how can I convert images to video without using FFmpeg or JCodec, only with android MediaCodec. The images for video is bitmap files that can be ARGB888 or YUV420 (my choice). The most important thing is that the video have to be playable in android devices and the maximum API is 16. I know all about API 18 MediaMuxer and I can not use it.
Please help me, I am stuck on this for many days.
(JCodec to slow, and FFmpeg very complicated to use).
There is no simple way to do this in API 16 that works across all devices.
You will encounter problems with buffer alignment, color spaces, and the need to use different YUV layouts on different devices.
Consider the buffer alignment issue. On pre-API 18 devices with Qualcomm SOCs, you had to align the CbCr planes at a 2K offset from the start of the buffer. On API 18, all devices use the same layout; this is enforced by CTS tests that were added in Android 4.3.
Even with API 18 you still have to runtime detect whether the encoder wants planar or semi-planar values. (It's probably not relevant for your situation, but none of the YUV formats output by the camera are accepted by MediaCodec.) Note there is no RGB input to MediaCodec.
If portability is not a concern, i.e. you're targeting a specific device, your code will be much simpler.
There are code snippets in the SO pages linked above. The closest "official" example is the buffer-to-buffer / buffer-to-surface tests in EncodeDecodeTest. These are API 18 tests, which means they don't do the "if QC and API16 then change buffer alignment" dance, but they do show how to do the planar vs. semi-planar layout detection. It doesn't include an RGB-to-YUV color converter, but there are examples of such around the web.
On the bright side, the encoder output seems to be just fine on any device and API version.
Converting the raw H.264 stream to a .mp4 file requires a 3rd-party library, since as you noted MediaMuxer is not available. I believe some people have installed ffmpeg as a command-line utility and executed it that way (maybe like this?).
import android.app.Activity;
import android.app.ProgressDialog;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Environment;
import android.os.Handler;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class MMediaMuxer {
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int _width = 512;
private static int _height = 512;
private static final int BIT_RATE = 800000;
private static final int INFLAME_INTERVAL = 1;
private static final int FRAME_RATE = 10;
private static boolean DEBUG = false;
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private boolean mRunning;
private int generateIndex = 0;
private int mTrackIndex;
private int MAX_FRAME_VIDEO = 0;
private List<byte[]> bitList;
private List<byte[]> bitFirst;
private List<byte[]> bitLast;
private int current_index_frame = 0;
private static final String TAG = "CODEC";
private String outputPath;
private Activity _activity;
private ProgressDialog pd;
private String _title;
private String _mess;
public void Init(Activity activity, int width, int height, String title, String mess) {
_title = title;
_mess = mess;
_activity = activity;
_width = width;
_height = height;
Logd("MMediaMuxer Init");
ShowProgressBar();
}
private Handler aHandler = new Handler();
public void AddFrame(final byte[] byteFrame) {
CheckDataListState();
new Thread(new Runnable() {
#Override
public void run() {
Logd("Android get Frame");
Bitmap bit = BitmapFactory.decodeByteArray(byteFrame, 0, byteFrame.length);
Logd("Android convert Bitmap");
byte[] byteConvertFrame = getNV21(bit.getWidth(), bit.getHeight(), bit);
Logd("Android convert getNV21");
bitList.add(byteConvertFrame);
}
}).start();
}
public void AddFrame(byte[] byteFrame, int count, boolean isLast) {
CheckDataListState();
Logd("Android get Frames = " + count);
Bitmap bit = BitmapFactory.decodeByteArray(byteFrame, 0, byteFrame.length);
Logd("Android convert Bitmap");
byteFrame = getNV21(bit.getWidth(), bit.getHeight(), bit);
Logd("Android convert getNV21");
for (int i = 0; i < count; i++) {
if (isLast) {
bitLast.add(byteFrame);
} else {
bitFirst.add(byteFrame);
}
}
}
public void CreateVideo() {
current_index_frame = 0;
Logd("Prepare Frames Data");
bitFirst.addAll(bitList);
bitFirst.addAll(bitLast);
MAX_FRAME_VIDEO = bitFirst.size();
Logd("CreateVideo");
mRunning = true;
bufferEncoder();
}
public boolean GetStateEncoder() {
return mRunning;
}
public String GetPath() {
return outputPath;
}
public void onBackPressed() {
mRunning = false;
}
public void ShowProgressBar() {
_activity.runOnUiThread(new Runnable() {
public void run() {
pd = new ProgressDialog(_activity);
pd.setTitle(_title);
pd.setCancelable(false);
pd.setMessage(_mess);
pd.setCanceledOnTouchOutside(false);
pd.show();
}
});
}
public void HideProgressBar() {
new Thread(new Runnable() {
#Override
public void run() {
_activity.runOnUiThread(new Runnable() {
#Override
public void run() {
pd.dismiss();
}
});
}
}).start();
}
private void bufferEncoder() {
Runnable runnable = new Runnable() {
#Override
public void run() {
try {
Logd("PrepareEncoder start");
PrepareEncoder();
Logd("PrepareEncoder end");
} catch (IOException e) {
Loge(e.getMessage());
}
try {
while (mRunning) {
Encode();
}
} finally {
Logd("release");
Release();
HideProgressBar();
bitFirst = null;
bitLast = null;
}
}
};
Thread thread = new Thread(runnable);
thread.start();
}
public void ClearTask() {
bitList = null;
bitFirst = null;
bitLast = null;
}
private void PrepareEncoder() throws IOException {
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Loge("Unable to find an appropriate codec for " + MIME_TYPE);
}
Logd("found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, _width, _height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, INFLAME_INTERVAL);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
String currentDateTimeString = DateFormat.getDateTimeInstance().format(new Date());
outputPath = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES),
"pixel"+currentDateTimeString+".mp4").toString();
mediaMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
Loge("MediaMuxer creation failed");
}
}
private void Encode() {
while (true) {
if (!mRunning) {
break;
}
Logd("Encode start");
long TIMEOUT_USEC = 5000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(generateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
byte[] input = bitFirst.get(current_index_frame);
final ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex);
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufIndex, 0, input.length, ptsUsec, 0);
generateIndex++;
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Loge("No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Loge("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus);
if (encodedData == null) {
Loge("encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
current_index_frame++;
if (current_index_frame > MAX_FRAME_VIDEO - 1) {
Log.d(TAG, "mRunning = false;");
mRunning = false;
}
Logd("Encode end");
}
}
private void Release() {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Logd("RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Logd("RELEASE MUXER");
}
}
/**
* Returns the first codec capable of encoding the specified MIME type, or
* null if no match was found.
*/
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
/**
* Returns a color format that is supported by the codec and by this test
* code. If no match is found, this throws a test failure -- the set of
* formats known to the test should be expanded for new platforms.
*/
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
/**
* Returns true if this is a color format that this test code understands
* (i.e. we know how to read and generate frames in this format).
*/
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
int[] argb = new int[inputWidth * inputHeight];
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
scaled.recycle();
return yuv;
}
private void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
}
index++;
}
}
}
private void CheckDataListState() {
if (bitList == null) {
bitList = new ArrayList<>();
}
if (bitFirst == null) {
bitFirst = new ArrayList<>();
}
if (bitLast == null) {
bitLast = new ArrayList<>();
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}
private static void Logd(String Mess) {
if (DEBUG) {
Log.d(TAG, Mess);
}
}
private static void Loge(String Mess) {
Log.e(TAG, Mess);
}
}
Works with 21 SDK. Do not pay attention to several lists. So it was done since the conversion of a piece of data goes in runtime.
Related
I'm working on a video conference app using openvidu. We are trying to include wikitude AR session in the call.
The problem is that both of them requires access to the camera so I have the next scenario: if I instantiate the local participant video first I can't start the wikitude AR session because video don't load. If I instantiate the wikitude session firstly the other participants of the call don't see the device video.
I was able to create a custom video capturer for openvidu, that imitates the camera. It is required to send every frame for it to works.
package org.webrtc;
import android.content.Context;
import android.graphics.Bitmap;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicReference;
public class CustomVideoCapturer implements VideoCapturer {
private final static String TAG = "FileVideoCapturer";
//private final FileVideoCapturer.VideoReader videoReader;
private final Timer timer = new Timer();
private CapturerObserver capturerObserver;
private AtomicReference<Bitmap> image = new AtomicReference<Bitmap>();
private final TimerTask tickTask = new TimerTask() {
#Override
public void run() {
tick();
}
};
public CustomVideoCapturer() {
}
public void tick() {
Bitmap frame = image.get();
if (frame != null && !frame.isRecycled()) {
NV21Buffer nv21Buffer = new NV21Buffer(getNV21(frame),frame.getWidth(),frame.getHeight(), null);
VideoFrame videoFrame = new VideoFrame(nv21Buffer, 0, System.nanoTime());
capturerObserver.onFrameCaptured(videoFrame);
}
}
byte [] getNV21(Bitmap image) {
int [] argb = new int[image.getWidth() * image.getHeight()];
image.getPixels(argb, 0, image.getWidth(), 0, 0, image.getWidth(), image.getHeight());
byte [] yuv = new byte[image.getWidth()*image.getHeight()*3/2];
encodeYUV420SP(yuv, argb, image.getWidth(), image.getHeight());
image.recycle();
return yuv;
}
void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
// well known RGB to YUV algorithm
Y = ( ( 66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ( ( -38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ( ( 112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
// pixel AND every other scanline.
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte)((V<0) ? 0 : ((V > 255) ? 255 : V));
yuv420sp[uvIndex++] = (byte)((U<0) ? 0 : ((U > 255) ? 255 : U));
}
index ++;
}
}
}
public void sendFrame(Bitmap bitmap) {
image.set(bitmap);
}
#Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver) {
this.capturerObserver = capturerObserver;
}
#Override
public void startCapture(int width, int height, int framerate) {
//timer.schedule(tickTask, 0, 1000 / framerate);
threadCV().start();
}
Thread threadCV() {
return new Thread() {
#Override
public void run() {
while (true) {
if (image.get() != null) {
tick();
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
};
}
#Override
public void stopCapture() throws InterruptedException {
timer.cancel();
}
#Override
public void changeCaptureFormat(int width, int height, int framerate) {
// Empty on purpose
}
#Override
public void dispose() {
//videoReader.close();
}
#Override
public boolean isScreencast() {
return false;
}
private interface VideoReader {
VideoFrame getNextFrame();
void close();
}
/**
* Read video data from file for the .y4m container.
*/
}
On the local participant I than use this function to send the frame:
public void sendFrame(Bitmap frame) {
customVideoCapturer.sendFrame(frame);
}
But I wasn't be able to thake the frames from the wikitude Camera. There is a way to access the frames and resend them?
Such as of the Native Api sdk, version 9.10.0, according to answer from wikitude support
https://support.wikitude.com/support/discussions/topics/5000096719?page=1 , to access the camera frames a custom plugin should be created:
https://www.wikitude.com/external/doc/documentation/latest/androidnative/pluginsapi.html#plugins-api
I'm decoding and encoding a video file using android MediaCodec. Both decoding and encoding are working fine with the following code except Pixel 3a device. For encoding, the encoder uses a set of bitmaps to create a video file. But only on the Pixel 3A device encoding bitmaps are failing and producing distorted video file.
Device details:
Name: Pixel 3A, Android version: 11
public class ImageProcessor implements Runnable {
private static final String VIDEO = "video/";
private static final String TAG = "VideoDecoder";
private static final long DEFAULT_TIMEOUT_US = 0;[enter image description here][1]
private final String inputFile;
private final String outputFile;
private MediaCodec mDecoder;
private MediaExtractor mExtractor;
private RenderScript rs;
private ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic;
private int width;
private int height;
private MediaCodec mEncoder;
private MediaMuxer mediaMuxer;
private int mTrackIndex;
private ScriptC_rotators rotateScript;
private int newWidth = 0, newHeight = 0;
private int preRotateHeight;
private int preRotateWidth;
private Allocation fromRotateAllocation;
private Allocation toRotateAllocation;
private int frameIndex;
private int deviceOrientation;
private int sensorOrientation;
private final Handler handler;
boolean sawOutputEOS = false;
boolean sawInputEOS = false;
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private FrameObject defaultObject;
private int faceBlurCount;
private long startTime;
private float frameRate;
private int generateIndex;
public ImageProcessor1(Handler handler, String inputFile, String outputFile) {
this.inputFile = inputFile;
this.outputFile = outputFile;
this.handler = handler;
}
public void setDeviceOrientation(int deviceOrientation) {
this.deviceOrientation = deviceOrientation;
}
public void setSensorOrientation(int sensorOrientation) {
this.sensorOrientation = sensorOrientation;
}
public void setDefaultObject(FrameObject frameObject) {
this.defaultObject = frameObject;
}
private void init() {
try {
mExtractor = new MediaExtractor();
mExtractor.setDataSource(inputFile);
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(inputFile);
FFmpegMediaMetadataRetriever metadataRetriever = new FFmpegMediaMetadataRetriever();
metadataRetriever.setDataSource(inputFile);
rs = RenderScript.create(Globals.getAppContext());
yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs));
rotateScript = new ScriptC_rotators(rs);
for (int i = 0; i < mExtractor.getTrackCount(); i++) {
MediaFormat format = mExtractor.getTrackFormat(i);
String mimeType = format.getString(MediaFormat.KEY_MIME);
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
frameRate = Float.parseFloat(metadataRetriever.extractMetadata(
FFmpegMediaMetadataRetriever.METADATA_KEY_FRAMERATE));
int bitRate = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE));
if (mimeType != null && mimeType.startsWith(VIDEO)) {
mExtractor.selectTrack(i);
mDecoder = MediaCodec.createDecoderByType(mimeType);
mDecoder.configure(format, null, null, 0 /* Decoder */);
mDecoder.start();
MediaCodecInfo mediaCodecInfo = selectCodec(mimeType);
if (mediaCodecInfo == null) {
throw new RuntimeException("Failed to initialise codec");
}
switch (deviceOrientation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
newWidth = height;
newHeight = width;
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
newWidth = width;
newHeight = height;
break;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(mimeType, newWidth, newHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
mediaFormat.setFloat(MediaFormat.KEY_FRAME_RATE, frameRate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEncoder.start();
mediaMuxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
break;
}
}
} catch (IOException e) {
throw new RuntimeException("Failed to initialise codec");
}
}
/**
* Returns the first codec capable of encoding the specified MIME type, or null if no
* match was found.
*/
private MediaCodecInfo selectCodec(String mimeType) throws IOException {
MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
MediaCodecInfo[] codecInfos = list.getCodecInfos();
for (MediaCodecInfo info : codecInfos) {
if (info.isEncoder()) {
mEncoder = MediaCodec.createByCodecName(info.getName());
String[] types = info.getSupportedTypes();
for (String type : types) {
if (type.equalsIgnoreCase(mimeType)) {
return info;
}
}
}
}
return null;
}
public void startProcessing() {
init();
MediaCodec.BufferInfo decoderBufferInfo = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo encoderBufferInfo = new MediaCodec.BufferInfo();
startTime = System.currentTimeMillis();
while (!sawOutputEOS) {
Log.d(TAG, "startProcessing: " + frameIndex);
if (!sawInputEOS && mDecoder != null) {
int inputBufferId = mDecoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputBufferId);
int sampleSize = mExtractor.readSampleData(inputBuffer, 0);
if (sampleSize < 0) {
mDecoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
sawInputEOS = true;
} else {
if (mExtractor != null) {
long presentationTimeUs = mExtractor.getSampleTime();
mDecoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
mExtractor.advance();
}
}
}
}
int outputBufferId = mDecoder.dequeueOutputBuffer(decoderBufferInfo, DEFAULT_TIMEOUT_US);
if (outputBufferId >= 0) {
if ((decoderBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
Log.d(TAG, "endProcessing: " + TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime));
}
boolean doRender = (decoderBufferInfo.size != 0);
if (doRender && mDecoder != null) {
Image image = mDecoder.getOutputImage(outputBufferId);
if (image != null) {
try {
frameIndex++;
byte[] frameData = quarterNV21(convertYUV420888ToNV21(image), image.getWidth(), image.getHeight());
byte[] data = getDataFromImage(image);
Type.Builder yuvType = new Type.Builder(rs, Element.U8(rs)).setX(data.length);
Allocation in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT);
Type.Builder rgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(width).setY(height);
Allocation out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
in.copyFromUnchecked(data);
yuvToRgbIntrinsic.setInput(in);
yuvToRgbIntrinsic.forEach(out);
out.copyTo(bitmap);
image.close();
encodeBitmaps(bitmap, encoderBufferInfo);
} catch (Exception e) {
Log.d(TAG, "startProcessing: " + e.getMessage());
}
}
if (mDecoder != null) {
mDecoder.releaseOutputBuffer(outputBufferId, false);
}
}
}
}
}
private long computePresentationTime(int frameIndex) {
return 132 + frameIndex * 1000000 / (int)frameRate;
}
private byte[] convertYUV420888ToNV21(Image image) {
byte[] data;
ByteBuffer buffer0 = image.getPlanes()[0].getBuffer();
ByteBuffer buffer2 = image.getPlanes()[2].getBuffer();
int buffer0_size = buffer0.remaining();
int buffer2_size = buffer2.remaining();
data = new byte[buffer0_size + buffer2_size];
buffer0.get(data, 0, buffer0_size);
buffer2.get(data, buffer0_size, buffer2_size);
return data;
}
private byte[] quarterNV21(byte[] data, int iWidth, int iHeight) {
byte[] yuv = new byte[iWidth * iHeight * 3 / 2];
// halve yuma
int i = 0;
for (int y = 0; y < iHeight; y++) {
for (int x = 0; x < iWidth; x++) {
yuv[i] = data[y * iWidth + x];
i++;
}
}
return yuv;
}
private void release() {
try {
if (mExtractor != null) {
mExtractor.release();
mExtractor = null;
}
if (mDecoder != null) {
mDecoder.stop();
mDecoder.release();
mDecoder = null;
}
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
}
} catch (Exception e) {
Log.d(TAG, "imageprocessor release: " + e.fillInStackTrace());
}
Message message = handler.obtainMessage();
Bundle bundle = new Bundle();
bundle.putString(FrameUtil.COMPUTATION_SUCCESS_KEY, this.outputFile);
bundle.putInt(FrameUtil.FACE_BLUR_COUNT, faceBlurCount);
message.setData(bundle);
handler.sendMessage(message);
}
// encode the bitmap to a new video file
private void encodeBitmaps(Bitmap bitmap, MediaCodec.BufferInfo encoderBufferInfo) {
Bitmap rotatedBitmap = null;
switch (deviceOrientation) {
case Surface.ROTATION_0:
if (sensorOrientation == SENSOR_ORIENTATION_DEFAULT_DEGREES) {
rotatedBitmap = rotateBitmap(bitmap, 270);
} else {
rotatedBitmap = rotateBitmap(bitmap, 90);
}
break;
case Surface.ROTATION_90:
Bitmap newBitmap = rotateBitmap(bitmap, 90);
bitmap.recycle();
rotatedBitmap = rotateBitmap(newBitmap, 90);
break;
default:
rotatedBitmap = bitmap;
}
byte[] bytes = getNV21(rotatedBitmap.getWidth(), rotatedBitmap.getHeight(), rotatedBitmap);
int inputBufIndex = mEncoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
long ptsUsec = computePresentationTime(generateIndex);
if (inputBufIndex >= 0) {
ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufIndex);
if (inputBuffer != null) {
inputBuffer.clear();
inputBuffer.put(bytes);
mEncoder.queueInputBuffer(inputBufIndex, 0, bytes.length,
ptsUsec, 0);
generateIndex++;
}
}
int encoderStatus = mEncoder.dequeueOutputBuffer(encoderBufferInfo, DEFAULT_TIMEOUT_US);
if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = mEncoder.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderBufferInfo.size != 0) {
ByteBuffer outputBuffer = mEncoder.getOutputBuffer(encoderStatus);
if (outputBuffer != null) {
outputBuffer.position(encoderBufferInfo.offset);
outputBuffer.limit(encoderBufferInfo.offset + encoderBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, outputBuffer, encoderBufferInfo);
mEncoder.releaseOutputBuffer(encoderStatus, false);
}
if ((encoderBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mEncoder.signalEndOfInputStream();
}
}
}
private Allocation getFromRotateAllocation(Bitmap bitmap) {
int targetHeight = bitmap.getWidth();
int targetWidth = bitmap.getHeight();
if (targetHeight != preRotateHeight || targetWidth != preRotateWidth) {
preRotateHeight = targetHeight;
preRotateWidth = targetWidth;
fromRotateAllocation = Allocation.createFromBitmap(rs, bitmap,
Allocation.MipmapControl.MIPMAP_NONE,
Allocation.USAGE_SCRIPT);
}
return fromRotateAllocation;
}
private Allocation getToRotateAllocation(Bitmap bitmap) {
int targetHeight = bitmap.getWidth();
int targetWidth = bitmap.getHeight();
if (targetHeight != preRotateHeight || targetWidth != preRotateWidth) {
toRotateAllocation = Allocation.createFromBitmap(rs, bitmap,
Allocation.MipmapControl.MIPMAP_NONE,
Allocation.USAGE_SCRIPT);
}
return toRotateAllocation;
}
private Bitmap rotateBitmap(Bitmap bitmap, int angle) {
Bitmap.Config config = bitmap.getConfig();
int targetHeight = bitmap.getWidth();
int targetWidth = bitmap.getHeight();
rotateScript.set_inWidth(bitmap.getWidth());
rotateScript.set_inHeight(bitmap.getHeight());
Allocation sourceAllocation = getFromRotateAllocation(bitmap);
sourceAllocation.copyFrom(bitmap);
rotateScript.set_inImage(sourceAllocation);
Bitmap target = Bitmap.createBitmap(targetWidth, targetHeight, config);
final Allocation targetAllocation = getToRotateAllocation(target);
if (angle == 90) {
rotateScript.forEach_rotate_90_clockwise(targetAllocation, targetAllocation);
} else {
rotateScript.forEach_rotate_270_clockwise(targetAllocation, targetAllocation);
}
targetAllocation.copyTo(target);
return target;
}
private byte[] getNV21(int inputWidth, int inputHeight, Bitmap bitmap) {
int[] argb = new int[inputWidth * inputHeight];
bitmap.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
bitmap.recycle();
return yuv;
}
private void encodeYUV420SP(byte[] yuv420sp, int[] rgb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
//a = (aRGB[index] & 0xff000000) >> 24; //not using it right now
R = (rgb[index] & 0xff0000) >> 16;
G = (rgb[index] & 0xff00) >> 8;
B = (rgb[index] & 0xff);
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : (Math.min(Y, 255)));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : (Math.min(U, 255)));
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : (Math.min(V, 255)));
}
index++;
}
}
}
private static byte[] getDataFromImage(Image image) {
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
int channelOffset = 0;
int outputStride = 1;
for (int i = 0; i < planes.length; i++) {
switch (i) {
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
channelOffset = width * height + 1;
outputStride = 2;
break;
case 2:
channelOffset = width * height;
outputStride = 2;
break;
}
ByteBuffer buffer = planes[i].getBuffer();
int rowStride = planes[i].getRowStride();
int pixelStride = planes[i].getPixelStride();
int shift = (i == 0) ? 0 : 1;
int w = width >> shift;
int h = height >> shift;
buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
for (int row = 0; row < h; row++) {
int length;
if (pixelStride == 1 && outputStride == 1) {
length = w;
buffer.get(data, channelOffset, length);
channelOffset += length;
} else {
length = (w - 1) * pixelStride + 1;
buffer.get(rowData, 0, length);
for (int col = 0; col < w; col++) {
data[channelOffset] = rowData[col * pixelStride];
channelOffset += outputStride;
}
}
if (row < h - 1) {
buffer.position(buffer.position() + rowStride - length);
}
}
}
return data;
}
#Override
public void run() {
try {
startProcessing();
} catch (Exception ex) {
Log.d(TAG, "run: " + ex.getCause());
} finally {
release();
}
}
public void stopProcessing() {
sawOutputEOS = true;
}
}
Kindly have a look at the code and tell me what I am doing wrong.
Distorted video frame
I am trying to create Video frames camera frames using Camera1.Even when I take long Video from camera Video Plays it Too Fast.I tried changing bitrates and frame rates.
image byte[] from camera frames is being queued and fed to this class so that we could create video from bitmaps or byte[]
What we have done.
Opened up camera.Took camera frame from onNextFrame method using Camera1.We could have used MediaRecorder for Recording the video but simultaneous frame rendering to surfaceView and video recording using camera1 cannot be done.So we have used MediaCodec to create mp4 video from frames so that frame rendering and recording a video can be done.
package com.sukshi.smartid_demo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.AsyncTask;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import static com.sukshi.smartid_demo.Camera.CameraSource.angle;
public class BitmapToVideoEncoder {
private static final String TAG = BitmapToVideoEncoder.class.getSimpleName();
private IBitmapToVideoEncoderCallback mCallback;
private File mOutputFile;
private Queue<Bitmap> mEncodeQueue = new ConcurrentLinkedQueue();
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private Object mFrameSync = new Object();
private CountDownLatch mNewFrameLatch;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int mWidth;
private static int mHeight;
private static final int BIT_RATE = 1000000;
private static final int FRAME_RATE = 30; // Frames per second
private static final int I_FRAME_INTERVAL = 1;
private int mGenerateIndex = 0;
private int mTrackIndex;
private boolean mNoMoreFrames = false;
private boolean mAbort = false;
public interface IBitmapToVideoEncoderCallback {
void onEncodingComplete(File outputFile);
}
public BitmapToVideoEncoder(IBitmapToVideoEncoderCallback callback) {
mCallback = callback;
}
public boolean isEncodingStarted() {
return (mediaCodec != null) && (mediaMuxer != null) && !mNoMoreFrames && !mAbort;
}
public int getActiveBitmaps() {
return mEncodeQueue.size();
}
public void startEncoding(int width, int height, File outputFile) {
mWidth = width;
mHeight = height;
mOutputFile = outputFile;
String outputFileString;
try {
outputFileString = outputFile.getCanonicalPath();
} catch (IOException e) {
Log.e(TAG, "Unable to get path for " + outputFile);
return;
}
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
Log.d(TAG, "found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
Log.e(TAG, "Unable to create MediaCodec " + e.getMessage());
return;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mediaMuxer = new MediaMuxer(outputFileString, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
Log.e(TAG,"MediaMuxer creation failed. " + e.getMessage());
return;
}
Log.d(TAG, "Initialization complete. Starting encoder...");
new EncodingAsync().execute();
}
public void stopEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to stop encoding since it never started");
return;
}
Log.d(TAG, "Stopping encoding");
mNoMoreFrames = true;
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void abortEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to abort encoding since it never started");
return;
}
Log.d(TAG, "Aborting encoding");
mNoMoreFrames = true;
mAbort = true;
mEncodeQueue = new ConcurrentLinkedQueue(); // Drop all frames
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void queueFrame(byte[] nextData) {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to queue frame. Encoding not started");
return;
}
Log.d(TAG, "Queueing frame");
Bitmap OriginalBitmap = BitmapFactory.decodeByteArray(nextData, 0, nextData.length);
Matrix matrix = new Matrix();
matrix.postRotate(angle);
Bitmap rotatedbitmap = Bitmap.createBitmap(OriginalBitmap,0,0,OriginalBitmap.getWidth(),OriginalBitmap.getHeight(),matrix,true);
mEncodeQueue.add(rotatedbitmap);
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
class EncodingAsync extends AsyncTask<Void,Void,File>{
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
protected File doInBackground(Void... voids) {
Log.d(TAG, "Encoder started");
while(true) {
if (mNoMoreFrames && (mEncodeQueue.size() == 0)) break;
Bitmap bitmap = mEncodeQueue.poll();
if (bitmap == null) {
synchronized (mFrameSync) {
mNewFrameLatch = new CountDownLatch(1);
}
try {
mNewFrameLatch.await();
} catch (InterruptedException e) {}
bitmap = mEncodeQueue.poll();
}
if (bitmap == null) continue;
byte[] byteConvertFrame = getNV21(bitmap.getWidth(), bitmap.getHeight(), bitmap);
long TIMEOUT_USEC = 500000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(mGenerateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
final ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex);
inputBuffer.clear();
inputBuffer.put(byteConvertFrame);
mediaCodec.queueInputBuffer(inputBufIndex, 0, byteConvertFrame.length, ptsUsec, 0);
mGenerateIndex++;
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.e(TAG, "No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus);
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
}
release();
if (mAbort) {
mOutputFile.delete();
return null;
} else {
return mOutputFile;
}
}
#Override
protected void onPostExecute(File outputFile) {
super.onPostExecute(outputFile);
mCallback.onEncodingComplete(outputFile);
}
}
private void release() {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Log.d(TAG,"RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Log.d(TAG,"RELEASE MUXER");
}
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
int[] argb = new int[inputWidth * inputHeight];
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
scaled.recycle();
return yuv;
}
private void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
}
index++;
}
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}
}
You can see the main ways to use the MediaCodec
Generates a movie using OpenGL ES
https://bigflake.com/mediacodec/EncodeAndMuxTest.java.txt
Background
I'm trying to go over bitmaps of animated GIF&WEBP files manually (frame by frame), so that it would work not just for Views, but on other cases too (such as a live wallpaper).
The problem
Animated GIF/WEBP files are supported only from Android P, using ImageDecoder API (example here) .
For GIF, I wanted to try Glide for the task, but I've failed, so I've tried overcoming this, by using a library that allows to load them (here, solution here). I think it works fine.
For WebP, I thought I've found another library that could work on older Android versions (here, made fork here), but it seems that it can't handle WebP files well in some cases (reported here). I tried to figure out what's the issue and how to solve it, but I didn't succeed.
So, assuming that some day Google will support GIF&WEBP animation for older Android versions via the support library (they wrote it here), I've decided to try to use ImageDecoder for the task.
Thing is, looking in the entire API of ImageDecoder , it's quite restricted in how we should use it. I don't see how I can overcome its limitations.
What I've found
This is how ImageDecoder can be used to show an animated WebP on an ImageView (just a sample, of course, available here) :
class MainActivity : AppCompatActivity() {
#SuppressLint("StaticFieldLeak")
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val source = ImageDecoder.createSource(resources, R.raw.test)
object : AsyncTask<Void, Void, Drawable?>() {
override fun doInBackground(vararg params: Void?): Drawable? {
return try {
ImageDecoder.decodeDrawable(source)
} catch (e: Exception) {
null
}
}
override fun onPostExecute(result: Drawable?) {
super.onPostExecute(result)
imageView.setImageDrawable(result)
if (result is AnimatedImageDrawable) {
result.start()
}
}
}.execute()
}
}
I've tried to read all of the documentations of ImageDecoder and AnimatedImageDrawable, and also look at its code, but I don't see how it's possible to manually go over each frame, and have the time that needs to be waited between them.
The questions
Is there a way to use ImageDecoder API to go over each frame manually, getting a Bitmap to draw and knowing how much time it's needed to wait between frames? Any workaround available? Maybe even using AnimatedImageDrawable ?
I'd like to do the same on older Android versions. Is it possible? If so how? Maybe on a different API/library? Google wrote it works on a way to use ImageDecoder on older Android versions, but I don't see it being mentioned anywhere (except for the link I've provided). Probably not ready yet... Android P didn't even reach 0.1% of users yet... Maybe Fresco can do it? I've tried to check it there too, but I don't see that it's capable of such a thing either, and it's a huge library to use just for this task, so I'd prefer to use a different library instead... I also know that libwebp is available, but it's in C/C++ and not sure if it's suited for Android, and whether there is a port for it on Java/Kotlin for Android.
EDIT:
Since I think I got what I wanted, for both a third party library and for ImageDecoder, to be able to get bitmaps out of animated WebP, I'd still want to know how to get the frame count and current frame using ImageDecoder, if that's possible. I tried using ImageDecoder.decodeDrawable(source, object : ImageDecoder.OnHeaderDecodedListener... , but it doesn't provide frame count information, and there is no way in the API that I can see that I can go to a specific frame index and start from there, or to know for a specific frame how long it needs to go to the next frame. So I made a reuqest about those here.
Sadly I also could not find that Google has ImageDecoder available for older Android versions, either.
It's also interesting if there is some kind of way to do the same as I did for the relatively new animation file of HEIC. Currently it's supported only on Android P.
OK, I got a possible solution, using Glide library, together with GlideWebpDecoder library .
I'm not sure if that's the best way to do it, but I think it should work fine. The next code shows how it's possible to make the drawable draw into the Bitmap instance that I create, for each frame that the animation needs to show. It's not exactly what I asked, but it might help others.
Here's the code (project available here) :
CallbackEx.kt
abstract class CallbackEx : Drawable.Callback {
override fun unscheduleDrawable(who: Drawable, what: Runnable) {}
override fun invalidateDrawable(who: Drawable) {}
override fun scheduleDrawable(who: Drawable, what: Runnable, `when`: Long) {}
}
MyAppGlideModule.kt
#GlideModule
class MyAppGlideModule : AppGlideModule()
MainActivity.kt
class MainActivity : AppCompatActivity() {
var webpDrawable: WebpDrawable? = null
var gifDrawable: GifDrawable? = null
var callback: Drawable.Callback? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
useFrameByFrameDecoding()
// useNormalDecoding()
}
fun useNormalDecoding() {
//webp url : https://res.cloudinary.com/demo/image/upload/fl_awebp/bored_animation.webp
Glide.with(this)
// .load(R.raw.test)
// .load(R.raw.fast)
.load(R.raw.example2)
// .load("https://res.cloudinary.com/demo/image/upload/fl_awebp/bored_animation.webp")
.into(object : SimpleTarget<Drawable>() {
override fun onResourceReady(drawable: Drawable, transition: Transition<in Drawable>?) {
imageView.setImageDrawable(drawable)
when (drawable) {
is GifDrawable -> {
drawable.start()
}
is WebpDrawable -> {
drawable.start()
}
}
}
})
}
fun useFrameByFrameDecoding() {
//webp url : https://res.cloudinary.com/demo/image/upload/fl_awebp/bored_animation.webp
Glide.with(this)
.load(R.raw.test)
// .load(R.raw.fast)
// .load(R.raw.example2)
// .load("https://res.cloudinary.com/demo/image/upload/fl_awebp/bored_animation.webp")
.into(object : SimpleTarget<Drawable>() {
override fun onResourceReady(drawable: Drawable, transition: Transition<in Drawable>?) {
// val callback
when (drawable) {
is GifDrawable -> {
gifDrawable = drawable
val bitmap = Bitmap.createBitmap(drawable.intrinsicWidth, drawable.intrinsicHeight, Bitmap.Config.ARGB_8888)
val canvas = Canvas(bitmap)
drawable.setBounds(0, 0, bitmap.width, bitmap.height)
drawable.setLoopCount(GifDrawable.LOOP_FOREVER)
callback = object : CallbackEx() {
override fun invalidateDrawable(who: Drawable) {
who.draw(canvas)
imageView.setImageBitmap(bitmap)
Log.d("AppLog", "invalidateDrawable ${drawable.toString().substringAfter('#')} ${drawable.frameIndex}/${drawable.frameCount}")
}
}
drawable.callback = callback
drawable.start()
}
is WebpDrawable -> {
webpDrawable = drawable
val bitmap = Bitmap.createBitmap(drawable.intrinsicWidth, drawable.intrinsicHeight, Bitmap.Config.ARGB_8888)
val canvas = Canvas(bitmap)
drawable.setBounds(0, 0, bitmap.width, bitmap.height)
drawable.setLoopCount(WebpDrawable.LOOP_FOREVER)
callback = object : CallbackEx() {
override fun invalidateDrawable(who: Drawable) {
who.draw(canvas)
imageView.setImageBitmap(bitmap)
Log.d("AppLog", "invalidateDrawable ${drawable.toString().substringAfter('#')} ${drawable.frameIndex}/${drawable.frameCount}")
}
}
drawable.callback = callback
drawable.start()
}
}
}
})
}
override fun onStart() {
super.onStart()
gifDrawable?.start()
gifDrawable?.start()
}
override fun onStop() {
super.onStop()
Log.d("AppLog", "onStop")
webpDrawable?.stop()
gifDrawable?.stop()
}
}
Not sure why SimpleTarget is marked as deprecated, and what I should use instead, though.
Using a similar technique, I've also found out how to do it using ImageDecoder, but not with the same functionality for some reason. A sample project available here.
Here's the code:
MainActivity.kt
class MainActivity : AppCompatActivity() {
var webpDrawable: AnimatedImageDrawable? = null
#SuppressLint("StaticFieldLeak")
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val source = ImageDecoder.createSource(resources, R.raw.test)
object : AsyncTask<Void, Void, Drawable?>() {
override fun doInBackground(vararg params: Void?): Drawable? {
return try {
ImageDecoder.decodeDrawable(source)
} catch (e: Exception) {
null
}
}
override fun onPostExecute(drawable: Drawable?) {
super.onPostExecute(drawable)
// imageView.setImageDrawable(result)
if (drawable is AnimatedImageDrawable) {
webpDrawable = drawable
val bitmap =
Bitmap.createBitmap(drawable.intrinsicWidth, drawable.intrinsicHeight, Bitmap.Config.ARGB_8888)
val canvas = Canvas(bitmap)
drawable.setBounds(0, 0, bitmap.width, bitmap.height)
drawable.repeatCount = AnimatedImageDrawable.REPEAT_INFINITE
drawable.callback = object : Drawable.Callback {
val handler = Handler()
override fun unscheduleDrawable(who: Drawable, what: Runnable) {
Log.d("AppLog", "unscheduleDrawable")
}
override fun invalidateDrawable(who: Drawable) {
who.draw(canvas)
imageView.setImageBitmap(bitmap)
Log.d("AppLog", "invalidateDrawable")
}
override fun scheduleDrawable(who: Drawable, what: Runnable, `when`: Long) {
Log.d("AppLog", "scheduleDrawable next frame in ${`when` - SystemClock.uptimeMillis()} ms")
handler.postAtTime(what, `when`)
}
}
drawable.start()
}
}
}.execute()
}
override fun onStart() {
super.onStart()
webpDrawable?.start()
}
override fun onStop() {
super.onStop()
webpDrawable?.stop()
}
}
see ImageDecoder.Source ...
one needs to first create a source, with either:
// source from file
val source = ImageDecoder.createSource(file)
// source from byte buffer
val source = ImageDecoder.createSource(byteBuffer)
// source from resource
val source = ImageDecoder.createSource(resources, resId)
// source from URI
val source = ImageDecoder.createSource(contentResolver, uri)
// source from asset file
val source = ImageDecoder.createSource(assetManager, assetFileName)
and then decode, with either:
// create bitmap
val bitmap = ImageDecoder.decodeBitmap(source)
// create drawable
val drawable = ImageDecoder.decodeDrawable(source)
update: the problem is, that the resulting AnimatedImageDrawable doesn't have the two methods: getNumberOfFrames() and getFrame(int) as an AnimationDrawable has. as #androiddeveloper pointed out ...I've messed up two different classes. I've double-checked the documentation and there seems to be no way. with the GIFImageReader it still can be extracted (source):
ArrayList<BufferedImage> getFrames(File gif) throws IOException {
ArrayList<BufferedImage> frames = new ArrayList<BufferedImage>();
ImageReader ir = new GIFImageReader(new GIFImageReaderSpi());
ir.setInput(ImageIO.createImageInputStream(gif));
for(int i = 0; i < ir.getNumImages(true); i++) {
frames.add(ir.read(i));
}
return frames;
}
just tried to convert it to Kotlin, but javax.imageio.ImageIO is not available on Android.
I played with GIF images a few years ago. My idea is decode GIF images to frames, convert frames to bitmaps add create Animated Drawable from bitmaps and delay between frames. This is decode class:
public class GifDecoder extends Thread {
public static final int STATUS_PARSING = 0;
public static final int STATUS_FORMAT_ERROR = 1;
public static final int STATUS_OPEN_ERROR = 2;
public static final int STATUS_FINISH = -1;
private static final int MaxStackSize = 4096;
public int width; // full image width
public int height; // full image height
int[] lastPixels;
int[] dest;
private InputStream in;
private int status;
private boolean gctFlag; // global color table used
private int gctSize; // size of global color table
private int loopCount = 1; // iterations; 0 = repeat forever
private int[] gct; // global color table
private int[] lct; // local color table
private int[] act; // active color table
private int bgIndex; // background color index
private int bgColor; // background color
private int lastBgColor; // previous bg color
private int pixelAspect; // pixel aspect ratio
private boolean lctFlag; // local color table flag
private boolean interlace; // interlace flag
private int lctSize; // local color table size
private int ix, iy, iw, ih; // current image rectangle
private int lrx, lry, lrw, lrh;
private GifFrame currentFrame = null;
private boolean isShow = false;
private byte[] block = new byte[256]; // current data block
private int blockSize = 0; // block size
private int dispose = 0;
private int lastDispose = 0;
private boolean transparency = false; // use transparent color
// max decoder pixel stack size
private int delay = 0; // delay in milliseconds
private int transIndex; // transparent color index
// LZW decoder working arrays
private short[] prefix;
private byte[] suffix;
private byte[] pixelStack;
private byte[] pixels;
private GifFrame gifFrame; // frames read from current file
private int frameCount;
private GifAction action = null;
private byte[] gifData = null;
private int gifDataOffset;
private int gifDataLength;
private GifDecoder() {
}
public GifDecoder(byte[] data, GifAction act) {
this(data, 0, data.length, act);
}
public GifDecoder(byte[] data, int offset, int length, GifAction act) {
gifData = data;
action = act;
gifDataOffset = offset;
gifDataLength = length;
}
public GifDecoder(InputStream is, GifAction act) {
in = is;
action = act;
}
public void run() {
if (in != null) {
readStream();
} else if (gifData != null) {
readByte();
}
}
public void free() {
GifFrame fg = gifFrame;
while (fg != null) {
if (fg.image != null) {
fg.image.recycle();
}
fg.image = null;
fg = null;
gifFrame = gifFrame.nextFrame;
fg = gifFrame;
}
if (in != null) {
try {
in.close();
} catch (Exception ex) {
}
in = null;
}
gifData = null;
}
public int getStatus() {
return status;
}
public boolean parseOk() {
return status == STATUS_FINISH;
}
public int getDelay(int n) {
delay = -1;
if ((n >= 0) && (n < frameCount)) {
GifFrame f = getFrame(n);
if (f != null) delay = f.delay;
}
return delay;
}
public GifFrame getFrame(int n) {
GifFrame frame = gifFrame;
int i = 0;
while (frame != null) {
if (i == n) {
return frame;
} else {
frame = frame.nextFrame;
}
i++;
}
return null;
}
public int[] getDelays() {
GifFrame f = gifFrame;
int[] d = new int[frameCount];
int i = 0;
while (f != null && i < frameCount) {
d[i] = f.delay;
f = f.nextFrame;
i++;
}
return d;
}
public int getFrameCount() {
return frameCount;
}
public Bitmap getImage() {
return getFrameImage(0);
}
public Bitmap getFrameImage(int n) {
GifFrame frame = getFrame(n);
if (frame == null) {
return null;
} else {
return frame.image;
}
}
public int getLoopCount() {
return loopCount;
}
public GifFrame getCurrentFrame() {
return currentFrame;
}
public void reset() {
currentFrame = gifFrame;
}
public GifFrame next() {
if (isShow == false) {
isShow = true;
return gifFrame;
} else {
if (status == STATUS_PARSING) {
if (currentFrame.nextFrame != null) currentFrame = currentFrame.nextFrame;
//currentFrame = gifFrame;
} else {
currentFrame = currentFrame.nextFrame;
if (currentFrame == null) {
currentFrame = gifFrame;
}
}
return currentFrame;
}
}
private Bitmap setPixels() {
if (dest == null) dest = new int[width * height];
// fill in starting image contents based on last image's dispose code
if (lastDispose > 0) {
if (lastDispose == 3) {
// use image before last
int n = frameCount - 2;
if (n > 0) {
Bitmap lastImage = getFrameImage(n - 1);
if (lastPixels == null) lastPixels = new int[width * height];
lastImage.getPixels(lastPixels, 0, width, 0, 0, width, height);
} else {
lastPixels = null;
}
}
if (lastPixels != null) {
dest = Arrays.copyOf(lastPixels, lastPixels.length);
// copy pixels
if (lastDispose == 2) {
// fill last image rect area with background color
int c = 0;
if (!transparency) {
c = lastBgColor;
}
for (int i = 0; i < lrh; i++) {
int n1 = (lry + i) * width + lrx;
int n2 = n1 + lrw;
for (int k = n1; k < n2; k++) {
dest[k] = c;
}
}
}
}
}
// copy each source line to the appropriate place in the destination
int pass = 1;
int inc = 8;
int iline = 0;
for (int i = 0; i < ih; i++) {
int line = i;
if (interlace) {
if (iline >= ih) {
pass++;
switch (pass) {
case 2:
iline = 4;
break;
case 3:
iline = 2;
inc = 4;
break;
case 4:
iline = 1;
inc = 2;
}
}
line = iline;
iline += inc;
}
line += iy;
if (line < height) {
int k = line * width;
int dx = k + ix; // start of line in dest
int dlim = dx + iw; // end of dest line
if ((k + width) < dlim) {
dlim = k + width; // past dest edge
}
int sx = i * iw; // start of line in source
while (dx < dlim) {
// map color and insert in destination
int index = ((int) pixels[sx++]) & 0xff;
int c = act[index];
if (c != 0) {
dest[dx] = c;
}
dx++;
}
}
}
return Bitmap.createBitmap(dest, width, height, Config.ARGB_4444);
}
private int readByte() {
in = new ByteArrayInputStream(gifData, gifDataOffset, gifDataLength);
gifData = null;
return readStream();
}
private int readStream() {
init();
if (in != null) {
readHeader();
if (!err()) {
readContents();
if (frameCount < 0) {
status = STATUS_FORMAT_ERROR;
action.parseOk(false, -1);
} else {
status = STATUS_FINISH;
action.parseOk(true, -1);
}
}
try {
in.close();
} catch (Exception e) {
e.printStackTrace();
}
} else {
status = STATUS_OPEN_ERROR;
action.parseOk(false, -1);
}
return status;
}
private void decodeImageData() {
int NullCode = -1;
int npix = iw * ih;
int available, clear, code_mask, code_size, end_of_information, in_code, old_code, bits,
code,
count, i, datum, data_size, first, top, bi, pi;
if ((pixels == null) || (pixels.length < npix)) {
pixels = new byte[npix]; // allocate new pixel array
}
if (prefix == null) {
prefix = new short[MaxStackSize];
}
if (suffix == null) {
suffix = new byte[MaxStackSize];
}
if (pixelStack == null) {
pixelStack = new byte[MaxStackSize + 1];
}
// Initialize GIF data stream decoder.
data_size = read();
clear = 1 << data_size;
end_of_information = clear + 1;
available = clear + 2;
old_code = NullCode;
code_size = data_size + 1;
code_mask = (1 << code_size) - 1;
for (code = 0; code < clear; code++) {
prefix[code] = 0;
suffix[code] = (byte) code;
}
// Decode GIF pixel stream.
datum = bits = count = first = top = pi = bi = 0;
for (i = 0; i < npix; ) {
if (top == 0) {
if (bits < code_size) {
// Load bytes until there are enough bits for a code.
if (count == 0) {
// Read a new data block.
count = readBlock();
if (count <= 0) {
break;
}
bi = 0;
}
datum += (((int) block[bi]) & 0xff) << bits;
bits += 8;
bi++;
count--;
continue;
}
// Get the next code.
code = datum & code_mask;
datum >>= code_size;
bits -= code_size;
// Interpret the code
if ((code > available) || (code == end_of_information)) {
break;
}
if (code == clear) {
// Reset decoder.
code_size = data_size + 1;
code_mask = (1 << code_size) - 1;
available = clear + 2;
old_code = NullCode;
continue;
}
if (old_code == NullCode) {
pixelStack[top++] = suffix[code];
old_code = code;
first = code;
continue;
}
in_code = code;
if (code == available) {
pixelStack[top++] = (byte) first;
code = old_code;
}
while (code > clear) {
pixelStack[top++] = suffix[code];
code = prefix[code];
}
first = ((int) suffix[code]) & 0xff;
// Add a new string to the string table,
if (available >= MaxStackSize) {
break;
}
pixelStack[top++] = (byte) first;
prefix[available] = (short) old_code;
suffix[available] = (byte) first;
available++;
if (((available & code_mask) == 0) && (available < MaxStackSize)) {
code_size++;
code_mask += available;
}
old_code = in_code;
}
// Pop a pixel off the pixel stack.
top--;
pixels[pi++] = pixelStack[top];
i++;
}
for (i = pi; i < npix; i++) {
pixels[i] = 0; // clear missing pixels
}
}
private boolean err() {
return status != STATUS_PARSING;
}
private void init() {
status = STATUS_PARSING;
frameCount = 0;
gifFrame = null;
gct = null;
lct = null;
}
private int read() {
int curByte = 0;
try {
curByte = in.read();
} catch (Exception e) {
status = STATUS_FORMAT_ERROR;
}
return curByte;
}
private int readBlock() {
blockSize = read();
int n = 0;
if (blockSize > 0) {
try {
int count = 0;
while (n < blockSize) {
count = in.read(block, n, blockSize - n);
if (count == -1) {
break;
}
n += count;
}
} catch (Exception e) {
e.printStackTrace();
}
if (n < blockSize) {
status = STATUS_FORMAT_ERROR;
}
}
return n;
}
private int[] readColorTable(int ncolors) {
int nbytes = 3 * ncolors;
int[] tab = null;
byte[] c = new byte[nbytes];
int n = 0;
try {
n = in.read(c);
} catch (Exception e) {
e.printStackTrace();
}
if (n < nbytes) {
status = STATUS_FORMAT_ERROR;
} else {
tab = new int[256]; // max size to avoid bounds checks
int i = 0;
int j = 0;
while (i < ncolors) {
int r = ((int) c[j++]) & 0xff;
int g = ((int) c[j++]) & 0xff;
int b = ((int) c[j++]) & 0xff;
tab[i++] = 0xff000000 | (r << 16) | (g << 8) | b;
}
}
return tab;
}
private void readContents() {
// read GIF file content blocks
boolean done = false;
while (!(done || err())) {
int code = read();
switch (code) {
case 0x2C: // image separator
readImage();
break;
case 0x21: // extension
code = read();
switch (code) {
case 0xf9: // graphics control extension
readGraphicControlExt();
break;
case 0xff: // application extension
readBlock();
String app = "";
for (int i = 0; i < 11; i++) {
app += (char) block[i];
}
if (app.equals("NETSCAPE2.0")) {
readNetscapeExt();
} else {
skip(); // don't care
}
break;
default: // uninteresting extension
skip();
}
break;
case 0x3b: // terminator
done = true;
break;
case 0x00: // bad byte, but keep going and see what happens
break;
default:
status = STATUS_FORMAT_ERROR;
}
}
}
private void readGraphicControlExt() {
read(); // block size
int packed = read(); // packed fields
dispose = (packed & 0x1c) >> 2; // disposal method
if (dispose == 0) {
dispose = 1; // elect to keep old image if discretionary
}
transparency = (packed & 1) != 0;
delay = readShort() * 10; // delay in milliseconds
transIndex = read(); // transparent color index
read(); // block terminator
}
private void readHeader() {
String id = "";
for (int i = 0; i < 6; i++) {
id += (char) read();
}
if (!id.startsWith("GIF")) {
status = STATUS_FORMAT_ERROR;
return;
}
readLSD();
if (gctFlag && !err()) {
gct = readColorTable(gctSize);
bgColor = gct[bgIndex];
}
}
private void readImage() {
ix = readShort(); // (sub)image position & size
iy = readShort();
iw = readShort();
ih = readShort();
int packed = read();
lctFlag = (packed & 0x80) != 0; // 1 - local color table flag
interlace = (packed & 0x40) != 0; // 2 - interlace flag
// 3 - sort flag
// 4-5 - reserved
lctSize = 2 << (packed & 7); // 6-8 - local color table size
if (lctFlag) {
lct = readColorTable(lctSize); // read table
act = lct; // make local table active
} else {
act = gct; // make global table active
if (bgIndex == transIndex) {
bgColor = 0;
}
}
int save = 0;
if (transparency) {
save = act[transIndex];
act[transIndex] = 0; // set transparent color if specified
}
if (act == null) {
status = STATUS_FORMAT_ERROR; // no color table defined
}
if (err()) {
return;
}
try {
decodeImageData(); // decode pixel data
skip();
if (err()) {
return;
}
frameCount++;
// create new image to receive frame data
// createImage(width, height);
Bitmap image = setPixels(); // transfer pixel data to image
if (gifFrame == null) {
gifFrame = new GifFrame(image, delay);
currentFrame = gifFrame;
} else {
GifFrame f = gifFrame;
while (f.nextFrame != null) {
f = f.nextFrame;
}
f.nextFrame = new GifFrame(image, delay);
}
// frames.addElement(new GifFrame(image, delay)); // add image to frame
// list
if (transparency) {
act[transIndex] = save;
}
resetFrame();
if (!action.parseOk(true, frameCount)) {
status = STATUS_FINISH;
return;
}
} catch (OutOfMemoryError e) {
Log.e("GifDecoder", ">>> log : " + e.toString());
e.printStackTrace();
}
}
private void readLSD() {
// logical screen size
width = readShort();
height = readShort();
// packed fields
int packed = read();
gctFlag = (packed & 0x80) != 0; // 1 : global color table flag
// 2-4 : color resolution
// 5 : gct sort flag
gctSize = 2 << (packed & 7); // 6-8 : gct size
bgIndex = read(); // background color index
pixelAspect = read(); // pixel aspect ratio
}
private void readNetscapeExt() {
do {
readBlock();
if (block[0] == 1) {
// loop count sub-block
int b1 = ((int) block[1]) & 0xff;
int b2 = ((int) block[2]) & 0xff;
loopCount = (b2 << 8) | b1;
}
} while ((blockSize > 0) && !err());
}
private int readShort() {
// read 16-bit value, LSB first
return read() | (read() << 8);
}
private void resetFrame() {
lastDispose = dispose;
lrx = ix;
lry = iy;
lrw = iw;
lrh = ih;
lastPixels = dest;
lastBgColor = bgColor;
dispose = 0;
transparency = false;
delay = 0;
lct = null;
}
/**
* Skips variable length blocks up to and including next zero length block.
*/
private void skip() {
do {
readBlock();
} while ((blockSize > 0) && !err());
}
}
I upload the full demo source here. Hope it can help you.
EDIT: In actually implementing this, I encountered a couple unexpected problems, but nothing insurmountable:
AnimatedImageDrawable seems to ignore its configured bounds. I scaled the canvas instead.
For reasons I don't understand, AnimatedImageDrawable.draw() occasionally neglects to schedule the next frame. I decided to call the function twice. The second time, I translate the canvas so that all drawing is out of bounds, which should allow most of the work to be optimized away.
Here's the sample code.
import android.annotation.*;
import android.graphics.*;
import android.graphics.drawable.*;
import android.os.*;
import android.service.wallpaper.*;
import android.support.annotation.*;
import android.view.*;
#TargetApi(28)
public class TestWallpaper extends WallpaperService
{
#Override public Engine onCreateEngine()
{
return new Engine();
}
private class Engine extends WallpaperService.Engine implements Drawable.Callback
{
private final Drawable d;
private final Handler h = new Handler();
private float scaleX, scaleY;
private Engine()
{
this.setOffsetNotificationsEnabled(false);
Drawable d = null;
try
{
d = ImageDecoder
.decodeDrawable(ImageDecoder.createSource(getResources(), R.drawable.test));
d.setCallback(this);
// AnimatedImageDrawable seems to ignore its configured bounds and use its
// intrinsic bounds instead.
// In case they fix this bug, we'll go ahead and request the current
// behavior, and then before drawing we'll transform the canvas to compensate
d.setBounds(0, 0, d.getIntrinsicWidth(), d.getIntrinsicHeight());
if (d instanceof AnimatedImageDrawable)
{
final AnimatedImageDrawable anim = (AnimatedImageDrawable) d;
anim.setRepeatCount(AnimatedImageDrawable.REPEAT_INFINITE);
anim.start();
}
}
catch (Throwable t) // should never happen
{
t.printStackTrace();
}
this.d = d;
}
#Override public void invalidateDrawable(#NonNull Drawable _d)
{
if(isVisible())
draw(getSurfaceHolder().getSurface());
}
#Override public void scheduleDrawable(#NonNull Drawable _d, #NonNull Runnable _r, long _at)
{
if(isVisible())
h.postAtTime(_r, _d, _at);
}
#Override public void unscheduleDrawable(#NonNull Drawable _d, #NonNull Runnable _r)
{
h.removeCallbacks(_r, _d);
}
#Override public void onSurfaceChanged(SurfaceHolder _sh, int _format, int _w, int _h)
{
scaleX = (float) _w / d.getIntrinsicWidth();
scaleY = (float) _h / d.getIntrinsicHeight();
draw(_sh.getSurface());
}
#Override public void onSurfaceRedrawNeeded(SurfaceHolder _sh)
{
draw(_sh.getSurface());
}
private void draw(Surface _s)
{
try
{
final Canvas c = _s.lockCanvas(null);
c.scale(scaleX, scaleY);
d.draw(c);
// Sometimes AnimatedImageDrawable neglects to schedule the next frame
// after only one draw() of the current frame, so we'll draw() it again,
// but outside the canvas this time
c.translate(Float.MAX_VALUE, Float.MAX_VALUE);
d.draw(c);
//
_s.unlockCanvasAndPost(c);
}
catch (Throwable t)
{
t.printStackTrace();
// Most likely, the surface was destroyed while we were using it
// The new one will be delivered to onSurfaceChanged and we'll be fine
}
}
#Override public void onVisibilityChanged(boolean _visible)
{
super.onVisibilityChanged(_visible);
if(_visible)
draw(getSurfaceHolder().getSurface());
else
h.removeCallbacksAndMessages(null);
}
}
}
i have this code to show gif image with Movie.
public class GIFView extends View{
private Movie movie;
private InputStream is;
private long moviestart;
public GIFView(Context context) {
super(context);
is=getResources().openRawResource(R.drawable.anim_cerca);
movie=Movie.decodeStream(is);
}
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
long now=android.os.SystemClock.uptimeMillis();
if (moviestart == 0)
moviestart = now;
int relTime = (int)((now - moviestart) % movie.duration());
movie.setTime(relTime);
movie.draw(canvas,10,10);
this.invalidate();
}
}
My problem borns when gif is loaded, it draw very bad, only the first frame is shown and the other are like disturbed. What can i do?
EDIT: THE PROBLEM IS EMULATOR! IT DOESN'T SHOW GIF, BUT ON DEVICE IT'S OK! :)
Good start.
Gotta make it more useful for loading different gifs after being added to the view and for either assets or resources. Also, for devices with hardware acceleration I was getting blank views, so I turned it off for this GIFView.
Also, be sure to put animated gifs in the res/drawable-xhdpi directory (or assets if using that way)
public class GIFView extends View{
Movie movie;
long moviestart;
public GIFView(Context context) throws IOException {
super(context);
}
public GIFView(Context context, AttributeSet attrs) throws IOException{
super(context, attrs);
}
public GIFView(Context context, AttributeSet attrs, int defStyle) throws IOException {
super(context, attrs, defStyle);
}
public void loadGIFResource(Context context, int id)
{
//turn off hardware acceleration
this.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
InputStream is=context.getResources().openRawResource(id);
movie = Movie.decodeStream(is);
}
public void loadGIFAsset(Context context, String filename)
{
InputStream is;
try {
is = context.getResources().getAssets().open(filename);
movie = Movie.decodeStream(is);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (movie == null) {
return;
}
long now=android.os.SystemClock.uptimeMillis();
if (moviestart == 0) moviestart = now;
int relTime;
relTime = (int)((now - moviestart) % movie.duration());
movie.setTime(relTime);
movie.draw(canvas,10,10);
this.invalidate();
}
}
Usage:
imageView.loadGIFResource(this, R.drawable.quickguide_1);
and
<com.eyeverify.GIFView
android:id="#+id/imageView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_marginBottom="50dp"
android:layout_marginTop="110dp"
android:src="#drawable/quickguide_1"/>
I solved in this way :
public class GIFView extends View{
private Movie movie;
private long moviestart;
public GIFView(Context context) throws IOException {
super(context);
movie=Movie.decodeStream(getResources().getAssets().open("anim_cerca.gif"));
}
public GIFView(Context context, AttributeSet attrs) throws IOException{
super(context, attrs);
movie=Movie.decodeStream(getResources().getAssets().open("anim_cerca.gif"));
}
public GIFView(Context context, AttributeSet attrs, int defStyle) throws IOException {
super(context, attrs, defStyle);
movie=Movie.decodeStream(getResources().getAssets().open("anim_cerca.gif"));
}
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
long now=android.os.SystemClock.uptimeMillis();
Paint p = new Paint();
p.setAntiAlias(true);
if (moviestart == 0)
moviestart = now;
int relTime;
relTime = (int)((now - moviestart) % movie.duration());
movie.setTime(relTime);
movie.draw(canvas,0,0);
this.invalidate();
}
}
and in layout i put this custom view in this way:
<spazio.digitale.com.GIFView
android:layout_marginLeft="30dp" android:layout_gravity="center"
android:layout_width="wrap_content" android:layout_height="220dp"
android:id="#+id/GIFSingle">
</spazio.digitale.com.GIFView>
create a class GifDecoder class
import java.io.InputStream;
import java.util.Vector;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
public class GifDecoder
{
public static final int STATUS_OK = 0;
/**
* File read status: Error decoding file (may be partially decoded)
*/
public static final int STATUS_FORMAT_ERROR = 1;
/**
* File read status: Unable to open source.
*/
public static final int STATUS_OPEN_ERROR = 2;
protected static final int MAX_STACK_SIZE = 4096*2;
public static final int MIN_DELAY = 100;
public static final int MIN_DELAY_ENFORCE_THRESHOLD = 20;
protected InputStream in;
protected int status;
protected int width; // full image width
protected int height; // full image height
protected boolean gctFlag; // global color table used
protected int gctSize; // size of global color table
protected int loopCount = 1; // iterations; 0 = repeat forever
protected int[] gct; // global color table
protected int[] lct; // local color table
protected int[] act; // active color table
protected int bgIndex; // background color index
protected int bgColor; // background color
protected int lastBgColor; // previous bg color
protected int pixelAspect; // pixel aspect ratio
protected boolean lctFlag; // local color table flag
protected boolean interlace; // interlace flag
protected int lctSize; // local color table size
protected int ix, iy, iw, ih; // current image rectangle
protected int lrx, lry, lrw, lrh;
protected Bitmap image; // current frame
protected Bitmap lastBitmap; // previous frame
protected byte[] block = new byte[256]; // current data block
protected int blockSize = 0; // block size last graphic control extension info
protected int dispose = 0; // 0=no action; 1=leave in place; 2=restore to bg; 3=restore to prev
protected int lastDispose = 0;
protected boolean transparency = false; // use transparent color
protected int delay = 0; // delay in milliseconds
protected int transIndex; // transparent color index
// LZW decoder working arrays
protected short[] prefix;
protected byte[] suffix;
protected byte[] pixelStack;
protected byte[] pixels;
protected Vector<GifFrame> frames;
protected int frameCount;
private boolean readComplete;
public GifDecoder()
{
readComplete = false;
}
private static class GifFrame {
public GifFrame(Bitmap im, int del) {
image = im;
delay = del;
}
public Bitmap image;
public int delay;
}
/**
* Gets display duration for specified frame.
*
* #param n
* int index of frame
* #return delay in milliseconds
*/
public int getDelay(int n) {
delay = -1;
if ((n >= 0) && (n < frameCount)) {
delay = frames.elementAt(n).delay;
//meets browser compatibility standards
if (delay < MIN_DELAY_ENFORCE_THRESHOLD) delay = MIN_DELAY;
}
return delay;
}
/**
* Gets the number of frames read from file.
*
* #return frame count
*/
public int getFrameCount() {
return frameCount;
}
/**
* Gets the first (or only) image read.
*
* #return BufferedBitmap containing first frame, or null if none.
*/
public Bitmap getBitmap() {
return getFrame(0);
}
/**
* Gets the "Netscape" iteration count, if any. A count of 0 means repeat indefinitiely.
*
* #return iteration count if one was specified, else 1.
*/
public int getLoopCount() {
return loopCount;
}
/**
* Creates new frame image from current data (and previous frames as specified by their disposition codes).
*/
protected void setPixels() {
// expose destination image's pixels as int array
int[] dest = new int[width * height];
// fill in starting image contents based on last image's dispose code
if (lastDispose > 0) {
if (lastDispose == 3) {
// use image before last
int n = frameCount - 2;
if (n > 0) {
lastBitmap = getFrame(n - 1);
} else {
lastBitmap = null;
}
}
if (lastBitmap != null) {
lastBitmap.getPixels(dest, 0, width, 0, 0, width, height);
// copy pixels
if (lastDispose == 2) {
// fill last image rect area with background color
int c = 0;
if (!transparency) {
c = lastBgColor;
}
for (int i = 0; i < lrh; i++) {
int n1 = (lry + i) * width + lrx;
int n2 = n1 + lrw;
for (int k = n1; k < n2; k++) {
dest[k] = c;
}
}
}
}
}
// copy each source line to the appropriate place in the destination
int pass = 1;
int inc = 8;
int iline = 0;
for (int i = 0; i < ih; i++) {
int line = i;
if (interlace) {
if (iline >= ih) {
pass++;
switch (pass) {
case 2:
iline = 4;
break;
case 3:
iline = 2;
inc = 4;
break;
case 4:
iline = 1;
inc = 2;
break;
default:
break;
}
}
line = iline;
iline += inc;
}
line += iy;
if (line < height) {
int k = line * width;
int dx = k + ix; // start of line in dest
int dlim = dx + iw; // end of dest line
if ((k + width) < dlim) {
dlim = k + width; // past dest edge
}
int sx = i * iw; // start of line in source
while (dx < dlim) {
// map color and insert in destination
int index = ((int) pixels[sx++]) & 0xff;
int c = act[index];
if (c != 0) {
dest[dx] = c;
}
dx++;
}
}
}
image = Bitmap.createBitmap(dest, width, height, Config.ARGB_4444);
}
/**
* Gets the image contents of frame n.
*
* #return BufferedBitmap representation of frame, or null if n is invalid.
*/
public Bitmap getFrame(int n) {
if (frameCount <= 0)
return null;
n = n % frameCount;
return ((GifFrame) frames.elementAt(n)).image;
}
/**
* Reads GIF image from stream
*
* #param is
* containing GIF file.
* #return read status code (0 = no errors)
*/
public int read(InputStream is)
{
init();
if (is != null) {
in = is;
readHeader();
if (!err()) {
readContents();
if (frameCount < 0) {
status = STATUS_FORMAT_ERROR;
}
}
} else {
status = STATUS_OPEN_ERROR;
}
readComplete = true;
return status;
}
public void complete()
{
readContents();
try {
in.close();
} catch (Exception e) {
}
}
/**
* Decodes LZW image data into pixel array. Adapted from John Cristy's BitmapMagick.
*/
protected void decodeBitmapData() {
int nullCode = -1;
int npix = iw * ih;
int available, clear, code_mask, code_size, end_of_information, in_code, old_code, bits, code, count, i, datum, data_size, first, top, bi, pi;
if ((pixels == null) || (pixels.length < npix)) {
pixels = new byte[npix]; // allocate new pixel array
}
if (prefix == null) {
prefix = new short[MAX_STACK_SIZE];
}
if (suffix == null) {
suffix = new byte[MAX_STACK_SIZE];
}
if (pixelStack == null) {
pixelStack = new byte[MAX_STACK_SIZE + 1];
}
// Initialize GIF data stream decoder.
data_size = read();
clear = 1 << data_size;
end_of_information = clear + 1;
available = clear + 2;
old_code = nullCode;
code_size = data_size + 1;
code_mask = (1 << code_size) - 1;
for (code = 0; code < clear; code++) {
prefix[code] = 0; // XXX ArrayIndexOutOfBoundsException
suffix[code] = (byte) code;
}
// Decode GIF pixel stream.
datum = bits = count = first = top = pi = bi = 0;
for (i = 0; i < npix;) {
if (top == 0) {
if (bits < code_size) {
// Load bytes until there are enough bits for a code.
if (count == 0) {
// Read a new data block.
count = readBlock();
if (count <= 0) {
break;
}
bi = 0;
}
datum += (((int) block[bi]) & 0xff) << bits;
bits += 8;
bi++;
count--;
continue;
}
// Get the next code.
code = datum & code_mask;
datum >>= code_size;
bits -= code_size;
// Interpret the code
if ((code > available) || (code == end_of_information)) {
break;
}
if (code == clear) {
// Reset decoder.
code_size = data_size + 1;
code_mask = (1 << code_size) - 1;
available = clear + 2;
old_code = nullCode;
continue;
}
if (old_code == nullCode) {
pixelStack[top++] = suffix[code];
old_code = code;
first = code;
continue;
}
in_code = code;
if (code == available) {
pixelStack[top++] = (byte) first;
code = old_code;
}
while (code > clear) {
pixelStack[top++] = suffix[code];
code = prefix[code];
}
first = ((int) suffix[code]) & 0xff;
// Add a new string to the string table,
if (available >= MAX_STACK_SIZE) {
break;
}
pixelStack[top++] = (byte) first;
prefix[available] = (short) old_code;
suffix[available] = (byte) first;
available++;
if (((available & code_mask) == 0) && (available < MAX_STACK_SIZE)) {
code_size++;
code_mask += available;
}
old_code = in_code;
}
// Pop a pixel off the pixel stack.
top--;
pixels[pi++] = pixelStack[top];
i++;
}
for (i = pi; i < npix; i++) {
pixels[i] = 0; // clear missing pixels
}
}
/**
* Returns true if an error was encountered during reading/decoding
*/
protected boolean err() {
return status != STATUS_OK;
}
/**
* Initializes or re-initializes reader
*/
protected void init() {
status = STATUS_OK;
frameCount = 0;
frames = new Vector<GifFrame>();
gct = null;
lct = null;
}
/**
* Reads a single byte from the input stream.
*/
protected int read() {
int curByte = 0;
try {
curByte = in.read();
} catch (Exception e) {
status = STATUS_FORMAT_ERROR;
}
return curByte;
}
/**
* Reads next variable length block from input.
*
* #return number of bytes stored in "buffer"
*/
protected int readBlock() {
blockSize = read();
int n = 0;
if (blockSize > 0) {
try {
int count = 0;
while (n < blockSize) {
count = in.read(block, n, blockSize - n);
if (count == -1) {
break;
}
n += count;
}
} catch (Exception e) {
e.printStackTrace();
}
if (n < blockSize) {
status = STATUS_FORMAT_ERROR;
}
}
return n;
}
/**
* Reads color table as 256 RGB integer values
*
* #param ncolors
* int number of colors to read
* #return int array containing 256 colors (packed ARGB with full alpha)
*/
protected int[] readColorTable(int ncolors) {
int nbytes = 3 * ncolors;
int[] tab = null;
byte[] c = new byte[nbytes];
int n = 0;
try {
n = in.read(c);
} catch (Exception e) {
e.printStackTrace();
}
if (n < nbytes) {
status = STATUS_FORMAT_ERROR;
} else {
tab = new int[256]; // max size to avoid bounds checks
int i = 0;
int j = 0;
while (i < ncolors) {
int r = ((int) c[j++]) & 0xff;
int g = ((int) c[j++]) & 0xff;
int b = ((int) c[j++]) & 0xff;
tab[i++] = 0xff000000 | (r << 16) | (g << 8) | b;
}
}
return tab;
}
/**
* Main file parser. Reads GIF content blocks.
*/
protected void readContents() {
// read GIF file content blocks
boolean done = false;
while (!(done || err())) {
int code = read();
switch (code) {
case 0x2C: // image separator
readBitmap();
if(!readComplete) return;
break;
case 0x21: // extension
code = read();
switch (code) {
case 0xf9: // graphics control extension
readGraphicControlExt();
break;
case 0xff: // application extension
readBlock();
String app = "";
for (int i = 0; i < 11; i++) {
app += (char) block[i];
}
if (app.equals("NETSCAPE2.0")) {
readNetscapeExt();
} else {
skip(); // don't care
}
break;
case 0xfe:// comment extension
skip();
break;
case 0x01:// plain text extension
skip();
break;
default: // uninteresting extension
skip();
}
break;
case 0x3b: // terminator
done = true;
break;
case 0x00: // bad byte, but keep going and see what happens break;
default:
status = STATUS_FORMAT_ERROR;
}
}
}
/**
* Reads Graphics Control Extension values
*/
protected void readGraphicControlExt() {
read(); // block size
int packed = read(); // packed fields
dispose = (packed & 0x1c) >> 2; // disposal method
if (dispose == 0) {
dispose = 1; // elect to keep old image if discretionary
}
transparency = (packed & 1) != 0;
delay = readShort() * 10; // delay in milliseconds
transIndex = read(); // transparent color index
read(); // block terminator
}
/**
* Reads GIF file header information.
*/
protected void readHeader() {
String id = "";
for (int i = 0; i < 6; i++) {
id += (char) read();
}
if (!id.startsWith("GIF")) {
status = STATUS_FORMAT_ERROR;
return;
}
readLSD();
if (gctFlag && !err()) {
gct = readColorTable(gctSize);
bgColor = gct[bgIndex];
}
}
/**
* Reads next frame image
*/
protected void readBitmap() {
ix = readShort(); // (sub)image position & size
iy = readShort();
iw = readShort();
ih = readShort();
int packed = read();
lctFlag = (packed & 0x80) != 0; // 1 - local color table flag interlace
lctSize = (int) Math.pow(2, (packed & 0x07) + 1);
// 3 - sort flag
// 4-5 - reserved lctSize = 2 << (packed & 7); // 6-8 - local color
// table size
interlace = (packed & 0x40) != 0;
if (lctFlag) {
lct = readColorTable(lctSize); // read table
act = lct; // make local table active
} else {
act = gct; // make global table active
if (bgIndex == transIndex) {
bgColor = 0;
}
}
int save = 0;
if (transparency) {
save = act[transIndex];
act[transIndex] = 0; // set transparent color if specified
}
if (act == null) {
status = STATUS_FORMAT_ERROR; // no color table defined
}
if (err()) {
return;
}
decodeBitmapData(); // decode pixel data
skip();
if (err()) {
return;
}
frameCount++;
// create new image to receive frame data
image = Bitmap.createBitmap(width, height, Config.ARGB_4444);
setPixels(); // transfer pixel data to image
frames.addElement(new GifFrame(image, delay)); // add image to frame
// list
if (transparency) {
act[transIndex] = save;
}
resetFrame();
}
/**
* Reads Logical Screen Descriptor
*/
protected void readLSD() {
// logical screen size
width = readShort();
height = readShort();
// packed fields
int packed = read();
gctFlag = (packed & 0x80) != 0; // 1 : global color table flag
// 2-4 : color resolution
// 5 : gct sort flag
gctSize = 2 << (packed & 7); // 6-8 : gct size
bgIndex = read(); // background color index
pixelAspect = read(); // pixel aspect ratio
}
/**
* Reads Netscape extenstion to obtain iteration count
*/
protected void readNetscapeExt() {
do {
readBlock();
if (block[0] == 1) {
// loop count sub-block
int b1 = ((int) block[1]) & 0xff;
int b2 = ((int) block[2]) & 0xff;
loopCount = (b2 << 8) | b1;
}
} while ((blockSize > 0) && !err());
}
/**
* Reads next 16-bit value, LSB first
*/
protected int readShort() {
// read 16-bit value, LSB first
return read() | (read() << 8);
}
/**
* Resets frame state for reading next image.
*/
protected void resetFrame() {
lastDispose = dispose;
lrx = ix;
lry = iy;
lrw = iw;
lrh = ih;
lastBitmap = image;
lastBgColor = bgColor;
dispose = 0;
transparency = false;
delay = 0;
lct = null;
}
/**
* Skips variable length blocks up to and including next zero length block.
*/
protected void skip() {
do {
readBlock();
} while ((blockSize > 0) && !err());
}
}
create another class GifAnimationDrawable
<!-- language: java -->
import java.io.File;
import java.io.FileInputStream;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import android.graphics.drawable.AnimationDrawable;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.Bitmap;
public class GifAnimationDrawable extends AnimationDrawable
{
private boolean decoded;
private GifDecoder mGifDecoder;
private Bitmap mTmpBitmap;
private int height, width;
public GifAnimationDrawable(File f) throws IOException
{
this(f, false);
}
public GifAnimationDrawable(InputStream is) throws IOException
{
this(is, false);
}
public GifAnimationDrawable(File f, boolean inline) throws IOException
{
this(new BufferedInputStream(new FileInputStream(f), 32768), inline);
}
public GifAnimationDrawable(InputStream is, boolean inline) throws IOException
{
super();
InputStream bis = is;
if(!BufferedInputStream.class.isInstance(bis)) bis = new BufferedInputStream(is, 32768);
decoded = false;
mGifDecoder = new GifDecoder();
mGifDecoder.read(bis);
mTmpBitmap = mGifDecoder.getFrame(0);
height = mTmpBitmap.getHeight();
width = mTmpBitmap.getWidth();
addFrame(new BitmapDrawable(mTmpBitmap), mGifDecoder.getDelay(0));
setOneShot(mGifDecoder.getLoopCount() != 0);
setVisible(true, true);
if(inline){
loader.run();
}else{
new Thread(loader).start();
}
}
public boolean isDecoded(){ return decoded; }
private Runnable loader = new Runnable(){
public void run()
{
mGifDecoder.complete();
int i, n = mGifDecoder.getFrameCount(), t;
for(i=1;i<n;i++){
mTmpBitmap = mGifDecoder.getFrame(i);
t = mGifDecoder.getDelay(i);
addFrame(new BitmapDrawable(mTmpBitmap), t);
}
decoded = true;
mGifDecoder = null;
}
};
public int getMinimumHeight(){ return height; }
public int getMinimumWidth(){ return width; }
public int getIntrinsicHeight(){ return height; }
public int getIntrinsicWidth(){ return width; }
}
in Mainactivity class create object of GifAnimationDrawable
GifAnimationDrawable gif;
try {
gif = new GifAnimationDrawable(getResources().openRawResource(
R.drawable.prof));
} catch (Exception e) {
e.printStackTrace();
}
ImageView im1 = (ImageView) findViewById(R.id.imageView1);
gif.setOneShot(false);
im1.setImageDrawable(gif);
gif.setVisible(true, true);
by using these classes you can add gif file to any view
in your AndroidManifest.xml, set the following attribute to your Application:
android:hardwareAccelerated="false"
Or you can simply add WebView to your xml and load the gif image inside the webview. You donot need to do anything else. The image will loop automatically inside the webview
The easiest way would be to use a WebView.
Store your animated GIF in the assets folder. Then load the GIF by using the following code:
WebView wv = (WebView) findViewById(R.id.webView1);
wv.loadUrl("file:///android_asset/anim5.gif");
webView1 is your WebView id, and anim5.gif is your GIF name.