Here is my logcat:
System.err:java.lang.IllegalStateException: Failed to add the track to the muxer
W/System.err: at android.media.MediaMuxer.nativeAddTrack(Native Method)
W/System.err: at android.media.MediaMuxer.addTrack(MediaMuxer.java:626)
W/System.err: at com.marvhong.videoeffect.composer.MuxRender.onSetOutputFormat(MuxRender.java:64)
W/System.err: at com.marvhong.videoeffect.composer.VideoComposer.drainEncoder(VideoComposer.java:224)
W/System.err: at com.marvhong.videoeffect.composer.VideoComposer.stepPipeline(VideoComposer.java:113)
W/System.err: at com.marvhong.videoeffect.composer.Mp4ComposerEngine.runPipelines(Mp4ComposerEngine.java:181)
W/System.err: at com.marvhong.videoeffect.composer.Mp4ComposerEngine.compose(Mp4ComposerEngine.java:127)
W/System.err: at com.marvhong.videoeffect.composer.Mp4Composer$1.run(Mp4Composer.java:198)
W/System.err: at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
W/System.err: at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
W/System.err: at java.lang.Thread.run(Thread.java:764)
E/TrimVideoActivity: filterVideo---onFailed()
In my application, I'm trying to add filters on video. But sometimes my app crashes and sometimes it works fine. The error is Failed to add the track to the muxer
I have debugged code and found the issue for video under audio available then apply filter and save it but doesn't work save filter video.
MuxRender Class :
class MuxRender {
private static final String TAG = "MuxRender";
private static final int BUFFER_SIZE = 64 * 1024; // I have no idea whether this value is appropriate or not...
private final MediaMuxer muxer;
private MediaFormat videoFormat;
private MediaFormat audioFormat;
private int videoTrackIndex;
private int audioTrackIndex;
private ByteBuffer byteBuffer;
private final List<SampleInfo> sampleInfoList;
private boolean started;
MuxRender(MediaMuxer muxer) {
this.muxer = muxer;
sampleInfoList = new ArrayList<>();
}
void setOutputFormat(SampleType sampleType, MediaFormat format) {
switch (sampleType) {
case VIDEO:
videoFormat = format;
break;
case AUDIO:
ObLogger.i(TAG, "format > " + format);
audioFormat = format;
break;
default:
throw new AssertionError();
}
}
void onSetOutputFormat() {
if (videoFormat != null && audioFormat != null) {
videoTrackIndex = muxer.addTrack(videoFormat);
ObLogger.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(
MediaFormat.KEY_MIME) + " to muxer");
ObLogger.i(TAG, "audioFormat > " + audioFormat);
audioTrackIndex = muxer.addTrack(audioFormat);
ObLogger.v(TAG, "Added track #" + audioTrackIndex + " with " + audioFormat.getString(
MediaFormat.KEY_MIME) + " to muxer");
} else if (videoFormat != null) {
videoTrackIndex = muxer.addTrack(videoFormat);
ObLogger.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(
MediaFormat.KEY_MIME) + " to muxer");
}
muxer.start();
started = true;
if (byteBuffer == null) {
byteBuffer = ByteBuffer.allocate(0);
}
byteBuffer.flip();
ObLogger.v(TAG, "Output format determined, writing " + sampleInfoList.size() +
" samples / " + byteBuffer.limit() + " bytes to muxer.");
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int offset = 0;
for (SampleInfo sampleInfo : sampleInfoList) {
sampleInfo.writeToBufferInfo(bufferInfo, offset);
muxer.writeSampleData(getTrackIndexForSampleType(sampleInfo.sampleType), byteBuffer, bufferInfo);
offset += sampleInfo.size;
}
sampleInfoList.clear();
byteBuffer = null;
}
void writeSampleData(SampleType sampleType, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
if (started) {
muxer.writeSampleData(getTrackIndexForSampleType(sampleType), byteBuf, bufferInfo);
return;
}
byteBuf.limit(bufferInfo.offset + bufferInfo.size);
byteBuf.position(bufferInfo.offset);
if (byteBuffer == null) {
byteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder());
}
byteBuffer.put(byteBuf);
sampleInfoList.add(new SampleInfo(sampleType, bufferInfo.size, bufferInfo));
}
private int getTrackIndexForSampleType(SampleType sampleType) {
switch (sampleType) {
case VIDEO:
return videoTrackIndex;
case AUDIO:
return audioTrackIndex;
default:
throw new AssertionError();
}
}
public enum SampleType {VIDEO, AUDIO}
private static class SampleInfo {
private final SampleType sampleType;
private final int size;
private final long presentationTimeUs;
private final int flags;
private SampleInfo(SampleType sampleType, int size, MediaCodec.BufferInfo bufferInfo) {
this.sampleType = sampleType;
this.size = size;
presentationTimeUs = bufferInfo.presentationTimeUs;
flags = bufferInfo.flags;
}
private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) {
bufferInfo.set(offset, size, presentationTimeUs, flags);
}
}
}
Mp4ComposerEngine Class:
class Mp4ComposerEngine {
private static final String TAG = "Mp4ComposerEngine";
private static final double PROGRESS_UNKNOWN = -1.0;
private static final long SLEEP_TO_WAIT_TRACK_TRANSCODERS = 10;
private static final long PROGRESS_INTERVAL_STEPS = 10;
private FileDescriptor inputFileDescriptor;
private VideoComposer videoComposer;
private IAudioComposer audioComposer;
private MediaExtractor mediaExtractor;
private MediaMuxer mediaMuxer;
private ProgressCallback progressCallback;
private long durationUs;
void setDataSource(FileDescriptor fileDescriptor) {
inputFileDescriptor = fileDescriptor;
}
void setProgressCallback(ProgressCallback progressCallback) {
this.progressCallback = progressCallback;
}
void compose(
final String destPath,
final Resolution outputResolution,
final GlFilter filter,
final int bitrate,
final boolean mute,
final Rotation rotation,
final Resolution inputResolution,
final FillMode fillMode,
final FillModeCustomItem fillModeCustomItem,
final int timeScale,
final boolean flipVertical,
final boolean flipHorizontal
) throws IOException {
try {
mediaExtractor = new MediaExtractor();
mediaExtractor.setDataSource(inputFileDescriptor);
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2) {
mediaMuxer = new MediaMuxer(destPath, OutputFormat.MUXER_OUTPUT_MPEG_4);
}
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(inputFileDescriptor);
try {
durationUs = Long
.parseLong(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000;
} catch (NumberFormatException e) {
durationUs = -1;
}
ObLogger.d(TAG, "Duration (us): " + durationUs);
MediaFormat videoOutputFormat = MediaFormat
.createVideoFormat("video/avc", outputResolution.width(), outputResolution.height());
videoOutputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
videoOutputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
videoOutputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
videoOutputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, CodecCapabilities.COLOR_FormatSurface);
MuxRender muxRender = new MuxRender(mediaMuxer);
// identify track indices
MediaFormat format = mediaExtractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
final int videoTrackIndex;
final int audioTrackIndex;
if (mime.startsWith("video/")) {
videoTrackIndex = 0;
audioTrackIndex = 1;
} else {
videoTrackIndex = 1;
audioTrackIndex = 0;
}
// setup video composer
videoComposer = new VideoComposer(mediaExtractor, videoTrackIndex, videoOutputFormat, muxRender, timeScale);
videoComposer.setUp(filter, rotation, outputResolution, inputResolution, fillMode, fillModeCustomItem, flipVertical, flipHorizontal);
mediaExtractor.selectTrack(videoTrackIndex);
// setup audio if present and not muted
if (mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_AUDIO) != null && !mute) {
// has Audio video
if (timeScale < 2) {
audioComposer = new AudioComposer(mediaExtractor, audioTrackIndex, muxRender);
} else {
audioComposer = new RemixAudioComposer(mediaExtractor, audioTrackIndex, mediaExtractor.getTrackFormat(audioTrackIndex), muxRender, timeScale);
}
audioComposer.setup();
mediaExtractor.selectTrack(audioTrackIndex);
runPipelines();
} else {
// no audio video
runPipelinesNoAudio();
}
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2) {
mediaMuxer.stop();
}
} finally {
try {
if (videoComposer != null) {
videoComposer.release();
videoComposer = null;
}
if (audioComposer != null) {
audioComposer.release();
audioComposer = null;
}
if (mediaExtractor != null) {
mediaExtractor.release();
mediaExtractor = null;
}
} catch (RuntimeException e) {
e.printStackTrace();
// Too fatal to make alive the app, because it may leak native resources.
// throw new Error("Could not shutdown mediaExtractor, codecs and mediaMuxer pipeline.", e);
}
try {
if (mediaMuxer != null) {
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2) {
mediaMuxer.release();
}
mediaMuxer = null;
}
} catch (RuntimeException e) {
ObLogger.e(TAG, "Failed to release mediaMuxer.", e);
}
}
}
private void runPipelines() {
long loopCount = 0;
if (durationUs <= 0) {
if (progressCallback != null) {
progressCallback.onProgress(PROGRESS_UNKNOWN);
}// unknown
}
while (!(videoComposer.isFinished() && audioComposer.isFinished())) {
boolean stepped = videoComposer.stepPipeline()
|| audioComposer.stepPipeline();
loopCount++;
if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
double videoProgress = videoComposer.isFinished() ? 1.0 : Math
.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
double audioProgress = audioComposer.isFinished() ? 1.0 : Math
.min(1.0, (double) audioComposer.getWrittenPresentationTimeUs() / durationUs);
double progress = (videoProgress + audioProgress) / 2.0;
if (progressCallback != null) {
progressCallback.onProgress(progress);
}
}
if (!stepped) {
try {
Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
} catch (InterruptedException e) {
// nothing to do
}
}
}
}
private void runPipelinesNoAudio() {
long loopCount = 0;
if (durationUs <= 0) {
if (progressCallback != null) {
progressCallback.onProgress(PROGRESS_UNKNOWN);
} // unknown
}
while (!videoComposer.isFinished()) {
boolean stepped = videoComposer.stepPipeline();
loopCount++;
if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
double videoProgress = videoComposer.isFinished() ? 1.0 : Math
.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
if (progressCallback != null) {
progressCallback.onProgress(videoProgress);
}
}
if (!stepped) {
try {
Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
} catch (InterruptedException e) {
// nothing to do
}
}
}
}
interface ProgressCallback {
/**
* Called to notify progress. Same thread which initiated transcode is used.
*
* #param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
*/
void onProgress(double progress);
}
}
Mp4Composer Class :
public class Mp4Composer {
private final static String TAG = Mp4Composer.class.getSimpleName();
private final String srcPath;
private final String destPath;
private GlFilter filter;
private Resolution outputResolution;
private int bitrate = -1;
private boolean mute = false;
private Rotation rotation = Rotation.NORMAL;
private Listener listener;
private FillMode fillMode = FillMode.PRESERVE_ASPECT_FIT;
private FillModeCustomItem fillModeCustomItem;
private int timeScale = 1;
private boolean flipVertical = false;
private boolean flipHorizontal = false;
private ExecutorService executorService;
public Mp4Composer(#NonNull final String srcPath, #NonNull final String destPath) {
this.srcPath = srcPath;
this.destPath = destPath;
}
public Mp4Composer filter(#NonNull GlFilter filter) {
this.filter = filter;
return this;
}
public Mp4Composer size(int width, int height) {
this.outputResolution = new Resolution(width, height);
return this;
}
public Mp4Composer videoBitrate(int bitrate) {
this.bitrate = bitrate;
return this;
}
public Mp4Composer mute(boolean mute) {
this.mute = mute;
return this;
}
public Mp4Composer flipVertical(boolean flipVertical) {
this.flipVertical = flipVertical;
return this;
}
public Mp4Composer flipHorizontal(boolean flipHorizontal) {
this.flipHorizontal = flipHorizontal;
return this;
}
public Mp4Composer rotation(#NonNull Rotation rotation) {
this.rotation = rotation;
return this;
}
public Mp4Composer fillMode(#NonNull FillMode fillMode) {
this.fillMode = fillMode;
return this;
}
public Mp4Composer customFillMode(#NonNull FillModeCustomItem fillModeCustomItem) {
this.fillModeCustomItem = fillModeCustomItem;
this.fillMode = FillMode.CUSTOM;
return this;
}
public Mp4Composer listener(#NonNull Listener listener) {
this.listener = listener;
return this;
}
public Mp4Composer timeScale(final int timeScale) {
this.timeScale = timeScale;
return this;
}
private ExecutorService getExecutorService() {
if (executorService == null) {
executorService = Executors.newSingleThreadExecutor();
}
return executorService;
}
public Mp4Composer start() {
getExecutorService().execute(new Runnable() {
#Override
public void run() {
Mp4ComposerEngine engine = new Mp4ComposerEngine();
engine.setProgressCallback(new Mp4ComposerEngine.ProgressCallback() {
#Override
public void onProgress(final double progress) {
if (listener != null) {
listener.onProgress(progress);
}
}
});
final File srcFile = new File(srcPath);
final FileInputStream fileInputStream;
try {
fileInputStream = new FileInputStream(srcFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
if (listener != null) {
listener.onFailed(e);
}
return;
}
try {
engine.setDataSource(fileInputStream.getFD());
} catch (IOException e) {
e.printStackTrace();
if (listener != null) {
listener.onFailed(e);
}
return;
}
final int videoRotate = getVideoRotation(srcPath);
final Resolution srcVideoResolution = getVideoResolution(srcPath, videoRotate);
if (filter == null) {
filter = new GlFilter();
}
if (fillMode == null) {
fillMode = FillMode.PRESERVE_ASPECT_FIT;
}
if (fillModeCustomItem != null) {
fillMode = FillMode.CUSTOM;
}
if (outputResolution == null) {
if (fillMode == FillMode.CUSTOM) {
outputResolution = srcVideoResolution;
} else {
Rotation rotate = Rotation.fromInt(rotation.getRotation() + videoRotate);
if (rotate == Rotation.ROTATION_90 || rotate == Rotation.ROTATION_270) {
outputResolution = new Resolution(srcVideoResolution.height(), srcVideoResolution.width());
} else {
outputResolution = srcVideoResolution;
}
}
}
if (filter instanceof IResolutionFilter) {
((IResolutionFilter) filter).setResolution(outputResolution);
}
if (timeScale < 2) {
timeScale = 1;
}
ObLogger.d(TAG, "rotation = " + (rotation.getRotation() + videoRotate));
ObLogger.d(TAG, "inputResolution width = " + srcVideoResolution.width() + " height = " + srcVideoResolution.height());
ObLogger.d(TAG, "outputResolution width = " + outputResolution.width() + " height = " + outputResolution.height());
ObLogger.d(TAG, "fillMode = " + fillMode);
try {
if (bitrate < 0) {
bitrate = calcBitRate(outputResolution.width(), outputResolution.height());
}
engine.compose(
destPath,
outputResolution,
filter,
bitrate,
mute,
Rotation.fromInt(rotation.getRotation() + videoRotate),
srcVideoResolution,
fillMode,
fillModeCustomItem,
timeScale,
flipVertical,
flipHorizontal
);
} catch (Exception e) {
e.printStackTrace();
if (listener != null) {
listener.onFailed(e);
}
executorService.shutdown();
return;
}
if (listener != null) {
listener.onCompleted();
}
executorService.shutdown();
}
});
return this;
}
public void cancel() {
getExecutorService().shutdownNow();
}
public interface Listener {
/**
* Called to notify progress.
*
* #param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
*/
void onProgress(double progress);
/**
* Called when transcode completed.
*/
void onCompleted();
/**
* Called when transcode canceled.
*/
void onCanceled();
void onFailed(Exception exception);
}
private int getVideoRotation(String videoFilePath) {
try {
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(videoFilePath);
ObLogger.e("MediaMetadataRetriever", "getVideoRotation error");
String orientation = mediaMetadataRetriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
return Integer.valueOf(orientation);
} catch (NumberFormatException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
return 0;
}
private int calcBitRate(int width, int height) {
final int bitrate = (int) (0.25 * 30 * width * height);
ObLogger.i(TAG, "bitrate=" + bitrate);
return bitrate;
}
private Resolution getVideoResolution(final String path, final int rotation) {
int width = 0;
int height = 0;
if (path != null && !path.isEmpty()) {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(path);
try {
String Strwidth = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
String Strheight = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
if (Strwidth != null && Strheight != null) {
width = Integer.valueOf(Strwidth);
height = Integer.valueOf(Strheight);
}
retriever.release();
} catch (NumberFormatException e) {
retriever.release();
e.printStackTrace();
} catch (IllegalArgumentException e) {
retriever.release();
e.printStackTrace();
}
}
return new Resolution(width, height);
}
}
Main Activity Call Mp4Composer:
private void startMediaCodec(String srcPath,String outputPath) {
mMp4Composer = new Mp4Composer(srcPath, outputPath)
// .rotation(Rotation.ROTATION_270)
//.size(720, 1280)
.fillMode(FillMode.PRESERVE_ASPECT_FIT)
.filter(MagicFilterFactory.getFilter())
.mute(false)
.flipHorizontal(false)
.flipVertical(false)
.listener(new Listener() {
#Override
public void onProgress(double progress) {
ObLogger.d(TAG, "filterVideo---onProgress: " + (int) (progress * 100));
runOnUiThread(new Runnable() {
#Override
public void run() {
//show progress
}
});
}
#Override
public void onCompleted() {
ObLogger.d(TAG, "filterVideo---onCompleted");
runOnUiThread(new Runnable() {
#Override
public void run() {
ObLogger.i(TAG, "run: Editor Screen is >>> ");
Intent intent = new Intent();
intent.putExtra(Extras.EXTRA_FILTER_SCREEN, outputPath);
setResult(RESULT_OK, intent);
finish();
}
});
}
#Override
public void onCanceled() {
ObLogger.e(TAG, "onCanceled");
NormalProgressDialog.stopLoading();
}
#Override
public void onFailed(Exception exception) {
ObLogger.e(TAG, "filterVideo---onFailed()");
NormalProgressDialog.stopLoading();
// Toast.makeText(TrimVideoActivity.this, "Video processing failed", Toast.LENGTH_SHORT).show();
}
})
.start();
}
The below links were used but my problem is not solved.
https://stackoverflow.com/a/53140941/11138845
https://stackoverflow.com/a/21759073/11138845
Related
In CameraX Analysis, setTargetResolution(new Size(2560, 800), but in Analyzer imageProxy.getImage.getWidth=1280 and getHeight=400, and YUVToByte(imageProxy.getImage).length()=768000。In Camera, parameter.setPreviewSize(2560, 800) then byte[].length in onPreviewFrame is 3072000(equales 768000*(2560/1280)*(800/400))。How can I make CameraX Analyzer imageProxy.getImage.getWidth and getHeight = 2560 and 800, and YUVToByte(ImageProxy.getImage).length()=3072000? In CameraX onPreviewFrame(), res always = null, in Camera onPreviewFrame(), res can get currect value, what's the different between CameraX and Camera? And what should I do in CameraX?
CameraX:
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
try {
copyDataBase();
} catch (IOException e) {
e.printStackTrace();
}
getSupportActionBar().hide();
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
viewFinder = findViewById(R.id.previewView);
executor = Executors.newSingleThreadExecutor();
if (!allPermissionGranted()) {
ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
}
DisplayMetrics metric = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metric);
width = metric.widthPixels;
height = metric.heightPixels;
l = 100;
r = width - 100;
ntmpH = (r - l) * 58 / 100;
t = (height - ntmpH) / 2;
b = t + ntmpH;
double proportion = (double) width / (double) preHeight;
double hproportion = (double) height / (double) preWidth;
l = (int) (l / proportion);
t = (int) (t / hproportion);
r = (int) (r / proportion);
b = (int) (b / hproportion);
m_ROI[0] = l;
m_ROI[1] = t;
m_ROI[2] = r;
m_ROI[3] = b;
cameraProviderFuture = processCameraProvider.getInstance(this);
cameraProviderFuture.addListener(() -> {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
#SuppressLint("RestrictedApi") Preview preview = new Preview.Builder().build();
CameraSelector cameraSelector = new CameraSelector.Builder().
requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
ImageAnalysis imageAnalysis = new ImageAnalysis.Builder()
.setTargetResolution(new Size(2560, 800))
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.setTargetRotation(Surface.ROTATION_90)
.build();
imageAnalysis.setAnalyzer(executor, new ImageAnalysis.Analyzer() {
#SuppressLint("UnsafeExperimentalUsageError")
#Override
public void analyze(#NonNull ImageProxy imageProxy) {
if (imageProxy.getFormat() == ImageFormat.YUV_420_888) {
image = imageProxy.getImage();
bIninKernal();
Log.d("Size ", image.getWidth() + "/" + image.getHeight());
onPreviewFrame(YUVToByte(image));
} else {
Log.d("Status ", "照片格式錯誤" + imageProxy.getFormat());
}
imageProxy.close();
}
});
cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageAnalysis);
preview.setSurfaceProvider(viewFinder.createSurfaceProvider());
} catch (ExecutionException | InterruptedException e) {
e.printStackTrace();
}
}, ContextCompat.getMainExecutor(this));
}
#NonNull
#Override
public CameraXConfig getCameraXConfig() {
return Camera2Config.defaultConfig();
}
private boolean allPermissionGranted() {
for (String permission : REQUIRED_PERMISSIONS) {
if (ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
private byte[] YUVToByte(Image image) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer0 = planes[0].getBuffer();
ByteBuffer buffer1 = planes[1].getBuffer();
ByteBuffer buffer2 = planes[2].getBuffer();
int width = image.getWidth();
int height = image.getHeight();
byte[] data = new byte[image.getWidth() * image.getHeight() * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
byte[] rowData1 = new byte[planes[1].getRowStride()];
byte[] rowData2 = new byte[planes[2].getRowStride()];
int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8;
// loop via rows of u/v channels
int offsetY = 0;
int sizeY = width * height * bytesPerPixel;
int sizeUV = (width * height * bytesPerPixel) / 4;
for (int row = 0; row < height; row++) {
// fill data for Y channel, two row
{
int length = bytesPerPixel * width;
buffer0.get(data, offsetY, length);
if (height - row != 1)
buffer0.position(buffer0.position() + planes[0].getRowStride() - length);
offsetY += length;
}
if (row >= height / 2)
continue;
{
int uvlength = planes[1].getRowStride();
if ((height / 2 - row) == 1) {
uvlength = width / 2 - planes[1].getPixelStride() + 1;
}
buffer1.get(rowData1, 0, uvlength);
buffer2.get(rowData2, 0, uvlength);
// fill data for u/v channels
for (int col = 0; col < width / 2; ++col) {
// u channel
data[sizeY + (row * width) / 2 + col] = rowData1[col * planes[1].getPixelStride()];
// v channel
data[sizeY + sizeUV + (row * width) / 2 + col] = rowData2[col * planes[2].getPixelStride()];
}
}
}
return data;
}
private void bIninKernal() {
api = new LPR();
String FilePath = Environment.getExternalStorageDirectory().toString() + "/lpr.key";
int nRet = api.Init(this, m_ROI[0], m_ROI[1], m_ROI[2], m_ROI[3], preHeight, preWidth, FilePath);
if (nRet != 0) {
bInitKernal = false;
Log.d("Status ", "相機開啟失敗");
} else {
bInitKernal = true;
}
}
private void onPreviewFrame(byte[] data) {
bIninKernal();
tackData = data;
Log.d("data length ", data.length + "");
resultStr = "";
if (!leaving && bInitKernal) {
byte[] result;
String res = "";
result = api.VideoRec(tackData, 1280, 400, 1);
try {
res = new String(result, "gb2312");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
if (res != null && !"".equals(res.trim())) {
resultStr = res.trim();
if (resultStr != "") {
leaving = true;
MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
Log.d("Status ", "辨識成功");
Log.d("車牌號碼", resultStr);
Thread thread = new Thread(Image_update);
thread.start();
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
Intent intent = new Intent(MainActivity2.this, MainActivity.class);
intent.putExtra("result", resultStr);
Log.d("result", resultStr);
setResult(1, intent);
finish();
}
} else {
Log.d("Status ", "未分辨車牌號碼,請重拍");
}
}
}
public void copyDataBase() throws IOException {
// Common common = new Common();
// 取得SK卡路徑/lpr.key
String dst = Environment.getExternalStorageDirectory().toString() + "/lpr.key";
File file = new File(dst);
if (!file.exists()) {
// file.createNewFile();
} else {
file.delete();
}
Log.d("File Name", file.toString());
try {
InputStream myInput = getAssets().open("lpr.key");
OutputStream myOutput = new FileOutputStream(dst);
byte[] buffer = new byte[1024];
int length;
while ((length = myInput.read(buffer)) > 0) {
myOutput.write(buffer, 0, length);
}
myOutput.flush();
myOutput.close();
myInput.close();
} catch (Exception e) {
System.out.println("lpr.key" + "is not found");
}
}
private Runnable Image_update = new Runnable() {
Retrofit retrofit = new Retrofit.Builder()
.baseUrl("https://0d9dccd7eac8.ngrok.io/")
.addConverterFactory(GsonConverterFactory.create())
.build();
MyAPIService myAPIService = retrofit.create(MyAPIService.class);
#Override
public void run() {
Log.d("Status ", "run");
Log.d("resultStr ", resultStr);
String url = "D:\\Images\\license_plate\\";
String imgStr = bitmap2base64(toBitmap(image));
LicensePlate licensePlate = new LicensePlate();
licensePlate.setsPlate(resultStr);
licensePlate.setsPicPosition(url + resultStr);
licensePlate.setImgStr(imgStr);
Call<LicensePlate> call = myAPIService.uploadLicensePlate(licensePlate);
call.enqueue(new Callback<LicensePlate>() {
#Override
public void onResponse(Call<LicensePlate> call, Response<LicensePlate> response) {
if(response.isSuccessful()){
Log.d("Status ", "照片上傳成功");
}else{
Log.d("Status ", "照片上傳失敗");
Log.d("response code ", response.code() + "");
}
}
#Override
public void onFailure(Call<LicensePlate> call, Throwable t) {
Log.d("Status ", "onFailure");
Log.d("Message ", t.getMessage());
}
});
}
};
private String bitmap2base64(Bitmap bitmap){
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outputStream);
return Base64.encodeToString(outputStream.toByteArray(), Base64.DEFAULT).trim().replaceAll("\n", "").replaceAll("\r", "");
}
private Bitmap toBitmap(Image image) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer yBuffer = planes[0].getBuffer();
ByteBuffer uBuffer = planes[1].getBuffer();
ByteBuffer vBuffer = planes[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
byte[] nv21 = new byte[ySize + uSize + vSize];
//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, image.getWidth(), image.getHeight(), null);
ByteArrayOutputStream out = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 75, out);
byte[] imageBytes = out.toByteArray();
return BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
}
}
Camera
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);// 隐藏标题
DisplayMetrics metric = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metric);
int metricwidth = metric.widthPixels; // 屏幕宽度(像素)
int metricheight = metric.heightPixels; // 屏幕高度(像素)
try {
copyDataBase();
} catch (IOException e) {
e.printStackTrace();
}
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);// 竖屏
Configuration cf= this.getResources().getConfiguration(); //获取设置的配置信息
int noriention=cf.orientation;
requestWindowFeature(Window.FEATURE_NO_TITLE);// 隐藏标题
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);// 设置全屏
// // 屏幕常亮
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_lpr2);
findView();
}
private void findView() {
surfaceView = (SurfaceView) findViewById(R.id.surfaceView);
re_c = (RelativeLayout) findViewById(R.id.re_c);
DisplayMetrics metric = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metric);
width = metric.widthPixels; // 屏幕宽度(像素)
height = metric.heightPixels; // 屏幕高度(像素)
if(myView==null)
{
if (isFatty)
{
myView = new LPRfinderView2(LPR2Activity.this, width, height, isFatty);
}
else
{
myView = new LPRfinderView2(LPR2Activity.this, width, height);
}
re_c.addView(myView);
}
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(LPR2Activity.this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
surfaceView.setFocusable(true);
//surfaceView.invali.date();
}
public void copyDataBase() throws IOException {
// Common common = new Common();
// 取得SK卡路徑/lpr.key
String dst = Environment.getExternalStorageDirectory().toString() + "/lpr.key";
File file = new File(dst);
if (!file.exists()) {
// file.createNewFile();
} else {
file.delete();
}
Log.d("File Name", file.toString());
try {
InputStream myInput = getAssets().open("lpr.key");
OutputStream myOutput = new FileOutputStream(dst);
byte[] buffer = new byte[1024];
int length;
while ((length = myInput.read(buffer)) > 0) {
myOutput.write(buffer, 0, length);
}
myOutput.flush();
myOutput.close();
myInput.close();
} catch (Exception e) {
System.out.println("lpr.key" + "is not found");
}
}
public void surfaceCreated(SurfaceHolder holder) {
if (mycamera == null) {
try {
mycamera = Camera.open();
} catch (Exception e) {
e.printStackTrace();
String mess = "打开摄像头失败";
Toast.makeText(getApplicationContext(), mess, Toast.LENGTH_LONG).show();
return;
}
}
if(mycamera!=null)
{
try {
mycamera.setPreviewDisplay(holder);
timer2 = new Timer();
if (timer == null)
{
timer = new TimerTask()
{
public void run()
{
if (mycamera != null)
{
try
{
mycamera.autoFocus(new AutoFocusCallback()
{
public void onAutoFocus(boolean success, Camera camera)
{
}
});
}
catch (Exception e)
{
e.printStackTrace();
}
}
};
};
}
timer2.schedule(timer, 500, 2500);
initCamera();
//mycamera.startPreview();
//mycamera.autoFocus(null);
} catch (IOException e) {
e.printStackTrace();
}
}
if(api==null)
{
api= new LPR();
String FilePath =Environment.getExternalStorageDirectory().toString()+"/lpr.key";
int nRet = api.Init(this,m_ROI[0], m_ROI[1], m_ROI[2], m_ROI[3], preHeight, preWidth,FilePath);
if(nRet!=0)
{
Toast.makeText(getApplicationContext(), "啟動失敗,請調整時間", Toast.LENGTH_SHORT).show();
Log.d("nRet ", nRet + "");
bInitKernal =false;
}
else
{
bInitKernal=true;
}
}
if(alertDialog==null){
alertDialog = new AlertDialog.Builder(this).create();
alertDialoginfo = new AlertDialog.Builder(this).create();
}
}
#Override
public void surfaceChanged(final SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
if (mycamera != null) {
mycamera.setPreviewCallback(null);
mycamera.stopPreview();
mycamera.release();
mycamera = null;
}
} catch (Exception e) {
}
if(bInitKernal){
bInitKernal=false;
api = null;
}
if(toast!=null){
toast.cancel();
toast = null;
}
if(timer2!=null){
timer2.cancel();
timer2=null;
}
if(alertDialog!=null)
{
alertDialog.dismiss();
alertDialog.cancel();
alertDialog=null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
try {
if (mycamera != null) {
mycamera.setPreviewCallback(null);
mycamera.stopPreview();
mycamera.release();
mycamera = null;
}
} catch (Exception e) {
e.printStackTrace();
}
if(bInitKernal)
{
bInitKernal=false;
api = null;
}
finish();
if(toast!=null){
toast.cancel();
toast=null;
}
if(timer2!=null){
timer2.cancel();
timer2=null;
}
if(alertDialog!=null)
{
alertDialog.cancel();
alertDialog=null;
}
}
return super.onKeyDown(keyCode, event);
}
#TargetApi(14)
private void initCamera() {
Camera.Parameters parameters = mycamera.getParameters();
List<Camera.Size> list = parameters.getSupportedPreviewSizes();
preWidth = list.get(4).width;
preHeight = list.get(4).height;
parameters.setPictureFormat(PixelFormat.JPEG);
parameters.setPreviewSize(preWidth,preHeight);
if (!bROI) {
int l,t,r,b;
l = 100;
r = width-100;
int ntmpH =(r-l)*58/100;
t = (height-ntmpH)/2;
b = t+ntmpH;
double proportion = (double) width / (double) preHeight;
double hproportion=(double)height/(double) preWidth;
l = (int) (l /proportion);
t = (int) (t /hproportion);
r = (int) (r /proportion);
b = (int) (b / hproportion);
m_ROI[0]=l;
m_ROI[1]=t;
m_ROI[2]=r;
m_ROI[3]=b;
bROI = true;
}
if (parameters.getSupportedFocusModes().contains(
parameters.FOCUS_MODE_AUTO))
{
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);// 1连续对焦
}
mycamera.setPreviewCallback(LPR2Activity.this);
mycamera.setParameters(parameters);
mycamera.setDisplayOrientation(90); //一般機種
mycamera.startPreview();
}
public void onPreviewFrame(byte[] data, Camera camera) {
tackData = data;
Log.d("data length ", data.length + "");
ByteArrayInputStream bis = new ByteArrayInputStream(data);
resultStr = "";
if (!leaving&& bInitKernal ) {
Log.d("Status ", "開始判斷");
byte result[];//[] = new byte[10];
String res="";
result = api.VideoRec(tackData, preWidth, preHeight, 1);
Log.d("preWidth ", preWidth + "");
Log.d("preHeight", preHeight + "");
Log.d("width ", width + "");
Log.d("height", height + "");
try {
res = new String(result,"gb2312");
Log.d("try ", res);
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
Log.d("Exception ", e.getMessage());
e.printStackTrace();
}
Log.d("res ", res);
if(res!=null&&!"".equals(res.trim()))
{
Camera.Parameters parameters = mycamera.getParameters();
resultStr =res.trim();
if (resultStr != "") {
leaving = true;
//拍照音效
MediaActionSound sound = null;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN) {
sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
}
Intent intent = new Intent();
intent.putExtra("strPltNo",resultStr);
setResult(2,intent);
finish();
}else{
Log.d("Status ", "未分配車牌號碼,請重拍");
}
}
}
}
#RequiresApi(api = Build.VERSION_CODES.CUPCAKE)
public String pictureName() {
String str = "";
Time t = new Time();
t.setToNow(); // 取得系统时间。
int year = t.year;
int month = t.month + 1;
int date = t.monthDay;
int hour = t.hour; // 0-23
int minute = t.minute;
int second = t.second;
if (month < 10)
str = String.valueOf(year) + "0" + String.valueOf(month);
else {
str = String.valueOf(year) + String.valueOf(month);
}
if (date < 10)
str = str + "0" + String.valueOf(date + "_");
else {
str = str + String.valueOf(date + "_");
}
if (hour < 10)
str = str + "0" + String.valueOf(hour);
else {
str = str + String.valueOf(hour);
}
if (minute < 10)
str = str + "0" + String.valueOf(minute);
else {
str = str + String.valueOf(minute);
}
if (second < 10)
str = str + "0" + String.valueOf(second);
else {
str = str + String.valueOf(second);
}
return str;
}
public void Leave(View view) {
Intent intent = new Intent();
intent.putExtra("strPltNo","");
setResult(3,intent);
finish();
}
}
CameraX Log
Camera Log
With regards to the image analysis resolution, the documentation of ImageAnalysis.Builder.setTargetResolution() states that:
The maximum available resolution that could be selected for an
ImageAnalysis is limited to be under 1080p.
So setting a size of 2560x800 won't work as you expect. In return CameraX seems to be selecting the maximum ImageAnalysis resolution that has the same aspect ratio you requested (2560/800 = 1280/400).
Please help me! I used this example in https://github.com/pchab/AndroidRTC to streaming video and audio from a android device to an other android device.In this example, they used 2 librarys is : libjingle_peerConnection and SocketIo client but i don't know how to save streaming data as h.264 format?
After a lot of tries and hard work about this project, I found the solution for saving video as mp4 without any problem.
add this VideoFileRenderer.java to your project
package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import java.io.IOException;
import java.nio.ByteBuffer;
public class VideoFileRenderer implements VideoSink, SamplesReadyCallback {
private static final String TAG = "VideoFileRenderer";
private final HandlerThread renderThread;
private final Handler renderThreadHandler;
private final HandlerThread audioThread;
private final Handler audioThreadHandler;
private int outputFileWidth = -1;
private int outputFileHeight = -1;
private ByteBuffer[] encoderOutputBuffers;
private ByteBuffer[] audioInputBuffers;
private ByteBuffer[] audioOutputBuffers;
private EglBase eglBase;
private EglBase.Context sharedContext;
private VideoFrameDrawer frameDrawer;
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private MediaMuxer mediaMuxer;
private MediaCodec encoder;
private MediaCodec.BufferInfo bufferInfo, audioBufferInfo;
private int trackIndex = -1;
private int audioTrackIndex;
private boolean isRunning = true;
private GlRectDrawer drawer;
private Surface surface;
private MediaCodec audioEncoder;
private AudioDeviceModule audioDeviceModule;
public VideoFileRenderer(String outputFile, final EglBase.Context sharedContext, boolean withAudio) throws IOException {
renderThread = new HandlerThread(TAG + "RenderThread");
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
if (withAudio) {
audioThread = new HandlerThread(TAG + "AudioThread");
audioThread.start();
audioThreadHandler = new Handler(audioThread.getLooper());
} else {
audioThread = null;
audioThreadHandler = null;
}
bufferInfo = new MediaCodec.BufferInfo();
this.sharedContext = sharedContext;
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
mediaMuxer = new MediaMuxer(outputFile,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
audioTrackIndex = withAudio ? -1 : 0;
}
private void initVideoEncoder() {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, outputFileWidth, outputFileHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
// Create a MediaCodec encoder and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
renderThreadHandler.post(() -> {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE);
surface = encoder.createInputSurface();
eglBase.createSurface(surface);
eglBase.makeCurrent();
drawer = new GlRectDrawer();
});
} catch (Exception e) {
Log.wtf(TAG, e);
}
}
#Override
public void onFrame(VideoFrame frame) {
frame.retain();
if (outputFileWidth == -1) {
outputFileWidth = frame.getRotatedWidth();
outputFileHeight = frame.getRotatedHeight();
initVideoEncoder();
}
renderThreadHandler.post(() -> renderFrameOnRenderThread(frame));
}
private void renderFrameOnRenderThread(VideoFrame frame) {
if (frameDrawer == null) {
frameDrawer = new VideoFrameDrawer();
}
frameDrawer.drawFrame(frame, drawer, null, 0, 0, outputFileWidth, outputFileHeight);
frame.release();
drainEncoder();
eglBase.swapBuffers();
}
/**
* Release all resources. All already posted frames will be rendered first.
*/
public void release() {
isRunning = false;
if (audioThreadHandler != null)
audioThreadHandler.post(() -> {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
audioThread.quit();
});
renderThreadHandler.post(() -> {
if (encoder != null) {
encoder.stop();
encoder.release();
}
eglBase.release();
mediaMuxer.stop();
mediaMuxer.release();
renderThread.quit();
});
}
private boolean encoderStarted = false;
private volatile boolean muxerStarted = false;
private long videoFrameStart = 0;
private void drainEncoder() {
if (!encoderStarted) {
encoder.start();
encoderOutputBuffers = encoder.getOutputBuffers();
encoderStarted = true;
return;
}
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = encoder.getOutputBuffers();
Log.e(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = encoder.getOutputFormat();
Log.e(TAG, "encoder output format changed: " + newFormat);
trackIndex = mediaMuxer.addTrack(newFormat);
if (audioTrackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
if (videoFrameStart == 0 && bufferInfo.presentationTimeUs != 0) {
videoFrameStart = bufferInfo.presentationTimeUs;
}
bufferInfo.presentationTimeUs -= videoFrameStart;
if (muxerStarted)
mediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
isRunning = isRunning && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
private long presTime = 0L;
private void drainAudio() {
if (audioBufferInfo == null)
audioBufferInfo = new MediaCodec.BufferInfo();
while (true) {
int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
audioOutputBuffers = audioEncoder.getOutputBuffers();
Log.w(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = audioEncoder.getOutputFormat();
Log.w(TAG, "encoder output format changed: " + newFormat);
audioTrackIndex = mediaMuxer.addTrack(newFormat);
if (trackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = audioOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(audioBufferInfo.offset);
encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size);
if (muxerStarted)
mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo);
isRunning = isRunning && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
audioEncoder.releaseOutputBuffer(encoderStatus, false);
if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
#Override
public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) {
if (!isRunning)
return;
audioThreadHandler.post(() -> {
if (audioEncoder == null) try {
audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, audioSamples.getChannelCount());
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioSamples.getSampleRate());
format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
audioEncoder.start();
audioInputBuffers = audioEncoder.getInputBuffers();
audioOutputBuffers = audioEncoder.getOutputBuffers();
} catch (IOException exception) {
Log.wtf(TAG, exception);
}
int bufferIndex = audioEncoder.dequeueInputBuffer(0);
if (bufferIndex >= 0) {
ByteBuffer buffer = audioInputBuffers[bufferIndex];
buffer.clear();
byte[] data = audioSamples.getData();
buffer.put(data);
audioEncoder.queueInputBuffer(bufferIndex, 0, data.length, presTime, 0);
presTime += data.length * 125 / 12; // 1000000 microseconds / 48000hz / 2 bytes
}
drainAudio();
});
}
}
then add this implementation for this recording MediaRecorderImpl.java
package com.vedja.hassan.kavandeh_master.utils;
import android.support.annotation.Nullable;
import android.util.Log;
import com.vedja.hassan.kavandeh_master.utils.utils.EglUtils;
import org.webrtc.VideoFileRenderer;
import org.webrtc.VideoTrack;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import java.io.File;
public class MediaRecorderImpl {
private final Integer id;
private final VideoTrack videoTrack;
private final AudioSamplesInterceptor audioInterceptor;
private VideoFileRenderer videoFileRenderer;
private boolean isRunning = false;
private File recordFile;
public MediaRecorderImpl(Integer id, #Nullable VideoTrack videoTrack, #Nullable AudioSamplesInterceptor audioInterceptor) {
this.id = id;
this.videoTrack = videoTrack;
this.audioInterceptor = audioInterceptor;
}
public void startRecording(File file) throws Exception {
recordFile = file;
if (isRunning)
return;
isRunning = true;
//noinspection ResultOfMethodCallIgnored
file.getParentFile().mkdirs();
if (videoTrack != null) {
videoFileRenderer = new VideoFileRenderer(
file.getAbsolutePath(),
EglUtils.getRootEglBaseContext(),
audioInterceptor != null
);
videoTrack.addSink(videoFileRenderer);
if (audioInterceptor != null)
audioInterceptor.attachCallback(id, videoFileRenderer);
} else {
Log.e(TAG, "Video track is null");
if (audioInterceptor != null) {
//TODO(rostopira): audio only recording
throw new Exception("Audio-only recording not implemented yet");
}
}
}
public File getRecordFile() { return recordFile; }
public void stopRecording() {
isRunning = false;
if (audioInterceptor != null)
audioInterceptor.detachCallback(id);
if (videoTrack != null && videoFileRenderer != null) {
videoTrack.removeSink(videoFileRenderer);
videoFileRenderer.release();
videoFileRenderer = null;
}
}
private static final String TAG = "MediaRecorderImpl";
}
and use above code with this code
final AudioSamplesInterceptor inputSamplesInterceptor = new AudioSamplesInterceptor();
private OutputAudioSamplesInterceptor outputSamplesInterceptor = null;
private final SparseArray<MediaRecorderImpl> mediaRecorders = new SparseArray<>();
void startRecordingToFile(String path, Integer id, #Nullable VideoTrack videoTrack, #Nullable AudioChannel audioChannel) throws Exception {
AudioSamplesInterceptor interceptor = null;
if (audioChannel == AudioChannel.INPUT)
interceptor = inputSamplesInterceptor;
else if (audioChannel == AudioChannel.OUTPUT) {
if (outputSamplesInterceptor == null)
outputSamplesInterceptor = new OutputAudioSamplesInterceptor(audioDeviceModule);
interceptor = outputSamplesInterceptor;
}
mediaRecorder = new MediaRecorderImpl(id, videoTrack, interceptor);
mediaRecorder.startRecording(new File(path));
mediaRecorders.append(id, mediaRecorder);
}
void stopRecording(Integer id) {
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
if (mediaRecorder != null) {
mediaRecorder.stopRecording();
mediaRecorders.remove(id);
File file = mediaRecorder.getRecordFile();
if (file != null) {
ContentValues values = new ContentValues(3);
values.put(MediaStore.Video.Media.TITLE, file.getName());
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath());
getContentResolver().insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values);
}
}
}
finaly use this
try {
VedjaSharedPreference sharedPreference = new VedjaSharedPreference(getContext());
final File dir = new File(sharedPreference.getStringParam(StaticParameter.SAVING_URL) + "/audio/");
dir.mkdirs(); //create folders where write files
final File file = new File(dir, "Vedja-".concat(String.valueOf(System.currentTimeMillis())).concat(".mp3"));
VideoTrack videoTrack = null;
MediaStreamTrack track = slaveManagerActivity.remoteStream.videoTracks.get(0);
if (track instanceof VideoTrack)
videoTrack = (VideoTrack) track;
AudioChannel audioChannel = AudioChannel.OUTPUT;
slaveManagerActivity.startRecordingToFile(file.getPath(), 1, videoTrack, audioChannel);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(
"Failed to open video file for output: ", e);
}
Maybe after copying this code, some class is not found in your project. you can search this class on the internet.
in this project have a class VideoFileRendere you can use this Rendere for save video in file
https://github.com/Piasy/AppRTC-Android
I have a device info app on Google Play, and within the app I have storage information. I know in Android 4.4 there has been some changes in regards to accessing external SDcards. Internal doesn't seem to give me a problem. My question is, how can I reliably get the size of SDcards on KitKat?
I have the required permissions listed, as this worked fine on earlier versions of Android. I have searched here on SO, and I always seem to get one of the same errors. I do not need to write to SDcards, only read for size availabilty.
I have a StorageUtils class that came from SO, sorry I can't remember the link.
public class StorageUtils {
private static final String TAG = "StorageUtils";
public static class StorageInfo {
public final String path;
public final boolean internal;
public final boolean readonly;
public final int display_number;
StorageInfo(String path, boolean internal, boolean readonly,
int display_number) {
this.path = path;
this.internal = internal;
this.readonly = readonly;
this.display_number = display_number;
}
}
public static ArrayList<StorageInfo> getStorageList() {
ArrayList<StorageInfo> list = new ArrayList<StorageInfo>();
String def_path = Environment.getExternalStorageDirectory().getPath();
boolean def_path_internal = !Environment.isExternalStorageRemovable();
String def_path_state = Environment.getExternalStorageState();
boolean def_path_available = def_path_state
.equals(Environment.MEDIA_MOUNTED)
|| def_path_state.equals(Environment.MEDIA_MOUNTED_READ_ONLY);
boolean def_path_readonly = Environment.getExternalStorageState()
.equals(Environment.MEDIA_MOUNTED_READ_ONLY);
BufferedReader buf_reader = null;
try {
HashSet<String> paths = new HashSet<String>();
buf_reader = new BufferedReader(new FileReader("/proc/mounts"));
String line;
int cur_display_number = 1;
Log.d(TAG, "/proc/mounts");
while ((line = buf_reader.readLine()) != null) {
Log.d(TAG, line);
if (line.contains("vfat") || line.contains("/mnt")) {
StringTokenizer tokens = new StringTokenizer(line, " ");
String unused = tokens.nextToken(); // device
String mount_point = tokens.nextToken(); // mount point
if (paths.contains(mount_point)) {
continue;
}
unused = tokens.nextToken(); // file system
List<String> flags = Arrays.asList(tokens.nextToken()
.split(",")); // flags
boolean readonly = flags.contains("ro");
if (mount_point.equals(def_path)) {
paths.add(def_path);
list.add(new StorageInfo(def_path, def_path_internal,
readonly, -1));
} else if (line.contains("/dev/block/vold")) {
if (!line.contains("/mnt/secure")
&& !line.contains("/mnt/asec")
&& !line.contains("/mnt/obb")
&& !line.contains("/dev/mapper")
&& !line.contains("tmpfs")) {
paths.add(mount_point);
list.add(new StorageInfo(mount_point, false,
readonly, cur_display_number++));
}
}
}
}
if (!paths.contains(def_path) && def_path_available) {
list.add(new StorageInfo(def_path, def_path_internal,
def_path_readonly, -1));
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
if (buf_reader != null) {
try {
buf_reader.close();
} catch (IOException ex) {
}
}
}
return list;
}
public static String getReadableFileSize(long bytes, boolean si) {
int unit = si ? 1000 : 1024;
if (bytes < unit)
return bytes + " B";
int exp = (int) (Math.log(bytes) / Math.log(unit));
String pre = (si ? "kMGTPE" : "KMGTPE").charAt(exp - 1)
+ (si ? "" : "i");
return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre);
}
#SuppressLint("NewApi")
public static long getFreeSpace(String path) {
StatFs statFs = new StatFs(path);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
long sdAvailSize = statFs.getFreeBlocksLong()
* statFs.getBlockSizeLong();
return sdAvailSize;
} else {
#SuppressWarnings("deprecation")
double sdAvailSize = (double) statFs.getFreeBlocks()
* (double) statFs.getBlockSize();
return (long) sdAvailSize;
}
}
public static long getUsedSpace(String path) {
return getTotalSpace(path) - getFreeSpace(path);
}
#SuppressLint("NewApi")
public static long getTotalSpace(String path) {
StatFs statFs = new StatFs(path);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
long sdTotalSize = statFs.getBlockCountLong()
* statFs.getBlockSizeLong();
return sdTotalSize;
} else {
#SuppressWarnings("deprecation")
double sdTotalSize = (double) statFs.getBlockCount()
* statFs.getBlockSize();
return (long) sdTotalSize;
}
}
/**
* getSize()[0] is /mnt/sdcard. getSize()[1] is size of sd (example 12.0G),
* getSize()[2] is used, [3] is free, [4] is blksize
*
* #return
* #throws IOException
*/
public static String[] getSize() throws IOException {
String memory = "";
Process p = Runtime.getRuntime().exec("df /mnt/sdcard");
InputStream is = p.getInputStream();
int by = -1;
while ((by = is.read()) != -1) {
memory += new String(new byte[] { (byte) by });
}
for (String df : memory.split("/n")) {
if (df.startsWith("/mnt/sdcard")) {
String[] par = df.split(" ");
List<String> pp = new ArrayList<String>();
for (String pa : par) {
if (!pa.isEmpty()) {
pp.add(pa);
}
}
return pp.toArray(new String[pp.size()]);
}
}
return null;
}
}
And here is my fragment in which I try to display the SDcard path and size.
public class CpuMemFragment extends Fragment {
// CPU
String devCpuInfo;
TextView tvCpuInfo;
// RAM
String devRamInfo;
TextView tvRamInfo;
// Storage
String devStorageA, devStorageB;
TextView tvStorageAName, tvStorageA, tvStorageB, tvStorageBName;
AdView adView;
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.cpu_mem, container, false);
return rootView;
}
#Override
public void onActivityCreated(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onActivityCreated(savedInstanceState);
// *** CPU ***
//
devCpuInfo = readCpuInfo();
//
// #################################
// *** RAM ***
//
devRamInfo = readTotalRam();
//
// #################################
// *** STORAGE ***
//
ArrayList<StorageInfo> storageInfoList = StorageUtils.getStorageList();
tvStorageAName = (TextView) getView().findViewById(R.id.tvStorageAName);
tvStorageBName = (TextView) getView().findViewById(R.id.tvStorageBName);
if (storageInfoList.size() > 0) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT
&& !storageInfoList.get(0).internal) {
kitKatWorkaround(0);
}
tvStorageAName.setText(storageInfoList.get(0).path);
devStorageA = StorageUtils.getReadableFileSize(
(StorageUtils.getUsedSpace(storageInfoList.get(0).path)),
true)
+ "/"
+ StorageUtils.getReadableFileSize((StorageUtils
.getTotalSpace(storageInfoList.get(0).path)), true);
if (storageInfoList.size() > 1) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT
&& !storageInfoList.get(0).internal) {
kitKatWorkaround(1);
}
tvStorageBName.setText(storageInfoList.get(1).path);
devStorageB = StorageUtils.getReadableFileSize(
StorageUtils.getUsedSpace(storageInfoList.get(1).path)
+ (StorageUtils.getUsedSpace("system/")), true)
+ "/"
+ StorageUtils.getReadableFileSize((StorageUtils
.getTotalSpace(storageInfoList.get(1).path)),
true);
} else {
devStorageB = "N/A";
}
} else {
devStorageA = "N/A";
devStorageB = "N/A";
}
//
// #################################
// CPU
tvCpuInfo = (TextView) getView().findViewById(R.id.tvCpuInfo);
tvCpuInfo.setText(devCpuInfo);
//
// #################################
// RAM
tvRamInfo = (TextView) getView().findViewById(R.id.tvRamInfo);
tvRamInfo.setText(devRamInfo);
//
// #################################
// STORAGE
tvStorageA = (TextView) getView().findViewById(R.id.tvStorageA);
tvStorageA.setText(devStorageA);
tvStorageB = (TextView) getView().findViewById(R.id.tvStorageB);
tvStorageB.setText(devStorageB);
//
// #################################
// Look up the AdView as a resource and load a request.
adView = (AdView) getActivity().findViewById(R.id.adCpuMemBanner);
AdRequest adRequest = new AdRequest.Builder().build();
adView.loadAd(adRequest);
}
#Override
public void onPause() {
if (adView != null) {
adView.pause();
}
super.onPause();
}
#Override
public void onResume() {
super.onResume();
if (adView != null) {
adView.resume();
}
}
#Override
public void onDestroy() {
if (adView != null) {
adView.destroy();
}
super.onDestroy();
}
private static synchronized String readCpuInfo() {
ProcessBuilder cmd;
String result = "";
try {
String[] args = { "/system/bin/cat", "/proc/cpuinfo" };
cmd = new ProcessBuilder(args);
Process process = cmd.start();
InputStream in = process.getInputStream();
byte[] re = new byte[1024];
while (in.read(re) != -1) {
System.out.println(new String(re));
result = result + new String(re);
}
in.close();
} catch (IOException ex) {
ex.printStackTrace();
}
return result;
}
public static synchronized String readTotalRam() {
String load = "";
try {
RandomAccessFile reader = new RandomAccessFile("/proc/meminfo", "r");
load = reader.readLine();
reader.close();
} catch (IOException ex) {
ex.printStackTrace();
}
return load;
}
public void kitKatWorkaround(int index) {
String path1 = Environment.getExternalStorageDirectory().getPath();
//Storage A
if (index == 0) {
tvStorageAName.setText(path1);
devStorageA = StorageUtils.getReadableFileSize(
(StorageUtils.getUsedSpace(path1)), true)
+ "/"
+ StorageUtils.getReadableFileSize(
(StorageUtils.getTotalSpace(path1)), true);
}
//Storage B
if (index == 1) {
tvStorageBName.setText(path1);
devStorageB = StorageUtils.getReadableFileSize(
StorageUtils.getUsedSpace(path1)), true)
+ "/"
+ StorageUtils.getReadableFileSize(
(StorageUtils.getTotalSpace(path1)), true);
}
}
}
This results in the EACCES error, or an invalid path (access denied) on KitKat. Please help, and thank you greatly for your time.
You can find all shared storage devices by calling Context.getExternalFilesDirs() or Context.getExternalCacheDirs(). Then, you can call File.getUsableSpace() or use android.os.StatFs to check available disk space at each location.
These APIs are also available in ContextCompat in the support library to seamlessly work on pre-KitKat devices.
Hi I am asking this question with reference to the code at https://github.com/google/grafika .
I am trying to re-initialize my encoder during orientation change in order to adjust the aspect ratio of encoded frames..
UPDATE:
Here is my GLSurfaceView.Renderer class
class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
private static final String TAG = MainActivity.TAG;
private static final boolean VERBOSE = false;
private static final boolean ROSE_COLORED_GLASSES = false; // experiment
private MainActivity.CameraHandler mCameraHandler;
private TextureMovieEncoder mVideoEncoder;
private File mOutputFile;
TextView tv;
private TextureRender mTextureRender;
private SurfaceTexture mSurfaceTexture;
private boolean mRecordingEnabled;
Context cntex;
public CameraSurfaceRenderer(MainActivity.CameraHandler cameraHandler, TextureMovieEncoder movieEncoder, File outputFile, TextView tx, Context c) {
mOutputFile = outputFile;
mVideoEncoder = movieEncoder;
mRecordingEnabled = false;
mCameraHandler = cameraHandler;
tv = tx;
cntex = c;
}
public void notifyPausing() {
if (mSurfaceTexture != null) {
Log.d(TAG, "renderer pausing -- releasing SurfaceTexture");
mSurfaceTexture.release();
mSurfaceTexture = null;
}
}
public void changeRecordingState(boolean isRecording) {
Log.d(TAG, "changeRecordingState: was " + mRecordingEnabled + " now " + isRecording);
mRecordingEnabled = isRecording;
}
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
System.out.println("onSurfaceCreated start");
Log.d(TAG, "onSurfaceCreated");
mTextureRender = new TextureRender(cntex);
mTextureRender.surfaceCreated();
if (ROSE_COLORED_GLASSES) {
String rosyFragment =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" vec4 tc = texture2D(sTexture, vTextureCoord);\n" +
" gl_FragColor.r = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
"}\n";
// assign value to gl_FragColor.g and .b as well to get simple B&W
mTextureRender.changeFragmentShader(rosyFragment);
}
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
// Tell the UI thread to enable the camera preview.
mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
MainActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
if (cntex.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 480, 640, 1400000, EGL14.eglGetCurrentContext()));
} else {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 640, 480, 1400000, EGL14.eglGetCurrentContext()));
}
mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
System.out.println("onSurfaceCreated end");
}
boolean is_stpd = false;
int count = 0;
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
System.out.println("onSurfaceChanged start");
Log.d(TAG, "onSurfaceChanged " + width + "x" + height);
GLES20.glViewport(0, 0, width, height);
mCameraHandler.sendEmptyMessage(2); // to display toast message of surface change.
int orientation = cntex.getResources().getConfiguration().orientation;
// System.out.println("onSurfaceChanged before reinit");
mVideoEncoder.encoder_reinitialisation(EGL14.eglGetCurrentContext(), orientation);
// System.out.println("onSurfaceChanged after reinit");
System.out.println("onSurfaceChanged end");
}
int _frm_cnt = 0;
double _strt_tm = 0;
#Override
public void onDrawFrame(GL10 unused) {
System.out.println("onDrawFrame start");
if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureRender.getTextureId());
++_frm_cnt;
if(_frm_cnt == 1) {
_strt_tm = System.currentTimeMillis();
}
if((System.currentTimeMillis() - _strt_tm) >= 1000) {
System.out.println("fps = " + _frm_cnt );
//tx.setText("fps = " + Integer.toString(frm_cnt));
_frm_cnt = 0;
}
mSurfaceTexture.updateTexImage();
//
mVideoEncoder.setTextureId(mTextureRender.getTextureId());
//
if(mVideoEncoder.is_prepared == true) {
// // This will be ignored if we're not actually recording.
mVideoEncoder.frameAvailable(mSurfaceTexture);
}
// Draw the video frame.
mTextureRender.drawFrame(mSurfaceTexture);
System.out.println("onDrawFrame end");
}
}
Here is my TextureMovieEncoder class
public class TextureMovieEncoder implements Runnable {
private static final String TAG = MainActivity.TAG;
private static final boolean VERBOSE = false;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 5; // 30fps
private static final int IFRAME_INTERVAL = 1; // 5 seconds between I-frames
private static final int MSG_START_RECORDING = 0;
private static final int MSG_STOP_RECORDING = 1;
private static final int MSG_FRAME_AVAILABLE = 2;
private static final int MSG_SET_TEXTURE_ID = 3;
private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
private static final int MSG_QUIT = 5;
private static final int MSG_REINIT = 6;
// ----- accessed by encoder thread -----
private EglCore mEglBase;
private WindowSurface mInputWindowSurface;
private MediaMuxer mMuxer;
private MediaCodec mEncoder;
private MediaCodec.BufferInfo mBufferInfo;
private int mTrackIndex;
private boolean mMuxerStarted;
private TextureRender mTextureRender;
private int mTextureId;
private int mFrameNum;
File enc_file = new File(Environment.getExternalStorageDirectory().getPath() + "/encoded_preview.webm");
FileOutputStream fp_enc = null;
PrintWriter enc_len = null;
// ----- accessed by multiple threads -----
private volatile EncoderHandler mHandler;
private Object mReadyFence = new Object(); // guards ready/running
private boolean mReady;
public boolean mRunning;
public boolean is_prepared = false;
public TextureMovieEncoder(Context cntxt) {
context = cntxt;
}
public static class EncoderConfig {
final File mOutputFile;
final int mWidth;
final int mHeight;
final int mBitRate;
final EGLContext mEglContext;
public EncoderConfig(File outputFile, int width, int height, int bitRate,
EGLContext sharedEglContext) {
System.out.println("EncoderConfig start");
mOutputFile = outputFile;
mWidth = width;
mHeight = height;
mBitRate = bitRate;
mEglContext = sharedEglContext;
System.out.println("EncoderConfig end");
}
#Override
public String toString() {
return "EncoderConfig: " + mWidth + "x" + mHeight + " #" + mBitRate +
" to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
}
}
public void startRecording(EncoderConfig config) {
System.out.println("startRecording start");
Log.d(TAG, "Encoder: startRecording()");
synchronized (mReadyFence) {
if (mRunning) {
Log.w(TAG, "Encoder thread already running");
return;
}
mRunning = true;
new Thread(this, "TextureMovieEncoder").start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException ie) {
// ignore
}
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
System.out.println("startRecording end");
}
int orientation_local = -1;
public void encoder_reinitialisation(EGLContext eglContext, int orientation) {
System.out.println("encoder_reinitialisation start");
is_prepared = false;
System.out.println("encoder_reinitialisation before message oriebta = " + orientation);
mHandler.sendMessage(mHandler.obtainMessage(MSG_REINIT, eglContext));
System.out.println("encoder_reinitialisation after message");
orientation_local = orientation;
System.out.println("encoder_reinitialisation end");
}
public void stopRecording() {
System.out.println("stopRecording start");
if(mHandler != null) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
}
// We don't know when these will actually finish (or even start). We don't want to
// delay the UI thread though, so we return immediately.
System.out.println("stopRecording end");
}
/**
* Returns true if recording has been started.
*/
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
public void updateSharedContext(EGLContext sharedContext) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext));
}
public void frameAvailable(SurfaceTexture st) {
System.out.println("frameAvailable start");
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
float[] transform = new float[16]; // TODO - avoid alloc every frame
st.getTransformMatrix(transform);
long timestamp = st.getTimestamp();
if (timestamp == 0) {
Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
(int) (timestamp >> 32), (int) timestamp, transform));
System.out.println("frameAvailable end");
}
public void setTextureId(int id) {
System.out.println("setTextureId start");
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
System.out.println("setTextureId end");
}
#Override
public void run() {
System.out.println("run start");
// Establish a Looper for this thread, and define a Handler for it.
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new EncoderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
Log.d(TAG, "Encoder thread exiting");
synchronized (mReadyFence) {
mReady = mRunning = false;
mHandler = null;
System.out.println("run end");
}
}
/**
* Handles encoder state change requests.
*/
private static class EncoderHandler extends Handler {
private WeakReference<TextureMovieEncoder> mWeakEncoder;
public EncoderHandler(TextureMovieEncoder encoder) {
mWeakEncoder = new WeakReference<TextureMovieEncoder>(encoder);
}
#Override
public void handleMessage(Message inputMessage) {
int what = inputMessage.what;
Object obj = inputMessage.obj;
TextureMovieEncoder encoder = mWeakEncoder.get();
if (encoder == null) {
Log.w(TAG, "EncoderHandler.handleMessage: encoder is null");
return;
}
switch (what) {
case MSG_START_RECORDING:
encoder.handleStartRecording((EncoderConfig) obj);
break;
case MSG_REINIT:
encoder.encoder_reinit((EGLContext) inputMessage.obj);
break;
case MSG_STOP_RECORDING:
encoder.handleStopRecording();
break;
case MSG_FRAME_AVAILABLE:
long timestamp = (((long) inputMessage.arg1) << 32) |
(((long) inputMessage.arg2) & 0xffffffffL);
encoder.handleFrameAvailable((float[]) obj, timestamp);
break;
case MSG_SET_TEXTURE_ID:
encoder.handleSetTexture(inputMessage.arg1);
break;
case MSG_UPDATE_SHARED_CONTEXT:
encoder.handleUpdateSharedContext((EGLContext) inputMessage.obj);
break;
case MSG_QUIT:
Looper.myLooper().quit();
break;
default:
throw new RuntimeException("Unhandled msg what=" + what);
}
}
}
/**
* Start recording.
*/
private void handleStartRecording(EncoderConfig config) {
Log.d(TAG, "handleStartRecording " + config);
mFrameNum = 0;
prepareEncoder(config.mEglContext, config.mWidth, config.mHeight, config.mBitRate,
config.mOutputFile);
}
private void encoder_reinit(EGLContext obj) {
System.out.println("encoder_reinit start ");
drainEncoder(true);
releaseEncoder();
prepareEncoder(obj, video_width, video_height, 1400000,
null);
System.out.println("encoder_reinit end ");
}
private void handleFrameAvailable(float[] transform, long timestamp) {
System.out.println("handleFrameAvailable start");
if (VERBOSE) Log.d(TAG, "handleFrameAvailable tr=" + transform);
if(is_prepared == true) {
drainEncoder(false);
mTextureRender.setTextureId(mTextureId);
mTextureRender.drawFrame(transform);
mInputWindowSurface.setPresentationTime(timestamp);
mInputWindowSurface.swapBuffers();
}
System.out.println("handleFrameAvailable end");
}
private void handleStopRecording() {
Log.d(TAG, "handleStopRecording");
drainEncoder(true);
releaseEncoder();
}
private void handleSetTexture(int id) {
//Log.d(TAG, "handleSetTexture " + id);
mTextureId = id;
}
/**
* Tears down the EGL surface and context we've been using to feed the MediaCodec input
* surface, and replaces it with a new one that shares with the new context.
*/
private void handleUpdateSharedContext(EGLContext newSharedContext) {
System.out.println("handleUpdateSharedContext start");
Log.d(TAG, "handleUpdatedSharedContext " + newSharedContext);
// Release the EGLSurface and EGLContext.
if(mInputWindowSurface != null) {
mInputWindowSurface.releaseEglSurface();
mEglBase.release();
}
// Create a new EGLContext and recreate the window surface.
mEglBase = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface.recreate(mEglBase);
mInputWindowSurface.makeCurrent();
// Create new programs and such for the new context.
mTextureRender.surfaceCreated();
System.out.println("handleUpdateSharedContext end");
}
boolean created =false;
EGLContext sharedContext_local;
private Context context;
private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate,
File outputFile) {
System.out.println("prepareEncoder start width = " + width + "height = " + height);
sharedContext_local = sharedContext;
enc_strm = new byte[width * height * 3 / 2];
encoded_data_buffer = new ofi_vc_buffer();
video_width = width;
video_height = height;
if(!created) {
try {
fp_enc = new FileOutputStream(enc_file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
try {
enc_len = new PrintWriter(Environment.getExternalStorageDirectory().getPath() + "/encoded_len.xls");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
created = true;
}
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = null;
if(orientation_local == 1) {
System.out.println("videoformatting portrait");
format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
} else {
System.out.println("videoformatting landscape");
format = MediaFormat.createVideoFormat(MIME_TYPE, height, width);
}
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEglBase = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface = new WindowSurface(mEglBase, mEncoder.createInputSurface());
mEncoder.start();
mInputWindowSurface.makeCurrent();
mTextureRender = new TextureRender(context);
mTextureRender.surfaceCreated();
mTrackIndex = -1;
mMuxerStarted = false;
is_prepared = true;
System.out.println("prepareEncoder end");
}
/**
* Releases encoder resources.
*/
private void releaseEncoder() {
System.out.println("releaseEncoder end");
if (VERBOSE) Log.d(TAG, "releasing encoder objects");
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mInputWindowSurface != null) {
mInputWindowSurface.release();
mInputWindowSurface = null;
}
if (mEglBase != null) {
mEglBase.release();
mEglBase = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
System.out.println("releaseEncoder start");
}
byte[] enc_strm = null;
byte sps_pps_nal[] = null;
int sps_pps_nal_size = 0;
ofi_vc_buffer encoded_data_buffer = null;
private int encod_len = 0;
private int frame_type;
private encoded_stream_info enc_buffer_global;
private int video_width;
private int video_height;
private void drainEncoder(boolean endOfStream) {
System.out.println("drainEncoder start");
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
//encoded_stream_info enc_buffer = new encoded_stream_info(video_width * video_height * 3 / 2);
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
// mTrackIndex = mMuxer.addTrack(newFormat);
// mMuxer.start();
// mMuxerStarted = true;
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d("Main", "ignoring BUFFER_FLAG_CODEC_CONFIG");
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
System.out.println("Encode SPS PPS buffer size" + mBufferInfo.size );
sps_pps_nal_size = mBufferInfo.size;
sps_pps_nal = new byte[sps_pps_nal_size];
encodedData.get(sps_pps_nal, 0, sps_pps_nal_size);
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// if (!mMuxerStarted) {
// throw new RuntimeException("muxer hasn't started");
// }
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
encodedData.get(enc_strm, sps_pps_nal_size, mBufferInfo.size);
System.arraycopy(sps_pps_nal, 0, enc_strm, 0, sps_pps_nal_size);
encod_len = mBufferInfo.size + sps_pps_nal_size;
if ((enc_strm[sps_pps_nal_size + 4] & 0x1F)== 5) {
frame_type = 2;
} else {
frame_type = 0;
}
//mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d("Main", "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
enc_buffer_global.encoded_len = 0; // frame sending is disabled.
if(enc_buffer_global.encoded_len == 0) {
enc_buffer_global.encoded_data = enc_strm;
enc_buffer_global.encoded_len = encod_len;
enc_buffer_global.frame_type = frame_type;
// System.out.println("encoded Wrote stream len =" + enc_buffer_global.encoded_len);
try {
fp_enc.write(enc_strm, 0, encod_len);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
fp_enc.flush();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
enc_len.format("%d\n", encod_len);
enc_len.flush();
}
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break; // out of while
}
}
}
System.out.println("drainEncoder end");
}
public void set_buffer(encoded_stream_info bufer) {
enc_buffer_global = bufer;
}
}
Other than these I have added
EglCore.java
EglSurfaceBase.java
TextureRender.java
WindowSurface.java
classes from the grafika link given above.
Still I am not able to figure out the reason for my app getting freezed up during re-initialization. But sometimes it works also.
Can anyone help....
Else what are the steps I should do to change the encoding resolution.
Thanks in advance...........
I got a working one with the following code....
public void onConfigurationChanged(Configuration newConfig) {
System.out.println("On config change start ");
super.onConfigurationChanged(newConfig);
mGLView.queueEvent(new Runnable() {
#Override public void run() {
// Tell the renderer that it's about to be paused so it can clean up.
mRenderer.notifyPausing();
}
});
mGLView.queueEvent(new Runnable() {
#Override public void run() {
mRenderer.re_init();
}
});
}
where re-init function is.
public void re_init() {
mTextureRender = new TextureRender(cntex);
mTextureRender.surfaceCreated();
if (ROSE_COLORED_GLASSES) {
String rosyFragment =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" vec4 tc = texture2D(sTexture, vTextureCoord);\n" +
" gl_FragColor.r = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
"}\n";
mTextureRender.changeFragmentShader(rosyFragment);
}
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
MainActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
if (cntex.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 640, 480, 1400000, EGL14.eglGetCurrentContext()));
} else {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 480, 640, 1400000, EGL14.eglGetCurrentContext()));
}
//mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
}
and in mRenderer.notifyPausing() I have added videoEncoder.stopRecording() call also which will wait untill the whole recorder is stopped (did an object based synchronisation there).
But the whole re-initialisation takes 250 - 400 ms................
I have the following BaseClass:
public class BaseClass extends Activity implements MusicUtils.Defs,
View.OnTouchListener, View.OnLongClickListener {
private static final int USE_AS_RINGTONE = CHILD_MENU_BASE;
private boolean mOneShot = false;
private boolean mSeeking = false;
private boolean mDeviceHasDpad;
private long mStartSeekenter code herePos = 0;
private long mLastSeekEventTime;
private IMediaPlaybackService mService = null;
private RepeatingImageButton mPrevButton;
private ImageButton mPauseButton;
private RepeatingImageButton mNextButton;
private Worker mAlbumArtWorker;
private AlbumArtHandler mAlbumArtHandler;
private Toast mToast;
private int mTouchSlop;
private ServiceToken mToken;
public BaseClass() {
}
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setVolumeControlStream(AudioManager.STREAM_MUSIC);
mAlbumArtWorker = new Worker("album art worker");
mAlbumArtHandler = new AlbumArtHandler(mAlbumArtWorker.getLooper());
mCurrentTime = (TextView) findViewById(R.id.currenttime);
mTotalTime = (TextView) findViewById(R.id.totaltime);
mPrevButton = (RepeatingImageButton) findViewById(R.id.prev);
mPrevButton.setOnClickListener(mPrevListener);
mPrevButton.setRepeatListener(mRewListener, 260);
mPauseButton = (ImageButton) findViewById(R.id.pause);
mPauseButton.requestFocus();
mPauseButton.setOnClickListener(mPauseListener);
mNextButton = (RepeatingImageButton) findViewById(R.id.next);
mNextButton.setOnClickListener(mNextListener);
mNextButton.setRepeatListener(mFfwdListener, 260);
seekmethod = 1;
mDeviceHasDpad = (getResources().getConfiguration().navigation ==
Configuration.NAVIGATION_DPAD);
if (icicle != null) {
mOneShot = icicle.getBoolean("oneshot");
} else {
mOneShot = getIntent().getBooleanExtra("oneshot", false);
}
mTouchSlop = ViewConfiguration.get(BaseClass.this).getScaledTouchSlop();
}
Handler mLabelScroller = new Handler() {
#Override
public void handleMessage(Message msg) {
TextView tv = (TextView) msg.obj;
int x = tv.getScrollX();
x = x * 3 / 4;
tv.scrollTo(x, 0);
if (x == 0) {
tv.setEllipsize(TruncateAt.END);
} else {
Message newmsg = obtainMessage(0, tv);
mLabelScroller.sendMessageDelayed(newmsg, 15);
}
}
};
Cursor c = MusicUtils.query(this, ContentUris.withAppendedId(
MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, audioid),
new String[] { MediaStore.Audio.Media.IS_MUSIC }, null,
null,
null);
boolean ismusic = true;
if (c != null) {
if (c.moveToFirst()) {
ismusic = c.getInt(0) != 0;
}
c.close();
}
if (!ismusic) {
return false;
}
boolean knownartist = (artist != null)
&& !MediaStore.UNKNOWN_STRING.equals(artist);
boolean knownalbum = (album != null)
&& !MediaStore.UNKNOWN_STRING.equals(album);
title = getString(R.string.mediasearch, title);
Intent i = new Intent();
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
i.setAction(MediaStore.INTENT_ACTION_MEDIA_SEARCH);
i.putExtra(SearchManager.QUERY, query);
if (knownartist) {
i.putExtra(MediaStore.EXTRA_MEDIA_ARTIST, artist);
}
if (knownalbum) {
i.putExtra(MediaStore.EXTRA_MEDIA_ALBUM, album);
}
i.putExtra(MediaStore.EXTRA_MEDIA_TITLE, song);
i.putExtra(MediaStore.EXTRA_MEDIA_FOCUS, mime);
startActivity(Intent.createChooser(i, title));
return true;
}
private OnSeekBarChangeListener mSeekListener = new OnSeekBarChangeListener() {
public void onStartTrackingTouch(SeekBar bar) {
mLastSeekEventTime = 0;
mFromTouch = true;
}
public void onProgressChanged(SeekBar bar, int progress,
boolean fromuser) {
if (!fromuser || (mService == null))
return;
long now = SystemClock.elapsedRealtime();
if ((now - mLastSeekEventTime) > 250) {
mLastSeekEventTime = now;
updateTrackInfo();
mPosOverride = mDuration * progress / 1000;
try {
mService.seek(mPosOverride);
} catch (RemoteException ex) {
}
// trackball event, allow progress updates
if (!mFromTouch) {
refreshNow();
mPosOverride = -1;
}
}
}
private View.OnClickListener mPauseListener = new View.OnClickListener() {
public void onClick(View v) {
doPauseResume();
}
};
private View.OnClickListener mPrevListener = new View.OnClickListener() {
public void onClick(View v) {
if (mService == null)
return;
try {
if (mService.position() < 2000) {
mService.prev();
} else {
mService.seek(0);
mService.play();
}
} catch (RemoteException ex) {
}
}
};
#Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
try {
switch (keyCode) {
case KeyEvent.KEYCODE_DPAD_LEFT:
if (!useDpadMusicControl()) {
break;
}
if (mService != null) {
if (!mSeeking && mStartSeekPos >= 0) {
mPauseButton.requestFocus();
if (mStartSeekPos < 1000) {
mService.prev();
} else {
mService.seek(0);
}
} else {
scanBackward(-1,
event.getEventTime() -
event.getDownTime());
mPauseButton.requestFocus();
mStartSeekPos = -1;
}
}
mSeeking = false;
mPosOverride = -1;
return true;
case KeyEvent.KEYCODE_DPAD_RIGHT:
if (!useDpadMusicControl()) {
break;
}
if (mService != null) {
if (!mSeeking && mStartSeekPos >= 0) {
mPauseButton.requestFocus();
mService.next();
} else {
scanForward(-1,
event.getEventTime() -
event.getDownTime());
mPauseButton.requestFocus();
mStartSeekPos = -1;
}
}
mSeeking = false;
mPosOverride = -1;
return true;
}
} catch (RemoteException ex) {
}
return super.onKeyUp(keyCode, event);
}
private boolean useDpadMusicControl() {
if (mDeviceHasDpad
&& (mPrevButton.isFocused() || mNextButton.isFocused()
|| mPauseButton
.isFocused())) {
return true;
}
return false;
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
int repcnt = event.getRepeatCount();
if ((seekmethod == 0) ? seekMethod1(keyCode) : seekMethod2(keyCode))
return true;
switch (keyCode) {
/*
* // image scale case KeyEvent.KEYCODE_Q: av.adjustParams(-0.05, 0.0,
* 0.0, 0.0, 0.0,-1.0); break; case KeyEvent.KEYCODE_E: av.adjustParams(
* 0.05, 0.0, 0.0, 0.0, 0.0, 1.0); break; // image translate case
* KeyEvent.KEYCODE_W: av.adjustParams( 0.0, 0.0,-1.0, 0.0, 0.0, 0.0);
* break; case KeyEvent.KEYCODE_X: av.adjustParams( 0.0, 0.0, 1.0, 0.0,
* 0.0, 0.0); break; case KeyEvent.KEYCODE_A: av.adjustParams( 0.0,-1.0,
* 0.0, 0.0, 0.0, 0.0); break; case KeyEvent.KEYCODE_D: av.adjustParams(
* 0.0, 1.0, 0.0, 0.0, 0.0, 0.0); break; // camera rotation case
* KeyEvent.KEYCODE_R: av.adjustParams( 0.0, 0.0, 0.0, 0.0, 0.0,-1.0);
* break; case KeyEvent.KEYCODE_U: av.adjustParams( 0.0, 0.0, 0.0, 0.0,
* 0.0, 1.0); break; // camera translate case KeyEvent.KEYCODE_Y:
* av.adjustParams( 0.0, 0.0, 0.0, 0.0,-1.0, 0.0); break; case
* KeyEvent.KEYCODE_N: av.adjustParams( 0.0, 0.0, 0.0, 0.0, 1.0, 0.0);
* break; case KeyEvent.KEYCODE_G: av.adjustParams( 0.0, 0.0, 0.0,-1.0,
* 0.0, 0.0); break; case KeyEvent.KEYCODE_J: av.adjustParams( 0.0, 0.0,
* 0.0, 1.0, 0.0, 0.0); break;
*/
case KeyEvent.KEYCODE_SLASH:
seekmethod = 1 - seekmethod;
return true;
case KeyEvent.KEYCODE_DPAD_LEFT:
if (!useDpadMusicControl()) {
break;
}
if (!mPrevButton.hasFocus()) {
mPrevButton.requestFocus();
}
scanBackward(repcnt, event.getEventTime() -
event.getDownTime());
return true;
case KeyEvent.KEYCODE_DPAD_RIGHT:
if (!useDpadMusicControl()) {
break;
}
if (!mNextButton.hasFocus()) {
mNextButton.requestFocus();
}
scanForward(repcnt, event.getEventTime() - event.getDownTime());
return true;
case KeyEvent.KEYCODE_S:
toggleShuffle();
return true;
case KeyEvent.KEYCODE_DPAD_CENTER:
case KeyEvent.KEYCODE_SPACE:
doPauseResume();
return true;
}
return super.onKeyDown(keyCode, event);
}
private void scanBackward(int repcnt, long delta) {
if (mService == null)
return;
try {
if (repcnt == 0) {
mStartSeekPos = mService.position();
mLastSeekEventTime = 0;
mSeeking = false;
} else {
mSeeking = true;
if (delta < 5000) {
// seek at 10x speed for the first 5 seconds
delta = delta * 10;
} else {
// seek at 40x after that
delta = 50000 + (delta - 5000) * 40;
}
long newpos = mStartSeekPos - delta;
if (newpos < 0) {
// move to previous track
mService.prev();
long duration = mService.duration();
mStartSeekPos += duration;
newpos += duration;
}
if (((delta - mLastSeekEventTime) > 250) || repcnt < 0)
{
mService.seek(newpos);
mLastSeekEventTime = delta;
}
if (repcnt >= 0) {
mPosOverride = newpos;
} else {
mPosOverride = -1;
}
refreshNow();
}
} catch (RemoteException ex) {
}
}
private void scanForward(int repcnt, long delta) {
if (mService == null)
return;
try {
if (repcnt == 0) {
mStartSeekPos = mService.position();
mLastSeekEventTime = 0;
mSeeking = false;
} else {
mSeeking = true;
if (delta < 5000) {
// seek at 10x speed for the first 5 seconds
delta = delta * 10;
} else {
// seek at 40x after that
delta = 50000 + (delta - 5000) * 40;
}
long newpos = mStartSeekPos + delta;
long duration = mService.duration();
if (newpos >= duration) {
// move to next track
mService.next();
mStartSeekPos -= duration; // is OK to go
negative
newpos -= duration;
}
if (((delta - mLastSeekEventTime) > 250) || repcnt < 0) {
mService.seek(newpos);
mLastSeekEventTime = delta;
}
if (repcnt >= 0) {
mPosOverride = newpos;
} else {
mPosOverride = -1;
}
refreshNow();
}
} catch (RemoteException ex) {
}
}
private void doPauseResume() {
try {
if (mService != null) {
if (mService.isPlaying()) {
mService.pause();
} else {
mService.play();
}
updateTrackInfo();
refreshNow();
setPauseButtonImage();
}
} catch (RemoteException ex) {
}
}
private void toggleShuffle() {
if (mService == null) {
return;
}
try {
int shuffle = mService.getShuffleMode();
if (shuffle == MediaPlaybackService.SHUFFLE_NONE) {
mService.setShuffleMode(MediaPlaybackService.SHUFFLE_NORMAL);
if (mService.getRepeatMode() ==
MediaPlaybackService.REPEAT_CURRENT) {
mService.setRepeatMode(MediaPlaybackService.REPEAT_ALL);
}
showToast(R.string.shuffle_on_notif);
} else if (shuffle == MediaPlaybackService.SHUFFLE_NORMAL
|| shuffle == MediaPlaybackService.SHUFFLE_AUTO) {
mService.setShuffleMode(MediaPlaybackService.SHUFFLE_NONE);
showToast(R.string.shuffle_off_notif);
} else {
Log.e("MediaPlaybackActivity", "Invalid shuffle mode: "
+ shuffle);
}
} catch (RemoteException ex) {
}
}
private void cycleRepeat() {
if (mService == null) {
return;
}
try {
int mode = mService.getRepeatMode();
if (mode == MediaPlaybackService.REPEAT_NONE) {
mService.setRepeatMode(MediaPlaybackService.REPEAT_ALL);
showToast(R.string.repeat_all_notif);
} else if (mode == MediaPlaybackService.REPEAT_ALL) {
mService.setRepeatMode(MediaPlaybackService.REPEAT_CURRENT);
if (mService.getShuffleMode() != MediaPlaybackService.SHUFFLE_NONE) {
mService.setShuffleMode(MediaPlaybackService.SHUFFLE_NONE);
}
showToast(R.string.repeat_current_notif);
} else {
mService.setRepeatMode(MediaPlaybackService.REPEAT_NONE);
showToast(R.string.repeat_off_notif);
}
} catch (RemoteException ex) {
}
}
private void showToast(int resid) {
if (mToast == null) {
mToast = Toast.makeText(this, "", Toast.LENGTH_SHORT);
}
mToast.setText(resid);
mToast.show();
}
private void startPlayback() {
if (mService == null)
return;
Intent intent = getIntent();
String filename = "";
Uri uri = intent.getData();
if (uri != null && uri.toString().length() > 0) {
// If this is a file:// URI, just use the path directly instead
// of going through the open-from-filedescriptor codepath.
String scheme = uri.getScheme();
if ("file".equals(scheme)) {
filename = uri.getPath();
} else {
filename = uri.toString();
}
try {
if (!ContentResolver.SCHEME_CONTENT.equals(scheme)
|| !MediaStore.AUTHORITY.equals(uri.getAuthority())) {
mOneShot = true;
}
mService.stop();
mService.openFile(filename, mOneShot);
mService.play();
setIntent(new Intent());
} catch (Exception ex) {
Log.d("MediaPlaybackActivity", "couldn't start playback: " + ex);
}
}
updateTrackInfo();
long next = refreshNow();
queueNextRefresh(next);
}
private ServiceConnection osc = new ServiceConnection() {
public void onServiceConnected(ComponentName classname, IBinder obj) {
mService = IMediaPlaybackService.Stub.asInterface(obj);
startPlayback();
try {
// Assume something is playing when the service says it is,
// but also if the audio ID is valid but the service is paused.
if (mService.getAudioId() >= 0 || mService.isPlaying()
|| mService.getPath() != null) {
// something is playing now, we're done
setPauseButtonImage();
return;
}
} catch (RemoteException ex) {
}
// Service is dead or not playing anything. If we got here as part
// of a "play this file" Intent, exit. Otherwise go to the Music
// app start screen.
if (getIntent().getData() == null) {
Intent intent = new Intent(Intent.ACTION_MAIN);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setClass(BaseClass.this, MusicBrowserActivity.class);
startActivity(intent);
}
finish();
}
public void onServiceDisconnected(ComponentName classname) {
mService = null;
}
};
private void setPauseButtonImage() {
try {
if (mService != null && mService.isPlaying()) {
mPauseButton
.setImageResource(android.R.drawable.ic_media_pause);
} else {
mPauseButton.setImageResource(android.R.drawable.ic_media_play);
}
} catch (RemoteException ex) {
}
}
private TextView mCurrentTime;
private TextView mTotalTime;
private long mPosOverride = -1;
private boolean mFromTouch = false;
private long mDuration;
private int seekmethod;
private boolean paused;
private static final int REFRESH = 1;
private static final int QUIT = 2;
private static final int GET_ALBUM_ART = 3;
private static final int ALBUM_ART_DECODED = 4;
private void queueNextRefresh(long delay) {
if (!paused) {
Message msg = mHandler.obtainMessage(REFRESH);
mHandler.removeMessages(REFRESH);
mHandler.sendMessageDelayed(msg, delay);
}
}
private long refreshNow() {
if (mService == null)
return 500;
try {
long pos = mPosOverride < 0 ? mService.position() : mPosOverride;
long remaining = 1000 - (pos % 1000);
if ((pos >= 0) && (mService.duration() > 0)) {
mCurrentTime.setText(MusicUtils
.makeTimeString(this, pos / 1000));
if (mService.isPlaying()) {
mCurrentTime.setVisibility(View.VISIBLE);
} else {
// blink the counter
int vis = mCurrentTime.getVisibility();
mCurrentTime
.setVisibility(vis == View.INVISIBLE ? View.VISIBLE
: View.INVISIBLE);
remaining = 500;
}
} else {
mCurrentTime.setText("--:--");
}
// return the number of milliseconds until the next full second, so
// the counter can be updated at just the right time
return remaining;
} catch (RemoteException ex) {
}
return 500;
}
private final Handler mHandler = new Handler() {
#Override
public void handleMessage(Message msg) {
switch (msg.what) {
case REFRESH:
updateTrackInfo();
long next = refreshNow();
queueNextRefresh(next);
break;
case QUIT:
// This can be moved back to onCreate once the bug that prevents
// Dialogs from being started from onCreate/onResume is fixed.
new AlertDialog.Builder(BaseClass.this)
.setTitle(R.string.service_start_error_title)
.setMessage(R.string.service_start_error_msg)
.setPositiveButton(R.string.service_start_error_button,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,
int whichButton) {
finish();
}
}).setCancelable(false).show();
break;
default:
break;
}
}
};
private BroadcastReceiver mStatusListener = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if (action.equals(MediaPlaybackService.META_CHANGED)) {
// redraw the artist/title info and
// set new max for progress bar
updateTrackInfo();
setPauseButtonImage();
queueNextRefresh(1);
} else if (action.equals(MediaPlaybackService.PLAYBACK_COMPLETE)) {
if (mOneShot) {
finish();
} else {
setPauseButtonImage();
}
} else if (action.equals(MediaPlaybackService.PLAYSTATE_CHANGED)) {
setPauseButtonImage();
}
}
};
private static class AlbumSongIdWrapper {
public long albumid;
public long songid;
AlbumSongIdWrapper(long aid, long sid) {
albumid = aid;
songid = sid;
}
}
private void updateTrackInfo() {
if (mService == null) {
return;
}
try {
String path = mService.getPath();
if (path == null) {
finish();
return;
}
long songid = mService.getAudioId();
if (songid < 0 && path.toLowerCase().startsWith("http://")) {
// Once we can get album art and meta data from MediaPlayer, we
// can show that info again when streaming.
mAlbumArtHandler.removeMessages(GET_ALBUM_ART);
mAlbumArtHandler.obtainMessage(GET_ALBUM_ART,
new AlbumSongIdWrapper(-1, -1)).sendToTarget();
} else {
String artistName = mService.getArtistName();
if (MediaStore.UNKNOWN_STRING.equals(artistName)) {
artistName = getString(R.string.unknown_artist_name);
}
String albumName = mService.getAlbumName();
long albumid = mService.getAlbumId();
if (MediaStore.UNKNOWN_STRING.equals(albumName)) {
albumName = getString(R.string.unknown_album_name);
albumid = -1;
}
mAlbumArtHandler.removeMessages(GET_ALBUM_ART);
mAlbumArtHandler.obtainMessage(GET_ALBUM_ART,
new AlbumSongIdWrapper(albumid, songid)).sendToTarget();
}
mDuration = mService.duration();
mTotalTime.setText(MusicUtils
.makeTimeString(this, mDuration / 1000));
} catch (RemoteException ex) {
finish();
}
}
public class AlbumArtHandler extends Handler {
private long mAlbumId = -1;
public AlbumArtHandler(Looper looper) {
super(looper);
}
#Override
public void handleMessage(Message msg) {
long albumid = ((AlbumSongIdWrapper) msg.obj).albumid;
long songid = ((AlbumSongIdWrapper) msg.obj).songid;
if (msg.what == GET_ALBUM_ART
&& (mAlbumId != albumid || albumid < 0)) {
// while decoding the new image, show the default album art
Message numsg = mHandler.obtainMessage(ALBUM_ART_DECODED, null);
mHandler.removeMessages(ALBUM_ART_DECODED);
mHandler.sendMessageDelayed(numsg, 300);
Bitmap bm = MusicUtils.getArtwork(BaseClass.this, songid,
albumid);
if (bm == null) {
bm = MusicUtils.getArtwork(BaseClass.this, songid, -1);
albumid = -1;
}
if (bm != null) {
numsg = mHandler.obtainMessage(ALBUM_ART_DECODED, bm);
mHandler.removeMessages(ALBUM_ART_DECODED);
mHandler.sendMessage(numsg);
}
mAlbumId = albumid;
}
}
}
public static class Worker implements Runnable {
private final Object mLock = new Object();
private Looper mLooper;
/**
* Creates a worker thread with the given name. The thread then runs a
* {#link android.os.Looper}.
*
* #param name
* A name for the new thread
*/
Worker(String name) {
Thread t = new Thread(null, this, name);
t.setPriority(Thread.MIN_PRIORITY);
t.start();
synchronized (mLock) {
while (mLooper == null) {
try {
mLock.wait();
} catch (InterruptedException ex) {
}
}
}
}
public Looper getLooper() {
return mLooper;
}
public void run() {
synchronized (mLock) {
Looper.prepare();
mLooper = Looper.myLooper();
mLock.notifyAll();
}
Looper.loop();
}
public void quit() {
mLooper.quit();
}
}
}
And I want to extend this Class.. But the program crashes showing null pointer exception at this base class.. The base class doesn't have any content view and it points to the buttons (ex.previous and next button), and shows null pointer.. please guide me to solve this problem.
set contentview in the child class and parent class can access it.