Setting an app to output ALL audio on a single channel - android

I want to be able to play all audio from one app on only one channel on a device e.g only on left speakers.
I can do this with MediaPlayer playing an mp3 file like this to play only on left speaker
MediaPlayer mediaPlayer=MediaPlayer.create(this,R.raw.bitter_sweet);
mediaPlayer.setVolume(1.0f, 0f);
mediaPlayer.start();
But I want ALL audio from the app to play on left speaker, i.e if i add a webview and load a youtube url the audio should still play on one channel.

Using Exoplayer I can achieve this by creating a custom AudioProcessor adopted from the ChannelMappingAudioProcessor
public class StereoVolumeProcessor implements AudioProcessor {
private int channelCount;
private int sampleRateHz;
private int[] pendingOutputChannels;
private boolean active;
private int[] outputChannels;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
private float[] volume;
private static final int LEFT_SPEAKER = 0;
private static final int RIGHT_SPEAKER = 1;
public StereoVolumeProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
}
public void setChannelMap(int[] outputChannels) {
pendingOutputChannels = outputChannels;
}
#Override
public boolean configure(int sampleRateHz, int channelCount, #C.Encoding int encoding)
throws UnhandledFormatException {
if(volume == null){
throw new IllegalStateException("volume has not been set! Call setVolume(float left,float right)");
}
boolean outputChannelsChanged = !Arrays.equals(pendingOutputChannels, outputChannels);
outputChannels = pendingOutputChannels;
if (outputChannels == null) {
active = false;
return outputChannelsChanged;
}
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
if (!outputChannelsChanged && this.sampleRateHz == sampleRateHz
&& this.channelCount == channelCount) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
active = true;
return true;
}
#Override
public boolean isActive() {
return active;
}
#Override
public int getOutputChannelCount() {
return outputChannels == null ? channelCount : outputChannels.length;
}
#Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
}
/**
* Returns the sample rate of audio output by the processor, in hertz. The value may change as a
* result of calling {#link #configure(int, int, int)} and is undefined if the instance is not
* active.
*/
#Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
#Override
public void queueInput(ByteBuffer inputBuffer) {
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int size = limit - position;
if (buffer.capacity() < size) {
buffer = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder());
} else {
buffer.clear();
}
if(isActive()){
int ch = 0;
for(int i = position;i<limit;i+=2){
short sample = (short) (inputBuffer.getShort(i)* volume[ch++]);
buffer.putShort(sample);
ch%=channelCount;
}
}else{
throw new IllegalStateException();
}
inputBuffer.position(limit);
buffer.flip();
outputBuffer = buffer;
}
#Override
public void queueEndOfStream() {
inputEnded = true;
}
/**
* Sets the volume of right and left channels/speakers
* The values are between 0.0 and 1.0
*
* #param left
* #param right
*/
public void setVolume(float left,float right){
volume = new float[]{left,right};
}
public float getLeftVolume(){
return volume[LEFT_SPEAKER];
}
public float getRightVolume(){
return volume[RIGHT_SPEAKER];
}
#Override
public ByteBuffer getOutput() {
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer;
}
#SuppressWarnings("ReferenceEquality")
#Override
public boolean isEnded() {
return inputEnded && outputBuffer == EMPTY_BUFFER;
}
#Override
public void flush() {
outputBuffer = EMPTY_BUFFER;
inputEnded = false;
}
#Override
public void reset() {
flush();
buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputChannels = null;
active = false;
}
}
Volume can be set with a value between 0.0 and 1.0 on either left, right or both speakers.
Use the processor as follows
stereoVolumeProcessor = new StereoVolumeProcessor();
stereoVolumeProcessor.setChannelMap(new int[]{0,1});
stereoVolumeProcessor.setVolume(1,0);
RenderersFactory factory = new DefaultRenderersFactory(this){
/**
* Builds an array of {#link AudioProcessor}s that will process PCM audio before output.
*/
#Override
protected AudioProcessor[] buildAudioProcessors() {
return new AudioProcessor[] {stereoVolumeProcessor};
}
};

Related

ExoPlayer Hls quality

I have ExoPlayer which plays HLS videos, the thing is i need to give user ability to change video quality(auto/1080/720/480).
I figured out that playing around with AdaptiveTrackSelection.Factory does set the quality, but it remains till the object is killed.
I have also tried using MappingTrackSelector, i know that my video has 4 tracks, but i did not get how to select any of it manually. Will this selection make it work?
Thanks for any ideas.
MappingTrackSelector.MappedTrackInfo trackInfo = mDefaultTrackSelector.getCurrentMappedTrackInfo();
mDefaultTrackSelector.selectTracks(
//what should go here?
, trackInfo.getTrackGroups(4));
Regarding this thread :https://github.com/google/ExoPlayer/issues/2250, I managed to change exo player video quality while playing previous one, so it does not getting in buffering instantly.
So I have next classes :
public enum HLSQuality {
Auto, Quality1080, Quality720, Quality480, NoValue
}
class HLSUtil {
private HLSUtil() {
}
#NonNull
static HLSQuality getQuality(#NonNull Format format) {
switch (format.height) {
case 1080: {
return HLSQuality.Quality1080;
}
case 720: {
return HLSQuality.Quality720;
}
case 480:
case 486: {
return HLSQuality.Quality480;
}
default: {
return HLSQuality.NoValue;
}
}
}
static boolean isQualityPlayable(#NonNull Format format) {
return format.height <= 1080;
}
}
public class ClassAdaptiveTrackSelection extends BaseTrackSelection {
public static final class Factory implements TrackSelection.Factory {
private final BandwidthMeter bandwidthMeter;
private final int maxInitialBitrate = 2000000;
private final int minDurationForQualityIncreaseMs = 10000;
private final int maxDurationForQualityDecreaseMs = 25000;
private final int minDurationToRetainAfterDiscardMs = 25000;
private final float bandwidthFraction = 0.75f;
private final float bufferedFractionToLiveEdgeForQualityIncrease = 0.75f;
public Factory(BandwidthMeter bandwidthMeter) {
this.bandwidthMeter = bandwidthMeter;
}
#Override
public ClassAdaptiveTrackSelection createTrackSelection(TrackGroup group, int... tracks) {
Log.d(ClassAdaptiveTrackSelection.class.getSimpleName(), " Video player quality reset to Auto");
sHLSQuality = HLSQuality.Auto;
return new ClassAdaptiveTrackSelection(
group,
tracks,
bandwidthMeter,
maxInitialBitrate,
minDurationForQualityIncreaseMs,
maxDurationForQualityDecreaseMs,
minDurationToRetainAfterDiscardMs,
bandwidthFraction,
bufferedFractionToLiveEdgeForQualityIncrease
);
}
}
private static HLSQuality sHLSQuality = HLSQuality.Auto;
private final BandwidthMeter bandwidthMeter;
private final int maxInitialBitrate;
private final long minDurationForQualityIncreaseUs;
private final long maxDurationForQualityDecreaseUs;
private final long minDurationToRetainAfterDiscardUs;
private final float bandwidthFraction;
private final float bufferedFractionToLiveEdgeForQualityIncrease;
private int selectedIndex;
private int reason;
private ClassAdaptiveTrackSelection(TrackGroup group,
int[] tracks,
BandwidthMeter bandwidthMeter,
int maxInitialBitrate,
long minDurationForQualityIncreaseMs,
long maxDurationForQualityDecreaseMs,
long minDurationToRetainAfterDiscardMs,
float bandwidthFraction,
float bufferedFractionToLiveEdgeForQualityIncrease) {
super(group, tracks);
this.bandwidthMeter = bandwidthMeter;
this.maxInitialBitrate = maxInitialBitrate;
this.minDurationForQualityIncreaseUs = minDurationForQualityIncreaseMs * 1000L;
this.maxDurationForQualityDecreaseUs = maxDurationForQualityDecreaseMs * 1000L;
this.minDurationToRetainAfterDiscardUs = minDurationToRetainAfterDiscardMs * 1000L;
this.bandwidthFraction = bandwidthFraction;
this.bufferedFractionToLiveEdgeForQualityIncrease = bufferedFractionToLiveEdgeForQualityIncrease;
selectedIndex = determineIdealSelectedIndex(Long.MIN_VALUE);
reason = C.SELECTION_REASON_INITIAL;
}
#Override
public void updateSelectedTrack(long playbackPositionUs, long bufferedDurationUs, long availableDurationUs) {
long nowMs = SystemClock.elapsedRealtime();
// Stash the current selection, then make a new one.
int currentSelectedIndex = selectedIndex;
selectedIndex = determineIdealSelectedIndex(nowMs);
if (selectedIndex == currentSelectedIndex) {
return;
}
if (!isBlacklisted(currentSelectedIndex, nowMs)) {
// Revert back to the current selection if conditions are not suitable for switching.
Format currentFormat = getFormat(currentSelectedIndex);
Format selectedFormat = getFormat(selectedIndex);
if (selectedFormat.bitrate > currentFormat.bitrate
&& bufferedDurationUs < minDurationForQualityIncreaseUs(availableDurationUs)) {
// The selected track is a higher quality, but we have insufficient buffer to safely switch
// up. Defer switching up for now.
selectedIndex = currentSelectedIndex;
} else if (selectedFormat.bitrate < currentFormat.bitrate
&& bufferedDurationUs >= maxDurationForQualityDecreaseUs) {
// The selected track is a lower quality, but we have sufficient buffer to defer switching
// down for now.
selectedIndex = currentSelectedIndex;
}
}
// If we adapted, update the trigger.
if (selectedIndex != currentSelectedIndex) {
reason = C.SELECTION_REASON_ADAPTIVE;
}
}
#Override
public int getSelectedIndex() {
return selectedIndex;
}
#Override
public int getSelectionReason() {
return reason;
}
#Override
public Object getSelectionData() {
return null;
}
#Override
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
if (queue.isEmpty()) {
return 0;
}
int queueSize = queue.size();
long bufferedDurationUs = queue.get(queueSize - 1).endTimeUs - playbackPositionUs;
if (bufferedDurationUs < minDurationToRetainAfterDiscardUs) {
return queueSize;
}
int idealSelectedIndex = determineIdealSelectedIndex(SystemClock.elapsedRealtime());
Format idealFormat = getFormat(idealSelectedIndex);
// If the chunks contain video, discard from the first SD chunk beyond
// minDurationToRetainAfterDiscardUs whose resolution and bitrate are both lower than the ideal
// track.
for (int i = 0; i < queueSize; i++) {
MediaChunk chunk = queue.get(i);
Format format = chunk.trackFormat;
long durationBeforeThisChunkUs = chunk.startTimeUs - playbackPositionUs;
if (durationBeforeThisChunkUs >= minDurationToRetainAfterDiscardUs
&& format.bitrate < idealFormat.bitrate
&& format.height != Format.NO_VALUE && format.height < 720
&& format.width != Format.NO_VALUE && format.width < 1280
&& format.height < idealFormat.height) {
return i;
}
}
return queueSize;
}
private int determineIdealSelectedIndex(long nowMs) {
if (sHLSQuality != HLSQuality.Auto) {
Log.d(ClassAdaptiveTrackSelection.class.getSimpleName(), " Video player quality seeking for " + String.valueOf(sHLSQuality));
for (int i = 0; i < length; i++) {
Format format = getFormat(i);
if (HLSUtil.getQuality(format) == sHLSQuality) {
Log.d(ClassAdaptiveTrackSelection.class.getSimpleName(), " Video player quality set to " + String.valueOf(sHLSQuality));
return i;
}
}
}
Log.d(ClassAdaptiveTrackSelection.class.getSimpleName(), " Video player quality seeking for auto quality " + String.valueOf(sHLSQuality));
long bitrateEstimate = bandwidthMeter.getBitrateEstimate();
long effectiveBitrate = bitrateEstimate == BandwidthMeter.NO_ESTIMATE
? maxInitialBitrate : (long) (bitrateEstimate * bandwidthFraction);
int lowestBitrateNonBlacklistedIndex = 0;
for (int i = 0; i < length; i++) {
if (nowMs == Long.MIN_VALUE || !isBlacklisted(i, nowMs)) {
Format format = getFormat(i);
if (format.bitrate <= effectiveBitrate && HLSUtil.isQualityPlayable(format)) {
Log.d(ClassAdaptiveTrackSelection.class.getSimpleName(), " Video player quality auto quality found " + String.valueOf(sHLSQuality));
return i;
} else {
lowestBitrateNonBlacklistedIndex = i;
}
}
}
return lowestBitrateNonBlacklistedIndex;
}
private long minDurationForQualityIncreaseUs(long availableDurationUs) {
boolean isAvailableDurationTooShort = availableDurationUs != C.TIME_UNSET
&& availableDurationUs <= minDurationForQualityIncreaseUs;
return isAvailableDurationTooShort
? (long) (availableDurationUs * bufferedFractionToLiveEdgeForQualityIncrease)
: minDurationForQualityIncreaseUs;
}
static void setHLSQuality(HLSQuality HLSQuality) {
sHLSQuality = HLSQuality;
}
}
Hope it helps someone.
You can check out ExoPlayer_TrackSelection from github for changing video quality manually.

Refresh Media source in exoplayer

I am using Exo Player ExtractorMediaSource for playing video in my android app. I am downloading media from server and save in local database and on a specific time Alarm i play this media using ConcatenatingMediaSource in exo player. but first i check that all video file downloaded or not and start player with downloaded media source . and if any video is not downloaded then i want to download it in background at when it downloaded then i want to add this video in my already created playlist
This is sample code
private void playAndUpdateVideo(ArrayList<String> mediaSourc) {
simpleExoPlayerView.setVisibility(View.VISIBLE);
simpleExoPlayerView.setDefaultArtwork(null);
mainHandler = new Handler();
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
TrackSelection.Factory videoTrackSelectionFactory = new AdaptiveVideoTrackSelection.Factory(bandwidthMeter);
TrackSelector trackSelector = new DefaultTrackSelector( videoTrackSelectionFactory);
dataSourceFactory = new DefaultDataSourceFactory(context,
Util.getUserAgent(context, "com.cloveritservices.hype"), bandwidthMeter);
// 2. Create a default LoadControl
extractorsFactory = new DefaultExtractorsFactory();
LoadControl loadControl = new DefaultLoadControl();
// 3. Create the player
player = ExoPlayerFactory.newSimpleInstance(this, trackSelector, loadControl);
player.addListener(this);
//Set media controller
simpleExoPlayerView.setUseController(false);
simpleExoPlayerView.requestFocus();
// Bind the player to the view.
simpleExoPlayerView.setPlayer(player);
MediaSource[] mediaSources = new MediaSource[mediaSourc.size()];
for (int i=0;i<mediaSourc.size();i++)
{
mediaSources[i]= buildMediaSource(Uri.parse(mediaSourc.get(i)));
}
MediaSource mediaSource = mediaSources.length == 1 ? mediaSources[0]
: new ConcatenatingMediaSource(mediaSources);
LoopingMediaSource loopingSource = new LoopingMediaSource(mediaSource);
player.prepare(loopingSource);
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(this);
boolean isChecked = settings.getBoolean("switch", false);
if (!isChecked)
player.setVolume(0f);
else player.setVolume(2f);
player.setPlayWhenReady(true);
}
And here i am checking for video file that it is downloaded or not
if (CommonUtils.isExternalStorageExistAndWritable()) {
for (int i = 0; i < videoUrl.size(); i++) {
if (!new File(Environment.getExternalStorageDirectory().toString() + Constants.PROFILE_VIDEO_FOLDER + CommonUtils.fileFromUrl(videoUrl.get(i))).exists() && !CommonUtils.currentlyDownloading(context,CommonUtils.fileFromUrl(videoUrl.get(i)))) {
downloadByDownloadManager(videoUrl.get(i), CommonUtils.fileFromUrl(videoUrl.get(i)));
if (flag==Constants.FLAG_PLAY){downloadFlag=true;}
}
}
} else {
Toast.makeText(getApplicationContext(), "SD Card not mounted.Please Mount SD Card", Toast.LENGTH_SHORT).show();
}
if (flag==Constants.FLAG_PLAY && !downloadFlag)
{
playAndUpdateVideo(videoUrl);
}
public void downloadByDownloadManager(String url, String fileName1) {
downloadUrl=url;
fileName=fileName1;
request = new DownloadManager.Request(Uri.parse(url));
request.setDescription("video file");
request.setTitle(fileName);
request.setNotificationVisibility(2);
request.allowScanningByMediaScanner();
request.setNotificationVisibility(DownloadManager.Request.VISIBILITY_HIDDEN);
request.setDestinationInExternalPublicDir(Constants.PROFILE_VIDEO_FOLDER, fileName);
DownloadManager manager = (DownloadManager) getSystemService(Context.DOWNLOAD_SERVICE);
manager.enqueue(request);
// get download service and enqueue file
}
Please help that how to add missing video file later to playlist if it is not downloaded.
To add new video files to your playlist, you need a new MediaSource implementation which can handle a list of sources to enable resizing. This is fairly simple to achieve, the easiest way to do so is to create a modified implementation of ConcaternatingMediaSource which uses lists instead of arrays to store and iterate over media sources. You then replace the ConcaternatingMediaSource in playAndUpdateVideo with the new implementation using lists. This will allow you to add and remove from your playlist as you wish, you can append new media files when your download complete listener is triggered. Here is the full class for a media source implementation using lists:
public final class DynamicMediaSource implements MediaSource {
private List<MediaSource> mediaSources;
private SparseArray<Timeline> timelines;
private SparseArray<Object> manifests;
private Map<MediaPeriod, Integer> sourceIndexByMediaPeriod;
private SparseArray<Boolean> duplicateFlags;
private boolean isRepeatOneAtomic;
private Listener listener;
private DynamicTimeline timeline;
/**
* #param mediaSources The {#link MediaSource}s to concatenate. It is valid for the same
* {#link MediaSource} instance to be present more than once in the array.
*/
public DynamicMediaSource(List<MediaSource> mediaSources) {
this(false, mediaSources);
}
/**
* #param isRepeatOneAtomic Whether the concatenated media source shall be treated as atomic
* (i.e., repeated in its entirety) when repeat mode is set to {#code Player.REPEAT_MODE_ONE}.
* #param mediaSources The {#link MediaSource}s to concatenate. It is valid for the same
* {#link MediaSource} instance to be present more than once in the array.
*/
public DynamicMediaSource(boolean isRepeatOneAtomic, List<MediaSource> mediaSources) {
for (MediaSource mediaSource : mediaSources) {
Assertions.checkNotNull(mediaSource);
}
this.mediaSources = mediaSources;
this.isRepeatOneAtomic = isRepeatOneAtomic;
timelines = new SparseArray<Timeline>();
manifests = new SparseArray<Object>();
sourceIndexByMediaPeriod = new HashMap<>();
duplicateFlags = buildDuplicateFlags(mediaSources);
}
#Override
public void prepareSource(ExoPlayer player, boolean isTopLevelSource, Listener listener) {
this.listener = listener;
for (int i = 0; i < mediaSources.size(); i++) {
if (!duplicateFlags.get(i)) {
final int index = i;
mediaSources.get(i).prepareSource(player, false, new Listener() {
#Override
public void onSourceInfoRefreshed(Timeline timeline, Object manifest) {
handleSourceInfoRefreshed(index, timeline, manifest);
}
});
}
}
}
#Override
public void maybeThrowSourceInfoRefreshError() throws IOException {
for (int i = 0; i < mediaSources.size(); i++) {
if (!duplicateFlags.get(i)) {
mediaSources.get(i).maybeThrowSourceInfoRefreshError();
}
}
}
#Override
public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator) {
int sourceIndex = timeline.getChildIndexByPeriodIndex(id.periodIndex);
MediaPeriodId periodIdInSource =
new MediaPeriodId(id.periodIndex - timeline.getFirstPeriodIndexByChildIndex(sourceIndex));
MediaPeriod mediaPeriod = mediaSources.get(sourceIndex).createPeriod(periodIdInSource, allocator);
sourceIndexByMediaPeriod.put(mediaPeriod, sourceIndex);
return mediaPeriod;
}
#Override
public void releasePeriod(MediaPeriod mediaPeriod) {
int sourceIndex = sourceIndexByMediaPeriod.get(mediaPeriod);
sourceIndexByMediaPeriod.remove(mediaPeriod);
mediaSources.get(sourceIndex).releasePeriod(mediaPeriod);
}
#Override
public void releaseSource() {
for (int i = 0; i < mediaSources.size(); i++) {
if (!duplicateFlags.get(i)) {
mediaSources.get(i).releaseSource();
}
}
}
private void handleSourceInfoRefreshed(int sourceFirstIndex, Timeline sourceTimeline,
Object sourceManifest) {
// Set the timeline and manifest.
timelines.put(sourceFirstIndex, sourceTimeline);
manifests.put(sourceFirstIndex, sourceManifest);
// Also set the timeline and manifest for any duplicate entries of the same source.
for (int i = sourceFirstIndex + 1; i < mediaSources.size(); i++) {
if (mediaSources.get(i).equals(mediaSources.get(sourceFirstIndex))) {
timelines.put(i, sourceTimeline);
manifests.put(i, sourceManifest);
}
}
for(int i= 0; i<mediaSources.size(); i++){
if(timelines.get(i) == null){
// Don't invoke the listener until all sources have timelines.
return;
}
}
timeline = new DynamicTimeline(timelines, isRepeatOneAtomic);
listener.onSourceInfoRefreshed(timeline, new ArrayList(asList(manifests)));
}
private static SparseArray<Boolean> buildDuplicateFlags(List<MediaSource> mediaSources) {
SparseArray<Boolean> duplicateFlags = new SparseArray<Boolean>();
IdentityHashMap<MediaSource, Void> sources = new IdentityHashMap<>(mediaSources.size());
for (int i = 0; i < mediaSources.size(); i++) {
MediaSource source = mediaSources.get(i);
if (!sources.containsKey(source)) {
sources.put(source, null);
duplicateFlags.append(i,false);
} else {
duplicateFlags.append(i,true);
}
}
return duplicateFlags;
}
/**
* A {#link Timeline} that is the concatenation of one or more {#link Timeline}s.
*/
public static final class DynamicTimeline extends AbstractConcatenatedTimeline {
private final SparseArray<Timeline> timelines;
private final int[] sourcePeriodOffsets;
private final int[] sourceWindowOffsets;
private final boolean isRepeatOneAtomic;
public DynamicTimeline(SparseArray<Timeline> timelines, boolean isRepeatOneAtomic) {
super(timelines.size());
int[] sourcePeriodOffsets = new int[timelines.size()];
int[] sourceWindowOffsets = new int[timelines.size()];
long periodCount = 0;
int windowCount = 0;
for (int i = 0; i < timelines.size(); i++) {
Timeline timeline = timelines.get(i);
periodCount += timeline.getPeriodCount();
Assertions.checkState(periodCount <= Integer.MAX_VALUE,
"ConcatenatingMediaSource children contain too many periods");
sourcePeriodOffsets[i] = (int) periodCount;
windowCount += timeline.getWindowCount();
sourceWindowOffsets[i] = windowCount;
}
this.timelines = timelines;
this.sourcePeriodOffsets = sourcePeriodOffsets;
this.sourceWindowOffsets = sourceWindowOffsets;
this.isRepeatOneAtomic = isRepeatOneAtomic;
}
#Override
public int getWindowCount() {
return sourceWindowOffsets[sourceWindowOffsets.length - 1];
}
#Override
public int getPeriodCount() {
return sourcePeriodOffsets[sourcePeriodOffsets.length - 1];
}
#Override
public int getNextWindowIndex(int windowIndex, #Player.RepeatMode int repeatMode) {
if (isRepeatOneAtomic && repeatMode == Player.REPEAT_MODE_ONE) {
repeatMode = Player.REPEAT_MODE_ALL;
}
return super.getNextWindowIndex(windowIndex, repeatMode);
}
#Override
public int getPreviousWindowIndex(int windowIndex, #Player.RepeatMode int repeatMode) {
if (isRepeatOneAtomic && repeatMode == Player.REPEAT_MODE_ONE) {
repeatMode = Player.REPEAT_MODE_ALL;
}
return super.getPreviousWindowIndex(windowIndex, repeatMode);
}
#Override
public int getChildIndexByPeriodIndex(int periodIndex) {
return Util.binarySearchFloor(sourcePeriodOffsets, periodIndex, true, false) + 1;
}
#Override
protected int getChildIndexByWindowIndex(int windowIndex) {
return Util.binarySearchFloor(sourceWindowOffsets, windowIndex, true, false) + 1;
}
#Override
protected int getChildIndexByChildUid(Object childUid) {
if (!(childUid instanceof Integer)) {
return C.INDEX_UNSET;
}
return (Integer) childUid;
}
#Override
protected Timeline getTimelineByChildIndex(int childIndex) {
return timelines.get(childIndex);
}
#Override
public int getFirstPeriodIndexByChildIndex(int childIndex) {
return childIndex == 0 ? 0 : sourcePeriodOffsets[childIndex - 1];
}
#Override
protected int getFirstWindowIndexByChildIndex(int childIndex) {
return childIndex == 0 ? 0 : sourceWindowOffsets[childIndex - 1];
}
#Override
protected Object getChildUidByChildIndex(int childIndex) {
return childIndex;
}
}
}
To check when the file is downloaded and set a download completed listener, you can use a BroadcastReceiver. A detailed example of how to set up the BroadcastReceiver is provided here.

Dynamic playlists with Exoplayer 2

I'd like to use ExoPlayer2 with playlists having possibility to dinamically change the tracks (add or remove them from playlist) and change the loop settings.
Since ConcatenatingMediaSource has static arrays (and not lists), I'm implementing a DynamicMediaSource, like Concatenating one but with lists instead of arrays and one mode method addSource to add one more media source to the list.
public void addSource(MediaSource mediaSource) {
this.mediaSources.add(mediaSource);
duplicateFlags = buildDuplicateFlags(this.mediaSources);
if(!mediaSources.isEmpty())
prepareSource(mediaSources.size() -1);
else
prepareSource(0);
}
When I invoke addSource
MediaSource ms = buildMediaSource(mynewuri, null);
mediaSource.addSource(ms);
the track is added to the arrays but it seems something is missing because I always obtain ArrayOutOfBoundsException in createPeriod method.
In createPeriod the method
mediaSources.get(sourceIndex)...
is trying to access the index = mediaSources.size().
Can you help me?
I eventually managed it.
It was my fault during the conversion from arrays to lists.
I had to use SparseArrays for timelines and manifests and everything began to work.
In the DynamicMediaSource simply set the following types:
private final List<MediaSource> mediaSources;
private final SparseArray<Timeline> timelines;
private final SparseArray<Object> manifests;
private final Map<MediaPeriod, Integer> sourceIndexByMediaPeriod;
private SparseArray<Boolean> duplicateFlags;
you have to use sparse arrays to set the proper values into the timelines and manifests in the method
private void handleSourceInfoRefreshed(int sourceFirstIndex, Timeline sourceTimeline,
Object sourceManifest) {
// Set the timeline and manifest.
timelines.put(sourceFirstIndex, sourceTimeline);
manifests.put(sourceFirstIndex, sourceManifest);
// Also set the timeline and manifest for any duplicate entries of the same source.
for (int i = sourceFirstIndex + 1; i < mediaSources.size(); i++) {
if (mediaSources.get(i).equals(mediaSources.get(sourceFirstIndex))) {
timelines.put(i, sourceTimeline);
manifests.put(i, sourceManifest);
}
}
for(int i= 0; i<mediaSources.size(); i++){
if(timelines.get(i) == null){
// Don't invoke the listener until all sources have timelines.
return;
}
}
timeline = new DynamicTimeline(new ArrayList(asList(timelines)));
listener.onSourceInfoRefreshed(timeline, new ArrayList(asList(manifests)));
}
Here is the complete code of DynamicMediaSource class:
public final class DynamicMediaSource implements MediaSource {
private static final String TAG = "DynamicSource";
private final List<MediaSource> mediaSources;
private final List<Timeline> timelines;
private final List<Object> manifests;
private final Map<MediaPeriod, Integer> sourceIndexByMediaPeriod;
private SparseArray<Boolean> duplicateFlags;
private Listener listener;
private DynamicTimeline timeline;
/**
* #param mediaSources The {#link MediaSource}s to concatenate. It is valid for the same
* {#link MediaSource} instance to be present more than once in the array.
*/
public DynamicMediaSource(MediaSource... mediaSources) {
this.mediaSources = new ArrayList<MediaSource>(Arrays.asList(mediaSources));
timelines = new ArrayList<Timeline>();
manifests = new ArrayList<Object>();
sourceIndexByMediaPeriod = new HashMap<>();
duplicateFlags = buildDuplicateFlags(this.mediaSources);
}
public void addSource(MediaSource mediaSource) {
this.mediaSources.add(mediaSource);
duplicateFlags = buildDuplicateFlags(this.mediaSources);
/*if(!mediaSources.isEmpty())
prepareSource(mediaSources.size() -1);
else
prepareSource(0);*/
}
#Override
public void prepareSource(Listener listener) {
this.listener = listener;
for (int i = 0; i < mediaSources.size(); i++) {
prepareSource(i);
/*if (duplicateFlags.get(i) == null || !duplicateFlags.get(i)) {
final int index = i;
mediaSources.get(i).prepareSource(new Listener() {
#Override
public void onSourceInfoRefreshed(Timeline timeline, Object manifest) {
handleSourceInfoRefreshed(index, timeline, manifest);
}
});
}*/
}
}
private void prepareSource(int sourceindex) {
if (duplicateFlags.get(sourceindex) == null || !duplicateFlags.get(sourceindex)) {
final int index = sourceindex;
mediaSources.get(sourceindex).prepareSource(new Listener() {
#Override
public void onSourceInfoRefreshed(Timeline timeline, Object manifest) {
handleSourceInfoRefreshed(index, timeline, manifest);
}
});
}
}
#Override
public void maybeThrowSourceInfoRefreshError() throws IOException {
for (int i = 0; i < mediaSources.size(); i++) {
if (duplicateFlags.get(i) == null || !duplicateFlags.get(i)) {
mediaSources.get(i).maybeThrowSourceInfoRefreshError();
}
}
}
#Override
public MediaPeriod createPeriod(int index, Callback callback, Allocator allocator,
long positionUs) {
int sourceIndex = timeline.getSourceIndexForPeriod(index);
int periodIndexInSource = index - timeline.getFirstPeriodIndexInSource(sourceIndex);
MediaPeriod mediaPeriod = mediaSources.get(sourceIndex).createPeriod(periodIndexInSource, callback,
allocator, positionUs);
sourceIndexByMediaPeriod.put(mediaPeriod, sourceIndex);
return mediaPeriod;
}
#Override
public void releasePeriod(MediaPeriod mediaPeriod) {
int sourceIndex = sourceIndexByMediaPeriod.get(mediaPeriod);
sourceIndexByMediaPeriod.remove(mediaPeriod);
mediaSources.get(sourceIndex).releasePeriod(mediaPeriod);
}
#Override
public void releaseSource() {
for (int i = 0; i < mediaSources.size(); i++) {
if (duplicateFlags.get(i) == null || !duplicateFlags.get(i)) {
mediaSources.get(i).releaseSource();
}
}
}
private void handleSourceInfoRefreshed(int sourceFirstIndex, Timeline sourceTimeline,
Object sourceManifest) {
// Set the timeline and manifest.
timelines.add(sourceFirstIndex, sourceTimeline);
manifests.add(sourceFirstIndex, sourceManifest);
// Also set the timeline and manifest for any duplicate entries of the same source.
for (int i = sourceFirstIndex + 1; i < mediaSources.size(); i++) {
if (mediaSources.get(i).equals(mediaSources.get(sourceFirstIndex))) {
timelines.add(i, sourceTimeline);
manifests.add(i, sourceManifest);
}
}
for (Timeline timeline : timelines) {
if (timeline == null) {
// Don't invoke the listener until all sources have timelines.
return;
}
}
timeline = new DynamicTimeline(new ArrayList(timelines));
listener.onSourceInfoRefreshed(timeline, new ArrayList(manifests));
}
private static SparseArray<Boolean> buildDuplicateFlags(List<MediaSource> mediaSources) {
SparseArray<Boolean> duplicateFlags = new SparseArray<Boolean>();
IdentityHashMap<MediaSource, Void> sources = new IdentityHashMap<>(mediaSources.size());
for (int i = 0; i < mediaSources.size(); i++) {
MediaSource mediaSource = mediaSources.get(i);
if (!sources.containsKey(mediaSource)) {
sources.put(mediaSource, null);
} else {
duplicateFlags.setValueAt(i, true);
}
}
return duplicateFlags;
}
/**
* A {#link Timeline} that is the concatenation of one or more {#link Timeline}s.
*/
private static final class DynamicTimeline extends Timeline {
private final List<Timeline> timelines;
private final List<Integer> sourcePeriodOffsets;
private final List<Integer> sourceWindowOffsets;
public DynamicTimeline(List<Timeline> timelines) {
List<Integer> sourcePeriodOffsets = new ArrayList<>();
List<Integer> sourceWindowOffsets = new ArrayList<>();
int periodCount = 0;
int windowCount = 0;
for (Timeline timeline : timelines) {
periodCount += timeline.getPeriodCount();
windowCount += timeline.getWindowCount();
sourcePeriodOffsets.add(periodCount);
sourceWindowOffsets.add(windowCount);
}
this.timelines = timelines;
this.sourcePeriodOffsets = sourcePeriodOffsets;
this.sourceWindowOffsets = sourceWindowOffsets;
}
#Override
public int getWindowCount() {
return sourceWindowOffsets.get(sourceWindowOffsets.size() - 1);
}
#Override
public Window getWindow(int windowIndex, Window window, boolean setIds) {
int sourceIndex = getSourceIndexForWindow(windowIndex);
int firstWindowIndexInSource = getFirstWindowIndexInSource(sourceIndex);
int firstPeriodIndexInSource = getFirstPeriodIndexInSource(sourceIndex);
timelines.get(sourceIndex).getWindow(windowIndex - firstWindowIndexInSource, window, setIds);
window.firstPeriodIndex += firstPeriodIndexInSource;
window.lastPeriodIndex += firstPeriodIndexInSource;
return window;
}
#Override
public int getPeriodCount() {
return sourcePeriodOffsets.get(sourcePeriodOffsets.size() - 1);
}
#Override
public Period getPeriod(int periodIndex, Period period, boolean setIds) {
int sourceIndex = getSourceIndexForPeriod(periodIndex);
int firstWindowIndexInSource = getFirstWindowIndexInSource(sourceIndex);
int firstPeriodIndexInSource = getFirstPeriodIndexInSource(sourceIndex);
timelines.get(sourceIndex).getPeriod(periodIndex - firstPeriodIndexInSource, period, setIds);
period.windowIndex += firstWindowIndexInSource;
if (setIds) {
period.uid = Pair.create(sourceIndex, period.uid);
}
return period;
}
#Override
public int getIndexOfPeriod(Object uid) {
if (!(uid instanceof Pair)) {
return C.INDEX_UNSET;
}
Pair<?, ?> sourceIndexAndPeriodId = (Pair<?, ?>) uid;
if (!(sourceIndexAndPeriodId.first instanceof Integer)) {
return C.INDEX_UNSET;
}
int sourceIndex = (Integer) sourceIndexAndPeriodId.first;
Object periodId = sourceIndexAndPeriodId.second;
if (sourceIndex < 0 || sourceIndex >= timelines.size()) {
return C.INDEX_UNSET;
}
int periodIndexInSource = timelines.get(sourceIndex).getIndexOfPeriod(periodId);
return periodIndexInSource == C.INDEX_UNSET ? C.INDEX_UNSET
: getFirstPeriodIndexInSource(sourceIndex) + periodIndexInSource;
}
private int getSourceIndexForPeriod(int periodIndex) {
return Util.binarySearchFloor(sourcePeriodOffsets, periodIndex, true, false) + 1;
}
private int getFirstPeriodIndexInSource(int sourceIndex) {
return sourceIndex == 0 ? 0 : sourcePeriodOffsets.get(sourceIndex - 1);
}
private int getSourceIndexForWindow(int windowIndex) {
return Util.binarySearchFloor(sourceWindowOffsets, windowIndex, true, false) + 1;
}
private int getFirstWindowIndexInSource(int sourceIndex) {
return sourceIndex == 0 ? 0 : sourceWindowOffsets.get(sourceIndex - 1);
}
}
}

Opencv4android Mediarecorder.getSurface() no View display

I want to use Opencv4android sample Tutorial1 to do Video Recorder.
And I use this solution, but my smartphone not display anything on Screen.
Just black view. Can anyone help me?
here is my code
Tutorial1Activity
public class Tutorial1Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private MenuItem mItemSwitchCamera = null;
//*****************writetoSD***********************//
public FileWriter fw; // = new FileWriter(folder_path, false);
public BufferedWriter bw;// = new BufferedWriter(fw);
boolean first_in = true;
String showTimefile = null;
String showTime = null;
String folder_path = Environment.getExternalStorageDirectory().getAbsolutePath();
String folder_name = "Face Detection Signal";
String folder_pathforfile = null;
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd HH_mm_ss");
SimpleDateFormat sdf_fileintxt = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
//*****************writetoSD***********************//
//---------------MediaRecorder-------------------//
public MediaRecorder mediaRecorder;
Button bt_Record;
boolean isRecord = false;
Handler mThreadHandler;
HandlerThread mHandlerThread;
//---------------MediaRecorder-------------------//
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial1Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial1_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
//---------------------------------------------------------//
bt_Record = (Button)findViewById(R.id.bt_recorder);
folder_pathforfile = folder_path + File.separator + folder_name
+ File.separator + "opencv" + "_";
CreateSDfolder();
ongetTime();
//---------------------------------------------------------//
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
private void CreateSDfolder() {
String filefolderpath = folder_path + File.separator + folder_name;
File dir = new File(filefolderpath);
if (!dir.exists()){
Log.e("folder", "not exist");
try{
//dir.createNewFile(true);
dir.mkdir();
Log.e("folder", "creat exist");
}catch(Exception e){
Log.e("folder", "creat not exist");
e.printStackTrace();
}
}
else{
Log.e("folder", "exist");
}
}
private void ongetTime() {
Date dt=new Date();
showTime=sdf_fileintxt.format(dt);
showTimefile =sdf.format(dt);
}
private void WritetoSD(String data) {
try {
fw = new FileWriter(folder_pathforfile + showTimefile+".txt", true);
bw = new BufferedWriter(fw);
if (first_in == true) {
first_in = false;
bw.append(showTime);
bw.newLine();
}
bw.append(data);
bw.newLine();
bw.flush();
bw.close();
} catch (IOException e) {
Log.e("WriteToSD", "Write To SD ERROR");
e.printStackTrace();
}
}
public void onRecordSignal (View v){
if(!isRecord){
isRecord = true;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Stop");
//new MediaPrepareTask().execute(null, null, null);
if (prepareMediaRecorder()) {
// Camera is available and unlocked, MediaRecorder is prepared,
// now you can start recording
Log.e("debug_mediarecorder", "prepareMediaRecorder in if");
mOpenCvCameraView.setRecorder(mediaRecorder);
mediaRecorder.start();
} else {
// prepare didn't work, release the camera
Log.e("debug_mediarecorder", "prepareMediaRecorder in else");
// mediaRecorder.stop();
releaseMediaRecorder();
}
} else{
isRecord = false;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Record");
try {
if(mediaRecorder != null)
mediaRecorder.stop(); // stop the recording
else
Log.e(TAG,"onRecordSignal mediaRecorder is null");
} catch (RuntimeException e) {
// RuntimeException is thrown when stop() is called immediately after start().
// In this case the output file is not properly constructed ans should be deleted.
Log.d(TAG, "RuntimeException: stop() is called immediately after start()");
//noinspection ResultOfMethodCallIgnored
}
releaseMediaRecorder(); // release the MediaRecorder object
}
}
public void releaseMediaRecorder() {
Log.e("debug","releaseMediaRecorder");
if (mediaRecorder != null) {
mediaRecorder.reset(); // clear recorder configuration
mediaRecorder.release(); // release the recorder object
mediaRecorder = null;
JavaCameraView.mCamera.lock();
mOpenCvCameraView.releaseRecord();
}
}
private String recordfilepath() {
// TODO Auto-generated method stub
ongetTime();
File sddir = Environment.getExternalStorageDirectory();
File vrdir = new File(sddir, folder_name);
File file = new File(vrdir, showTimefile+"_.mp4");
String filepath = file.getAbsolutePath();
Log.e("debug mediarecorder", filepath);
return filepath;
}
public boolean prepareMediaRecorder() {
// TODO Auto-generated method stub
Log.e("debug mediarecorder", "in prepareMediaRecorder");
mediaRecorder = new MediaRecorder();
try {
JavaCameraView.mCamera.lock();
JavaCameraView.mCamera.unlock();
}catch (RuntimeException e){
Log.e("debug mediarecorder","JavaCameraView.mCamera.unlock() fail");
}
/*mediaRecorder.reset();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
//mediaRecorder.setPreviewDisplay(CameraBridgeViewBase.mSurfaceHolder.getSurface());
mediaRecorder.setOutputFile(recordfilepath());
//mediaRecorder.setOnInfoListener((MediaRecorder.OnInfoListener) this);
//mediaRecorder.setOnErrorListener((MediaRecorder.OnErrorListener) this);*/
mediaRecorder.reset();
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
mediaRecorder.setProfile(cpHigh);
//mediaRecorder.setOutputFile("out.mp4");
mediaRecorder.setOutputFile(recordfilepath());
mediaRecorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight);
//mediaRecorder.setOnInfoListener(this);
//mediaRecorder.setOnErrorListener(this);
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.e("debug mediarecorder", "not prepare");
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.e("debug mediarecorder", "not prepare IOException");
//releaseMediaRecorder();
}
return true;
}
}
CameraBridgeViewBase
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraBridge";
private static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();
public int mFrameWidth;
public int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;
public static final int CAMERA_ID_ANY = -1;
public static final int CAMERA_ID_BACK = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;
//-------------------------------------//
protected MediaRecorder mRecorder;
protected Surface mSurface = null;
public void setRecorder(MediaRecorder rec) {
mRecorder = rec;
//Log.e(TAG,mRecorder.toString());
if (mRecorder != null) {
mSurface = mRecorder.getSurface();
Log.e(TAG,"mRecorder is not null");
Log.e(TAG,"mSurface = "+mSurface.toString());
}
else{
Log.e(TAG,"mRecorder is null");
}
}
public void releaseRecord(){
mSurface.release();
}
//-------------------------------------//
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
styledAttrs.recycle();
}
/**
* Sets the camera index
* #param cameraIndex new camera index
*/
public void setCameraIndex(int cameraIndex) {
this.mCameraIndex = cameraIndex;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
public interface CvCameraViewListener2 {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};
protected class CvCameraViewListenerAdapter implements CvCameraViewListener2 {
public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
mOldStyleListener = oldStypeListener;
}
public void onCameraViewStarted(int width, int height) {
mOldStyleListener.onCameraViewStarted(width, height);
}
public void onCameraViewStopped() {
mOldStyleListener.onCameraViewStopped();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat result = null;
switch (mPreviewFormat) {
case RGBA:
result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
break;
case GRAY:
result = mOldStyleListener.onCameraFrame(inputFrame.gray());
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
return result;
}
public void setFrameFormat(int format) {
mPreviewFormat = format;
}
private int mPreviewFormat = RGBA;
private CvCameraViewListener mOldStyleListener;
};
/**
* This class interface is abstract representation of single frame from camera for onCameraFrame callback
* Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
*/
public interface CvCameraViewFrame {
/**
* This method returns RGBA Mat with frame
*/
public Mat rgba();
/**
* This method returns single channel gray scale Mat with frame
*/
public Mat gray();
};
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* #param listener
*/
public void setCvCameraViewListener(CvCameraViewListener2 listener) {
mListener = listener;
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
adapter.setFrameFormat(mPreviewFormat);
mListener = adapter;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* #param maxWidth - the maximum width allowed for camera frame.
* #param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
if (mListener instanceof CvCameraViewListenerAdapter) {
CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
adapter.setFrameFormat(mPreviewFormat);
}
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
Log.d(TAG, "call checkCurrentState");
int targetState;
if (mEnabled && mSurfaceExist && getVisibility() == VISIBLE) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
Log.d(TAG, "call processEnterState: " + state);
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
Log.d(TAG, "call processExitState: " + state);
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
Log.d(TAG, "call onEnterStartedState");
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* #param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas;
if (mRecorder != null) {
canvas = mSurface.lockCanvas(null);
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
mSurface.unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* #param width - the width of this SurfaceView
* #param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTextre constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* #param supportedSizes
* #param surfaceWidth
* #param surfaceHeight
* #return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
return new Size(calcWidth, calcHeight);
}
}
And I add permission in Manifest
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.RECORD_VIDEO" />
<uses-permission android:name="android.permission.RECORD_AUDIO"/>

AudioRecord records intermittent sound in Android L Developer Preview

I'm recording sound with AudioRecord in PCM16LE format, 8000Hz, 1channel. It records ok in Android versions 2.3.3-4.4.4, but records strange intermittent sound in Android L(5.0) Developer Preview (on nexus 5, nexus 7 and emulator).
Here is the sample of recorded sound (the first half - recording, the second half - playback):
https://www.dropbox.com/s/3wcgufua5pphwtt/android_l_sound_record_error.m4a?dl=0
I tried to play recorded sound using different sample rate (4000, 16000) and as 8bit but sound keeps to be intermittent. What the problem could be with this sound?
I'm using this AudioRecordTask to record audio with getAudioRecord() for initializing input (no errors returned during operation; receiving audio chunks sized equally to internalBufferSize value):
public final int SAMPLING_RATE = 8000;
private AudioRecord getAudioRecord() {
int internalBufferSize = AudioRecord.getMinBufferSize(SAMPLING_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT); //returns 640
internalBufferSize = 8000; //also tried returned value (640) and values 2560, 30000 - no changes
final int SOURCE;
if (Build.VERSION.SDK_INT < 11) {
SOURCE = MediaRecorder.AudioSource.MIC;
} else {
SOURCE = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
}
AudioRecord record = new AudioRecord(SOURCE,
SAMPLING_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
internalBufferSize);
int state = record.getState();
if (state != AudioRecord.STATE_INITIALIZED) {
try {
record.release();
} catch (Exception e) {
}
return null;
}
if (record.getState() == android.media.AudioRecord.STATE_INITIALIZED) {
record.startRecording();
} else {
record.release();
return null;
}
return record;
}
private class AudioRecordTask extends AsyncTask<Void, Void, Void> {
final int PARTIAL_BUFFER_SIZE = SAMPLING_RATE;
final int NECESSARY_BUFFER_SIZE = 15 * PARTIAL_BUFFER_SIZE * Short.SIZE / 8;
final int FULL_BUFFER_SIZE = NECESSARY_BUFFER_SIZE * 2; //XXX: * 2 for the case when system returns more data than needed
short[] mBuffer;
int mTotalSize;
int mTotalSizeInBytes;
boolean mResult;
private Object mLock = new Object();
#Override
protected void onPreExecute()
{
mIsRecording = true;
mBuffer = new short[FULL_BUFFER_SIZE];
mTotalSize = 0;
mTotalSizeInBytes = 0;
mResult = false;
}
#Override
protected Void doInBackground(Void... arg0) {
synchronized (mLock) {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
AudioRecord record = getAudioRecord();
if (record == null) {
mResult = false;
return null;
}
for (int i = 0; i < 15 * 100; i++) { //XXX: * 100 to record enough data (system can return lesser than needed)
int datalen = record.read(mBuffer, mTotalSize, PARTIAL_BUFFER_SIZE);
if (datalen > 0) {
mTotalSize += datalen;
mTotalSizeInBytes = mTotalSize*2;
} else {
Log.w("", "error " + datalen + " in AudioRecord.read");
}
if (isCancelled() || mTotalSizeInBytes > NECESSARY_BUFFER_SIZE) {
break;
}
}
if (record.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
record.stop();
}
record.release();
mResult = true;
return null;
}
}
#Override
protected void onPostExecute(Void r) {
synchronized (mLock) {
mIsRecording = false;
fin();
}
}
#Override
protected void onCancelled() {
//XXX: on old Androids (e.g. 2.3.3) onCancelled being called while doInBackground is still running
synchronized (mLock) {
mIsRecording = false;
if (mAbort) {
return;
}
fin();
}
}
private void fin() {
if (mResult && mTotalSizeInBytes > 0) {
sendRecordedAudioToServer(mBuffer, mTotalSize, mTotalSizeInBytes);
} else {
showError(null);
}
}
}
It's a bug in Android L Developer Preview: https://code.google.com/p/android-developer-preview/issues/detail?id=1492
AudioRecord.read for short[] buffer argument returns value in bytes instead of value in shorts.
As a workaround use AudioRecord.read with byte[] buffer.

Categories

Resources