I am developing an application where I have to detect the loudness of voice. Means some on shouts on the mobile and it will show the loudness level on the screen.
Which API can be used for this?
Look at http://developer.android.com/reference/android/media/AudioRecord.html
When you read the buffer the byte values will represent the amplitude. The higher the byte value the louder the sound.
Here is a scaled-down version of something I used in an app I wrote a while back:
Add this to your mainifest.xml
<uses-permission android:name="android.permission.RECORD_AUDIO" />
soundlevel.xml
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical" >
<ToggleButton
android:id="#+id/togglebutton_record"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="ToggleButton" />
<ProgressBar
android:id="#+id/progressbar_level"
style="?android:attr/progressBarStyleHorizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</LinearLayout>
SoundLevel.java
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Handler;
import android.os.SystemClock;
import android.widget.CompoundButton;
import android.widget.ProgressBar;
import android.widget.ToggleButton;
public class SoundLevel extends Activity {
private static final int sampleRate = 11025;
private static final int bufferSizeFactor = 10;
private AudioRecord audio;
private int bufferSize;
private ProgressBar level;
private Handler handler = new Handler();
private int lastLevel = 0;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.soundlevel);
level = (ProgressBar) findViewById(R.id.progressbar_level);
level.setMax(32676);
ToggleButton record = (ToggleButton) findViewById(R.id.togglebutton_record);
record.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
// TODO Auto-generated method stub
if (isChecked) {
bufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT) * bufferSizeFactor;
audio = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audio.startRecording();
Thread thread = new Thread(new Runnable() {
public void run() {
readAudioBuffer();
}
});
thread.setPriority(Thread.currentThread().getThreadGroup().getMaxPriority());
thread.start();
handler.removeCallbacks(update);
handler.postDelayed(update, 25);
} else if (audio != null) {
audio.stop();
audio.release();
audio = null;
handler.removeCallbacks(update);
}
}
});
}
private void readAudioBuffer() {
try {
short[] buffer = new short[bufferSize];
int bufferReadResult;
do {
bufferReadResult = audio.read(buffer, 0, bufferSize);
for (int i = 0; i < bufferReadResult; i++){
if (buffer[i] > lastLevel) {
lastLevel = buffer[i];
}
}
} while (bufferReadResult > 0 && audio.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
if (audio != null) {
audio.release();
audio = null;
handler.removeCallbacks(update);
}
} catch (Exception e) {
e.printStackTrace();
}
}
private Runnable update = new Runnable() {
public void run() {
SoundLevel.this.level.setProgress(lastLevel);
lastLevel *= .5;
handler.postAtTime(this, SystemClock.uptimeMillis() + 500);
}
};
}
Related
I want to get the audio from Bluetooth headset and play it on Bluetooth headset itself. I am able to do that on lollipop(5.1.1)(Samsung note 3 neo) but it is not working on android(7.0)(Redmi Note 4).
I am first creating an audio track and then start a new thread for reading audio from mic. First, it starts reading audio from phonemic. After clicking the Bluetooth button it starts Bluetooth SCO.
Can anyone help?
package surya.com.audiorecord;
import android.Manifest;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Sample that demonstrates how to record from a Bluetooth HFP microphone using {#link AudioRecord}.
*/
public class BluetoothRecordActivity extends AppCompatActivity {
private static final String TAG = BluetoothRecordActivity.class.getCanonicalName();
private static final int SAMPLING_RATE_IN_HZ = 16000;
private static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
private static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
/**
* Factor by that the minimum buffer size is multiplied. The bigger the factor is the less
* likely it is that samples will be dropped, but more memory will be used. The minimum buffer
* size is determined by {#link AudioRecord#getMinBufferSize(int, int, int)} and depends on the
* recording settings.
*/
private static final int BUFFER_SIZE_FACTOR = 2;
/**
* Size of the buffer where the audio data is stored by Android
*/
private static final int BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLING_RATE_IN_HZ,
CHANNEL_CONFIG, AUDIO_FORMAT) * BUFFER_SIZE_FACTOR;
/**
* Signals whether a recording is in progress (true) or not (false).
*/
private final AtomicBoolean recordingInProgress = new AtomicBoolean(false);
private AudioRecord recorder = null;
private AudioManager audioManager;
private Thread recordingThread = null;
private Button startButton;
private Button stopButton;
private Button bluetoothButton;
AudioTrack mAudioTrack;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.bluetooth);
audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
try {
outputBufferSize = AudioTrack.getMinBufferSize(16000,
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 16000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, outputBufferSize, AudioTrack.MODE_STREAM);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
mAudioTrack.setVolume(100);
}
mAudioTrack.play();
} catch (Exception e) {
e.printStackTrace();
}
startButton = (Button) findViewById(R.id.btnStart);
startButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
startRecording();
}
});
stopButton = (Button) findViewById(R.id.btnStop);
stopButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
stopRecording();
}
});
bluetoothButton = (Button) findViewById(R.id.btnBluetooth);
bluetoothButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
activateBluetoothSco();
}
});
requestAudioPermissions();
}
int outputBufferSize;
#Override
protected void onResume() {
super.onResume();
ButtonEnableSetters();
registerReceiver(bluetoothStateReceiver, new IntentFilter(
AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED));
}
private void ButtonEnableSetters() {
runOnUiThread(new Runnable() {
#Override
public void run() {
bluetoothButton.setEnabled(calculateBluetoothButtonState());
startButton.setEnabled(calculateStartRecordButtonState());
stopButton.setEnabled(calculateStopRecordButtonState());
}
});
}
#Override
protected void onPause() {
super.onPause();
stopRecording();
unregisterReceiver(bluetoothStateReceiver);
}
private void startRecording() {
// Depending on the device one might has to change the AudioSource, e.g. to DEFAULT
// or VOICE_COMMUNICATION
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
SAMPLING_RATE_IN_HZ, CHANNEL_CONFIG, AUDIO_FORMAT, BUFFER_SIZE);
recorder.startRecording();
recordingInProgress.set(true);
try {
recordingThread = new Thread(new RecordingRunnable(), "Recording Thread");
recordingThread.start();
} catch (Exception e) {
e.printStackTrace();
}
ButtonEnableSetters();
}
private void stopRecording() {
if (null == recorder) {
return;
}
recordingInProgress.set(false);
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
ButtonEnableSetters();
}
private void activateBluetoothSco() {
if (!audioManager.isBluetoothScoAvailableOffCall()) {
Log.e(TAG, "SCO ist not available, recording is not possible");
return;
}
if (!audioManager.isBluetoothScoOn()) {
audioManager.startBluetoothSco();
audioManager.setBluetoothScoOn(true);
}
}
private void bluetoothStateChanged(BluetoothState state) {
Log.i(TAG, "Bluetooth state changed to:" + state);
if (BluetoothState.UNAVAILABLE == state && recordingInProgress.get()) {
stopRecording();
}
ButtonEnableSetters();
}
private boolean calculateBluetoothButtonState() {
return !audioManager.isBluetoothScoOn();
}
private boolean calculateStartRecordButtonState() {
return audioManager.isBluetoothScoOn() && !recordingInProgress.get();
}
private boolean calculateStopRecordButtonState() {
return audioManager.isBluetoothScoOn() && recordingInProgress.get();
}
private class RecordingRunnable implements Runnable {
#Override
public void run() {
if (mAudioTrack != null) {
if (mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
mAudioTrack.play();
} else {
mAudioTrack.stop();
mAudioTrack.flush();
mAudioTrack.play();
}
}
// final File file = new File(Environment.getExternalStorageDirectory(), "recording.pcm");
final ByteBuffer buffer = ByteBuffer.allocateDirect(BUFFER_SIZE);
while (recordingInProgress.get()) {
int result = recorder.read(buffer, BUFFER_SIZE);
if (result 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// permission was granted, yay!
// recordAudio();
activateBluetoothSco();
startRecording();
} else {
// permission denied, boo! Disable the
// functionality that depends on this permission.
Toast.makeText(this, "Permissions Denied to record audio", Toast.LENGTH_LONG).show();
}
return;
}
}
}
private final BroadcastReceiver bluetoothStateReceiver = new BroadcastReceiver() {
private BluetoothState bluetoothState = BluetoothState.UNAVAILABLE;
#Override
public void onReceive(Context context, Intent intent) {
int state = intent.getIntExtra(AudioManager.EXTRA_SCO_AUDIO_STATE, -1);
switch (state) {
case AudioManager.SCO_AUDIO_STATE_CONNECTED:
Log.i(TAG, "Bluetooth HFP Headset is connected");
handleBluetoothStateChange(BluetoothState.AVAILABLE);
break;
case AudioManager.SCO_AUDIO_STATE_CONNECTING:
Log.i(TAG, "Bluetooth HFP Headset is connecting");
handleBluetoothStateChange(BluetoothState.UNAVAILABLE);
case AudioManager.SCO_AUDIO_STATE_DISCONNECTED:
Log.i(TAG, "Bluetooth HFP Headset is disconnected");
handleBluetoothStateChange(BluetoothState.UNAVAILABLE);
break;
case AudioManager.SCO_AUDIO_STATE_ERROR:
Log.i(TAG, "Bluetooth HFP Headset is in error state");
handleBluetoothStateChange(BluetoothState.UNAVAILABLE);
break;
}
}
private void handleBluetoothStateChange(BluetoothState state) {
if (bluetoothState == state) {
return;
}
bluetoothState = state;
bluetoothStateChanged(state);
}
};
}
This is the project source code
https://bitbucket.org/surya945/audiorecord
welcome to stackoverflow
I think your issue related to
TargetSdkVersion in build.gardle(module:app)
check this
I am writing a Android program that Streams MIC directly to Speaker of Phone.The code works but UI hangs and app hangs.But Still audio transfer is working even if the app hangs.Where is the error..?
RecordBufferSize=AudioRecord.getMinBufferSize(sampleRateInHz,AudioFormat.CHANNEL_IN_MONO,AudioFormat.ENCODING_PCM_16BIT);
TrackBufferSize= AudioTrack.getMinBufferSize(sampleRateInHz,AudioFormat.CHANNEL_OUT_MONO,AudioFormat.ENCODING_PCM_16BIT);
am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
Record record = new Record();
record.run();
}
public class Record extends Thread
{
final short[] buffer = new short[RecordBufferSize];
short[] readBuffer = new short[TrackBufferSize];
public void run() {
isRecording = true;
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
AudioRecord arec = new AudioRecord(MediaRecorder.AudioSource.MIC,sampleRateInHz,AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,RecordBufferSize);
AudioTrack atrack = new AudioTrack(AudioManager.STREAM_MUSIC,sampleRateInHz,AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, TrackBufferSize, AudioTrack.MODE_STREAM);
//am.setRouting(AudioManager.MODE_NORMAL, AudioManager.ROUTE_EARPIECE, AudioManager.ROUTE_ALL);
atrack.setPlaybackRate(sampleRateInHz);
byte[] buffer = new byte[RecordBufferSize];
arec.startRecording();
atrack.play();
while(isRecording) {
AudioLenght=arec.read(buffer, 0, RecordBufferSize);
atrack.write(buffer, 0, AudioLenght);
}
arec.stop();
atrack.stop();
isRecording = false;
}
}
This is my code.
I tried this and got result.Try this
Java Code I used:-
package com.example.root.akuvo;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.PorterDuff;
import android.media.AudioAttributes;
import android.media.AudioDeviceInfo;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.media.audiofx.AcousticEchoCanceler;
import android.media.audiofx.AutomaticGainControl;
import android.media.audiofx.BassBoost;
import android.media.audiofx.NoiseSuppressor;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
public class MicToSpeakerActivity extends AppCompatActivity {
//Audio
private Button mOn;
private boolean isOn;
private boolean isRecording;
private AudioRecord record;
private AudioTrack player;
private AudioManager manager;
private int recordState, playerState;
private int minBuffer;
//Audio Settings
private final int source = MediaRecorder.AudioSource.CAMCORDER;
private final int channel_in = AudioFormat.CHANNEL_IN_MONO;
private final int channel_out = AudioFormat.CHANNEL_OUT_MONO;
private final int format = AudioFormat.ENCODING_PCM_16BIT;
private final static int REQUEST_ENABLE_BT = 1;
private boolean IS_HEADPHONE_AVAILBLE=false;
#RequiresApi(api = Build.VERSION_CODES.M)
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mic_to_speaker);
//Reduce latancy
setVolumeControlStream(AudioManager.MODE_IN_COMMUNICATION);
mOn = (Button) findViewById(R.id.button);
isOn = false;
isRecording = false;
manager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
manager.setMode(AudioManager.MODE_IN_COMMUNICATION);
//Check for headset availability
AudioDeviceInfo[] audioDevices = manager.getDevices(AudioManager.GET_DEVICES_ALL);
for(AudioDeviceInfo deviceInfo : audioDevices) {
if (deviceInfo.getType() == AudioDeviceInfo.TYPE_WIRED_HEADPHONES || deviceInfo.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET || deviceInfo.getType() == AudioDeviceInfo.TYPE_USB_HEADSET) {
IS_HEADPHONE_AVAILBLE = true;
}
}
if (!IS_HEADPHONE_AVAILBLE){
// get delete_audio_dialog.xml view
LayoutInflater layoutInflater = LayoutInflater.from(MicToSpeakerActivity.this);
View promptView = layoutInflater.inflate(R.layout.insert_headphone_dialog, null);
AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(MicToSpeakerActivity.this);
alertDialogBuilder.setView(promptView);
// setup a dialog window
alertDialogBuilder.setCancelable(false)
.setPositiveButton("Try Again", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
startActivity(new Intent(getIntent()));
}
})
.setNegativeButton("Cancel",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
startActivity(new Intent(MicToSpeakerActivity.this,MainActivity.class));
dialog.cancel();
}
});
// create an alert dialog
AlertDialog alert = alertDialogBuilder.create();
alert.show();
}
initAudio();
mOn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
mOn.getBackground().setColorFilter(getResources().getColor(!isOn ? R.color.colorOn : R.color.colorOff), PorterDuff.Mode.SRC_ATOP);
isOn = !isOn;
if(isOn) {
(new Thread() {
#Override
public void run()
{
startAudio();
}
}).start();
} else {
endAudio();
}
}
});
}
public void initAudio() {
//Tests all sample rates before selecting one that works
int sample_rate = getSampleRate();
minBuffer = AudioRecord.getMinBufferSize(sample_rate, channel_in, format);
record = new AudioRecord(source, sample_rate, channel_in, format, minBuffer);
recordState = record.getState();
int id = record.getAudioSessionId();
Log.d("Record", "ID: " + id);
playerState = 0;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
player = new AudioTrack(
new AudioAttributes.Builder().setUsage(AudioAttributes.USAGE_MEDIA).setContentType(AudioAttributes.CONTENT_TYPE_MUSIC).build(),
new AudioFormat.Builder().setEncoding(format).setSampleRate(sample_rate).setChannelMask(channel_out).build(),
minBuffer,
AudioTrack.MODE_STREAM,
AudioManager.AUDIO_SESSION_ID_GENERATE);
playerState = player.getState();
// Formatting Audio
if(AcousticEchoCanceler.isAvailable()) {
AcousticEchoCanceler echo = AcousticEchoCanceler.create(id);
echo.setEnabled(true);
Log.d("Echo", "Off");
}
if(NoiseSuppressor.isAvailable()) {
NoiseSuppressor noise = NoiseSuppressor.create(id);
noise.setEnabled(true);
Log.d("Noise", "Off");
}
if(AutomaticGainControl.isAvailable()) {
AutomaticGainControl gain = AutomaticGainControl.create(id);
gain.setEnabled(false);
Log.d("Gain", "Off");
}
BassBoost base = new BassBoost(1, player.getAudioSessionId());
base.setStrength((short) 1000);
}
}
public void startAudio() {
int read = 0, write = 0;
if(recordState == AudioRecord.STATE_INITIALIZED && playerState == AudioTrack.STATE_INITIALIZED) {
record.startRecording();
player.play();
isRecording = true;
Log.d("Record", "Recording...");
}
while(isRecording) {
short[] audioData = new short[minBuffer];
if(record != null)
read = record.read(audioData, 0, minBuffer);
else
break;
Log.d("Record", "Read: " + read);
if(player != null)
write = player.write(audioData, 0, read);
else
break;
Log.d("Record", "Write: " + write);
}
}
public void endAudio() {
if(record != null) {
if(record.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING)
record.stop();
isRecording = false;
Log.d("Record", "Stopping...");
}
if(player != null) {
if(player.getPlayState() == AudioTrack.PLAYSTATE_PLAYING)
player.stop();
isRecording = false;
Log.d("Player", "Stopping...");
}
}
public int getSampleRate() {
//Find a sample rate that works with the device
for (int rate : new int[] {8000, 11025, 16000, 22050, 44100, 48000}) {
int buffer = AudioRecord.getMinBufferSize(rate, channel_in, format);
if (buffer > 0)
return rate;
}
return -1;
}
}
XML Code I Used :
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.example.root.akuvo.MicToSpeakerActivity">
<Button
android:id="#+id/button"
android:layout_width="104dp"
android:layout_height="102dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="8dp"
android:layout_marginEnd="8dp"
android:layout_marginStart="8dp"
android:layout_marginTop="8dp"
android:background="#android:drawable/ic_lock_power_off"
android:backgroundTint="#color/colorOff"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintVertical_bias="0.396" />
</android.support.constraint.ConstraintLayout>
I'm trying to implement subtitles to videoview. I've used this example project :
package com.example.media.timedtexttest;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Locale;
import android.app.Activity;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnTimedTextListener;
import android.media.MediaPlayer.TrackInfo;
import android.media.TimedText;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.widget.TextView;
public class MainActivity extends Activity implements OnTimedTextListener {
private static final String TAG = "TimedTextTest";
private TextView txtDisplay;
private static Handler handler = new Handler();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
txtDisplay = (TextView) findViewById(R.id.txtDisplay);
MediaPlayer player = MediaPlayer.create(this, R.raw.video);
try {
player.addTimedTextSource(getSubtitleFile(R.raw.sub),
MediaPlayer.MEDIA_MIMETYPE_TEXT_SUBRIP);
int textTrackIndex = findTrackIndexFor(
TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT, player.getTrackInfo());
if (textTrackIndex >= 0) {
player.selectTrack(textTrackIndex);
} else {
Log.w(TAG, "Cannot find text track!");
}
player.setOnTimedTextListener(this);
player.start();
} catch (Exception e) {
e.printStackTrace();
}
}
private int findTrackIndexFor(int mediaTrackType, TrackInfo[] trackInfo) {
int index = -1;
for (int i = 0; i < trackInfo.length; i++) {
if (trackInfo[i].getTrackType() == mediaTrackType) {
return i;
}
}
return index;
}
private String getSubtitleFile(int resId) {
String fileName = getResources().getResourceEntryName(resId);
File subtitleFile = getFileStreamPath(fileName);
if (subtitleFile.exists()) {
Log.d(TAG, "Subtitle already exists");
return subtitleFile.getAbsolutePath();
}
Log.d(TAG, "Subtitle does not exists, copy it from res/raw");
// Copy the file from the res/raw folder to your app folder on the
// device
InputStream inputStream = null;
OutputStream outputStream = null;
try {
inputStream = getResources().openRawResource(resId);
outputStream = new FileOutputStream(subtitleFile, false);
copyFile(inputStream, outputStream);
return subtitleFile.getAbsolutePath();
} catch (Exception e) {
e.printStackTrace();
} finally {
closeStreams(inputStream, outputStream);
}
return "";
}
private void copyFile(InputStream inputStream, OutputStream outputStream)
throws IOException {
final int BUFFER_SIZE = 1024;
byte[] buffer = new byte[BUFFER_SIZE];
int length = -1;
while ((length = inputStream.read(buffer)) != -1) {
outputStream.write(buffer, 0, length);
}
}
// A handy method I use to close all the streams
private void closeStreams(Closeable... closeables) {
if (closeables != null) {
for (Closeable stream : closeables) {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
#Override
public void onTimedText(final MediaPlayer mp, final TimedText text) {
if (text != null) {
handler.post(new Runnable() {
#Override
public void run() {
int seconds = mp.getCurrentPosition() / 1000;
txtDisplay.setText("[" + secondsToDuration(seconds) + "] "
+ text.getText());
}
});
}
}
// To display the seconds in the duration format 00:00:00
public String secondsToDuration(int seconds) {
return String.format("%02d:%02d:%02d", seconds / 3600,
(seconds % 3600) / 60, (seconds % 60), Locale.US);
}
}
and created exactly the same implementation and it somehow works, but if i stop video and continue subtitle doesn't continue or if i seek video to some time subtitle doesn't continue too.
What i have tried:
Every time before player.start() i set player.selectTrack(textTrackIndex);
I've tried to reregister listener when i'm doing player.start(); player.setOnTimedTextListener(this);
Please help i've spent 4 days on subtitle features... If you have some project example or snippet it would be nice :)
we have the same problem and we must implements OnSeekCompleteListener even if you nothing to do into override method
it's our code:
#Override
public void onSeekComplete(MediaPlayer mediaPlayer) {
}
and it work !!!
I hope that help you
I tried to create an android application which connects with HC-06 and shows data detected by a sensor in a textview
But when i click on button to start the connection my application is stopped and shows a toast saying that i have to activate bluetooth
package com.example.admin.app_sniff;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.widget.ImageButton;
import android.widget.TextView;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.content.Intent;
import android.os.Handler;
import android.widget.Toast;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Set;
import java.util.UUID;
public class Activity_2 extends Activity
{ private static final int REQUEST_ENABLE_BT = 1;
TextView myLabel;
BluetoothSocket mmSocket;
BluetoothDevice mmDevice;
OutputStream mmOutputStream;
InputStream mmInputStream;
Thread workerThread;
byte[] readBuffer;
int readBufferPosition;
volatile boolean stopWorker;
BluetoothAdapter bluetoothAdapter ;
Set<BluetoothDevice> pairedDevices = bluetoothAdapter.getBondedDevices();
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_2);
ImageButton bt1 = (ImageButton) findViewById(R.id.demarrer);
ImageButton bt2 = (ImageButton)findViewById(R.id.arreter);
bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
myLabel = (TextView)findViewById(R.id.text1);
//Open Button
bt1.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
F1();
}
});
//Close button
bt2.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
while (!stopWorker)
{
try
{
closeBT();
}
catch (IOException ex) {
stopWorker=true;}
}}
});
}
public void F1(){
while (!stopWorker)
{
try
{
findBT();
openBT();
}
catch (IOException ex) {
stopWorker=true ;
}
}
}
public void findBT()
{
if (bluetoothAdapter == null) {
myLabel.setText(R.string.Bluetooth_NOT_support);
} else {
myLabel.setText(R.string.Bluetooth_support);
if (!bluetoothAdapter.isEnabled()) {
myLabel.setText(R.string.not_enabled);
Intent enableBluetooth = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBluetooth,REQUEST_ENABLE_BT);
myLabel.setText(R.string.enabled);
}
}
if(pairedDevices.size() > 0)
{
for(BluetoothDevice device : pairedDevices)
{
if(device.getName().equals("HC-06"))
{
mmDevice = device;
break;
}
}
}
myLabel.setText(R.string.Bluetooth_Device_not_found);
}
public void openBT() throws IOException
{
UUID uuid = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB"); //Standard SerialPortService ID
mmSocket = mmDevice.createRfcommSocketToServiceRecord(uuid);
mmSocket.connect();
mmOutputStream = mmSocket.getOutputStream();
mmInputStream = mmSocket.getInputStream();
beginListenForData();
Toast.makeText(this, "Bluetooth Opened", Toast.LENGTH_LONG)
.show();
}
public void beginListenForData()
{
final Handler handler = new Handler();
final byte delimiter = 10; //This is the ASCII code for a newline character
stopWorker = false;
readBufferPosition = 0;
readBuffer = new byte[1024];
workerThread = new Thread(new Runnable()
{
public void run()
{
while(!Thread.currentThread().isInterrupted() && !stopWorker)
{
try
{
int bytesAvailable = mmInputStream.available();
if(bytesAvailable > 0)
{
byte[] packetBytes = new byte[bytesAvailable];
mmInputStream.read(packetBytes);
for(int i=0;i<bytesAvailable;i++)
{
byte b = packetBytes[i];
if(b == delimiter)
{
byte[] encodedBytes = new byte[readBufferPosition];
System.arraycopy(readBuffer, 0, encodedBytes, 0, encodedBytes.length);
final String data = new String(encodedBytes, "US-ASCII");
readBufferPosition = 0;
handler.post(new Runnable()
{
public void run()
{
myLabel.setText(data);
}
});
}
else
{
readBuffer[readBufferPosition++] = b;
}
}
}
}
catch (IOException ex)
{ stopWorker = true;
}
}
}
});
workerThread.start();
}
public void closeBT() throws IOException
{
stopWorker = true;
mmOutputStream.close();
mmInputStream.close();
mmSocket.close();
myLabel.setText(R.string.Bluetooth_Closed);
}
}
this is my xml layout
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="#dimen/activity_vertical_margin"
android:paddingLeft="#dimen/activity_horizontal_margin"
android:paddingRight="#dimen/activity_horizontal_margin"
android:paddingTop="#dimen/activity_vertical_margin"
android:background="#drawable/background2"
tools:context="com.example.admin.app_sniff.Activity_2">
<GridLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_above="#+id/demarrer"
android:layout_alignParentLeft="true"
android:layout_alignParentStart="true">
<TextView
android:layout_width="wrap_content"
android:layout_height="382dp"
android:textAppearance="?android:attr/textAppearanceLarge"
android:id="#+id/text1"
android:layout_row="2"
android:layout_column="0" />
</GridLayout>
<ImageButton
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="#+id/demarrer"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true"
android:layout_alignParentStart="true"
android:layout_marginBottom="53dp"
android:src="#drawable/btn1"
android:contentDescription="#string/de"
/>
<ImageButton
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="#+id/arreter"
android:layout_alignBottom="#+id/demarrer"
android:layout_toRightOf="#+id/demarrer"
android:layout_toEndOf="#+id/demarrer"
android:src="#drawable/btn2"
android:contentDescription="#string/ar"
/>
</RelativeLayout>
Have you added permission in your AndroidManifest to use Bluetooth?
<uses-permission android:name="android.permission.BLUETOOTH" />
<uses-permission android:name="android.permission.BLUETOOTH_ADMIN" />
my APP is voice chat between server and client. client side works well. but server side doesn't work on real phone but in emulator.
it shows no error sometimes.
whats wrong with my code?
import android.os.Build;
import android.os.Bundle;
import android.os.StrictMode;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Activity;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.SocketException;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.util.Base64;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
#TargetApi(Build.VERSION_CODES.GINGERBREAD) #SuppressLint("NewApi") public class MainActivity extends Activity {
private Button receiveButton,stopButton;
private TextView receive_label;
private EditText port;
public static DatagramSocket socket;
private AudioTrack speaker;
private int port_num=50005;
//private int sampleRate =8000;//Integer.parseInt(audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE));
private int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minBufSize=4096;
private boolean status = true;
#SuppressLint("NewApi") #TargetApi(Build.VERSION_CODES.GINGERBREAD) #Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setContentView(R.layout.activity_main);
if (android.os.Build.VERSION.SDK_INT > 9) {
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
}
receiveButton = (Button) findViewById (R.id.receive_button);
stopButton = (Button) findViewById (R.id.stop_button);
receive_label= (TextView) findViewById(R.id.receive_label);
receiveButton.setOnClickListener(receiveListener);
stopButton.setOnClickListener(stopListener);
port=(EditText) findViewById(R.id.editText1);
//AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
// sampleRate =Integer.parseInt( audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE));
}
private final OnClickListener stopListener = new OnClickListener() {
#Override
public void onClick(View v) {
status = false;
speaker.release();
Log.d("VR","Speaker released");
}
};
private final OnClickListener receiveListener = new OnClickListener() {
#Override
public void onClick(View arg0) {
status = true;
receive_label.setText("socket...1");
startReceiving();
}
};
public void startReceiving() {
Thread receiveThread = new Thread (new Runnable() {
#Override
public void run() {
try {
DatagramSocket socket = new DatagramSocket(50005);
Log.d("VR", "Socket Created");
byte[] buffer = new byte[1024];
for (int sampleRate : new int[] {8000, 11025, 16000, 22050,
32000, 37800, 44056, 44100}) { // add the rates you wish to check against
minBufSize = 4096;//AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
if (minBufSize != AudioRecord.ERROR_BAD_VALUE) {
speaker = new AudioTrack(AudioManager.STREAM_MUSIC,sampleRate,channelConfig,audioFormat,minBufSize,AudioTrack.MODE_STREAM);
speaker.play();
}
}
//minimum buffer size. need to be careful. might cause problems. try setting manually if any problems faced
// int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
while(status == true) {
DatagramPacket packet = new DatagramPacket(buffer,buffer.length);
socket.receive(packet);
Log.d("VR", "Packet Received");
//reading content from packet
buffer=packet.getData();
Log.d("VR", "Packet data read into buffer");
//sending data to the Audiotrack obj i.e. speaker
speaker.write(buffer, 0, minBufSize);
Log.d("VR", "Writing buffer content to speaker");
}
} catch (SocketException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
receiveThread.start();
}}
give me your best help
you need to run both apps in same local network