manager.setSpeakerphoneOn(true);
works for mobile phones but does not works for tablet.
The sound does not plays from the speaker. Does anybody knows why?
Please Help
MODIFY_AUDIO_SETTINGS permission is also added in Manifest file.
My code
Path for the audio file
String mainpath = "android.resource://com.example.texttospeech/raw/";
public void makeSound(String amount) {
AudioManager m_amAudioManager;
m_amAudioManager = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
m_amAudioManager.setMode(AudioManager.MODE_CURRENT);
m_amAudioManager.setSpeakerphoneOn(true);
int numInput = Integer.parseInt(amount);
String amt = amount;
Log.d("The amount is : ", amt);
int length = amt.length();
Log.d("Length of the String : ", Integer.toString(length));
thousandsDigit = numInput / 1000;
ths = numInput % 1000;
hundredsDigit = ths / 100;
hs = ths % 100;
tensDigit = hs / 10;
onesDigit = hs % 10;
System.out.println(thousandsDigit);
System.out.println(hundredsDigit);
System.out.println(tensDigit);
System.out.println(onesDigit);
Log.d("Transaction type:",transactionType);
if (transactionType.equals("WITHDRAW")){
// withdraw();
final MediaPlayer mp11 = new MediaPlayer();
try {
mp11.setDataSource(AgentTransaction.this,
Uri.parse(mainpath + "withdraw"));
mp11.prepare();
} catch (IllegalArgumentException | SecurityException
| IllegalStateException | IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
mp11.start();
mp11.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
mp11.release();
}
});
}
}
Related
I have a project in which it plays IPTV. I would like to identify if there is video signal in order to show an alertdialog if there isn't signal. I could set a timer to wait for the signal to return, but I don't know how to identify if there is a signal or not. Could you please help me. I leave the code with which I am working. SdkVersion 22.
public class Television extends Activity {
VideoView videoView;
ImageView volmas, volmenos, voloff;
SeekBar seekbar;
TextView textview, tvvolumen;
AudioManager audioManager;
int i = 50;
int volumen = 7;
int volumenporcentaje;
Toast toast;
Exception e;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_television);
try {
String canal = getIntent().getStringExtra("Canal");
volmas = (ImageView) findViewById(R.id.volmas);
volmenos = (ImageView) findViewById(R.id.volmenos);
voloff = (ImageView) findViewById(R.id.voloff);
tvvolumen = (TextView) findViewById(R.id.tvvolumen);
volumenporcentaje = (volumen * 100) / 15;
tvvolumen.setText("" + volumenporcentaje + "%");
e = null;
videoView = (VideoView) this.findViewById(R.id.videoViewTV);
videoView.setVideoURI(Uri.parse(canal));
videoView.requestFocus();
videoView.start();
voloff.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
volumen = 0;
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, volumen, 0);
volumenporcentaje = (volumen * 100) / 15;
tvvolumen.setText("" + volumenporcentaje + "%");
}
});
volmas.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (volumen < 15) {
volumen = volumen + 1;
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, volumen, 0);
volumenporcentaje = (volumen * 100) / 15;
tvvolumen.setText("" + volumenporcentaje + "%");
}
if (volumen >= 15) {
Toast.makeText(Television.this, "Volumen al máximo", Toast.LENGTH_SHORT).show();
}
}
});
volmenos.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (volumen > 0) {
volumen = volumen - 1;
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, volumen, 0);
volumenporcentaje = (volumen * 100) / 15;
tvvolumen.setText("" + volumenporcentaje + "%");
}
if (volumen == 0) {
Toast.makeText(Television.this, "Volumen al mínimo", Toast.LENGTH_SHORT).show();
}
}
});
seekbar = (SeekBar) findViewById(R.id.seekBar1);
audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
seekbar.setMax(audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC));
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, volumen, 0);
} catch (Exception e) {
Errores.exception(e, this.getApplicationContext());
}
}
I recommend you to change your approach! Use Exomedia or Exoplayer library. They are very simple.
Depend on your choice, you can use their function to solve your problem :)
I am using Raspberry pi3 and DHT11 sensor for temperature monitoring project.
I have following pin positions
VCC to pin no : 2
Ground to pin no : 6
Output to GPIO : BCM22 i.e pin no 15
Code that I have used:
public class WeatherStationActivity extends Activity {
private Handler mHandler = new Handler();
private TextView mTxtStatus;
private PeripheralManagerService service = new PeripheralManagerService();
private Gpio tempGpio;
private int i = 0;
int[] dht11_dat = {0, 0, 0, 0, 0};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d("Weather station", "Started Weather Station");
setContentView(R.layout.activity_main);
mTxtStatus = (TextView) findViewById(R.id.txtStatus);
try {
tempGpio = service.openGpio("BCM22");
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
#Override
public void run() {
if (i == 10) {
handler.removeCallbacks(this);
} else {
getTemp();
handler.postDelayed(this, 5000);
}
i++;
}
}, 5000);
} catch (Exception e) {
e.printStackTrace();
}
}
private void getTemp() {
boolean laststate = false;
try {
laststate = tempGpio.getValue();
} catch (IOException e) {
e.printStackTrace();
}
int j = 0;
final int MAXTIMINGS = 85;
dht11_dat[0] = dht11_dat[1] = dht11_dat[2] = dht11_dat[3] = dht11_dat[4] = 0;
try {
tempGpio.setDirection(Gpio.DIRECTION_OUT_INITIALLY_LOW);
// tempGpio.setActiveType(Gpio.ACTIVE_LOW);
tempGpio.setValue(false);
// Thread.sleep(18);
TimeUnit.MILLISECONDS.sleep(18);
// tempGpio.setActiveType(Gpio.ACTIVE_HIGH);
// tempGpio.setActiveType(Gpio.ACTIVE_HIGH);
tempGpio.setValue(true);
TimeUnit.MICROSECONDS.sleep(40);
tempGpio.setDirection(Gpio.DIRECTION_IN);
/* tempGpio.setActiveType(Gpio.ACTIVE_HIGH);
tempGpio.setValue(true);*/
// tempGpio.setValue(true);
StringBuilder value = new StringBuilder();
for (int i = 0; i < MAXTIMINGS; i++) {
int counter = 0;
while (tempGpio.getValue() == laststate) {
counter++;
TimeUnit.MICROSECONDS.sleep(1);
if (counter == 255) {
break;
}
}
laststate = tempGpio.getValue();
mTxtStatus.append("\nLast State of Sensor " + laststate);
if (counter == 255) {
break;
}
//* ignore first 3 transitions *//*
if ((i >= 4) && (i % 2 == 0)) {
//* shove each bit into the storage bytes *//*
dht11_dat[j / 8] <<= 1;
if (counter > 16) {
dht11_dat[j / 8] |= 1;
}
j++;
}
}
// check we read 40 bits (8bit x 5 ) + verify checksum in the last
// byte
if ((j >= 40) && checkParity()) {
value.append(dht11_dat[2]).append(".").append(dht11_dat[3]);
Log.i("Logger", "temperature value readed: " + value.toString());
mTxtStatus.append("\nTemp " + value.toString());
} else {
mTxtStatus.append("\nNothing is working ");
Log.i("Logger", "Nothing is working ");
}
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (Exception ex) {
ex.printStackTrace();
}
}
private boolean checkParity() {
return dht11_dat[4] == (dht11_dat[0] + dht11_dat[1] + dht11_dat[2] + dht11_dat[3] & 0xFF);
}
}
Above code is giving me "Nothing is working" as output.
Any suggestion where I might be doing wrong?
You can't read data from DHT11 using Raspberry Pi 3 with Android Things because duration of DHT11 response pulses is from 26-28 us to 70 us, but max frequency of RP3 with AT GPIO is around 3kHz, which means around 300 us pulse duration. Take a look at answers to this question.
What i am doing: I am playing a media player from a url
What is happening: In the beginning, the screen is like unresponsive and hangs since the song buffering is happening.
Question:
How to overcome this ?
Should i need to use a Async task here instead of handler ? ... if
so how to modify the code i have posted
Or should i just need to show a progress dialog here. in handler ?
... is possible ? .. if so how ?
Code:
protected void newValue(TextView txtSrcId, TextView txtDstId) {
// TODO Auto-generated method stub
do {
current = mediaPlayer.getCurrentPosition();
System.out.println("duration - " + duration + " current- "
+ current);
int dSeconds = (int) (duration / 1000) % 60 ;
int dMinutes = (int) ((duration / (1000*60)) % 60);
int dHours = (int) ((duration / (1000*60*60)) % 24);
int cSeconds = (int) (current / 1000) % 60 ;
int cMinutes = (int) ((current / (1000*60)) % 60);
int cHours = (int) ((current / (1000*60*60)) % 24);
if(dHours == 0){
txtSrcId.setText(String.format("%02d:%02d", cMinutes, cSeconds));
txtDstId.setText(String.format("%02d:%02d", dMinutes, dSeconds));
//txtSrcId.setText("jodwjdwudwudguwdgwugduwgduwgduwgd");
}else{
txtSrcId.setText(String.format("%02d:%02d:%02d",cHours, cMinutes, cSeconds));
txtDstId.setText(String.format("%02d:%02d:%02d",dHours, dMinutes, dSeconds));
//txtSrcId.setText("jodwjdwudwudguwdgwugduwgduwgduwgd");
}
try{
Log.d("Value: ", String.valueOf((int) (current * 100 / duration)));
if(seekBarProgress.getProgress() >= 100){
break;
}
}catch (Exception e) {}
}while (seekBarProgress.getProgress() <= 100);
}
private Runnable onEverySecond = new Runnable() {
#Override
public void run(){
if(true == running){
if(seekBarProgress != null) {
seekBarProgress.setProgress(mediaPlayer.getCurrentPosition());
}
if(mediaPlayer.isPlaying()) {
seekBarProgress.postDelayed(onEverySecond, 1000);
//updateTime();
newValue(txtSrcId,txtDstId);
}
}
}
};
public void play() {
try {
mediaPlayer.setDataSource(url);
mediaPlayer.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
mediaPlayer.start();
seekBarProgress.postDelayed(onEverySecond, 1000);
}
#Override
public void onPrepared(MediaPlayer arg0) {
// TODO Auto-generated method stub
duration = mediaPlayer.getDuration();
seekBarProgress.setMax(duration);
seekBarProgress.postDelayed(onEverySecond, 1000);
}
#Override
public void onBufferingUpdate(MediaPlayer mp, int percent) {
/** Method which updates the SeekBar secondary progress by current song loading from URL position*/
seekBarProgress.setSecondaryProgress(percent);
}
note: if more code is needed, let me know i will edit the question
You are looking for MediaPlayer#prepareAsync.
Prepares the player for playback, asynchronously. After setting the datasource and the display surface, you need to either call prepare() or prepareAsync(). For streams, you should call prepareAsync(), which returns immediately, rather than blocking until enough data has been buffered.
Use this metod instead of prepare. You will by notified when media source is ready for playback via MediaPlayer.OnPreparedListener.html.
My app loads two audio files from database and stores them in an array. User can play any of them by selecting one from radio button group. both are mp3. One is playing fine and it's elapsed and total duration is displaying correctly. But the same functions display 00:00 total duration for other. Seek bar also updates its progress to 100% in this case but the elapsed time is correctly displaying and audio is playing fine. Someone please tell what is the problem? Why this is happening.. and how can I resolve it??
audio_urdu's time is fine.. error is with audio_eng.
private void updateView(int i) throws JSONException
{
idx=0;
_imgBtnPlay.setClickable(false);
_imgBtnStop.setClickable(false);
JSONObject jObject=null;
jObject=Jarray.getJSONObject(i);
audioUrl_eng=jObject.getString("audio_eng");
audioUrl_urdu=jObject.getString("audio_urdu");
lbl_tDuration.setText("00:00");
lbl_cDuration.setText("00:00");
lbl_loading.setText("Loading audio files...");
loadAudio(audioUrl_eng);
}
// Loading audio files from URL
private void loadAudio(String url)
{
// TODO Auto-generated method stub
mMediaPlayer=new MediaPlayer();
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
try
{
mMediaPlayer.setDataSource(url);
mMediaPlayer.prepareAsync();
}
catch(IOException e)
{
e.printStackTrace();
}
catch (IllegalArgumentException e)
{
e.printStackTrace();
}
catch (IllegalStateException e)
{
e.printStackTrace();
}
mMediaPlayer.setOnPreparedListener(this);
}
// Notify when audio is ready to be played
#Override
public void onPrepared(MediaPlayer mp)
{
// TODO Auto-generated method stub
audioPlayerList[idx]=mp;
idx++;
if (idx == 1)
{
audioPlayer = mp;
lbl_tDuration.setText(mSecToTimer(mp.getDuration()));
lbl_cDuration.setText(mSecToTimer(mp.getCurrentPosition()));
updateSeekBar();
loadAudio(audioUrl_eng);
}
if (idx == 2)
{
// Enabling the media control buttons
_imgBtnPlay.setClickable(true);
_imgBtnStop.setClickable(true);
rdGrp.setClickable(true);
lbl_loading.setText("");
idx = 0;
}
}
public void onClick(View v)
{
switch(v.getId())
{
// calling search function
case R.id.imgBtnSearch:
onSearchRequested();
break;
// when play/pause button is tapped
case R.id.imgBtnPlay:
if (audioPlayer.isPlaying())
{
audioPlayer.pause();
_imgBtnPlay.setImageResource(R.drawable.ic_action_play);
}
else if (audioPlayer!=null)
{
audioPlayer.start();
_imgBtnPlay.setImageResource(R.drawable.ic_action_pause);
durationHandler.postDelayed(updateSeekBarTime, 100);
}
break;
// when stop button is tapped
case R.id.imgBtnStop:
audioPlayer.pause();
_imgBtnPlay.setImageResource(R.drawable.ic_action_play);
resetProgress();
break;
default:
break;
}
// Updating the seek bar's time after every 100 milliseconds
public void updateSeekBar()
{
durationHandler.postDelayed(updateSeekBarTime, 100);
}
// Updating the progress of seek bar
private Runnable updateSeekBarTime = new Runnable()
{
public void run()
{
long tDuration = audioPlayer.getDuration();
long cDuration = audioPlayer.getCurrentPosition();
lbl_tDuration.setText(mSecToTimer(tDuration));
lbl_cDuration.setText(mSecToTimer(cDuration));
int progress = (int) getProgressPercentage(cDuration, tDuration);
_seekbar.setProgress(progress);
durationHandler.postDelayed(this, 100);
}
};
// Converting milliseconds into min:sec format
public String mSecToTimer(long ms)
{
String finalTimerString = "";
String secString = "";
String minString = "";
// Convert total duration into minutes and seconds
int min = (int)(ms % (1000*60*60)) / (1000*60);
int sec = (int) ((ms % (1000*60*60)) % (1000*60) / 1000);
// Prepending 0 to seconds if it is one digit
if(sec < 10)
secString = "0" + sec;
else
secString = "" + sec;
// Prepending 0 to minutes if it is one digit
if(min < 10)
minString = "0" + min;
else
minString = "" + min;
finalTimerString = minString + ":" + secString;
return finalTimerString;
}
// calculating the percentage progress of seek bar
public int getProgressPercentage(long cDuration, long tDuration)
{
Double percentage = (double) 0;
long cSeconds = (int) (cDuration / 1000);
long tSeconds = (int) (tDuration / 1000);
percentage =(((double)cSeconds)/tSeconds)*100;
return percentage.intValue();
}
// Converting progress of seek bar into time duration in milliseconds
public int progressToTimer(int progress, int tDuration)
{
int cDuration = 0;
tDuration = (int) (tDuration / 1000);
cDuration = (int) ((((double)progress) / 100) * tDuration);
return cDuration * 1000;
}
// Reseting the progress of seek bar when stop button is tapped
public void resetProgress()
{
audioPlayer.seekTo(0);
lbl_cDuration.setText(mSecToTimer(0));
_seekbar.setProgress(0);
}
I have one audio in English and one in Urdu language. both are in the array audioPlayerList. User can select different languages using radio buttons. and idx is variable which tells which audio file is to be played. audio_eng is on index 0 (idx = 0) and audio_urdu is on index 1 (idx = 1). Audio is selected as audioPlayer = audioPlayerList[idx]
code for Radio button selection is this:
rdGrp.setOnCheckedChangeListener(new OnCheckedChangeListener()
{
#Override
public void onCheckedChanged(RadioGroup group, int checkedId)
{
// Find which radio button is selected
if (audioPlayer!=null)
{
if(audioPlayer.isPlaying())
audioPlayer.pause();
_imgBtnPlay.setImageResource(R.drawable.ic_action_play);
resetProgress();
if (checkedId == R.id.rdEng)
audioPlayer = audioPlayerList[0];
else if (checkedId == R.id.rdUrdu)
audioPlayer = audioPlayerList[1];
}
}
});
Duration of your file 'audio_eng' might be less than 1 second. When calculating percentage you convert milliseconds to seconds that results in 0 total length. That's why you get progress bar set to 100% from the beginning (actually, an exception might be thrown in this case - did you check that?).
When calculatng percentage try not to convert xDurationinto xSeconds but divide durations themselves in method getProgressPercentage.
I couldn't find other reasons why you get such result
Is it possible to record the voice from Bluetooth headset and play on Android speaker simultaneously? I finished to record audio from handset device by this code. And I am doing the second step- play this sound in android speaker. Please help me to resolve it. Thank you so much
_audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
_audioManager.startBluetoothSco();
_recorder = new MediaRecorder();
_recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
_recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
_recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
_recorder.setOutputFile(file.toString());
_recorder.prepare();
_recorder.start();
Recording using AudioRecord
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
Button showPref;
OnBluetoothRecording bluetoothRecording;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
showPref = (Button)findViewById(R.id.showPreferece);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
showPref.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
startActivity(new Intent(getApplicationContext(),BluetoothPreferenceActivity.class));
}
});
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new OnClickListener() {
#Override
public void onClick(final View v) {
BluetoothRecordingManager.checkAndRecord(getApplicationContext(), new OnBluetoothRecording() {
#Override
public void onStartRecording(boolean state, boolean bluetoothFlag) {
Log.d("CallBack","starting Recording");
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
#Override
public void onCancelRecording() {
}
}, true);
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}