I have a problem with android wear's microphone.
Bluetooth headset works with android wear. My app release VoIP application.
When I play voice (from network) in my app with paired bluetooth headset, headset play this voice. But when I try record voice from the microphone.. turns on the microphone of the android wear, not the headset.
How can I implement voice reading from the Bluetooth headset microphone??
Attach Player.class
public class Player {
private static final String TAG = Player.class.getName();
private AudioTrack audioTrack;
private boolean isWorking;
public Player() {
try {
audioTrack = new AudioTrack(
new AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.setLegacyStreamType(AudioManager.STREAM_MUSIC)
.build(),
new AudioFormat.Builder()
.setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(AudioConsts.SAMPLERATE)
.build(),
AudioConsts.GetPlayerBufferSize(),
AudioTrack.MODE_STREAM,
AudioManager.AUDIO_SESSION_ID_GENERATE);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
}
public void play() {
audioTrack.play();
}
public void stopReading() {
if (!isWorking)
return;
audioTrack.release();
isWorking = false;
}
public void appendForPlayback(byte[] audioMessage, int size) {
new Executor().doInBackground(audioMessage);
}
private class Executor extends AsyncTask<byte[], Void, Void> {
#Override
protected Void doInBackground(byte[]... bytes) {
if (bytes != null) {
if (bytes.length > 0) {
byte[] audioMessage = bytes[0];
if (audioMessage.length != 0) {
int written = audioTrack.write(audioMessage, 0, audioMessage.length);
if (written != audioMessage.length) {
Log.d(TAG, "WTF");
}
}
}
}
return null;
}
}}
Attach Recorder.class
public class Recorder {
private static final String TAG = Recorder.class.getName();
private boolean isAlive;
private Thread recordThread;
private IRecorderBytesListener listener;
private AudioRecord audioRecord;
public Recorder() {
isAlive = true;
audioRecord = new AudioRecord.Builder()
.setAudioSource(MediaRecorder.AudioSource.MIC)
.setAudioFormat(new AudioFormat.Builder()
.setSampleRate(AudioConsts.SAMPLERATE)
.setEncoding(AudioConsts.ENCODING_PCM_16BIT)
.build())
.setBufferSizeInBytes(AudioConsts.GetRecorderBufferSize())
.build();
//audioRecord.setPreferredDevice(audioDeviceInfo);
recordThread = new Thread(() -> {
ByteBuffer buffer = ByteBuffer.allocateDirect(AudioConsts.GetRecorderBufferSize());
byte[] audioMsg = new byte[AudioConsts.FRAME_SIZE * AudioConsts.ENCODING_PCM_16BIT];
while (isAlive) {
if (audioRecord.getRecordingState() == 1) {
try {
Thread.sleep(50);
} catch (Exception e) {
Log.d(TAG, "hz");
}
continue;
}
buffer = (ByteBuffer) buffer.rewind();
int len = audioRecord.read(buffer, AudioConsts.GetRecorderBufferSize());
if (len != AudioConsts.GetRecorderBufferSize())
Log.d(TAG, "WTF LEN");
len -= AudioConsts.OFFSET_AUDIO_RECORDER;
if (len > 0) {
try {
System.arraycopy(buffer.array(), AudioConsts.OFFSET_AUDIO_RECORDER,
audioMsg, 0, len);
if (listener != null)
listener.bytesReceived(audioMsg, len);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
} else {
Log.d(TAG, "WTF");
}
}
audioRecord.stop();
});
recordThread.start();
}
public void startRecording() {
audioRecord.startRecording();
}
public void stopRecording() {
audioRecord.stop();
}
public void setListener(IRecorderBytesListener listener) {
this.listener = listener;
}
public void dispose() {
isAlive = false;
}}
Sorry for my English.
In my Android app videos are played with subtitles using videoview. The problem is when I'm pressing forward or dragging the seekbar subtitles are displaying, but when I'm pressing backward or dragging the seekbar backwards subtitles are not displaying. Below is the full code.
public class MainActivity extends AppCompatActivity implements View.OnTouchListener,MediaPlayer.OnInfoListener,MediaPlayer.OnSeekCompleteListener {
MediaController ctlr;
VideoView videoview;
FrameLayout playercontrolerview;
View view_full_cc;
TextView tv_subtitleText;
private static Handler handler = new Handler();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
videoview = (VideoView) findViewById(R.id.VideoView);
tv_subtitleText=(TextView) findViewById(R.id.tv_subtitleText);
try {
// Start the MediaController
setVideoView();
preparedvideo();
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void setVideoView() {
ctlr = new MediaController(MainActivity.this) {
#Override
public void show() {
repositionSubtitle(true);
super.show();
}
#Override
public void hide() {
repositionSubtitle(false);
super.hide();
}
};
playercontrolerview = (FrameLayout) ctlr.getParent();
ctlr.setMediaPlayer(videoview);
ctlr.setAnchorView(videoview);
videoview.setMediaController(ctlr);
videoview.requestFocus();
}
private void preparedvideo(){
videoview.setVideoURI(Uri.parse("android.resource://" + getPackageName() + "/" + R.raw.samplevideo));
videoview.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
try {
mp.addTimedTextSource(getSubtitleFile(R.raw.sub1), MediaPlayer.MEDIA_MIMETYPE_TEXT_SUBRIP);
int textTrackIndex = findTrackIndexFor(MediaPlayer.TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT, mp.getTrackInfo());
if (textTrackIndex >= 0) {
try {
mp.selectTrack(textTrackIndex);
}catch (Exception e){
e.printStackTrace();
}
} else {
Log.w("subtitles", "Cannot find text track!");
}
mp.setOnTimedTextListener(new MediaPlayer.OnTimedTextListener() {
#Override
public void onTimedText(final MediaPlayer mp, final TimedText text) {
if(text!=null){
handler.post(new Runnable() {
#Override
public void run() {
try {
int seconds = mp.getCurrentPosition() / 1000;
Log.e("Subtitle", "subtitle Info " + text.getText());
tv_subtitleText.setText(text.getText());
}catch (Exception w){
w.printStackTrace();
}
}
});
}else{
Log.e("Subtitle", "subtitle Info null ");
}
}
});
}catch (Exception e){
e.printStackTrace();
}
try {
videoview.start();
}catch (Exception e){
e.printStackTrace();
}
}
});
}
public void repositionSubtitle(boolean isShown) {
FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) tv_subtitleText.getLayoutParams();
if (params != null) {
if (isShown)
params.bottomMargin = 150;
else
params.bottomMargin = 0;
tv_subtitleText.setLayoutParams(params);
}
}
private int findTrackIndexFor(int mediaTrackType, MediaPlayer.TrackInfo[] trackInfo) {
int index = -1;
for (int i = 0; i < trackInfo.length; i++) {
if (trackInfo[i].getTrackType() == mediaTrackType) {
return i;
}
}
return index;
}
private String getSubtitleFile(int resId) {
String fileName = getResources().getResourceEntryName(resId);
File subtitleFile = getFileStreamPath(fileName);
if (subtitleFile.exists()) {
Log.d("subtitle", "Subtitle already exists");
return subtitleFile.getAbsolutePath();
}
Log.d("subtitle", "Subtitle does not exists, copy it from res/raw");
// Copy the file from the res/raw folder to your app folder on the
// device
InputStream inputStream = null;
OutputStream outputStream = null;
try {
inputStream = getResources().openRawResource(resId);
outputStream = new FileOutputStream(subtitleFile, false);
copyFile(inputStream, outputStream);
return subtitleFile.getAbsolutePath();
} catch (Exception e) {
e.printStackTrace();
} finally {
closeStreams(inputStream, outputStream);
}
return "";
}
private void copyFile(InputStream inputStream, OutputStream outputStream)
throws IOException {
final int BUFFER_SIZE = 1024;
byte[] buffer = new byte[BUFFER_SIZE];
int length = -1;
while ((length = inputStream.read(buffer)) != -1) {
outputStream.write(buffer, 0, length);
}
}
// A handy method I use to close all the streams
private void closeStreams(Closeable... closeables) {
if (closeables != null) {
for (Closeable stream : closeables) {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
#Override
public boolean onInfo(MediaPlayer mp, int what, int extra) {
videoview.getCurrentPosition();
// Collection<Caption> subtitles = srt.captions.values();
return false;
}
#Override
public boolean onTouch(View v, MotionEvent event) {
return false;
}
#Override
public void onSeekComplete(MediaPlayer mp) {
}
}
I am working on Live Streaming application using RTMP. I am in the initial state, I successful done lot of things but now stuck on one place. I am using Surface view and my video is little bit stretched vertically. Codes are below:
This is my SrcCameraView
public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Renderer {
private GPUImageFilter magicFilter;
private SurfaceTexture surfaceTexture;
private int mOESTextureId = OpenGLUtils.NO_TEXTURE;
private int mSurfaceWidth;
private int mSurfaceHeight;
private int mPreviewWidth;
private int mPreviewHeight;
private boolean mIsEncoding;
private boolean mIsTorchOn = false;
private float mInputAspectRatio;
private float mOutputAspectRatio;
private float[] mProjectionMatrix = new float[16];
private float[] mSurfaceMatrix = new float[16];
private float[] mTransformMatrix = new float[16];
private Camera mCamera;
private ByteBuffer mGLPreviewBuffer;
private final static int PIXEL_FORMAT = ImageFormat.NV21;
private int mCamId = -1;
private int mPreviewRotation = 90;
private int mPreviewOrientation = Configuration.ORIENTATION_PORTRAIT;
private Thread worker;
private final Object writeLock = new Object();
private ConcurrentLinkedQueue<IntBuffer> mGLIntBufferCache = new
ConcurrentLinkedQueue<>();
private PreviewCallback mPrevCb;
public SrsCameraView(Context context) {
this(context, null);
}
public SrsCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glDisable(GL10.GL_DITHER);
GLES20.glClearColor(0, 0, 0, 0);
magicFilter = new GPUImageFilter(MagicFilterType.NONE);
magicFilter.init(getContext().getApplicationContext());
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
mOESTextureId = OpenGLUtils.getExternalOESTextureID();
surfaceTexture = new SurfaceTexture(mOESTextureId);
surfaceTexture.setOnFrameAvailableListener(new
SurfaceTexture.OnFrameAvailableListener() {
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
});
// For camera preview on activity creation
if (mCamera != null) {
try {
mCamera.setPreviewTexture(surfaceTexture);
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
mSurfaceWidth = width;
mSurfaceHeight = height;
magicFilter.onDisplaySizeChanged(width, height);
mOutputAspectRatio = width > height ? (float) width / height : (float)
height / width;
float aspectRatio = mOutputAspectRatio / mInputAspectRatio;
if (width > height) {
Matrix.orthoM(mProjectionMatrix, 0, -1.0f, 1.0f, -aspectRatio,
aspectRatio, -1.0f, 1.0f);
} else {
Matrix.orthoM(mProjectionMatrix, 0, -aspectRatio, aspectRatio, -1.0f, 1.0f, -1.0f, 1.0f);
}
}
#Override
public void onDrawFrame(GL10 gl) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSurfaceMatrix);
Matrix.multiplyMM(mTransformMatrix, 0, mSurfaceMatrix, 0,
mProjectionMatrix, 0);
magicFilter.setTextureTransformMatrix(mTransformMatrix);
magicFilter.onDrawFrame(mOESTextureId);
if (mIsEncoding) {
mGLIntBufferCache.add(magicFilter.getGLFboBuffer());
synchronized (writeLock) {
writeLock.notifyAll();
}
}
}
public void setPreviewCallback(PreviewCallback cb) {
mPrevCb = cb;
}
public int[] setPreviewResolution(int width, int height) {
getHolder().setFixedSize(width, height);
mCamera = openCamera();
mPreviewWidth = width;
mPreviewHeight = height;
Camera.Size rs = adaptPreviewResolution(mCamera.new Size(width,
height));
if (rs != null) {
mPreviewWidth = rs.width;
mPreviewHeight = rs.height;
}
mCamera.getParameters().setPreviewSize(mPreviewWidth, mPreviewHeight);
mGLPreviewBuffer = ByteBuffer.allocateDirect(mPreviewWidth *
mPreviewHeight * 4);
mInputAspectRatio = mPreviewWidth > mPreviewHeight ?
(float) mPreviewWidth / mPreviewHeight : (float) mPreviewHeight /
mPreviewWidth;
return new int[] { mPreviewWidth, mPreviewHeight };
}
public boolean setFilter(final MagicFilterType type) {
if (mCamera == null) {
return false;
}
queueEvent(new Runnable() {
#Override
public void run() {
if (magicFilter != null) {
magicFilter.destroy();
}
magicFilter = MagicFilterFactory.initFilters(type);
if (magicFilter != null) {
magicFilter.init(getContext().getApplicationContext());
magicFilter.onInputSizeChanged(mPreviewWidth,
mPreviewHeight);
magicFilter.onDisplaySizeChanged(mSurfaceWidth,
mSurfaceHeight);
}
}
});
requestRender();
return true;
}
private void deleteTextures() {
if (mOESTextureId != OpenGLUtils.NO_TEXTURE) {
queueEvent(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{ mOESTextureId }, 0);
mOESTextureId = OpenGLUtils.NO_TEXTURE;
}
});
}
}
public void setCameraId(int id) {
mCamId = id;
setPreviewOrientation(mPreviewOrientation);
}
public void setPreviewOrientation(int orientation) {
mPreviewOrientation = orientation;
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCamId, info);
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mPreviewRotation = info.orientation % 360;
mPreviewRotation = (360 - mPreviewRotation) % 360; // compensate the mirror
} else {
mPreviewRotation = (info.orientation + 360) % 360;
}
} else if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mPreviewRotation = (info.orientation + 90) % 360;
mPreviewRotation = (360 - mPreviewRotation) % 360; // compensate the mirror
} else {
mPreviewRotation = (info.orientation + 270) % 360;
}
}
}
public int getCameraId() {
return mCamId;
}
public void enableEncoding() {
worker = new Thread(new Runnable() {
#Override
public void run() {
while (!Thread.interrupted()) {
while (!mGLIntBufferCache.isEmpty()) {
IntBuffer picture = mGLIntBufferCache.poll();
mGLPreviewBuffer.asIntBuffer().put(picture.array());
mPrevCb.onGetRgbaFrame(mGLPreviewBuffer.array(),
mPreviewWidth, mPreviewHeight);
}
// Waiting for next frame
synchronized (writeLock) {
try {
// isEmpty() may take some time, so we set timeout to detect next frame
writeLock.wait(500);
} catch (InterruptedException ie) {
worker.interrupt();
}
}
}
}
});
worker.start();
mIsEncoding = true;
}
public void disableEncoding() {
mIsEncoding = false;
mGLIntBufferCache.clear();
if (worker != null) {
worker.interrupt();
try {
worker.join();
} catch (InterruptedException e) {
e.printStackTrace();
worker.interrupt();
}
worker = null;
}
}
public boolean startCamera() {
if (mCamera == null) {
mCamera = openCamera();
if (mCamera == null) {
return false;
}
}
Camera.Parameters params = mCamera.getParameters();
params.setPictureSize(mPreviewWidth, mPreviewHeight);
params.setPreviewSize(mPreviewWidth, mPreviewHeight);
int[] range = adaptFpsRange(SrsEncoder.VFPS,
params.getSupportedPreviewFpsRange());
params.setPreviewFpsRange(range[0], range[1]);
params.setPreviewFormat(ImageFormat.NV21);
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
params.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
params.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && !supportedFocusModes.isEmpty()) {
if
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} else if
(supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.autoFocus(null);
} else {
params.setFocusMode(supportedFocusModes.get(0));
}
}
List<String> supportedFlashModes = params.getSupportedFlashModes();
if (supportedFlashModes != null && !supportedFlashModes.isEmpty()) {
if
(supportedFlashModes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
if (mIsTorchOn) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
}
} else {
params.setFlashMode(supportedFlashModes.get(0));
}
}
mCamera.setParameters(params);
mCamera.setDisplayOrientation(mPreviewRotation);
try {
mCamera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
return true;
}
public void stopCamera() {
disableEncoding();
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
private Camera openCamera() {
Camera camera;
if (mCamId < 0) {
Camera.CameraInfo info = new Camera.CameraInfo();
int numCameras = Camera.getNumberOfCameras();
int frontCamId = -1;
int backCamId = -1;
for (int i = 0; i < numCameras; i++) {
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
backCamId = i;
} else if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
frontCamId = i;
break;
}
}
if (frontCamId != -1) {
mCamId = frontCamId;
} else if (backCamId != -1) {
mCamId = backCamId;
} else {
mCamId = 0;
}
}
camera = Camera.open(mCamId);
return camera;
}
private Camera.Size adaptPreviewResolution(Camera.Size resolution) {
float diff = 100f;
float xdy = (float) resolution.width / (float) resolution.height;
Camera.Size best = null;
for (Camera.Size size :
mCamera.getParameters().getSupportedPreviewSizes()) {
if (size.equals(resolution)) {
return size;
}
float tmp = Math.abs(((float) size.width / (float) size.height) -
xdy);
if (tmp < diff) {
diff = tmp;
best = size;
}
}
return best;
}
private int[] adaptFpsRange(int expectedFps, List<int[]> fpsRanges) {
expectedFps *= 1000;
int[] closestRange = fpsRanges.get(0);
int measure = Math.abs(closestRange[0] - expectedFps) +
Math.abs(closestRange[1] - expectedFps);
for (int[] range : fpsRanges) {
if (range[0] <= expectedFps && range[1] >= expectedFps) {
int curMeasure = Math.abs(range[0] - expectedFps) +
Math.abs(range[1] - expectedFps);
if (curMeasure < measure) {
closestRange = range;
measure = curMeasure;
}
}
}
return closestRange;
}
public interface PreviewCallback {
void onGetRgbaFrame(byte[] data, int width, int height);
}
}
This is My Main Activity:
public class MainActivity extends AppCompatActivity implements RtmpHandler.RtmpListener,
SrsRecordHandler.SrsRecordListener, SrsEncodeHandler.SrsEncodeListener {
private static final String TAG = "Yasea";
Button btnPublish = null;
Button btnSwitchCamera = null;
Button btnRecord = null;
Button btnSwitchEncoder = null;
private SharedPreferences sp;
private String rtmpUrl = "rtmp://00.00.000.00/live/" + getRandomAlphaString(3) + '/' + getRandomAlphaDigitString(5);
private String recPath = Environment.getExternalStorageDirectory().getPath() + "/test.mp4";
private SrsPublisher mPublisher;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
// response screen rotation event
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR);
// restore data.
sp = getSharedPreferences("Yasea", MODE_PRIVATE);
rtmpUrl = sp.getString("rtmpUrl", rtmpUrl);
// initialize url.
final EditText efu = (EditText) findViewById(R.id.url);
efu.setText(rtmpUrl);
btnPublish = (Button) findViewById(R.id.publish);
btnSwitchCamera = (Button) findViewById(R.id.swCam);
btnRecord = (Button) findViewById(R.id.record);
btnSwitchEncoder = (Button) findViewById(R.id.swEnc);
mPublisher = new SrsPublisher((SrsCameraView) findViewById(R.id.glsurfaceview_camera));
mPublisher.setEncodeHandler(new SrsEncodeHandler(this));
mPublisher.setRtmpHandler(new RtmpHandler(this));
mPublisher.setRecordHandler(new SrsRecordHandler(this));
mPublisher.setPreviewResolution(640, 480);
mPublisher.setOutputResolution(720, 1280);
mPublisher.setVideoHDMode();
mPublisher.startCamera();
btnPublish.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnPublish.getText().toString().contentEquals("publish")) {
rtmpUrl = efu.getText().toString();
SharedPreferences.Editor editor = sp.edit();
editor.putString("rtmpUrl", rtmpUrl);
editor.apply();
mPublisher.startPublish(rtmpUrl);
mPublisher.startCamera();
if (btnSwitchEncoder.getText().toString().contentEquals("soft encoder")) {
Toast.makeText(getApplicationContext(), "Use hard encoder", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(getApplicationContext(), "Use soft encoder", Toast.LENGTH_SHORT).show();
}
btnPublish.setText("stop");
btnSwitchEncoder.setEnabled(false);
} else if (btnPublish.getText().toString().contentEquals("stop")) {
mPublisher.stopPublish();
mPublisher.stopRecord();
btnPublish.setText("publish");
btnRecord.setText("record");
btnSwitchEncoder.setEnabled(true);
}
}
});
btnSwitchCamera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
mPublisher.switchCameraFace((mPublisher.getCamraId() + 1) % Camera.getNumberOfCameras());
}
});
btnRecord.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnRecord.getText().toString().contentEquals("record")) {
if (mPublisher.startRecord(recPath)) {
btnRecord.setText("pause");
}
} else if (btnRecord.getText().toString().contentEquals("pause")) {
mPublisher.pauseRecord();
btnRecord.setText("resume");
} else if (btnRecord.getText().toString().contentEquals("resume")) {
mPublisher.resumeRecord();
btnRecord.setText("pause");
}
}
});
btnSwitchEncoder.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnSwitchEncoder.getText().toString().contentEquals("soft encoder")) {
mPublisher.switchToSoftEncoder();
btnSwitchEncoder.setText("hard encoder");
} else if (btnSwitchEncoder.getText().toString().contentEquals("hard encoder")) {
mPublisher.switchToHardEncoder();
btnSwitchEncoder.setText("soft encoder");
}
}
});
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
} else {
switch (id) {
case R.id.cool_filter:
mPublisher.switchCameraFilter(MagicFilterType.COOL);
break;
case R.id.beauty_filter:
mPublisher.switchCameraFilter(MagicFilterType.BEAUTY);
break;
case R.id.early_bird_filter:
mPublisher.switchCameraFilter(MagicFilterType.EARLYBIRD);
break;
case R.id.evergreen_filter:
mPublisher.switchCameraFilter(MagicFilterType.EVERGREEN);
break;
case R.id.n1977_filter:
mPublisher.switchCameraFilter(MagicFilterType.N1977);
break;
case R.id.nostalgia_filter:
mPublisher.switchCameraFilter(MagicFilterType.NOSTALGIA);
break;
case R.id.romance_filter:
mPublisher.switchCameraFilter(MagicFilterType.ROMANCE);
break;
case R.id.sunrise_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNRISE);
break;
case R.id.sunset_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNSET);
break;
case R.id.tender_filter:
mPublisher.switchCameraFilter(MagicFilterType.TENDER);
break;
case R.id.toast_filter:
mPublisher.switchCameraFilter(MagicFilterType.TOASTER2);
break;
case R.id.valencia_filter:
mPublisher.switchCameraFilter(MagicFilterType.VALENCIA);
break;
case R.id.walden_filter:
mPublisher.switchCameraFilter(MagicFilterType.WALDEN);
break;
case R.id.warm_filter:
mPublisher.switchCameraFilter(MagicFilterType.WARM);
break;
case R.id.original_filter:
default:
mPublisher.switchCameraFilter(MagicFilterType.NONE);
break;
}
}
setTitle(item.getTitle());
return super.onOptionsItemSelected(item);
}
#Override
protected void onResume() {
super.onResume();
final Button btn = (Button) findViewById(R.id.publish);
btn.setEnabled(true);
mPublisher.resumeRecord();
}
#Override
protected void onPause() {
super.onPause();
mPublisher.pauseRecord();
}
#Override
protected void onDestroy() {
super.onDestroy();
mPublisher.stopPublish();
mPublisher.stopRecord();
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mPublisher.stopEncode();
mPublisher.stopRecord();
btnRecord.setText("record");
mPublisher.setScreenOrientation(newConfig.orientation);
if (btnPublish.getText().toString().contentEquals("stop")) {
mPublisher.startEncode();
}
mPublisher.startCamera();
}
private static String getRandomAlphaString(int length) {
String base = "abcdefghijklmnopqrstuvwxyz";
Random random = new Random();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < length; i++) {
int number = random.nextInt(base.length());
sb.append(base.charAt(number));
}
return sb.toString();
}
private static String getRandomAlphaDigitString(int length) {
String base = "abcdefghijklmnopqrstuvwxyz0123456789";
Random random = new Random();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < length; i++) {
int number = random.nextInt(base.length());
sb.append(base.charAt(number));
}
return sb.toString();
}
private void handleException(Exception e) {
try {
Toast.makeText(getApplicationContext(), e.getMessage(), Toast.LENGTH_SHORT).show();
mPublisher.stopPublish();
mPublisher.stopRecord();
btnPublish.setText("publish");
btnRecord.setText("record");
btnSwitchEncoder.setEnabled(true);
} catch (Exception e1) {
//
}
}
// Implementation of SrsRtmpListener.
#Override
public void onRtmpConnecting(String msg) {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpConnected(String msg) {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpVideoStreaming() {
}
#Override
public void onRtmpAudioStreaming() {
}
#Override
public void onRtmpStopped() {
Toast.makeText(getApplicationContext(), "Stopped", Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpDisconnected() {
Toast.makeText(getApplicationContext(), "Disconnected", Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpVideoFpsChanged(double fps) {
Log.i(TAG, String.format("Output Fps: %f", fps));
}
#Override
public void onRtmpVideoBitrateChanged(double bitrate) {
int rate = (int) bitrate;
if (rate / 1000 > 0) {
Log.i(TAG, String.format("Video bitrate: %f kbps", bitrate / 1000));
} else {
Log.i(TAG, String.format("Video bitrate: %d bps", rate));
}
}
#Override
public void onRtmpAudioBitrateChanged(double bitrate) {
int rate = (int) bitrate;
if (rate / 1000 > 0) {
Log.i(TAG, String.format("Audio bitrate: %f kbps", bitrate / 1000));
} else {
Log.i(TAG, String.format("Audio bitrate: %d bps", rate));
}
}
#Override
public void onRtmpSocketException(SocketException e) {
handleException(e);
}
#Override
public void onRtmpIOException(IOException e) {
handleException(e);
}
#Override
public void onRtmpIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
#Override
public void onRtmpIllegalStateException(IllegalStateException e) {
handleException(e);
}
// Implementation of SrsRecordHandler.
#Override
public void onRecordPause() {
Toast.makeText(getApplicationContext(), "Record paused", Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordResume() {
Toast.makeText(getApplicationContext(), "Record resumed", Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordStarted(String msg) {
Toast.makeText(getApplicationContext(), "Recording file: " + msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordFinished(String msg) {
Toast.makeText(getApplicationContext(), "MP4 file saved: " + msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordIOException(IOException e) {
handleException(e);
}
#Override
public void onRecordIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
// Implementation of SrsEncodeHandler.
#Override
public void onNetworkWeak() {
Toast.makeText(getApplicationContext(), "Network weak", Toast.LENGTH_SHORT).show();
}
#Override
public void onNetworkResume() {
Toast.makeText(getApplicationContext(), "Network resume", Toast.LENGTH_SHORT).show();
}
#Override
public void onEncodeIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
}
I am not sure that this is the issue (a screenshot could have helped) but there might be a precision issue here (or even more in other places in the code):
mOutputAspectRatio = width > height ? (float) width / height : (float)
height / width;
Both width & height are integers. The result of their division is also an integer and hence it losses precision. I suggest you cast them both to float before the division:
mOutputAspectRatio = width > height ? ((float) width) / ((float)height) : ((float)
height) / ((float) width);
And the same for the computation of mInputAspectRatio.
I am using musicg library for clap detection but whenever starts activity it shows continuous detection of clap without any clap.
I thought that there is some value issue with bit rate or frame size.
This is my code
RecorderThread.java
public class RecorderThread extends Thread {
private AudioRecord audioRecord;
private boolean isRecording;
private int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private int sampleRate = 44100;
private int frameByteSize = 2048; // for 1024 fft size (16bit sample size)
byte[] buffer;
public RecorderThread() {
int recBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfiguration, audioEncoding); // need to be larger than size of a frame
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, channelConfiguration, audioEncoding, recBufSize);
buffer = new byte[frameByteSize];
}
public AudioRecord getAudioRecord() {
return audioRecord;
}
public boolean isRecording() {
return this.isAlive() && isRecording;
}
public void startRecording() {
try {
audioRecord.startRecording();
isRecording = true;
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
try {
audioRecord.stop();
isRecording = false;
} catch (Exception e) {
e.printStackTrace();
}
}
public byte[] getFrameBytes() {
audioRecord.read(buffer, 0, frameByteSize);
// analyze sound
int totalAbsValue = 0;
short sample = 0;
float averageAbsValue = 0.0f;
for (int i = 0; i < frameByteSize; i += 2) {
sample = (short) ((buffer[i]) | buffer[i + 1] << 8);
totalAbsValue += Math.abs(sample);
}
averageAbsValue = totalAbsValue / frameByteSize / 2;
//System.out.println(averageAbsValue);
// no input
if (averageAbsValue < 30) {
return null;
}
return buffer;
}
public void run() {
startRecording();
}
}
DetectorThread.java
public class DetectorThread extends Thread {
private RecorderThread recorder;
private WaveHeader waveHeader;
private ClapApi clapApi;
private volatile Thread _thread;
private LinkedList<Boolean> clapResultList = new LinkedList<Boolean>();
private int numClaps;
private int totalClapsDetected = 0;
private int clapCheckLength = 3;
private int clapPassScore = 3;
public DetectorThread(RecorderThread recorder) {
this.recorder = recorder;
AudioRecord audioRecord = recorder.getAudioRecord();
int bitsPerSample = 0;
if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) {
bitsPerSample = 16;
} else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT) {
bitsPerSample = 8;
}
int channel = 0;
// whistle detection only supports mono channel
//if (audioRecord.getChannelConfiguration() == AudioFormat.CHANNEL_CONFIGURATION_MONO) {
channel = 1;
//}
waveHeader = new WaveHeader();
waveHeader.setChannels(channel);
waveHeader.setBitsPerSample(bitsPerSample);
waveHeader.setSampleRate(audioRecord.getSampleRate());
clapApi = new ClapApi(waveHeader);
}
private void initBuffer() {
numClaps = 0;
clapResultList.clear();
// init the first frames
for (int i = 0; i < clapCheckLength; i++) {
clapResultList.add(false);
}
// end init the first frames
}
public void start() {
_thread = new Thread(this);
_thread.start();
}
public void stopDetection() {
_thread = null;
}
public void run() {
try {
byte[] buffer;
initBuffer();
Thread thisThread = Thread.currentThread();
while (_thread == thisThread) {
// detect sound
buffer = recorder.getFrameBytes();
// audio analyst
if (buffer != null) {
// sound detected
MainActivity.clapsValue = numClaps;
// whistle detection
//System.out.println("*Whistle:");
boolean isClap = clapApi.isClap(buffer);
if (clapResultList.getFirst()) {
numClaps--;
}
clapResultList.removeFirst();
clapResultList.add(isClap);
if (isClap) {
numClaps++;
}
//System.out.println("num:" + numWhistles);
if (numClaps >= clapPassScore) {
// clear buffer
initBuffer();
totalClapsDetected++;
}
// end whistle detection
} else {
// no sound detected
if (clapResultList.getFirst()) {
numClaps--;
}
clapResultList.removeFirst();
clapResultList.add(false);
MainActivity.clapsValue = numClaps;
}
// end audio analyst
}
} catch (Exception e) {
e.printStackTrace();
}
}
public int getTotalClapsDetected() {
return totalClapsDetected;
}
}
MainActivity.java
public class MainActivity extends Activity {
public static final int DETECT_NONE = 0;
public static final int DETECT_CLAP = 1;
public static int selectedDetection = DETECT_NONE;
private DetectorThread detectorThread;
private RecorderThread recorderThread;
private Thread detectedTextThread;
public static int clapsValue = 0;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startVoiceDetection();
}
#Override
protected void onPause() {
super.onPause();
stopVoiceDetection();
}
#Override
protected void onDestroy() {
super.onDestroy();
android.os.Process.killProcess(android.os.Process.myPid());
}
private void startVoiceDetection() {
selectedDetection = DETECT_CLAP;
recorderThread = new RecorderThread();
recorderThread.start();
detectorThread = new DetectorThread(recorderThread);
detectorThread.start();
goListeningView();
}
private void stopVoiceDetection() {
if (recorderThread != null) {
recorderThread.stopRecording();
recorderThread = null;
}
if (detectorThread != null) {
detectorThread.stopDetection();
detectorThread = null;
}
selectedDetection = DETECT_NONE;
}
private void goListeningView() {
if (detectedTextThread == null) {
detectedTextThread = new Thread() {
public void run() {
try {
while (recorderThread != null && detectorThread != null) {
runOnUiThread(new Runnable() {
public void run() {
if (detectorThread != null) {
Log.e("Clap", "Detected");
}
}
});
sleep(100);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
detectedTextThread = null;
}
}
};
detectedTextThread.start();
}
}
}
change
AudioFormat.CHANNEL_CONFIGURATION_MONO;
to
AudioFormat.CHANNEL_IN_MONO
me helps)
I want to detect 'Whistle' sound. For that I have implemented http://code.google.com/p/musicg/
Source code itself having issue. When you start app it is ready for listen but when you go back and again restart detector thread it does not trigger whistle detection.
DetectorThread.java
package weetech.wallpaper.services;
import java.util.LinkedList;
import weetech.wallpaper.utils.Debug;
import android.media.AudioFormat;
import android.media.AudioRecord;
import com.musicg.api.WhistleApi;
import com.musicg.wave.WaveHeader;
public class DetectorThread extends Thread {
private RecorderThread recorder;
private WaveHeader waveHeader;
private WhistleApi whistleApi;
private Thread _thread;
private LinkedList<Boolean> whistleResultList = new LinkedList<Boolean>();
private int numWhistles;
private int totalWhistlesDetected = 0;
private int whistleCheckLength = 3;
private int whistlePassScore = 3;
public DetectorThread(RecorderThread recorder) {
this.recorder = recorder;
AudioRecord audioRecord = recorder.getAudioRecord();
int bitsPerSample = 0;
if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) {
bitsPerSample = 16;
} else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT) {
bitsPerSample = 8;
}
int channel = 0;
// whistle detection only supports mono channel
if (audioRecord.getChannelConfiguration() == AudioFormat.CHANNEL_IN_MONO) {
channel = 1;
}
waveHeader = new WaveHeader();
waveHeader.setChannels(channel);
waveHeader.setBitsPerSample(bitsPerSample);
waveHeader.setSampleRate(audioRecord.getSampleRate());
whistleApi = new WhistleApi(waveHeader);
}
private void initBuffer() {
numWhistles = 0;
whistleResultList.clear();
// init the first frames
for (int i = 0; i < whistleCheckLength; i++) {
whistleResultList.add(false);
}
// end init the first frames
}
public void start() {
_thread = new Thread(this);
_thread.start();
}
public void stopDetection() {
_thread = null;
}
#Override
public void run() {
Debug.e("", "DetectorThread started...");
try {
byte[] buffer;
initBuffer();
Thread thisThread = Thread.currentThread();
while (_thread == thisThread) {
// detect sound
buffer = recorder.getFrameBytes();
// audio analyst
if (buffer != null) {
// sound detected
// MainActivity.whistleValue = numWhistles;
// whistle detection
// System.out.println("*Whistle:");
try {
boolean isWhistle = whistleApi.isWhistle(buffer);
Debug.e("", "isWhistle : " + isWhistle + " "
+ buffer.length);
if (whistleResultList.getFirst()) {
numWhistles--;
}
whistleResultList.removeFirst();
whistleResultList.add(isWhistle);
if (isWhistle) {
numWhistles++;
}
// Debug.e("", "numWhistles : " + numWhistles);
if (numWhistles >= whistlePassScore) {
// clear buffer
initBuffer();
totalWhistlesDetected++;
Debug.e("", "totalWhistlesDetected : "
+ totalWhistlesDetected);
if (onWhistleListener != null) {
onWhistleListener.onWhistle();
}
}
} catch (Exception e) {
Debug.w("", "" + e.getCause());
}
// end whistle detection
} else {
// Debug.e("", "no sound detected");
// no sound detected
if (whistleResultList.getFirst()) {
numWhistles--;
}
whistleResultList.removeFirst();
whistleResultList.add(false);
// MainActivity.whistleValue = numWhistles;
}
// end audio analyst
}
Debug.e("", "Terminating detector thread...");
} catch (Exception e) {
e.printStackTrace();
}
}
private OnWhistleListener onWhistleListener;
public void setOnWhistleListener(OnWhistleListener onWhistleListener) {
this.onWhistleListener = onWhistleListener;
}
public interface OnWhistleListener {
void onWhistle();
}
public int getTotalWhistlesDetected() {
return totalWhistlesDetected;
}
}
RecorderThread.java
public class RecorderThread {
private AudioRecord audioRecord;
private int channelConfiguration;
private int audioEncoding;
private int sampleRate;
private int frameByteSize; // for 1024 fft size (16bit sample size)
byte[] buffer;
public RecorderThread() {
sampleRate = 44100;
frameByteSize = 1024 * 2;
channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
int recBufSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfiguration, audioEncoding); // need to be larger than
// size of a frame
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfiguration, audioEncoding, recBufSize);
buffer = new byte[frameByteSize];
}
public AudioRecord getAudioRecord() {
return audioRecord;
}
public boolean isRecording() {
if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
return true;
}
return false;
}
public void startRecording() {
try {
audioRecord.startRecording();
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
try {
audioRecord.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
public byte[] getFrameBytes() {
audioRecord.read(buffer, 0, frameByteSize);
// analyze sound
int totalAbsValue = 0;
short sample = 0;
float averageAbsValue = 0.0f;
for (int i = 0; i < frameByteSize; i += 2) {
sample = (short) ((buffer[i]) | buffer[i + 1] << 8);
totalAbsValue += Math.abs(sample);
}
averageAbsValue = totalAbsValue / frameByteSize / 2;
Debug.e("", "averageAbsValue : " + averageAbsValue);
// no input
if (averageAbsValue < 30) {
return null;
}
return buffer;
}
}
Usage
public class DetectionService extends Service implements
OnWhistleListener {
Handler handler;
private DetectorThread detectorThread;
private RecorderThread recorderThread;
#Override
public void onCreate() {
super.onCreate();
handler = new Handler();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
try {
if (intent != null && intent.getExtras() != null) {
if (intent.getExtras().containsKey("action")) {
Debug.e("", "action : " + intent.getStringExtra("action"));
if (intent.getStringExtra("action").equals("start")) {
startWhistleDetection();
}
if (intent.getStringExtra("action").equals("stop")) {
stopWhistleDetection();
stopSelf();
}
}
} else {
startWhistleDetection();
Debug.e("", "intent is null OR intent.getExtras() is null");
}
} catch (Exception e) {
e.printStackTrace();
}
return super.onStartCommand(intent, flags, startId);
}
private void startWhistleDetection() {
try {
stopWhistleDetection();
} catch (Exception e) {
e.printStackTrace();
}
recorderThread = new RecorderThread();
recorderThread.startRecording();
detectorThread = new DetectorThread(recorderThread);
detectorThread.setOnWhistleListener(this);
detectorThread.start();
}
private void stopWhistleDetection() {
if (detectorThread != null) {
detectorThread.stopDetection();
detectorThread.setOnWhistleListener(null);
detectorThread = null;
}
if (recorderThread != null) {
recorderThread.stopRecording();
recorderThread = null;
}
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public void onWhistle() {
Debug.e("", "onWhistle()");
}
It detects whistle first time until you don't stop service. But after stopping and again starting It does not detect (does not call listener). I just failed to trace, what can be the issue?
Is there any issue with recording?
I invested 6 hours, :D Unbelievable, audio recorder is not released when it is stopped. I just released recorder after stopping.
Source code is having minor silly mistake. It is not releasing recorder.
public void stopRecording() {
try {
audioRecord.stop();
audioRecord.release();
} catch (Exception e) {
e.printStackTrace();
}
}
This code is ok for me
if (detectorThread != null) {
detectorThread.stopDetection();
recorderThread.stopRecording();
}