I am trying to make a text scanner application with camera that recognizes text and shows in screen.But instead of showing camera it shows a black screen. How can I solve this.
My code is here
package com.myapp.game.easynepalirecharge;
import android.Manifest;
import android.app.ActionBar;
import android.content.pm.PackageManager;
import android.graphics.Camera;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.util.SparseArray;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import android.widget.TextView;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.Detector;
import com.google.android.gms.vision.text.TextBlock;
import com.google.android.gms.vision.text.TextRecognizer;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
SurfaceView cameraView;
TextView textView;
CameraSource cameraSource;
final int REQUESTCAMERAPERMISSION = 105;
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull final int[] grantResults) {
switch (requestCode) {
case REQUESTCAMERAPERMISSION:
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.checkSelfPermission(getApplication(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(cameraView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraView = (SurfaceView) findViewById(R.id.surfaceView);
textView = (TextView) findViewById(R.id.textView);
TextRecognizer textRecognizer = new TextRecognizer.Builder(getApplicationContext()).build();
if (!textRecognizer.isOperational()) {
Log.v("haha", "error not operational");
} else {
cameraSource = new CameraSource.Builder(getApplicationContext(), textRecognizer).
setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedPreviewSize(3840, 2160)
.setRequestedFps(2.0f)
.setAutoFocusEnabled(true)
.build();
cameraView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA}, REQUESTCAMERAPERMISSION);
cameraSource.start(cameraView.getHolder());
return;
}
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop();
}
});
textRecognizer.setProcessor(new Detector.Processor<TextBlock>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
final SparseArray<TextBlock> items = detections.getDetectedItems();
if (items.size() != 0) {
textView.post(new Runnable() {
#Override
public void run() {
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i <= items.size(); i++) {
TextBlock item = items.valueAt(i);
stringBuilder.append(item.getValue());
stringBuilder.append("\n");
}
textView.setText(stringBuilder.toString());
}
});
}
}
}
);
}
}
}
and another thing I wanna ask. Which one is better. This or the tess two library?
have you stopped camera properly. Try to do camera functions in thread so that your single task does not be expensive. The reason i thought problem is that get your permission at run time.
try this
https://developer.android.com/training/permissions/requesting.html
Related
I am using CameraX, preview, image and video capture. I have created a fragment class that uses binding to lifecycle to avoid all manual camera preview state management etc (as recommended by Google). Problem is that sometimes if I put the app into background and then foreground, the camera preview is black and does not show preview. There is no way to restore it apart from recreating the fragment. Any hints what I may be doing wrong?
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.util.Size;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.AspectRatio;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;
import androidx.camera.core.Preview;
import androidx.camera.core.UseCaseGroup;
import androidx.camera.core.VideoCapture;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.mlkit.vision.face.Face;
import
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
public class CameraViewFragment extends BaseFragment {
private static final int REQUEST_CAMERA_PERMISSIONS = 100;
private CameraViewFragmentListener listener;
private final Executor executor = Executors.newSingleThreadExecutor();
private Camera camera;
private Handler handler = new Handler();
private boolean isRecording;
private Preview preview;
private ImageCapture imageCapture;
private VideoCapture videoCapture;
private ImageAnalysis imageAnalysis;
private FaceDetector faceDetector;
public interface CameraViewFragmentListener {
void onCameraPictureTaken(byte[] buffer);
void onCameraPermissionsRejected();
void onCameraVideoRecorded(Uri file);
void onCameraVideoRecordError(Throwable ex);
void onCameraFacesDetect(List<Face> faces);
void onCameraFacesDetectError(Exception e);
enum CameraFeature {
PREVIEW,
IMAGE_CAPTURE,
VIDEO_CAPTURE,
FACE_DETECTION
}
Set<CameraFeature> cameraGetRequestedFeatures();
}
private FragmentCameraViewBinding binding;
#Nullable
#Override
public View onCreateView(#NonNull LayoutInflater inflater, #Nullable ViewGroup container, #Nullable Bundle savedInstanceState) {
binding = FragmentCameraViewBinding.inflate(inflater, container, false);
return binding.getRoot();
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
int rc = ActivityCompat.checkSelfPermission(requireContext(), Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
startCamera();
} else {
requestCameraPermission();
}
}
private void requestCameraPermission() {
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSIONS);
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSIONS) {
if (permissions.length == 1 && permissions[0].equals(Manifest.permission.CAMERA) && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Logger.log(Logger.info, "[VRM] Camera permission granted.");
startCamera();//createCameraSource();
} else {
Logger.log(Logger.error, "[VRM] Camera permission NOT granted.");
if (listener != null) {
listener.onCameraPermissionsRejected();
}
requestCameraPermission();
}
}
}
/**
* Restarts the camera.
*/
#Override
public void onResume() {
super.onResume();
//startCamera();
}
/**
* Stops the camera.
*/
#Override
public void onPause() {
super.onPause();
}
/**
* Releases the resources associated with the camera source, the associated detector, and the
* rest of the processing pipeline.
*/
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof CameraViewFragmentListener) {
listener = (CameraViewFragmentListener) context;
} else if (getParentFragment() instanceof CameraViewFragmentListener) {
listener = (CameraViewFragmentListener) getParentFragment();
}
}
#Override
public void onDetach() {
super.onDetach();
listener = null;
}
private void startCamera() {
final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(requireContext());
cameraProviderFuture.addListener(() -> {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
Set<CameraViewFragmentListener.CameraFeature> features = listener.cameraGetRequestedFeatures();
bindUseCases(cameraProvider, features);
} catch (ExecutionException | InterruptedException e) {
// No errors need to be handled for this Future.
// This should never be reached.
Logger.log(Logger.error, "Exception while initializing CameraX: {}", e);
}
}, ContextCompat.getMainExecutor(requireContext()));
}
void bindUseCases(#NonNull ProcessCameraProvider cameraProvider, Set<CameraViewFragmentListener.CameraFeature> features) {
cameraProvider.unbindAll();
final CameraSelector cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_FRONT)
.build();
final WindowManager wm = (WindowManager) requireContext().getSystemService(Context.WINDOW_SERVICE);
final UseCaseGroup.Builder useCaseGroupBuilder = new UseCaseGroup.Builder();
if (features.contains(CameraViewFragmentListener.CameraFeature.PREVIEW)) {
preview = new Preview.Builder()
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
//.setTargetResolution(new Size(480, 640))
.build();
binding.preview.setScaleType(PreviewView.ScaleType.FILL_CENTER);
preview.setSurfaceProvider(binding.preview.getSurfaceProvider());
useCaseGroupBuilder.addUseCase(preview);
} else {
preview = null;
}
if (features.contains(CameraViewFragmentListener.CameraFeature.IMAGE_CAPTURE)) {
imageCapture = new ImageCapture.Builder()
.setCameraSelector(cameraSelector)
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
//.setTargetAspectRatio(AspectRatio.RATIO_4_3)
.setTargetResolution(new Size(480, 640))
.setTargetRotation(wm.getDefaultDisplay().getRotation())
.build();
useCaseGroupBuilder.addUseCase(imageCapture);
} else {
imageCapture = null;
}
if (features.contains(CameraViewFragmentListener.CameraFeature.VIDEO_CAPTURE)) {
videoCapture = new VideoCapture.Builder()
.setCameraSelector(cameraSelector)
//.setTargetResolution(new Size(480, 640))
.setBitRate(1000)
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
.setTargetRotation(wm.getDefaultDisplay().getRotation())
.build();
useCaseGroupBuilder.addUseCase(videoCapture);
} else {
videoCapture = null;
}
if (features.contains(CameraViewFragmentListener.CameraFeature.FACE_DETECTION)) {
imageAnalysis = new ImageAnalysis.Builder()
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
faceDetector = new FaceDetector(new FaceDetector.FaceDetectorListener() {
#Override
public void onFaceDetectSuccess(List<Face> faces) {
if (listener!=null)
listener.onCameraFacesDetect(faces);
}
#Override
public void onFaceDetectError(Exception e) {
if (listener!=null)
listener.onCameraFacesDetectError(e);
}
});
imageAnalysis.setAnalyzer(executor, faceDetector.createAnalyzer());
useCaseGroupBuilder.addUseCase(imageAnalysis);
} else {
imageAnalysis = null;
}
camera = cameraProvider.bindToLifecycle(this, cameraSelector, useCaseGroupBuilder.build());
}
public void requestTakePicture() {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ImageCapture.OutputFileOptions outputFileOptions = new ImageCapture.OutputFileOptions.Builder(bos).build();
imageCapture.takePicture(outputFileOptions, executor, new ImageCapture.OnImageSavedCallback() {
#Override
public void onImageSaved(#NonNull ImageCapture.OutputFileResults outputFileResults) {
handler.post(() -> {
//Toast.makeText(MainActivity.this, "Image Saved successfully", Toast.LENGTH_SHORT).show();
if (listener != null) {
listener.onCameraPictureTaken(bos.toByteArray());
}
});
}
#Override
public void onError(#NonNull ImageCaptureException error) {
error.printStackTrace();
}
});
}
public void startVideoRecording(File file) {
VideoCapture.OutputFileOptions outputFileOptions = new VideoCapture.OutputFileOptions
.Builder(file)
.build();
videoCapture.startRecording(outputFileOptions, executor, new VideoCapture.OnVideoSavedCallback() {
#Override
public void onVideoSaved(#NonNull VideoCapture.OutputFileResults outputFileResults) {
handler.post(() -> {
isRecording = false;
//Toast.makeText(MainActivity.this, "Image Saved successfully", Toast.LENGTH_SHORT).show();
if (listener != null) {
listener.onCameraVideoRecorded(outputFileResults.getSavedUri());
}
});
}
#Override
public void onError(int videoCaptureError, #NonNull String message, #Nullable Throwable cause) {
isRecording = false;
if (listener != null) {
listener.onCameraVideoRecordError(cause);
}
}
});
isRecording = true;
}
public void stopVideoRecording() {
videoCapture.stopRecording();
isRecording = false;
}
public boolean isRecording() {
return isRecording;
}
}
I'm trying get the Camera to autofocus using the CameraSource class from Android Mobile Vision API.
I've activated autofocus as follows:
cameraSource = new CameraSource
.Builder(this, barcodeDetector)
.setRequestedPreviewSize(640, 480)
.setAutoFocusEnabled(true)
.setRequestedFps(24.0f)
.build();
But the SurfaceView rendering the camera is often blurred.
Here my full Activity's code:
import android.Manifest;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.util.Log;
import android.util.SparseArray;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.TextView;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.Detector;
import com.google.android.gms.vision.barcode.Barcode;
import com.google.android.gms.vision.barcode.BarcodeDetector;
import java.io.IOException;
import java.util.Arrays;
public class MainActivity extends Activity {
private BarcodeDetector barcodeDetector;
private CameraSource cameraSource;
private SurfaceView cameraView;
private TextView barcodeInfo;
public static int REQUEST_PERMISSION_CAMERA = 1;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraView = (SurfaceView) findViewById(R.id.camera_view);
barcodeInfo = (TextView) findViewById(R.id.code_info);
barcodeDetector =
new BarcodeDetector.Builder(this)
.setBarcodeFormats(Barcode.QR_CODE)
.build();
cameraSource = new CameraSource
.Builder(this, barcodeDetector)
.setRequestedPreviewSize(640, 480)
.setAutoFocusEnabled(true)
.setRequestedFps(24.0f)
.build();
cameraView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ActivityCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_PERMISSION_CAMERA);
return;
} else {
cameraSource.start(cameraView.getHolder());
}
} else {
cameraSource.start(cameraView.getHolder());
}
} catch (IOException ie) {
Log.e("CAMERA SOURCE", ie.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop();
}
});
barcodeDetector.setProcessor(new Detector.Processor<Barcode>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<Barcode> detections) {
final SparseArray<Barcode> barcodes = detections.getDetectedItems();
if (barcodes.size() != 0) {
barcodeInfo.post(new Runnable() { // Use the post method of the TextView
public void run() {
barcodeInfo.setText( // Update the TextView
barcodes.valueAt(0).displayValue
);
}
});
}
}
});
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
int[] checkValues = new int[1];
if (requestCode == REQUEST_PERMISSION_CAMERA) {
if (Arrays.equals(grantResults, checkValues)) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(cameraView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
#Override
protected void onDestroy() {
super.onDestroy();
cameraSource.release();
barcodeDetector.release();
}
}
I'm trying to create a barcode reader. The program works perfectly when the barcode is big. To read small barcodes i will need autofocus enabled.
I added setAutoFocusEnabled(true) to cameraSource, but it doesnt work.
How can i fix the following code to get autofocus working?
My testting device is a samsung galaxy J1 sm111m android 5.1.1 API 22
package com.gutimore.android.pdf417;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.os.Vibrator;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.SparseArray;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.TextView;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.Detector;
import com.google.android.gms.vision.barcode.Barcode;
import com.google.android.gms.vision.barcode.BarcodeDetector;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
SurfaceView cameraPreview;
TextView txtResult;
BarcodeDetector barcodeDetector;
CameraSource cameraSource;
final int RequestCameraPermissionID = 1001;
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
switch (requestCode) {
case RequestCameraPermissionID: {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(cameraPreview.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
break;
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraPreview = findViewById(R.id.cameraPreview);
txtResult = findViewById(R.id.txtResult);
barcodeDetector = new BarcodeDetector.Builder(this)
.setBarcodeFormats(Barcode.PDF417)
.build();
cameraSource = new CameraSource
.Builder(this, barcodeDetector)
.setRequestedPreviewSize(640, 480)
.setAutoFocusEnabled(true)
.build();
//Add Event
cameraPreview.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
if (ActivityCompat.checkSelfPermission(getApplicationContext(), android.Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
//Request permission
ActivityCompat.requestPermissions(MainActivity.this,
new String[]{Manifest.permission.CAMERA},RequestCameraPermissionID);
return;
}
try {
cameraSource.start(cameraPreview.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
cameraSource.stop();
}
});
barcodeDetector.setProcessor(new Detector.Processor<Barcode>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<Barcode> detections) {
final SparseArray<Barcode> qrcodes = detections.getDetectedItems();
if(qrcodes.size() != 0)
{
txtResult.post(new Runnable() {
#Override
public void run() {
//Create vibrate
Vibrator vibrator = (Vibrator)getApplicationContext().getSystemService(Context.VIBRATOR_SERVICE);
vibrator.vibrate(1000);
txtResult.setText(qrcodes.valueAt(0).displayValue);
}
});
}
}
});
}
}
Use CamaraSource from (link)
package com.google.android.gms.samples.vision.barcodereader.ui.camera;
And initialize, as below
CameraSource camera = new CameraSource.Builder(requireContext(), barcodeDetector)
.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)
.build();
But on some old samsung devices autofocus still won't be working. Better use ZXing
I am using the audiodispatcher from the TarsosDSPlibrary.
The pitchdetection is used to detect sounds from the mic. Once detected, it switches to the next activity (which is a Maths quiz). After completing the quiz on the next activity, it returns to this activity and starts the process all over again.
What is bugging me is that my APP is working 90% of the time when using the pitchdetection function. However, sometimes it doesn't work and throws an error as follows:
E/AudioRecord: start() status -38
and the app no longers switches to the next activity.
package com.humanfactorsandappliedcognitionlab.research.mathsapp;
import android.content.Context;
import android.content.DialogInterface;
import android.media.MediaPlayer;
import android.os.AsyncTask;
import android.os.IBinder;
import android.speech.tts.TextToSpeech;
import android.speech.tts.UtteranceProgressListener;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.content.Intent;
import java.util.HashMap;
import java.util.Locale;
import java.util.concurrent.RunnableFuture;
import be.tarsos.dsp.AudioDispatcher;
import be.tarsos.dsp.AudioEvent;
import be.tarsos.dsp.io.android.AudioDispatcherFactory;
import be.tarsos.dsp.pitch.PitchDetectionHandler;
import be.tarsos.dsp.pitch.PitchDetectionResult;
import be.tarsos.dsp.pitch.PitchProcessor;
public class MainActivity extends AppCompatActivity implements TextToSpeech.OnInitListener {
MediaPlayer notifySound;
MediaPlayer endSound;
AudioDispatcher dispatcherMAIN;
PitchProcessor pitchProcessorMAIN;
public boolean isListening = false;
TextToSpeech tts;
private int sensitivity = 100;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
notifySound = MediaPlayer.create(this, R.raw.samsung);
endSound = MediaPlayer.create(this, R.raw.ding);
OPTION = dbHandler.getOPTION();
tts = new TextToSpeech(this, this);
tts.setOnUtteranceProgressListener(new UtteranceProgressListener() {
#Override
public void onStart(String utteranceId) {
runOnUiThread(new Runnable() {
#Override
public void run(){
}
});
}
#Override
public void onDone(String utteranceId) {
runOnUiThread(new Runnable() {
#Override
public void run(){
startListenToTalk();
}
});
}
#Override
public void onError(String utteranceId) {
}
});
}
private void speakOut() {
Log.e("TTS", "SPEAKING...");
String text = "Please Say Continue to Proceed ";
HashMap<String, String> map = new HashMap<String, String>();
map.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, "");
tts.speak(text, TextToSpeech.QUEUE_FLUSH, map);
}
private void startListenToTalk() {
dispatcherMAIN = AudioDispatcherFactory.fromDefaultMicrophone(22050, 1024, 0);
pitchProcessorMAIN = new PitchProcessor(PitchProcessor.PitchEstimationAlgorithm.FFT_YIN, 22050, 1024, new PitchDetectionHandler() {
#Override
public void handlePitch(PitchDetectionResult pitchDetectionResult,
AudioEvent audioEvent) {
final float pitchInHz = pitchDetectionResult.getPitch();
runOnUiThread(new Runnable() {
#Override
public void run() {
ImageButton buttonOK = (ImageButton) findViewById(R.id.buttonOK);
TextView textINPUT = (TextView)findViewById(R.id.textINPUT);
if (pitchInHz > sensitivity) {
Log.e("pitch : ", pitchInHz + "");
if (isListening) {
try {
dispatcherMAIN.stop();
Intent gotoMaths = new Intent(MainActivity.this, MathsActivity.class);
startActivity(gotoMaths);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
});
}
});
dispatcherMAIN.addAudioProcessor(pitchProcessorMAIN);
new Thread(dispatcherMAIN, "Audio Dispatcher").start();
isListening = true;
}
#Override
protected void onPause() {
super.onPause();
if (notifySound != null) {
notifySound.release();
}
if (endSound != null) {
endSound.release();
}
if (isListening) {
try {
dispatcherMAIN.stop();
} catch (Exception e) {
e.printStackTrace();
}
isListening = false;
}
finish();
}
#Override
public void onStop(){
super.onStop();
if (tts != null) {
tts.shutdown();
}
}
#Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
int result = tts.setLanguage(Locale.US);
if (result == TextToSpeech.LANG_MISSING_DATA
|| result == TextToSpeech.LANG_NOT_SUPPORTED) {
Log.e("TTS", "This Language is not supported");
} else {
if(OPTION == "3") {
speakOut();
}
}
} else {
Log.e("TTS", "Initilization Failed!");
}
}
I want to do some image processing jobs and I want to get real-time image data from my camera(during preview state) instead of after taking pictures.
I looked on this post but don't know how to use this.
After setting this,
SurfaceHolder.Callback surfaceCallback=new SurfaceHolder.Callback()
{
public void surfaceCreated(SurfaceHolder holder) {
camera.setPreviewCallback(previewCallback);
}
}
and this
private Camera.PreviewCallback previewCallback= new Camera.PreviewCallback()
{
#Override
public void onPreviewFrame(byte[] data,Camera cam)
{
Camera.Size previewSize = cam.getParameters().getPreviewSize();
YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21,previewSize.width,previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0,0,previewSize.width,previewSize.height),80,baos);
byte[] jdata = baos.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jdata,0,jdata.length);
}
};
How can I real-time get my image data? Could anyone please kindly give me an short example code?
Thanks
For doing image processing based operations you can use with OpenCV Library.
Here i given sample code to process the frames directly from camera using OpenCV
import android.Manifest;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import static org.opencv.core.CvType.CV_8UC1;
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "MainActivity";
JavaCameraView javaCameraView;
Mat frame;
public static final int CAMERA_PERMISSION_REQUEST_CODE = 3;
static {
System.loadLibrary("MyOpenCVLibs");
}
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch(status){
case LoaderCallbackInterface.SUCCESS:
{
javaCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (ContextCompat.checkSelfPermission(this,Manifest.permission.CAMERA)!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this,new String[]{Manifest.permission.CAMERA},CAMERA_PERMISSION_REQUEST_CODE);
}
javaCameraView = (JavaCameraView) findViewById(R.id.java_camera_view);
javaCameraView.setVisibility(View.VISIBLE);
javaCameraView.setCvCameraViewListener(this);
}
#Override
protected void onPause(){
super.onPause();
if(javaCameraView!=null)
javaCameraView.disableView();
}
#Override
protected void onDestroy(){
super.onDestroy();
if(javaCameraView!=null)
javaCameraView.disableView();
}
#Override
protected void onResume(){
super.onResume();
if (OpenCVLoader.initDebug()) {
Log.i(TAG, "OpenCV loaded successfully.");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
} else {
Log.i(TAG, "OpenCV not loaded.");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
frame=new Mat(height,width,CV_8UC4);
}
#Override
public void onCameraViewStopped() {
frame.release();
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
frame=inputFrame.rgba();
}
}
This code checks OpenCV properly imported or not and Checks permission for Camera.
And each input frames are saved in frame variable.