Android CameraX throwing exception when enabling video capture usecase - android

I'm using the CameraX for the first time, and following the android documentation guide, but having issues, search a lot but did not find anything helping, so basically I'm trying to capture screen using CameraX and my captureVideo() method code is,
#RequiresApi(api = Build.VERSION_CODES.P)
public void takeVideo() {
ContentValues contentValues;
if (videoCapture == null) {
return;
}
Recording curRecording = recording;
if (curRecording != null) {
// Stop the current recording session.
recording.stop();
recording = null;
return;
}
// create and start a new recording session
String name = DateFormat.getInstance().format(new Date().getTime()).toString();
contentValues = new ContentValues();
contentValues.put(MediaStore.MediaColumns.DISPLAY_NAME, name);
contentValues.put(MediaStore.MediaColumns.MIME_TYPE, "video/mp4");
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.P) {
contentValues.put(MediaStore.Video.Media.RELATIVE_PATH, "Movies/CameraX-Video");
}
MediaStoreOutputOptions mediaStoreOutputOptions = new MediaStoreOutputOptions.Builder(getContentResolver(), MediaStore.Video.Media.EXTERNAL_CONTENT_URI)
.setContentValues(contentValues)
.build();
PendingRecording pendingRecording = (videoCapture.getOutput()
.prepareRecording(this, mediaStoreOutputOptions));
if (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) {
pendingRecording.withAudioEnabled();
recording = pendingRecording.start(getMainExecutor(), new Consumer<VideoRecordEvent>() {
#Override
public void accept(VideoRecordEvent videoRecordEvent) {
if (videoRecordEvent instanceof VideoRecordEvent.Start) {
btnTakeVide.setText("Stop Video");
} else if (videoRecordEvent instanceof VideoRecordEvent.Pause) {
// Handle the case where the active recording is paused
} else if (videoRecordEvent instanceof VideoRecordEvent.Resume) {
// Handles the case where the active recording is resumed
} else if (videoRecordEvent instanceof VideoRecordEvent.Finalize) {
btnTakeVide.setText("Start Video");
VideoRecordEvent.Finalize finalizeEvent =
(VideoRecordEvent.Finalize) videoRecordEvent;
// Handles a finalize event for the active recording, checking Finalize.getError()
if (!finalizeEvent.hasError()) {
String msg = "Video capture succeeded: " + ((VideoRecordEvent.Finalize) videoRecordEvent).getOutputResults().getOutputUri();
Toast.makeText(MainActivity.this, msg, Toast.LENGTH_SHORT)
.show();
} else {
if (recording != null) {
recording.close();
recording = null;
Log.e("TAG", "Video capture ends with error: ");
}
}
}
}
});
}
}
code inside setPreview is,
private void startCameraPreview() {
listenableFuture = ProcessCameraProvider.getInstance(MainActivity.this);
listenableFuture.addListener(new Runnable() {
#Override
public void run() {
try {
cameraProvider = listenableFuture.get();
preview = new Preview.Builder().build();
preview.setSurfaceProvider(cameraView.getSurfaceProvider());
recorder = new Recorder.Builder()
.setQualitySelector(QualitySelector.from(Quality.LOWEST))
.build();
videoCapture = VideoCapture.withOutput(recorder);
cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA;
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle(MainActivity.this, cameraSelector, preview, videoCapture);
} catch (ExecutionException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}, ContextCompat.getMainExecutor(this));
}
the exception being thrown is,
I inserted following dependencies,
def camerax_version = "1.2.0-alpha04"
implementation "androidx.camera:camera-lifecycle:$camerax_version"
implementation "androidx.camera:camera-core:$camerax_version"
implementation "androidx.camera:camera-camera2:$camerax_version"
implementation "androidx.camera:camera-view:1.1.0"

Sorry developers, I forgot to add the video dependency,
implementation "androidx.camera:camera-video:${camerax_version}"
after adding dependency it works fine.

Related

How can I make other apps play/pause music?

I'm making a media controller app similar to this example made by google. https://github.com/googlesamples/android-media-controller
However, I want to make a function that can resume playing music or pause given package name. I managed to return a list of package names.
PS. I'm using react native that's why I need a fucntion that I can call from the react side.
public void getMediaApps (Callback callback) {
// = getPackageManager();
ArrayList<MediaAppDetails> mediaApps = new ArrayList<MediaAppDetails>();
Intent mediaBrowserIntent = new Intent(MediaBrowserServiceCompat.SERVICE_INTERFACE);
List<ResolveInfo> services = packageManager.queryIntentServices(
mediaBrowserIntent,
PackageManager.GET_RESOLVED_FILTER
);
if (services != null && !services.isEmpty()) {
for (ResolveInfo info : services) {
mediaApps.add(
new MediaAppDetails(info.serviceInfo, packageManager, resources)
);
}
}
WritableArray waPackagenames = Arguments.createArray();
// ArrayList<String> packagenames = ArrayList<String>()
if(mediaApps != null && !mediaApps.isEmpty()){
for(MediaAppDetails mediaApp : mediaApps){
waPackagenames.pushString(mediaApp.packageName);
}
}
callback.invoke(waPackagenames);
}
I've been trying to do this for 3 days now, but no luck.
Probably won't make such of a difference but this is where I got so far with the play function.
#ReactMethod
public void play (String packageName) {
PackageManager pm = this.packageManager;
Resources res = this.resources;
ServiceInfo serviceInfo = MediaAppDetails.findServiceInfo(packageName, pm);
mMediaAppDetails = new MediaAppDetails(serviceInfo, pm, res);
MediaSessionCompat.Token token = mMediaAppDetails.sessionToken;
if (token == null) {
if (mMediaAppDetails.componentName != null) {
mBrowser = new MediaBrowserCompat(this.reactContext, mMediaAppDetails.componentName,
new MediaBrowserCompat.ConnectionCallback() {
#Override
public void onConnected() {
setupMediaController();
// mBrowseMediaItemsAdapter.setRoot(mBrowser.getRoot());
}
#Override
public void onConnectionSuspended() {
//TODO(rasekh): shut down browser.
// mBrowseMediaItemsAdapter.setRoot(null);
}
#Override
public void onConnectionFailed() {
showToastAndFinish("connection failed .. shit!");
}
}, null);
mBrowser.connect();
} else if (mMediaAppDetails.sessionToken != null) {
setupMediaController();
}
token = mBrowser.getSessionToken();
Toast.makeText(this.reactContext, "no token can't open controller", Toast.LENGTH_SHORT).show();
// toast
}
// Toast.makeText(this.reactContext, "found token", Toast.LENGTH_SHORT).show();
if(mBrowser == null )mBrowser = new MediaBrowserCompat(this.reactContext, new ComponentName(packageName, "MainActivity"), null, null);
MediaControllerCompat.TransportControls transportControls;
try{
mController = new MediaControllerCompat(this.reactContext, token);
if(mController!= null) {
transportControls = mController.getTransportControls();
transportControls.play();
}
}catch(Exception E){
Log.w("Error",E);
Log.w("Error","couldn't create mediaControllerCompat");
// System.out.println(E);
// System.out.println("couldn't create mediaControllerCompat");
}
}

While taking video camera.startCapturingVideo(null); automatically call the public void onVideoTaken(File video)

I use the following library for camera and video
https://github.com/natario1/CameraView
While I take video sometimes its work fine for me. But sometimes While taking a video camera.startCapturingVideo(null); automatically call the public void onVideoTaken(File video) .
Camera activity code:
Oncreate():
setContentView(R.layout.activity_home);
ButterKnife.bind(this);
Fabric.with(this, new Crashlytics());
logUser();
view_group_photo.performClick();
if (BuildConfig.DEBUG) {
CameraLogger.setLogLevel(CameraLogger.LEVEL_VERBOSE);
}
recorder = new MediaRecorder();
camera = findViewById(R.id.camera);
camera.destroy();
camera.start();
camera.addCameraListener(new CameraListener() {
#Override
public void onCameraError(#NonNull CameraException exception) {
super.onCameraError(exception);
camera.stop();
camera.start();
}
#Override
public void onPictureTaken(byte[] jpeg) {
super.onPictureTaken(jpeg);
Long tsLong = System.currentTimeMillis() / 1000;
String ts = tsLong.toString();
try {
File root = new File(FILE_PATH_IMAGE);
if (!root.exists()) {
root.mkdirs();
}
#SuppressLint("SdCardPath") File myFile = new File(FILE_PATH_IMAGE + ts + FILE_EXTENSION_IMAGE);
myFile.createNewFile();
FileOutputStream stream = new FileOutputStream(myFile);
stream.write(jpeg);
stream.close();
Intent webViewIntent = new Intent(HomeActivity.this, ImageCropActivity.class);
Bundle bundle = new Bundle();
bundle.putString("file_path", FILE_PATH_IMAGE + ts + FILE_EXTENSION_IMAGE);
webViewIntent.putExtras(bundle);
webViewIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(webViewIntent);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void onVideoTaken(File video) {
super.onVideoTaken(video);
Long tsLong = System.currentTimeMillis() / 1000;
String ts = tsLong.toString();
try {
File root = new File(FILE_PATH_VIDEO);
if (!root.exists()) {
root.mkdirs();
}
#SuppressLint("SdCardPath") File myFile = new File(FILE_PATH_VIDEO + ts + FILE_EXTENSION_VIDEO);
myFile.createNewFile();
try (InputStream in = new FileInputStream(video)) {
try (OutputStream out = new FileOutputStream(myFile)) {
// Transfer bytes from in to out
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
out.close();
}
}
AddFileToPojo("2", FILE_TYPE_VIDEO, FILE_PATH_VIDEO + ts + FILE_EXTENSION_VIDEO);
IntentFunction(HomeActivity.this, ReportIncidentActivity.class);
} catch (IOException e) {
Log.e("error ", "video error" + e.getMessage());
}
}
});
mGoogleApiClient = new GoogleApiClient
.Builder(this)
.enableAutoManage(this, 0, HomeActivity.this)
.addApi(Places.GEO_DATA_API)
.addApi(Places.PLACE_DETECTION_API)
.addConnectionCallbacks(HomeActivity.this)
.addOnConnectionFailedListener(this)
.build();
guessCurrentPlace();
// get the gesture detector
mDetector = new GestureDetector(this, new MyGestureListener());
if (camera.getFlash() == Flash.ON) {
imageViewFlash.setImageDrawable(getResources().getDrawable(R.drawable.flash));
} else {
imageViewFlash.setImageDrawable(getResources().getDrawable(R.drawable.flash_off));
}
viewGroupProgressLoad.setOnTouchListener(touchListener);
ConnectivityManager connectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
assert connectivityManager != null;
NetworkInfo networkInfo = connectivityManager.getActiveNetworkInfo();
if (networkInfo != null && networkInfo.isConnected()) {
ConstantVariables.INTERNET_CONNECTION = true;
if (DEBUG) {
Log.i("", ":: " + ConstantVariables.INTERNET_CONNECTION);
}
} else {
ConstantFunctions.IntentFunction(getApplicationContext(), NoInternetActivity.class);
ConstantVariables.INTERNET_CONNECTION = false;
if (DEBUG) {
Log.i("", ":: " + ConstantVariables.INTERNET_CONNECTION);
}
}
Camera video record fn();
if (camera.getSessionType() == SessionType.PICTURE) {
camera.setSessionType(SessionType.VIDEO);
}
if (camera.getSessionType() != SessionType.VIDEO) {
return;
}
if (mCapturingPicture || mCapturingVideo) return;
mCapturingVideo = true;
/*camera.setVideoMaxSize(1000000);*/
camera.setVideoMaxDuration(300000);
camera.startCapturingVideo(null);
camera video stop fn();
camera.stopCapturingVideo();
camera.destroy();
myCountDownTimer.cancel();
isSpeakButtonLongPressed = false;
Log::
I/CameraController: Restart: returned from start. Dispatching. State: STATE_STARTED
I/CameraCallbacks: dispatchOnCameraOpened com.otaliastudios.cameraview.CameraOptions#61755df
W/MediaRecorder: mediarecorder went away with unhandled events
W/MediaRecorder: mediarecorder went away with unhandled events
I/CameraController: Stop: executing. State: STATE_STARTED
Stop: about to call onStop()
I/Camera1: onStop: About to clean up.
onStop: Clean up. Ending video.
endVideoImmediately: is capturing: false
onStop: Clean up. Stopping preview.
I/Camera1: onStop: Clean up. Stopped preview.
onStop: Clean up. Releasing camera.
I/Camera1: onStop: Clean up. Released camera.
W/Camera1: onStop: Clean up. Returning.
I/CameraController: Stop: returned from onStop(). Dispatching.
I/CameraCallbacks: dispatchOnCameraClosed
E/MediaMetadataRetrieverJNI: getFrameAtTime: videoFrame is a NULL pointer
I/Choreographer: Skipped 67 frames! The application may be doing too much work on its main thread.
I/CameraPreview: crop: applied scaleX= 1.1407409
crop: applied scaleY= 1.0
I update the code with,
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
#Override
public void run() {
camera.startCapturingVideo(null);
}
}, 1000);
I give 1 sec delay because not all time the startCapturingVideo intialize and start record .Becase the hardware not responsd quickly on some occation. So the execution to next line got error due to the startCapturingVideo not intiated.

Watson Speech to Text Android

I am using the example code from IBM's github for Speech To Text but this line is giving me problems. android studio throws an error saying that i don't need the "capture" argument but when i remove it, i get an error when i run it that the audio cannot be null.
speechService.recognizeUsingWebSocket(capture, getRecognizeOptions(), new MicrophoneRecognizeDelegate());
it is used in this part
private void recordMessage() {
//mic.setEnabled(false);
speechService = new SpeechToText();
speechService.setUsernameAndPassword(STT_username, STT_password);
speechService.setEndPoint("https://stream.watsonplatform.net/speech-to-text/api");
if(listening != true) {
capture = microphoneHelper.getInputStream(true);
InputStream myInputStream = new MicrophoneInputStream(true);
new Thread(new Runnable() {
#Override public void run() {
try {
speechService.recognizeUsingWebSocket(capture, getRecognizeOptions(), new MicrophoneRecognizeDelegate());
} catch (Exception e) {
showError(e);
}
}
}).start();
listening = true;
Toast.makeText(MainActivity.this,"Listening....Click to Stop", Toast.LENGTH_LONG).show();
} else {
try {
microphoneHelper.closeInputStream();
listening = false;
Toast.makeText(MainActivity.this,"Stopped Listening....Click to Start", Toast.LENGTH_LONG).show();
} catch (Exception e) {
e.printStackTrace();
}
}
}
This is a very late answer but just in case anyone needs this ..
update your call to :
speechService.recognizeUsingWebSocket(getRecognizeOptions(capture),new MicrophoneRecognizeDelegate());

Chromcast Remoteplayer seek to resume at seek 0 position when using nanohttpd

Chromcast Remote player seek to resume at seek position 0 when using nanohttpd server. Main issue getting when I seek into video player in device its working fine but on TV seek-bar set at 0 position and music stat at beginning.
When to call mRemoteMediaPlayer.seek() in onSeekChanged() getting result success but on TV seek-bar set at 0 position and music stat at beginning.
public class webserver extends NanoHTTPD {
FileInputStream fileInputStream;
public webserver(){
super(8080);
}
#Override
public Response serve(String uri, Method method, Map<String, String> header,Map<String, String> parameters, Map<String, String> files) {
String mediasend=" ";
long size=0;
FileInputStream fis = null;
try {
fis = new FileInputStream(path);
//byte[] buffer = new byte[(int) fis.getChannel().size()];
size=fis.getChannel().size();
} catch (Exception e) {
e.printStackTrace();
}
switch(mediatype){
case "photo":
mediasend="image/jpeg";
break;
case "audio":
mediasend="audio/mp3";
break;
case "video":
mediasend="video/mp4";
break;
}
return new NanoHTTPD.Response(com.castoffline.castActivity.NanoHTTPD.Response.Status.OK,mediasend,fis,size);
}
}
Cast connection code
Cast.CastApi.launchApplication(mApiClient,getString(R.string.app_id),false).setResultCallback(new ResultCallback<Cast.ApplicationConnectionResult>() {
#Override
public void onResult(ApplicationConnectionResult result) {
Status status = result.getStatus();
if (status.isSuccess()) {
ApplicationMetadata applicationMetadata = result.getApplicationMetadata();
mSessionId = result.getSessionId();
String applicationStatus = result.getApplicationStatus();
boolean wasLaunched = result.getWasLaunched();
Log.d(TAG,"application name: "+ applicationMetadata.getName()+ ", status: "+ applicationStatus+ ", sessionId: "+ mSessionId+ ", wasLaunched: "+ wasLaunched);
mApplicationStarted = true;
mRemoteMediaPlayer = new RemoteMediaPlayer();
/*
* Identify the mediatype and send the metadata details to media info
*/
switch(mediatype)
{
case "audio" : mediaMetadata = new MediaMetadata(MediaMetadata.MEDIA_TYPE_MUSIC_TRACK);
mediaMetadata.putString(MediaMetadata.KEY_TITLE, "MY MUSIC TRACK"+": "+audioTitle);
mediaMetadata.putString(MediaMetadata.KEY_ARTIST,audioArtist);
mediaMetadata.addImage(new WebImage(Uri.parse("https://www.googledrive.com/host/0B61ekPEN_94sZ21mcnQtbVU2RHM/media.png")));
mediaInfo = new MediaInfo.Builder(ipdevice).setContentType(mimetype).setStreamType(MediaInfo.STREAM_TYPE_BUFFERED).setMetadata(mediaMetadata).build();
break;
case "video" : mediaMetadata = new MediaMetadata(MediaMetadata.MEDIA_TYPE_MOVIE);
mediaMetadata.addImage(new WebImage(Uri.parse("https://www.googledrive.com/host/0B61ekPEN_94sZ21mcnQtbVU2RHM/film_reel.png")));
mediaMetadata.putString(MediaMetadata.KEY_TITLE, "My MOVIE"+": "+videoTitle);
mediaInfo = new MediaInfo.Builder(ipdevice).setContentType(mimetype).setStreamType(MediaInfo.STREAM_TYPE_BUFFERED).setMetadata(mediaMetadata).build();
break;
case "photo" : mediaMetadata = new MediaMetadata(MediaMetadata.MEDIA_TYPE_PHOTO);
mediaMetadata.putString(MediaMetadata.KEY_TITLE, "My PHOTO"+": ");
mediaInfo = new MediaInfo.Builder(ipdevice).setContentType(mimetype).setStreamType(MediaInfo.STREAM_TYPE_BUFFERED).setMetadata(mediaMetadata).build();
break;
default:
}
try {
Cast.CastApi.setMessageReceivedCallbacks(mApiClient,mRemoteMediaPlayer.getNamespace(), mRemoteMediaPlayer);
} catch (IOException e) {
Log.d(TAG, "Exception while creating media channel", e);
}
try {
mRemoteMediaPlayer.load(mApiClient, mediaInfo, false,0).setResultCallback(new ResultCallback<RemoteMediaPlayer.MediaChannelResult>() {
#Override
public void onResult(MediaChannelResult result) {
if (result.getStatus().isSuccess()) {
Log.d(TAG, "Media loaded successfully");
}
}});
/*
* checks if the video is playing or if it is paused and according it will be played/paused in the receiver
*/
videoview.setPlayPauseListener(new CustomVideoView.PlayPauseListener() {
AudioManager amanager=(AudioManager)getSystemService(Context.AUDIO_SERVICE);
#Override
public void onPlay() {
playbackPaused=false; //videoView is playing
if(mSelectedDevice!=null && mApiClient != null && mRemoteMediaPlayer != null){
//volume is set to mute if media is casting in Chromecast
amanager.setStreamMute(AudioManager.STREAM_MUSIC, true);
sendMediaControl(playbackPaused,false);
}else{
amanager.setStreamVolume(AudioManager.STREAM_MUSIC, 3,1);
}
}
#Override
public void onPause(){
playbackPaused=true; //videoView is paused
if (mSelectedDevice != null && mApiClient != null && mRemoteMediaPlayer != null){
amanager.setStreamMute(AudioManager.STREAM_MUSIC, false);
sendMediaControl(playbackPaused,false);
}else{
amanager.setStreamVolume(AudioManager.STREAM_MUSIC, 3,1); }
}
/* Currently Seek function is not working for the media playback while casting
* (non-Javadoc)
* #see com.castoffline.castActivity.CustomVideoView.PlayPauseListener#onSeekChanged(int)
*/
#Override
public void onSeekChanged(int pos){
Log.d(String.valueOf(videoview.getCurrentPosition()),"seekinsie");
// seek(videoview.getCurrentPosition());
Log.d("mimetype ",mimetype);
Log.d("seek1",""+pos);
if (mSelectedDevice != null && mApiClient != null && mRemoteMediaPlayer != null){
videoview.pause();
final long position=videoview.getCurrentPosition();
Log.d("seek",""+position);
mRemoteMediaPlayer.seek(mApiClient,position,RemoteMediaPlayer.RESUME_STATE_UNCHANGED).setResultCallback(new ResultCallback<RemoteMediaPlayer.MediaChannelResult>(){
#Override
public void onResult(MediaChannelResult result) {
if (result.getStatus().isSuccess()) {
Log.d(String.valueOf("State Code "+result.getStatus().getStatusCode()),""+mRemoteMediaPlayer.getApproximateStreamPosition());
}
}
});
mRemoteMediaPlayer.setOnStatusUpdatedListener(new RemoteMediaPlayer.OnStatusUpdatedListener(){
#Override
public void onStatusUpdated() {
#SuppressWarnings("unused")
MediaStatus mediaStatus = mRemoteMediaPlayer.getMediaStatus();
Log.d("seek state update",""+mediaStatus);
}
});
}
}
});
} catch (IllegalStateException e) {
Log.d(TAG, "Problem occurred with media during loading", e);
} catch (Exception e) {
Log.d(TAG, "Problem opening media during loading", e);}
} else {
Log.e(TAG,"application could not launch");
teardown();
}
}
});
}
Remote player control code.
private void sendMediaControl(final boolean playbackPaused,final boolean change)
{
if (mApiClient != null && mRemoteMediaPlayer != null){
mRemoteMediaPlayer.requestStatus(mApiClient).setResultCallback( new ResultCallback<RemoteMediaPlayer.MediaChannelResult>() {
#Override
public void onResult(RemoteMediaPlayer.MediaChannelResult mediaChannelResult) {
if(playbackPaused ==true){
mRemoteMediaPlayer.pause(mApiClient);
}else{
mRemoteMediaPlayer.play(mApiClient);
}
}
});
}
}
There can be two things here:
I don't know if it is a limitation of nanohttpd or a configuration issue but what you are seeing is because the nanaohttpd (at least the way you have configured it) doesn't support seek. When you do a seek, your receiver will call into your http server (nanohttpd in this case) and passes a position and asks the web server to seek to that position and start streaming from there. If the web server doesn't support that, you will not be able to seek successfully. As a test, set up an apache server on your laptop, just for testing, and point to that instead of your embedded web server and see if that works or not.
There might be a mismatch between units of position; so if, say, your local player is using seconds and is reporting, say, 60 when it is a minute into the content and if you send that to the cast receiver, it will be interpreted as 60 milliseconds which is practically same as 0 seconds, so check on that too.

Android VpnService block packets

Edit:- i'm able to start the internet using vpn.The other issues is that now i'm receiving packets in my service in this piece of code of my VpnService.But i can't think of a proper way to block particular website.I've tried using name resolution using InnetAddress but that's not giving the expected result :
**#Override
public void run()
{
Log.i(TAG, "Started");
FileChannel vpnInput = new FileInputStream(vpnFileDescriptor).getChannel();
FileChannel vpnOutput = new FileOutputStream(vpnFileDescriptor).getChannel();
try
{
ByteBuffer bufferToNetwork = null;
boolean dataSent = true;
boolean dataReceived;
while (!Thread.interrupted())
{
if (dataSent)
bufferToNetwork = ByteBufferPool.acquire();
int readBytes = vpnInput.read(bufferToNetwork);
if (readBytes > 0)
{
dataSent = true;
bufferToNetwork.flip();
Packet packet = new Packet(bufferToNetwork);
Log.e("loggg packet",packet.toString());
if (packet.isUDP())
{
deviceToNetworkUDPQueue.offer(packet);
}
else if (packet.isTCP())
{
deviceToNetworkTCPQueue.offer(packet);
}
else
{
Log.w(TAG, "Unknown packet type");
dataSent = false;
}
}
else
{
dataSent = false;
}
ByteBuffer bufferFromNetwork = networkToDeviceQueue.poll();
if (bufferFromNetwork != null)
{
bufferFromNetwork.flip();
vpnOutput.write(bufferFromNetwork);
dataReceived = true;
ByteBufferPool.release(bufferFromNetwork);
}
else
{
dataReceived = false;
}
if (!dataSent && !dataReceived)
Thread.sleep(10);
}
}
catch (InterruptedException e)
{
Log.i(TAG, "Stopping");
}
catch (IOException e)
{
Log.w(TAG, e.toString(), e);
}
finally
{
closeResources(vpnInput, vpnOutput);
}
}**
I'm receiving a packet in this format:
Packet{ip4Header=IP4Header{version=4, totalLength=40, protocol=TCP, headerChecksum=14192, sourceAddress=10.0.8.1, destinationAddress=216.58.196.100}, tcpHeader=TCPHeader{sourcePort=39217, destinationPort=443, sequenceNumber=800911985, acknowledgementNumber=823271551, headerLength=20, window=29596, checksum=32492, flags= ACK}, payloadSize=0}
I'm using THIS CODE for starter and unable to block packets.
Apps like greyshirts no root firewall and mobiwool no root firewall works perfectly and they are also vpn based.Any suggestion is most welcomed.

Categories

Resources