I'm trying to open a vcf file I've created by receiving data from a QrCode.
My code for open the file:
private fun openBackup(savedVCard: File) {
try {
val vcfMimeType = MimeTypeMap.getSingleton().getMimeTypeFromExtension("vcf")
val openVcfIntent = Intent(Intent.ACTION_VIEW)
openVcfIntent.setDataAndType(Uri.fromFile(savedVCard), vcfMimeType)
// Try to explicitly specify activity in charge of opening the vCard so that the user doesn't have to choose
// https://stackoverflow.com/questions/6827407/how-to-customize-share-intent-in-android/9229654#9229654
try {
if (mContext!!.packageManager != null) {
val resolveInfos = mContext!!.packageManager.queryIntentActivities(openVcfIntent, 0)
if (resolveInfos != null) {
for (resolveInfo in resolveInfos) {
val activityInfo = resolveInfo.activityInfo
if (activityInfo != null) {
val packageName = activityInfo.packageName
val name = activityInfo.name
// Find the needed Activity based on Android source files: http://grepcode.com/search?query=ImportVCardActivity&start=0&entity=type&n=
if (packageName != null && packageName == "com.android.contacts" && name != null && name.contains("ImportVCardActivity")) {
openVcfIntent.`package` = packageName
break
}
}
}
}
}
} catch (ignored: Exception) {
}
startActivity(openVcfIntent)
} catch (exception: Exception) {
Log.d("DEBUG", exception.toString())
}
}
And I receive this exception :
android.os.FileUriExposedException:
file:///storage/emulated/0/Android/data/pathformyapplication/files/qr.vcf exposed beyond app through Intent.getData()
I think the problem is the Uri.fromFile, I tried with Uri.parser with my vCard in String format but it's didn't work to.
Related
I'm new to Webrtc, I'm using the AWS Webrtc demo with Android Nav Component. When I exited the app with the back button, I can see that Webrtc is still running or I can see the following log:
EglRenderer: cameraSurfaceViewDropping frame - No surface
In my fragment at the onStop method my code is as follow:
Thread.setDefaultUncaughtExceptionHandler(null)
if (rootEglBase != null) {
rootEglBase!!.release()
rootEglBase = null
}
if (remoteView != null) {
remoteView!!.release()
remoteView = null
}
if (localPeer != null) {
localPeer!!.dispose()
localPeer = null
}
if (videoSource != null) {
videoSource!!.dispose()
videoSource = null
}
if (videoCapturer != null) {
try {
videoCapturer?.stopCapture()
videoCapturer?.dispose()
} catch (e: InterruptedException) {
Timber.e("Failed to stop webrtc video capture. $e ")
}
videoCapturer = null
}
if (client != null) {
this.client!!.disconnect()
this.client = null
}
peerConnectionFoundMap.clear()
pendingIceCandidatesMap.clear()
However, I can see the problem only when I exited the app through the back button, if I killed the app, I don't get the log. Has anyone experienced this?
Thank you.
This is the way you should destroy your WebRTC session on onDestroy() or onStop().
if (videoCaptureAndroid != null) {
videoCaptureAndroid?.stopCapture()
videoCaptureAndroid = null
}
if (localPeer != null) {
localPeer?.close()
localPeer = null
}
if (videoSource != null) {
videoSource?.dispose()
videoSource = null
}
if (audioSource != null) {
audioSource?.dispose()
audioSource = null
}
if (localAudioTrack != null) {
localAudioTrack?.dispose()
localAudioTrack = null
}
if (currentRemoteMediaStream != null) {
currentRemoteMediaStream?.dispose()
currentRemoteMediaStream = null
}
if (localVideoView != null) {
localVideoView?.release()
localVideoView = null
}
if (remoteVideoView != null) {
remoteVideoView?.release()
remoteVideoView = null
}
rootEglBase.release();
I am trying to capture the image every 1 second. I use a handler for the task. Here is the code for that part.
val handler = Handler()
button.setOnClickListener {
prepareUIForCapture()
if(isRunning){
handler.removeCallbacksAndMessages(null)
restartActivity()
}else{
button.text = "Stop"
handler.postDelayed(object : Runnable {
override fun run(){
twoLens.reset()
twoLens.isTwoLensShot = true
MainActivity.cameraParams.get(dualCamLogicalId).let {
if (it?.isOpen == true) {
Logd("In onClick. Taking Dual Cam Photo on logical camera: " + dualCamLogicalId)
takePicture(this#MainActivity, it)
Toast.makeText(applicationContext, "Captured", Toast.LENGTH_LONG).show()
}
}
handler.postDelayed(this, 1000)
}
}, 1000)
}
isRunning = !isRunning
}
}
This works as intended, however after around 3 minutes, the camera preview lags and the capture stops and eventually the application crashes.
Here is the code for takePicture()
fun takePicture(activity: MainActivity, params: CameraParams) {
if (!params.isOpen) {
return
}
try {
Logd("In captureStillPicture.")
val camera = params.captureSession?.getDevice()
if (null != camera) {
params.captureBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE)
// params.captureBuilder?.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
params.captureBuilder?.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO)
if (params.id.equals(dualCamLogicalId) && twoLens.isTwoLensShot) {
val normalParams: CameraParams? = MainActivity.cameraParams.get(normalLensId)
val wideParams: CameraParams? = MainActivity.cameraParams.get(wideAngleId)
if (null == normalParams || null == wideParams)
return
Logd("In captureStillPicture. This is a Dual Cam shot.")
params.captureBuilder?.addTarget(normalParams.imageReader?.surface!!)
params.captureBuilder?.addTarget(wideParams.imageReader?.surface!!)
}
params.captureBuilder?.set(CaptureRequest.JPEG_QUALITY, 50)
// Orientation
val rotation = activity.getWindowManager().getDefaultDisplay().getRotation()
var capturedImageRotation = getOrientation(params, rotation)
params.captureBuilder?.set(CaptureRequest.JPEG_ORIENTATION, capturedImageRotation)
try {
params.captureSession?.stopRepeating()
} catch (e: CameraAccessException) {
e.printStackTrace()
}
//Do the capture
if (28 <= Build.VERSION.SDK_INT )
params.captureSession?.captureSingleRequest(params.captureBuilder?.build(), params.backgroundExecutor, StillCaptureSessionCallback(activity, params))
else
params.captureSession?.capture(params.captureBuilder?.build(), StillCaptureSessionCallback(activity, params),
params.backgroundHandler)
}
} catch (e: CameraAccessException) {
e.printStackTrace()
} catch (e: IllegalStateException) {
Logd("captureStillPicture IllegalStateException, aborting: " + e)
}
}
Here is the part to create the preview.
fun createCameraPreviewSession(activity: MainActivity, camera: CameraDevice, params: CameraParams) {
Logd("In createCameraPreviewSession.")
if (!params.isOpen) {
return
}
try {
if (Build.VERSION.SDK_INT >= 28 && params.id.equals(MainActivity.dualCamLogicalId)) {
val normalParams: CameraParams? = MainActivity.cameraParams.get(normalLensId)
val wideParams: CameraParams? = MainActivity.cameraParams.get(wideAngleId)
Logd("In createCameraPreview. This is a Dual Cam stream. Starting up simultaneous streams.")
if (null == normalParams || null == wideParams)
return
// val normalTexture = normalParams.previewTextureView?.surfaceTexture
val wideTexture = wideParams.previewTextureView?.surfaceTexture
// if (null == normalTexture || null == wideTexture)
// return
// normalTexture.setDefaultBufferSize(400,400)
wideTexture!!.setDefaultBufferSize(400,400)
// val normalSurface = Surface(normalTexture)
val wideSurface = Surface(wideTexture)
val wideOutputConfigPreview = OutputConfiguration(wideSurface)
val wideOutputConfigImageReader = OutputConfiguration(wideParams.imageReader?.surface!!)
wideOutputConfigPreview.setPhysicalCameraId(wideAngleId)
wideOutputConfigImageReader.setPhysicalCameraId(wideAngleId)
// val normalOutputConfigPreview = OutputConfiguration(normalSurface)
val normalOutputConfigImageReader = OutputConfiguration(normalParams.imageReader?.surface!!)
normalOutputConfigImageReader.setPhysicalCameraId(normalLensId)
val sessionConfig = SessionConfiguration(SessionConfiguration.SESSION_REGULAR,
Arrays.asList( normalOutputConfigImageReader, wideOutputConfigPreview, wideOutputConfigImageReader),
params.backgroundExecutor, PreviewSessionStateCallback(activity, params))
params.previewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
// params.previewBuilder?.addTarget(normalSurface)
params.previewBuilder?.addTarget(wideSurface)
camera.createCaptureSession(sessionConfig)
} else {
val texture = params.previewTextureView?.surfaceTexture
if (null == texture)
return
val surface = Surface(texture)
if (null == surface)
return
params.previewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
params.previewBuilder?.addTarget(surface)
val imageSurface = params.imageReader?.surface
if (null == imageSurface)
return
if (Build.VERSION.SDK_INT >= 28) {
val sessionConfig = SessionConfiguration(SessionConfiguration.SESSION_REGULAR,
Arrays.asList(OutputConfiguration(surface), OutputConfiguration(imageSurface)),
params.backgroundExecutor, PreviewSessionStateCallback(activity, params))
camera.createCaptureSession(sessionConfig)
} else {
camera.createCaptureSession(Arrays.asList(surface, imageSurface),
PreviewSessionStateCallback(activity, params), params.backgroundHandler)
}
}
} catch (e: CameraAccessException) {
e.printStackTrace()
} catch (e: IllegalStateException) {
Logd("createCameraPreviewSession IllegalStateException, aborting: " + e)
}
}
I tried clearing the buffers and bitmaps, but the issue persists. I am thinking I might not be using the correct approach towards continuous capture. Is this way correct?
Any idea why the preview freezes?
Here is the original repo: https://github.com/google/basicbokeh
I did changes to suit my needs since it did not run on my device (HWMate20Pro) off the shelf and I needed continuous captures from both lenses.
Thanks.
I'm making a media controller app similar to this example made by google. https://github.com/googlesamples/android-media-controller
However, I want to make a function that can resume playing music or pause given package name. I managed to return a list of package names.
PS. I'm using react native that's why I need a fucntion that I can call from the react side.
public void getMediaApps (Callback callback) {
// = getPackageManager();
ArrayList<MediaAppDetails> mediaApps = new ArrayList<MediaAppDetails>();
Intent mediaBrowserIntent = new Intent(MediaBrowserServiceCompat.SERVICE_INTERFACE);
List<ResolveInfo> services = packageManager.queryIntentServices(
mediaBrowserIntent,
PackageManager.GET_RESOLVED_FILTER
);
if (services != null && !services.isEmpty()) {
for (ResolveInfo info : services) {
mediaApps.add(
new MediaAppDetails(info.serviceInfo, packageManager, resources)
);
}
}
WritableArray waPackagenames = Arguments.createArray();
// ArrayList<String> packagenames = ArrayList<String>()
if(mediaApps != null && !mediaApps.isEmpty()){
for(MediaAppDetails mediaApp : mediaApps){
waPackagenames.pushString(mediaApp.packageName);
}
}
callback.invoke(waPackagenames);
}
I've been trying to do this for 3 days now, but no luck.
Probably won't make such of a difference but this is where I got so far with the play function.
#ReactMethod
public void play (String packageName) {
PackageManager pm = this.packageManager;
Resources res = this.resources;
ServiceInfo serviceInfo = MediaAppDetails.findServiceInfo(packageName, pm);
mMediaAppDetails = new MediaAppDetails(serviceInfo, pm, res);
MediaSessionCompat.Token token = mMediaAppDetails.sessionToken;
if (token == null) {
if (mMediaAppDetails.componentName != null) {
mBrowser = new MediaBrowserCompat(this.reactContext, mMediaAppDetails.componentName,
new MediaBrowserCompat.ConnectionCallback() {
#Override
public void onConnected() {
setupMediaController();
// mBrowseMediaItemsAdapter.setRoot(mBrowser.getRoot());
}
#Override
public void onConnectionSuspended() {
//TODO(rasekh): shut down browser.
// mBrowseMediaItemsAdapter.setRoot(null);
}
#Override
public void onConnectionFailed() {
showToastAndFinish("connection failed .. shit!");
}
}, null);
mBrowser.connect();
} else if (mMediaAppDetails.sessionToken != null) {
setupMediaController();
}
token = mBrowser.getSessionToken();
Toast.makeText(this.reactContext, "no token can't open controller", Toast.LENGTH_SHORT).show();
// toast
}
// Toast.makeText(this.reactContext, "found token", Toast.LENGTH_SHORT).show();
if(mBrowser == null )mBrowser = new MediaBrowserCompat(this.reactContext, new ComponentName(packageName, "MainActivity"), null, null);
MediaControllerCompat.TransportControls transportControls;
try{
mController = new MediaControllerCompat(this.reactContext, token);
if(mController!= null) {
transportControls = mController.getTransportControls();
transportControls.play();
}
}catch(Exception E){
Log.w("Error",E);
Log.w("Error","couldn't create mediaControllerCompat");
// System.out.println(E);
// System.out.println("couldn't create mediaControllerCompat");
}
}
I am using CSipSimple for adding a new feature which is transfer call . For this feature I need the callID of call .
I am seeing that when I call ,the following function is called .
public void placeCallWithOption(Bundle b) {
if (service == null) {
return;
}
String toCall = "";
Long accountToUse = SipProfile.INVALID_ID;
// Find account to use
SipProfile acc = accountChooserButton.getSelectedAccount();
if(acc == null) {
return;
}
accountToUse = acc.id;
// Find number to dial
toCall = digits.getText().toString();
if(isDigit) {
toCall = PhoneNumberUtils.stripSeparators(toCall);
}
if(accountChooserFilterItem != null && accountChooserFilterItem.isChecked()) {
toCall = rewriteNumber(toCall);
}
if (TextUtils.isEmpty(toCall)) {
return;
}
// Well we have now the fields, clear theses fields
digits.getText().clear();
// -- MAKE THE CALL --//
if (accountToUse >= 0) {
// It is a SIP account, try to call service for that
try {
service.makeCallWithOptions(toCall, accountToUse.intValue(), b);
// service.xfer(callId,"01628105601");
} catch (RemoteException e) {
Log.e(THIS_FILE, "Service can't be called to make the call");
}
} else if (accountToUse != SipProfile.INVALID_ID) {
// It's an external account, find correct external account
CallHandlerPlugin ch = new CallHandlerPlugin(getActivity());
ch.loadFrom(accountToUse, toCall, new OnLoadListener() {
#Override
public void onLoad(CallHandlerPlugin ch) {
placePluginCall(ch);
}
});
}
}
But from this , I can't get callId of the call. How can I get callId of each call ? Any advice is of great help .
Using SipCallSession to fetch a call id
SipCallSession callInfo = new SipCallSession();
callinfo.getCallid();
I want to send multiple images those are present in my internal storage and when i selects that folder i want upload that folder into google drive. i have tried this google drive api for android https://developers.google.com/drive/android/create-file
and i have used the below code but it shows some error in getGoogleApiClient
the code is
ResultCallback<DriveContentsResult> contentsCallback = new
ResultCallback<DriveContentsResult>() {
#Override
public void onResult(DriveContentsResult result) {
if (!result.getStatus().isSuccess()) {
// Handle error
return;
}
MetadataChangeSet metadataChangeSet = new MetadataChangeSet.Builder()
.setMimeType("text/html").build();
IntentSender intentSender = Drive.DriveApi
.newCreateFileActivityBuilder()
.setInitialMetadata(metadataChangeSet)
.setInitialDriveContents(result.getDriveContents())
.build(getGoogleApiClient());
try {
startIntentSenderForResult(intentSender, 1, null, 0, 0, 0);
} catch (SendIntentException e) {
// Handle the exception
}
}
}
is there any approach to send images to drive or gmail?
I can't give you the exact code that does what you need, but you may try to modify the code I use for testing Google Drive Android API (GDAA). It creates folders and uploads files to Google Drive. It is up to you if you choose the REST or GDAA flavor, each has it's specific advantages.
This covers only a half of your question, though. Selecting and enumerating files on your Android device should be covered elsewhere.
UPDATE: (per Frank's comment below)
The example I mentioned above would give you a full solution from scratch, but let's address the points of your question I could decipher:
The hurdle 'some error' is a method that returns GoogleApiClient object initialized prior to your code sequence. Would look something like:
GoogleApiClient mGAC = new GoogleApiClient.Builder(appContext)
.addApi(Drive.API).addScope(Drive.SCOPE_FILE)
.addConnectionCallbacks(callerContext)
.addOnConnectionFailedListener(callerContext)
.build();
If you have this cleared out, let's assume your folder is represented by a java.io.File object. Here is the code that:
1/ enumerates the files in you local folder
2/ sets the name, content and MIME type of each file (using jpeg for simplicity here).
3/ uploads each file to the root folder of Google drive
(the create() method must run off-UI thread)
// enumerating files in a folder, uploading to Google Drive
java.io.File folder = ...;
for (java.io.File file : folder.listFiles()) {
create("root", file.getName(), "image/jpeg", file2Bytes(file))
}
/******************************************************
* create file/folder in GOODrive
* #param prnId parent's ID, (null or "root") for root
* #param titl file name
* #param mime file mime type
* #param buf file contents (optional, if null, create folder)
* #return file id / null on fail
*/
static String create(String prnId, String titl, String mime, byte[] buf) {
DriveId dId = null;
if (mGAC != null && mGAC.isConnected() && titl != null) try {
DriveFolder pFldr = (prnId == null || prnId.equalsIgnoreCase("root")) ?
Drive.DriveApi.getRootFolder(mGAC):
Drive.DriveApi.getFolder(mGAC, DriveId.decodeFromString(prnId));
if (pFldr == null) return null; //----------------->>>
MetadataChangeSet meta;
if (buf != null) { // create file
DriveContentsResult r1 = Drive.DriveApi.newDriveContents(mGAC).await();
if (r1 == null || !r1.getStatus().isSuccess()) return null; //-------->>>
meta = new MetadataChangeSet.Builder().setTitle(titl).setMimeType(mime).build();
DriveFileResult r2 = pFldr.createFile(mGAC, meta, r1.getDriveContents()).await();
DriveFile dFil = r2 != null && r2.getStatus().isSuccess() ? r2.getDriveFile() : null;
if (dFil == null) return null; //---------->>>
r1 = dFil.open(mGAC, DriveFile.MODE_WRITE_ONLY, null).await();
if ((r1 != null) && (r1.getStatus().isSuccess())) try {
Status stts = bytes2Cont(r1.getDriveContents(), buf).commit(mGAC, meta).await();
if ((stts != null) && stts.isSuccess()) {
MetadataResult r3 = dFil.getMetadata(mGAC).await();
if (r3 != null && r3.getStatus().isSuccess()) {
dId = r3.getMetadata().getDriveId();
}
}
} catch (Exception e) { /* error handling*/ }
} else {
meta = new MetadataChangeSet.Builder().setTitle(titl).setMimeType("application/vnd.google-apps.folder").build();
DriveFolderResult r1 = pFldr.createFolder(mGAC, meta).await();
DriveFolder dFld = (r1 != null) && r1.getStatus().isSuccess() ? r1.getDriveFolder() : null;
if (dFld != null) {
MetadataResult r2 = dFld.getMetadata(mGAC).await();
if ((r2 != null) && r2.getStatus().isSuccess()) {
dId = r2.getMetadata().getDriveId();
}
}
}
} catch (Exception e) { /* error handling*/ }
return dId == null ? null : dId.encodeToString();
}
//-----------------------------
static byte[] file2Bytes(File file) {
if (file != null) try {
return is2Bytes(new FileInputStream(file));
} catch (Exception e) {}
return null;
}
//----------------------------
static byte[] is2Bytes(InputStream is) {
byte[] buf = null;
BufferedInputStream bufIS = null;
if (is != null) try {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
bufIS = new BufferedInputStream(is);
buf = new byte[2048];
int cnt;
while ((cnt = bufIS.read(buf)) >= 0) {
byteBuffer.write(buf, 0, cnt);
}
buf = byteBuffer.size() > 0 ? byteBuffer.toByteArray() : null;
} catch (Exception e) {}
finally {
try {
if (bufIS != null) bufIS.close();
} catch (Exception e) {}
}
return buf;
}
//--------------------------
private static DriveContents bytes2Cont(DriveContents driveContents, byte[] buf) {
OutputStream os = driveContents.getOutputStream();
try { os.write(buf);
} catch (IOException e) {/*error handling*/}
finally {
try { os.close();
} catch (Exception e) {/*error handling*/}
}
return driveContents;
}
Needles to say the code here is taken directly from the GDAA wrapper here (mentioned at the beginning), so if you need to resolve any references you have to look up the code there.
Good Luck