Hi I tried below getUserMedia API to capture the android screen,But Its not working any help ?
const constraints = {
audio: false, // mandatory.
video: {'mandatory': {'chromeMediaSource':'screen'}}
};
const successCallback = (stream) => {
var video = document.querySelector('video');
video.srcObject = stream;
video.onloadedmetadata = function(e) {
video.play();
};
};
const errorCallback = (error) => {
// We don't have access to the API
console.log("sadd"+error);
};
navigator.getUserMedia(constraints, successCallback, errorCallback);
I am getting the error Requested device not found
Related
I am trying to connect react native webrtc lib to my react native project. I can go to screen normally, but when I press button to start the call I receive following error:
[Unhandled promise rejection: TypeError: undefined is not an object (evaluating 'navigator.mediaDevices.enumerateDevices')]
This was tested on both android emulator and android phone. Both had same issue.
const startLocalStream = async () => {
// isFront will determine if the initial camera should face user or environment
const isFront = true;
let devices = await navigator.mediaDevices.enumerateDevices(); //this is where I get error
console.log(devices);
const facing = isFront ? 'front' : 'environment';
const videoSourceId = devices.find(device => device.kind === 'videoinput' && device.facing === facing);
const facingMode = isFront ? 'user' : 'environment';
const constraints = {
audio: true,
video: {
mandatory: {
minWidth: 500, // Provide your own width, height and frame rate here
minHeight: 300,
minFrameRate: 30,
},
facingMode,
optional: videoSourceId ? [{ sourceId: videoSourceId }] : [],
},
};
const newStream = await mediaDevices.getUserMedia(constraints);
setLocalStream(newStream);
};
const startCall = async id => {
const localPC = new RTCPeerConnection(configuration);
localPC.addStream(localStream);
const roomRef = await db.collection('rooms').doc(id);
const callerCandidatesCollection = roomRef.collection('callerCandidates');
localPC.onicecandidate = e => {
if (!e.candidate) {
console.log('Got final candidate!');
return;
}
callerCandidatesCollection.add(e.candidate.toJSON());
};
localPC.onaddstream = e => {
if (e.stream && remoteStream !== e.stream) {
console.log('RemotePC received the stream call', e.stream);
setRemoteStream(e.stream);
}
};
const offer = await localPC.createOffer();
await localPC.setLocalDescription(offer);
const roomWithOffer = { offer };
await roomRef.set(roomWithOffer);
roomRef.onSnapshot(async snapshot => {
const data = snapshot.data();
if (!localPC.currentRemoteDescription && data.answer) {
const rtcSessionDescription = new RTCSessionDescription(data.answer);
await localPC.setRemoteDescription(rtcSessionDescription);
}
});
roomRef.collection('calleeCandidates').onSnapshot(snapshot => {
snapshot.docChanges().forEach(async change => {
if (change.type === 'added') {
let data = change.doc.data();
await localPC.addIceCandidate(new RTCIceCandidate(data));
}
});
});
setCachedLocalPC(localPC);
};
How to fix it? I am using official lib for react native webrtc.
react-native-audio-recorder-player, library for use in my project to record audio.
When I start recording everything works fine, but close the application when I call the function to stop recording the audio "audioRecorderPlayer.stopRecorder ()".
It works perfectly on IOS (emulator and real device), and on Android emulator, but, it doesn't work on real android device.
What could be the cause of this error and how could it be resolved?
"react-native": "0.63.3",
"react-native-audio-recorder-player": "^2.6.0-rc3",
hooks for control
const [record, setRecord] = useState(false);
start record function
async function handleRecord() {
try {
const permissions = await getPermissions();
if (permissions) {
const path = Platform.select({
ios: `audio-${new Date().getTime()}.m4a`,
android: `sdcard/audio-${new Date().getTime()}.mp3`
});
const audioSet = {
AudioEncoderAndroid: AudioEncoderAndroidType.AAC,
AudioSourceAndroid: AudioSourceAndroidType.MIC,
AVEncoderAudioQualityKeyIOS: AVEncoderAudioQualityIOSType.high,
AVNumberOfChannelsKeyIOS: 2,
AVFormatIDKeyIOS: AVEncodingOption.aac
};
const result = await audioRecorderPlayer.startRecorder(path, audioSet);
setRecord(true);
audioRecorderPlayer.addRecordBackListener(e => {
return;
});
}
} catch (err) {
}
}
stop record function
async function onStopRecord() {
try {
const result = await audioRecorderPlayer.stopRecorder();
audioRecorderPlayer.removeRecordBackListener();
setRecord(false);
setFiles([
...files,
{
uri: result,
type: "audio/mpeg",
name: `audio-${new Date().getTime()}.mp3`
}
]);
}catch(error){
Alert.alert(
"Erro",
error
);
}
}
I am trying to stream a video element via my phone camera and using WebRTC. I do this as follows (snippets):
<video id="yourVideo" autoplay muted playsinline></video>
var yourVideo = document.getElementById("yourVideo");
// ...
navigator.mediaDevices.getUserMedia({audio:false, video:true}).
then(function(stream){
console.log(stream)
yourVideo.srcObject = stream
pc.addStream(stream)
})
.catch(function(error){
console.log(error)
})
This works fine in the browser and my video/camera is displayed. However, on the phone it returns me the error DOMException. I cannot find any information that can explain this.
Running it Ionic V1.X
ionic cordova run android
When I run navigator.mediaDevices this is what I see:
Is it perhaps permission related? If so, how can I fix this?
You will have to first get the device source and then try for the stream, try this way
var videoElement = document.getElementById("yourVideo");
var videoSrc = undefined;
navigator.mediaDevices.enumerateDevices()
.then(getDevices).then(getStream).catch(handleError);
function getDevices(deviceInfos) {
for (var i = 0; i !== deviceInfos.length; ++i) {
var deviceInfo = deviceInfos[i];
if (deviceInfo.kind === 'videoinput') {
videoSrc = deviceInfo.deviceId;
break;
}
}
}
function getStream() {
navigator.mediaDevices.getUserMedia({
video: {
deviceId: {
exact: videoSrc
}
}
}).
then(gotStream).catch(handleError);
}
function gotStream(stream) {
videoElement.srcObject = stream;
}
function handleError(error) {
console.log('Error: ', error);
}
Am trying to user the $cordovaFileTransfer plugin from ngCordovato upload images in my ionic app.
However the images, never seem to upload. The app is running on my android phone and am debugging with chrome remote inspector, the network tab does not show any request for the upload.
This method gets images from the device
$scope.getImages = function() {
var options = {
maximumImagesCount: 2,
width: 800,
height: 800,
quality: 80
};
$cordovaImagePicker.getPictures(options)
.then(function (results) {
for (var i = 0; i < results.length; i++) {
$scope.queue.push({
filepath: results[i],
progress: 0.00
});
}
$scope.$emit('process:queue');
}, function(error) {
// error getting photos
});
};
And am handling the 'process:queue' event with this method
$scope.startUploadQueue = function() {
var server = encodeURI(serviceUrl());
var options = {
fileKey: "image",
httpMethod: 'PUT',
headers: {
"Authorization": "Bearer "+$user.getToken()
}
};
angular.forEach($scope.queue, function(item) {
$cordovaFileTransfer.upload(server, item.filepath, options, true)
.then(function (result) {
console.log(result);
$scope.media.push(result.data);
}, function (error) {
}, function (progress) {
item.progress = (progress.loaded / progress.total) * 100;
});
});
};
Am doing something incorrectly?. Any help will be appreciated
I'm new to Ionic and Cordova, so I'm sure I'm missing something basic, but my problem is a packaged APK does not play sounds on an Android device. I can get the sound to play in the Ripple emulator just fine with the following code:
.controller('MainCtrl', ['$scope', function ($scope) {
$scope.playStartBell = function () {
var media = new Media('media/startBell.mp3', function () {
console.log('good');
}, function (err) {
console.log('bad: ', err);
});
media.play();
},
$scope.playStopBell = function () {
var media = new Media('media/stopBell.mp3', function () {
console.log('good');
}, function (err) {
console.log('bad: ', err);
});
media.play();
}
}])
I've used Cordova to install the media plugin: $cordova plugin add org.apache.cordova.media
According to this SO post, a value needs to be added to the config.xml, but I'm not sure how to do it properly for Ionic/Cordova.
Turns out that you have specify path starting with the /android_asset/www prefix like so:
/android_asset/www/
So changing my code to the following worked. Note you'll want to detect what device you're running on to determine the appropriate location.
.controller('MainCtrl', ['$scope', function ($scope) {
///android_asset/www/
$scope.playStartBell = function () {
var media = new Media('/android_asset/www/media/startBell.mp3', function () {
console.log('good');
}, function (err) {
console.log('bad: ', err);
});
media.play();
},
$scope.playStopBell = function () {
var media = new Media('/android_asset/www/media/stopBell.mp3', function () {
console.log('good');
}, function (err) {
console.log('bad: ', err);
});
media.play();
}
}])