I am trying to play YouTube Videos on android devices using webview and youtube iframe api. While testing the same on 4.2 devices, it did work on 4.2.2 Nexus 4. But when I tried it on Sony Xpedia L, its showing the buffering status, but the video never gets loaded (Screen Shots attached). What could be a fix for this? Any suggestions...?!?!
My code is as follows..
JAVA CODE
wvVideo = (WebView)findViewById(R.id.wvVideo);
wvVideo.setWebChromeClient(new WebChromeClient());
WebSettings webSettings = wvVideo.getSettings();
webSettings.setJavaScriptEnabled(true);
webSettings.setMediaPlaybackRequiresUserGesture(false);
wvVideo.addJavascriptInterface(new WebAppInterface(this), "Android");
wvVideo.loadUrl("http://test.com/iframeYouTube.html?videoid="+videoURL);
wvVideo.loadUrl("javascript:playVideo();");
XML
<WebView
android:id="#+id/wvVideo"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerInParent="true"
android:background="#212121"></WebView>
HTML
<!DOCTYPE html>
<html style="height:100%;margin:0;padding:0;width:100%">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body style="background:#212121;height:100%;margin:0;padding:0">
<!-- 1. The <iframe> (and video player) will replace this <div> tag. -->
<div id="player" style="margin:0;padding:0"></div>
<script>
var Query = function () {
var query_string = {};
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i=0;i<vars.length;i++) {
var pair = vars[i].split("=");
if (typeof query_string[pair[0]] === "undefined") {
query_string[pair[0]] = pair[1];
} else if (typeof query_string[pair[0]] === "string") {
var arr = [ query_string[pair[0]], pair[1] ];
query_string[pair[0]] = arr;
} else {
query_string[pair[0]].push(pair[1]);
}
}
return query_string;
} ();
var tag = document.createElement('script');
tag.src = "https://www.youtube.com/iframe_api";
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
var player;
function onYouTubeIframeAPIReady() {
player = new YT.Player('player', {
height:'100%',width:'100%',
videoId: Query.videoid,
playerVars: { 'autoplay': 0, 'controls': 0, 'showinfo':0, 'modestbranding':1},
events: {
'onReady': onPlayerReady,
'onStateChange': onPlayerStateChange
}
});
}
function onPlayerReady(event) {
cueCheck();
timerFunction();
videoLength();
}
var done = false;
function onPlayerStateChange(event) {
Android.playState(event.data+"");
if (event.data == YT.PlayerState.PLAYING && !done) {
done = true;
}
}
function cueCheck()
{
Android.playState("cued");
}
function timerFunction(){
console.log("function");
Android.updateTimer(player.getCurrentTime()+"");
setTimeout(timerFunction, 20);
}
function videoLength(){
Android.getVideoLength(player.getDuration()+"");
}
function seekTo(val) {
player.seekTo(val);
// player.playVideo();
}
function pauseVideo(){
player.pauseVideo()
}
function playVideo(){
player.playVideo()
}
</script>
</body>
</html>
String html = getHTML(url);
MainActivity.webview.getSettings().setJavaScriptEnabled(true);
MainActivity.webview.getSettings().setPluginState(PluginState.ON);
final String mimeType = "text/html";
final String encoding = "UTF-8";
MainActivity.webview.setWebViewClient(new WebViewClient());
MainActivity.webview.setWebChromeClient(new WebChromeClient());
webview.loadDataWithBaseURL("", html, mimeType,
encoding, "");
public static String getHTML(String str) {
String html = "<iframe class=\"youtube-player\" style=\"overflow:hidden; width: 100%; height: 95%; scrolling=\"no\" padding:0px; margin:0px\" id=\"ytplayer\" type=\"text/html\" src=\"http://www.youtube.com/embed/"
+ str
+ "?html5=1&autoplay=1 & fs=0\" frameborder=\"0\" >\n"
+ "</iframe>\n ";
return html;
}
I faced with the same issue and the only thing that i can say is that the error is related with the Android version. Particularly, as I posted here :
In Android 4 versions, can't autoplay. The user has to press play button into the iframe to start play the video. Particularly:
In Android 4.1 it reproduces the audio but not the image.
In Android 4.3 i tested in devices which can reproduce audio but no
video and devices that can't reproduce anything.
In Android 4.4 don't reproduce anything
From Android 5 and + everything works fine.
I spend so many time trying to do it i don't find the way :(
Related
I am trying to play a video from YouTube and get the current-time by YT.API.
The video plays successfully and functions on HTML content works too,
But I can't figure it out how can I get data from this webView Element.
I want to be informed about events from webView,
this is my code:
<template>
<Page #loaded=pageLoaded>
<GridLayout>
<web-view id="webView"></web-view>
</GridLayout>
</Page>
</template>
let webViewInterfaceModule = require('nativescript-webview-interface');
#Component
export default class someComponent extends Vue {
oWebViewInterface;
pageLoaded(args){
let page = args.object;
this.setupWebViewInterface(page);
}
setupWebViewInterface(page){
let webView = page.getViewById('webView');
this.oWebViewInterface = new webViewInterfaceModule.WebViewInterface(webView, this.html);
this.oWebViewInterface.on( '*', function(eventData){
console.log("some change!!"); // but never fires!
});
}
}
and i define this.html as:
html = `
<!DOCTYPE html>
<html>
<body>
<div id="player"></div>
<script>
var tag = document.createElement('script');
tag.src = "https://www.youtube.com/iframe_api";
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
var player;
function onYouTubeIframeAPIReady() {
player = new YT.Player('player', {
height: '280',
width: '100%',
videoId: 'k3On6DLPGLA',
events: {
'onReady': onPlayerReady,
'onStateChange': onPlayerStateChange
}
});
}
function onPlayerReady(event) {
event.target.playVideo();
}
var done = false;
function onPlayerStateChange(event) {
console.log("state changed!"); // it fires!
}
function stopVideo() {
player.stopVideo();
}
<\/script>
<\/body>
<\/html>
`;
but i just get such these results:
chromium: [ERROR:web_contents_delegate.cc(224)] WebContentsDelegate::CheckMediaAccessPermission: Not supported.
chromium: [INFO:CONSOLE(35)] "state changed!", source: ..../ (35)
Ok I got the answer:
there is some issues here:
#1. implanting nativescript-webview-interface.js:
I need to copy this file:
cp node_modules/nativescript-webview-interface/www/nativescript-webview-interface.js app/assets/www/lib/
#2. permissions!
but #1 causes another problem: net::ERR_ACCESS_DENIED
for solving this one I add this:
setupWebViewInterface(page){
let webView = page.getViewById('webView');
webView.android.getSettings().setAllowFileAccess(true);
}
and now I have access to the folder so I moved this.html variable content to a new file : /assets/www/index.html
#3. setting src
since just now we have access to the folder, we need to (re)set the src of web-view so I add this one to the setupWebViewInterface Function:
this.oWebViewInterface = new webViewInterfaceModule.WebViewInterface( webView, '~/assets/www/index.html' );
note: I removed src from <webView /> since it causes problem! (if old and new src would be the same it will not reload!)
#4.defining Variables:
in html file I need to add this line:
var oWebViewInterface = window.nsWebViewInterface;
#5. defining listener:
I add this one: oWebViewInterface.emit('anyEvent', event);
inside this function:
function onPlayerStateChange(event) {
console.log("state changed!");
oWebViewInterface.emit('anyEvent', event);
}
Now it works :)
It is easy to create a custom play button for an embed YouTube video using their API, however it seems that the button cannot launch the video in mobile environment, especially Android. When you click it there, screen simply goes forever black with ever spinning loading animation.
<div id="wrapper">
<div id="video">
</div>
Play
</div>
<script>
var player, tag = document.createElement('script');
tag.src = "https://www.youtube.com/iframe_api";
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
window.onYouTubeIframeAPIReady = function() {
player = new YT.Player('video', {
height: '320',
width: '240',
videoId: '5oxIQe3XngY',
playerVars: {
modestbranding: 1,
controls: 0,
rel: 0
}
});
player.addEventListener('onReady', function(e) {
document.getElementById('play').onclick = function() {
player.playVideo();
};
});
player.addEventListener('onStateChange', function(e) {
if (e.data == 1) {
document.getElementById('play').style.display = 'none';
}
});
};
</script>
Here's the fiddle: http://jsfiddle.net/HArsN/
Any solution to that? Is it necessary to launch the video through bundled button only?
When I use html5 'getUserMedia' API to access acamera on the android(4.0) phone, it comes out "front camera", but I want to open "back camera". Sample code:
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width">
<title>Html5 Mobile Carema</title>
<script src="js/jquery.js"></script>
<script>
$(document).ready(init);
function init() {
try {
window.URL = window.URL || window.webkitURL || window.msURL
|| window.oURL;
navigator.getUserMedia = navigator.getUserMedia
|| navigator.webkitGetUserMedia
|| navigator.mozGetUserMedia || navigator.msGetUserMedia;
navigator.getUserMedia({
video : true
}, successsCallback, errorCallback);
} catch (err) {
// Tries it with old spec of string syntax
navigator.getUserMedia('video', successsCallback, errorCallback);
}
$(":button").click(function() {
slap();
});
}
function slap() {
var video = $("#myVideo")[0];
var canvas = capture(video);
$("#result").empty();
$("#result").append(canvas);
//alert();
var imgData = canvas.toDataURL('image/png;base64,');
//var imgData = canvas.toDataURL("image/png");
var imgData = imgData.substring(22);
//blb = dataURItoBlob(imgData);
//sendMsg(blb);
}
function errorCallback(err) {
}
function successsCallback(stream) {
$("#myVideo").attr("src", window.webkitURL.createObjectURL(stream));
}
function capture(video) {
var canvas = document.createElement('canvas');
var width = video.videoWidth;
var height = video.videoHeight;
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
var context = canvas.getContext('2d');
context.drawImage(video, 0, 0, 160, 120);
return canvas;
}
</script>
</head>
<body>
<video id="myVideo" autoplay="autoplay"></video>
<br> <input type="button" value="capture" />
<br><div id="result" style="width: 145px"></div>
<div>
<p id="resultMsg" style="color: red"></p>
<p id="decodeTime" style="color: green"></p>
</div>
</body>
</html>
I don't know how to access specific camera on android phone, anyone who knows? thanks
There is now the ability to specify a camera in the latest specification with the facingMode property: http://www.w3.org/TR/mediacapture-streams/#idl-def-VideoFacingModeEnum
This property is an optional part of the MediaStreamConstraints object that is the first argument of the getUserMedia method.
Here's a simplified example from the spec:
var supports = navigator.mediaDevices.getSupportedConstraints();
if (!supports["facingMode"]) {
// Handle lack of browser support if necessary
}
var gotten = navigator.mediaDevices.getUserMedia({
video: {
facingMode: {exact: "environment"}
}
});
The value environment means the back camera of the device. Other values are user, left and right.
Note that support for this varies depending on the browser/browser version.
See function gotSources(sourceInfos) in the code below
<!--
Based on Motion Detector Demo Created by Ákos Nikházy.
If you use this app please link this demo http://motion-detector.nikhazy-dizajn.hu/
-->
<!DOCTYPE html>
<html>
<head>
<meta charset=utf-8 />
<title>Frame capture demo</title>
</head>
<body>
<header>
<h1>Motion Detection</h1>
<h4>with HTML5 API using .getUserMedia()</h4>
</header>
<video autoplay></video>
<hr>
<canvas id="savePhoto"></canvas>
<script>
function hasGetUserMedia() {
//returns true if supported
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia
|| navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
function onSuccess(stream) {
//If we can stream from camera.
var source;
//Get the stream. This goes to the video tag
if (window.URL) {
source = window.URL.createObjectURL(stream);
} else if (window.webkitURL) {
source = window.webkitURL.createObjectURL(stream);
} else {
source = stream; // Opera and Firefox
}
//Set up video tag
video.autoplay = true;
video.src = source;
//We try to find motion in every X second
setInterval(function() {
motionDetector();
}, sampling);
}
function onError() {
//if we fail (not supported, no camera etc.)
alert('No stream, no win. Refresh.');
}
function saveImage(canvasToSave) {
//create image from canvas
dataUrl = canvasToSave.toDataURL();
imageFound = document.createElement('img');
imageFound.src = dataUrl;
document.body.appendChild(imageFound);
}
function motionDetector() {
ctxSave.drawImage(video, 0, 0, savePhoto.width, savePhoto.height);
}
/*After all those functions lets start setting up the program*/
//Set up elements. Should be a ini() but I don't care right now
var video = document.querySelector('video'); //the video tag
var savePhoto = document.getElementById('savePhoto'); //the possible saved image's canvas
var ctxSave = savePhoto.getContext('2d'); //the latest image from video in full size and color
var sampling = 1000; //how much time needed between samples in milliseconds
var videoSourceInfo = null;
//We need this so we can use the videoWidth and ...Height, also we setup canvas sizes here, after we have video data
video.addEventListener("loadedmetadata", function() {
console.log(video.videoWidth + ":" + video.videoHeight)
savePhoto.width = video.videoWidth;
savePhoto.height = video.videoHeight;
});
function start() { //Start the whole magic
if (hasGetUserMedia()) {
//it is working?
navigator.getUserMedia
|| (navigator.getUserMedia = navigator.mozGetUserMedia
|| navigator.webkitGetUserMedia
|| navigator.msGetUserMedia);
var videoSourceInfoId = videoSourceInfo.id;
var constraints = {
video : {
optional: [{sourceId: videoSourceInfoId}]
},
toString : function() {
return "video";
}
};
navigator.getUserMedia(constraints, onSuccess, onError);
} else {
//no support
alert('getUserMedia() is not supported in your browser. Try Chrome.');
}
}
function gotSources(sourceInfos) {
for (var i = sourceInfos.length-1 ; i >= 0; i--) { // get last camera index (supposed to back camera)
var sourceInfo = sourceInfos[i];
if (sourceInfo.kind === 'video') {
videoSourceInfo = sourceInfo;
console.log('SourceId: ', videoSourceInfo.id);
start();
break;
} else {
console.log('Some other kind of source: ', sourceInfo);
}
}
}
if (typeof MediaStreamTrack === 'undefined') {
alert('This browser does not support MediaStreamTrack.\n\nTry Chrome Canary.');
} else {
MediaStreamTrack.getSources(gotSources); // async task
}
</script>
</body>
</html>
Hi I think this works with you
<script>
var gum = mode =>
navigator.mediaDevices.getUserMedia({video: {facingMode: {exact: mode}}})
.then(stream => (video.srcObject = stream))
.catch(e => log(e));
var stop = () => video.srcObject && video.srcObject.getTracks().forEach(t => t.stop());
var log = msg => div.innerHTML += msg + "<br>";
</script>
<button onclick="stop();gum('user')">Front</button>
<button onclick="stop();gum('environment')">Back</button>
<div id="div"></div><br>
<video id="video" height="320" autoplay></video>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
facingMode??
https://github.com/webrtcHacks/adapter/issues/820
https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints/facingMode
public class MainActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
WebView wv=(WebView)findViewById(R.id.webView1);
wv.getSettings().setJavaScriptEnabled(true);
wv.getSettings().setAppCacheEnabled(true);
wv.getSettings().setDomStorageEnabled(true);
// how plugin is enabled change in API 8
if (Build.VERSION.SDK_INT < 8) {
wv.getSettings().setPluginsEnabled(true);
} else {
wv.getSettings().setPluginState(PluginState.ON);
}
String venkat="<iframe src=\"http://player.vimeo.com/video/27244727?portrait=0&color=333\" width=\"WIDTH\" height=\"HEIGHT\" frameborder=\"0\" webkitAllowFullScreen mozallowfullscreen allowFullScreen></iframe>";
wv.loadData(venkat,"text/html","UTF-8");
}
}
After researching from google I have written the above code but is not working. In this no errors are occurred but when I click on the play button progress bar is displaying for sometime and then it disappears and displays play button again... Could anyone please suggest me how to solve this problem?
Do this:
<iframe src="//player.vimeo.com/video/VIDEO_ID"
width="515"
height="340"
frameborder="0"
webkitallowfullscreen mozallowfullscreen allowfullscreen>
</iframe>
You have to enable the hardware accelaration feature in andorid manifest. To work vimeo video on all devices, bec vimeo video is html5 type of video.
And here is link of vimeo video play which is working perfectly.
const TheVideo = (props) => {
const getVimeoUrl = (uri: string): string => {
const value = uri.split("/");
return value[value.length - 1];
};
enter code here
return (<>
<iframe
src={
props.specificVideo?.url
?.split("/")[2]
.startsWith("vimeo")
? `https://player.vimeo.com/video/${getVimeoUrl(
props.specificVideo?.url
)}`
: props.specificVideo?.url ?? ""
}
width="{video_width}"
height="{video_height}"
frameBorder="0"
title={props.specificVideo?.title ?? ""}
className="specvideoview-frame"
></iframe>
</>)
}
webView = (WebView)findViewById(R.id.presentation_webview);
webView.getSettings().setJavaScriptEnabled(true);
webView.getSettings().setAllowFileAccess(true);
webView.getSettings().setPluginsEnabled(true);
webView.setScrollBarStyle(View.SCROLLBARS_OUTSIDE_OVERLAY);
DisplayMetrics displaymetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
int height = displaymetrics.heightPixels;
int wwidth = displaymetrics.widthPixels;
Log.e("h & w",wwidth+"-"+height);
String data_html = "<!DOCTYPE HTML> <html xmlns=\"http://www.w3.org/1999/xhtml\" xmlns:og=\"http://opengraphprotocol.org/schema/\" xmlns:fb=\"http://www.facebook.com/2008/fbml\"> <head></head> <body style=\"margin:0 0 0 0; padding:0 0 0 0;\"> <iframe width='"+wwidth+"' height='"+height+"' src=\"http://player.vimeo.com/video/"+VIDEO_ID+"\" frameborder=\"0\"></iframe> </body> </html> ";
webView.setWebViewClient(new MyWebViewClient());
webView.loadDataWithBaseURL("http://vimeo.com", data_html, "text/html", "UTF-8", null);
Try the above code and use device width and height to play the video.
I am trying to create an offline video player that would download video content from my site for later viewing offline via an HTML5 video element. The code below works fine in Chrome for the desktop, but not on mobile (Nexus S smartphone, Nexus 7 tablet, 4.1 since only that runs chrome, which is required for the filesystem api). I am using the filesystem API that is supported by chrome on both the desktop and mobile.
I have confirmed it is correctly storing the file on the mobile device and I can retrieve the file correctly, but for some reason after retrieving the video from the localsystem chrome does not want to play the video. This is true whether I am using the html5 video element or whether I am navigating directly to the filesystem URL. When I use the html5 video element it returns the error media_err_not_supported. I have confirmed that the device can play the video if I navigate directly to it on my server (without first storing it using the filesystem api), so the issue is not a codec or video format problem. I am also using the video/mp4 mime type in both cases.
Again, this works on desktop, but not mobile. Any ideas?
Here is the code we are using:
<!DOCTYPE html >
<html>
<head>
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.min.js" type="text/javascript"> </script>
<script type="text/javascript">
var _fs;
var filename = "test3.mp4";
var diskSpaceRequired = 10 * 1024 * 1024;
$(document).ready(function () {
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
function onInitFs(fs) {
_fs = fs;
getVideo(fs);
}
if (!!window.requestFileSystem) {
window.webkitStorageInfo.requestQuota(
window.webkitStorageInfo.PERSISTENT,
diskSpaceRequired, // amount of bytes you need
function () { },
function () {}
);
window.requestFileSystem(window.PERSISTENT, diskSpaceRequired, onInitFs, function () { alert('error'); });
} else {
alert('not supported');
}
$("#play").on('click', playVideo);
$("#ourVideo").on('error', function(e) { console.log('ERROR!!!', e, arguments);
console.log($("#ourVideo")[0].error);
});
});
function playVideo() {
_fs.root.getFile(filename, {}, function (fileEntry) {
$("#ourVideo").attr('src', fileEntry.toURL());
fileEntry.file(function (file) {
var reader = new FileReader();
reader.onloadend = function (e) {
$("#ourVideo").get(0).play();
};
reader.readAsText(file);
}, errorHandler);
}, errorHandler);
}
function getVideo(fs) {
fs.root.getFile(filename, { create: true }, function (fileEntry) {
fileEntry.createWriter(function (fileWriter) {
fetchResource(fileWriter);
}, errorHandler);
}, errorHandler);
}
function errorHandler(e) {
console.log('error', e);
}
function fetchResource(fileWriter) {
console.log('fetchresource');
var xhr = new XMLHttpRequest();
xhr.responseType = "arraybuffer";
xhr.open("GET", "http://mydomain.com/trailer.mp4", true);
xhr.onload = function(e) {
if (this.status == 200) {
var bb = new WebKitBlobBuilder();
bb.append(this.response);
var blob = bb.getBlob("video\/mp4");
fileWriter.write(blob);
} else {
console.log(this.status);
}
};
xhr.send();
}
</script>
<title>foo</title>
</head>
<body>
<input type="button" value="Play Video" id="play"/>
<video id="ourVideo" controls="">
<source id="vidSource" type="video/mp4"/>
</video>
</body>
</html>
The problem looks like your android chrome coudn't access the android file system correctly, For that you can use nanoHttpd server for access android local files in device or sdcard.
for NanoHttpd server use this one class in your application and pass the media file location as http://localhost:8081/sdcard/(your_media_location).mp4
or get nanoHttpd from https://gist.github.com/1893396
I think this is more accurate to access sdcard files than directly calling for them
try change html part to
</head>
<body>
<input type="button" value="Play Video" id="play"/>
<video id="ourVideo" controls="">
<source src="video1.mp4" type= "video/mp4">
<source src="video1.ogv" type= "video/ogg">
</video>
</body>
you can convert your mp4 to ogv using
http://video.online-convert.com/convert-to-ogg
and put ogv file in the same location in mp4
**for more information check out these
http://www.broken-links.com/2010/07/08/making-html5-video-work-on-android-phones/
HTML5 <video> element on Android does not play