I have a very specific problem with jPlayer. I need to run it in Android/iPhone, which is no problem thanks to "jPlayer Android Fix" on the documentation. However, I also need it to stream a ShoutCast .MP3 file, not a ShoutCast stream. All the documentation I read points to a generic URL/IP/Port, but not a file.
I have got it all working with a static MP3 using the following code:
<script type="text/javascript">
//<![CDATA[
$(document).ready(function() {
var id = "#jquery_jplayer_1";
var bubble = {
mp3:"http://www.jplayer.org/audio/mp3/Miaow-07-Bubble.mp3"
};
var options = {
swfPath: "/jplayer2-4-0",
supplied: "mp3",
wmode: "window",
smoothPlayBar: true,
keyEnabled: true
};
var myAndroidFix = new jPlayerAndroidFix(id, bubble, options);
$('#setMedia-Bubble').click(function() {
myAndroidFix.setMedia(bubble);
$('.jp-title ul li').text('Bubble');
});
$('#setMedia-Bubble-play').click(function() {
myAndroidFix.setMedia(bubble).play();
$('.jp-title ul li').text('Bubble');
});
$("#jplayer_inspector").jPlayerInspector({jPlayer:$("#jquery_jplayer_1")});
});
//]]>
</script>
... but the moment I change
mp3:"http://www.jplayer.org/audio/mp3/Miaow-07-Bubble.mp3"
to
mp3:"http://live.station.ca:8000/stream.mp3"
... I get nothing. I've even tried
mp3:"http://live.station.ca:8000/stream.mp3;stream/1"
Can anybody help? Thanks in advance!
Related
I am trying to play a video from YouTube and get the current-time by YT.API.
The video plays successfully and functions on HTML content works too,
But I can't figure it out how can I get data from this webView Element.
I want to be informed about events from webView,
this is my code:
<template>
<Page #loaded=pageLoaded>
<GridLayout>
<web-view id="webView"></web-view>
</GridLayout>
</Page>
</template>
let webViewInterfaceModule = require('nativescript-webview-interface');
#Component
export default class someComponent extends Vue {
oWebViewInterface;
pageLoaded(args){
let page = args.object;
this.setupWebViewInterface(page);
}
setupWebViewInterface(page){
let webView = page.getViewById('webView');
this.oWebViewInterface = new webViewInterfaceModule.WebViewInterface(webView, this.html);
this.oWebViewInterface.on( '*', function(eventData){
console.log("some change!!"); // but never fires!
});
}
}
and i define this.html as:
html = `
<!DOCTYPE html>
<html>
<body>
<div id="player"></div>
<script>
var tag = document.createElement('script');
tag.src = "https://www.youtube.com/iframe_api";
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
var player;
function onYouTubeIframeAPIReady() {
player = new YT.Player('player', {
height: '280',
width: '100%',
videoId: 'k3On6DLPGLA',
events: {
'onReady': onPlayerReady,
'onStateChange': onPlayerStateChange
}
});
}
function onPlayerReady(event) {
event.target.playVideo();
}
var done = false;
function onPlayerStateChange(event) {
console.log("state changed!"); // it fires!
}
function stopVideo() {
player.stopVideo();
}
<\/script>
<\/body>
<\/html>
`;
but i just get such these results:
chromium: [ERROR:web_contents_delegate.cc(224)] WebContentsDelegate::CheckMediaAccessPermission: Not supported.
chromium: [INFO:CONSOLE(35)] "state changed!", source: ..../ (35)
Ok I got the answer:
there is some issues here:
#1. implanting nativescript-webview-interface.js:
I need to copy this file:
cp node_modules/nativescript-webview-interface/www/nativescript-webview-interface.js app/assets/www/lib/
#2. permissions!
but #1 causes another problem: net::ERR_ACCESS_DENIED
for solving this one I add this:
setupWebViewInterface(page){
let webView = page.getViewById('webView');
webView.android.getSettings().setAllowFileAccess(true);
}
and now I have access to the folder so I moved this.html variable content to a new file : /assets/www/index.html
#3. setting src
since just now we have access to the folder, we need to (re)set the src of web-view so I add this one to the setupWebViewInterface Function:
this.oWebViewInterface = new webViewInterfaceModule.WebViewInterface( webView, '~/assets/www/index.html' );
note: I removed src from <webView /> since it causes problem! (if old and new src would be the same it will not reload!)
#4.defining Variables:
in html file I need to add this line:
var oWebViewInterface = window.nsWebViewInterface;
#5. defining listener:
I add this one: oWebViewInterface.emit('anyEvent', event);
inside this function:
function onPlayerStateChange(event) {
console.log("state changed!");
oWebViewInterface.emit('anyEvent', event);
}
Now it works :)
this code returns:
Cannot read property 'getPicture' of undefined
Have no idea what im doing wrong, can you please help me with the code?
My App:
angular.module('Todo', ['ionic', 'Todo.controllers','ngStorage',
'Todo.services', 'ngCordova'])
my Controller:
.controller('profileEditCtrl', function($scope,Camera, $localStorage,
$cordovaCamera)
{
$scope.$storage = $localStorage.$default({ data:[]});
$scope.takePicture = function()
{
navigator.camera.getPicture(onSuccess, onFail, { quality: 50,
destinationType: Camera.DestinationType.DATA_URL });
function onSuccess(imageData) {
var image = document.getElementById('myImage');
image.src ="data:image/jpeg;base64," + imageData;
}
function onFail(message) {
alert('Failed because: ' + message);
}
}});
Your code is correct, just add an html button with ng-click="takePicture()".
There is no problem here, It's sure that the browser "cannot read
property 'getPicture' of undefined" because it has no configuration
for a mobile camera that you defined, which means you should test your application on
a real device using:
> ionic run android.
Notice that the new update of Google Chrome has a new feature which
helps your test your device on the browser if it is connected to the
PC/laptop, for testing go to chrome's navigation panel >> More tools >> Inspect devices
or just go to this link:
chrome://inspect/#devices
I'm sure your camera will function normally if you have the plugin cordova plugin add org.apache.cordova.camera installed in the app,
I hope this helps you.
After trying various solutions with no luck for my cordova project, I simply went ahead to use the built-in JavaScript APIs. Essentially:
async function startCapturing() { // get ready to shoot
await getPermission('android.permission.CAMERA');
let stream = await navigator.mediaDevices.getUserMedia({ video: {width: 480, height: 320, facingMode:'environment' }, audio: false });
let video = document.getElementById("pVideo"); // a <video> element
video.srcObject = stream;
video.play();
video.style.display = "block";
}
function shootPhoto(){ // take a snapshot
let video = document.getElementById("pVideo");
let canvas = document.getElementById("pCanvas"); // a <canvas> element
let context = canvas.getContext('2d');
context.drawImage(video,0,0,480,320);
document.getElementById('fsPhotoI').src = Photo.current.src = canvas.toDataURL('image/png');
Photo.current.changed = Profile.current.changed = true;
video.style.display = "none";
}
In particular, some plugins did not work for me because they could't use the Android rear camera right away. The following in getUserMedia(...) does the trick:
facingMode:'environment'
Also make sure you have the CAMERA permission in your AndroidManifest.xml.
I made a html page ,and put an audio tag in it then I try to use my android browser to play some music .
But it doesn't work. Here is my code:
<audio id='simple-audio-player' src="http://m.html5/baidu.mp3" controls="controls" preload="metadata"></audio>
window.onload = function () {
var au = document.getElementById("simple-audio-player");
au.load();
if(au.networkState != au.NETWORK_NO_SOURCE){
alert('au.networkState != au.NETWORK_NO_SOURCE');
try{
au.play();
}catch(err){
alert(err);
}
}else{
alert('au.networkState == au.NETWORK_NO_SOURCE');
}
};
I think it would be something wrong with my IIS config ,but I not sure.
You can't play audio from the window load event, it has to happen in response to a user interaction.
Like this...
window.addEventListener('ontouchstart', onStart);
function onStart()
{
var audio = new Audio(); // --or-- document.getElementById('id-of-audio-tag');
audio.src = "test.mp3";
audio.play();
}
I am trying to create an offline video player that would download video content from my site for later viewing offline via an HTML5 video element. The code below works fine in Chrome for the desktop, but not on mobile (Nexus S smartphone, Nexus 7 tablet, 4.1 since only that runs chrome, which is required for the filesystem api). I am using the filesystem API that is supported by chrome on both the desktop and mobile.
I have confirmed it is correctly storing the file on the mobile device and I can retrieve the file correctly, but for some reason after retrieving the video from the localsystem chrome does not want to play the video. This is true whether I am using the html5 video element or whether I am navigating directly to the filesystem URL. When I use the html5 video element it returns the error media_err_not_supported. I have confirmed that the device can play the video if I navigate directly to it on my server (without first storing it using the filesystem api), so the issue is not a codec or video format problem. I am also using the video/mp4 mime type in both cases.
Again, this works on desktop, but not mobile. Any ideas?
Here is the code we are using:
<!DOCTYPE html >
<html>
<head>
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.min.js" type="text/javascript"> </script>
<script type="text/javascript">
var _fs;
var filename = "test3.mp4";
var diskSpaceRequired = 10 * 1024 * 1024;
$(document).ready(function () {
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
function onInitFs(fs) {
_fs = fs;
getVideo(fs);
}
if (!!window.requestFileSystem) {
window.webkitStorageInfo.requestQuota(
window.webkitStorageInfo.PERSISTENT,
diskSpaceRequired, // amount of bytes you need
function () { },
function () {}
);
window.requestFileSystem(window.PERSISTENT, diskSpaceRequired, onInitFs, function () { alert('error'); });
} else {
alert('not supported');
}
$("#play").on('click', playVideo);
$("#ourVideo").on('error', function(e) { console.log('ERROR!!!', e, arguments);
console.log($("#ourVideo")[0].error);
});
});
function playVideo() {
_fs.root.getFile(filename, {}, function (fileEntry) {
$("#ourVideo").attr('src', fileEntry.toURL());
fileEntry.file(function (file) {
var reader = new FileReader();
reader.onloadend = function (e) {
$("#ourVideo").get(0).play();
};
reader.readAsText(file);
}, errorHandler);
}, errorHandler);
}
function getVideo(fs) {
fs.root.getFile(filename, { create: true }, function (fileEntry) {
fileEntry.createWriter(function (fileWriter) {
fetchResource(fileWriter);
}, errorHandler);
}, errorHandler);
}
function errorHandler(e) {
console.log('error', e);
}
function fetchResource(fileWriter) {
console.log('fetchresource');
var xhr = new XMLHttpRequest();
xhr.responseType = "arraybuffer";
xhr.open("GET", "http://mydomain.com/trailer.mp4", true);
xhr.onload = function(e) {
if (this.status == 200) {
var bb = new WebKitBlobBuilder();
bb.append(this.response);
var blob = bb.getBlob("video\/mp4");
fileWriter.write(blob);
} else {
console.log(this.status);
}
};
xhr.send();
}
</script>
<title>foo</title>
</head>
<body>
<input type="button" value="Play Video" id="play"/>
<video id="ourVideo" controls="">
<source id="vidSource" type="video/mp4"/>
</video>
</body>
</html>
The problem looks like your android chrome coudn't access the android file system correctly, For that you can use nanoHttpd server for access android local files in device or sdcard.
for NanoHttpd server use this one class in your application and pass the media file location as http://localhost:8081/sdcard/(your_media_location).mp4
or get nanoHttpd from https://gist.github.com/1893396
I think this is more accurate to access sdcard files than directly calling for them
try change html part to
</head>
<body>
<input type="button" value="Play Video" id="play"/>
<video id="ourVideo" controls="">
<source src="video1.mp4" type= "video/mp4">
<source src="video1.ogv" type= "video/ogg">
</video>
</body>
you can convert your mp4 to ogv using
http://video.online-convert.com/convert-to-ogg
and put ogv file in the same location in mp4
**for more information check out these
http://www.broken-links.com/2010/07/08/making-html5-video-work-on-android-phones/
HTML5 <video> element on Android does not play
I am converting some HTML5 to work with Android. Android does not support the element, so the play method does not function.
I am using PhoneGap's media element, and that does play audio. What I want, is a way to find the first source of the audio tag, and then use that with media play, but this does not seem to work.
function playSound(whatToPlay) {
// removed since <audio does not work
// var snd = document.getElementById(whatToPlay);
// snd.play();
toPlaySrc = $("#"+whatToPlay+":first-child").eq(0).attr("src");
// next I will have to deal with /android_asset and my current partial path
myMedia = new Media("/android_asset/"+toPlaySrc, onSuccess,onError);
if (myMedia)
myMedia.play();
}
playSound("clockticking");
this is my audio element.
<audio id="clockticking" preload="auto" autobuffer onEnded="instructionFinsihed()">
<source src="../sound/tictoc.mp3" type="audio/mp3" />
<source src="../sound/tictoc.ogg" type="audio/ogg" />
</audio>
I have many existing audio elements, so a general solution would really be important, I can not, for instance just ad id tags to all the mp3 elements and solve it.
Okay, I'm working on some monkey patch code for the Android WebView, you can see the progress over on my corinthian github project. In the meantime what you'd want to do is setup a on deviceready listener:
document.addEventListener("deviceready", monkeypunch, true);
create an array to hold the media objects:
mediaObjs = [];
and in the monkeypunch method you would do:
function monkeypunch() {
var audioclips = document.getElementsByTagName("audio");
for (var i=0; i < audioclips.length; i++) {
// Create new Media object.
var audioSrc = audioclips[i].firstElementChild.src;
if (audioSrc.indexOf("file:///android_asset") == 0) {
audioSrc = audioSrc.substring(7);
}
mediaObjs[audioSrc] = new Media(audioSrc);
// Create the HTML
var newAudio = document.createElement('div');
var newImg = document.createElement('img');
newImg.setAttribute('src', 'images/play.png');
newAudio.appendChild(newImg);
// Set the onclick listener
newAudio.addEventListener("click", function() {
// figure out what image is displayed
if (newImg.src.indexOf("images/play.png", newImg.src.length - "images/play.png".length) !== -1) {
newImg.src = "images/pause.png";
mediaObjs[audioSrc].play();
} else {
newImg.src = "images/play.png";
mediaObjs[audioSrc].pause();
}
});
// replace the audio tag with out div
audioclips[i].parentNode.replaceChild(newAudio, audioclips[i]);
}
}
You can grab the play/pause images from my github project. I'm planning on adding more functionality when I have some more time.