I cannot make work AnalayserNode with AudioContext.createMediaElementSource in mobile devices. It does not work in Safari on iOS and does not work in Chrome on Android. When I do analayser.getByteFrequencyData(buffer); I always get an array with zeros. Is this a bug? where should I report it? Any workaround?
Full code below or in Codepen: http://codepen.io/elranu/pen/WQOerB
var initialized = false;
var audio = new Audio();
audio.src = "http://blogtalk.vo.llnwd.net/o23/show/6/884/show_6884921_2014_09_08_16_11_36.mp3";
audio.crossOrigin = "anonymous";
audio.preload = false;
function init(){
var context = new AudioContext();
var source = context.createMediaElementSource(audio);
var anal = context.createAnalyser();
anal.fftSize = 1024;
var buffer = new Uint8Array(anal.frequencyBinCount);
var lines = [];
var eq = document.getElementById('eq');
for(var index = 0; index < buffer.length; index++){
var line = document.createElement('div');
line.className = 'line';
lines.push(line);
eq.appendChild(line);
}
source.connect(anal);
anal.connect(context.destination);
setInterval(function(){
anal.getByteFrequencyData(buffer);
for(var index = 0; index < buffer.length; index++){
lines[index].style.height = buffer[index] + 'px';
}
}, 50);
}
function startAudio(){
if(!initialized){
init();
initialized = true;
}
audio.play();
}
function stopAudio(){
audio.pause();
}
.line {
width: 2px;
background-color: blue;
margin-left:3px;
display:inline-block;
}
#eq {
height: 500px;
}
<button onclick="startAudio()">Start Audio</button>
<button onclick="stopAudio()">Stop Audio</button>
<div id="eq">
</div>
Update: I have noticed that the AnalayserNode works on mobile if the source of the AudioContext is createBufferSource(). But this is not a viable way to solve the problem. Because to create a buffer I have to do the following request:
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
request.onload = function(){//stufff...};
And if I do this request, the mobile device will download the full audio, just render the frequency of the audio. With big audios this is no viable solution in mobile, because the user could wait a lot just to start playing.
An example to test this: http://webaudioapi.com/samples/visualizer/
I found the answer, unfortunately is a bug. More information here:
https://code.google.com/p/chromium/issues/detail?id=419446
Related
my project is normal in web browser, HTML5 version is work.
But after compile to apk by command 'cocos run -p android', socket.io is useless server side cannot receive any information.
I know something is improper, but I cannot find a concrete solution. Please help me.THANKS
server.js as follow:
var io = require('socket.io').listen(3000);
console.log('Server on port 3000.');
var player = new Array();
var player_data = {
name: "",
kill: 0,
dead: 0
}
io.sockets.on('connection', function (socket){
console.log("come in");
socket.on('join_success', function (name){
console.log("name is"+name);
player_data.name=name;
player.push(player_data);
io.sockets.emit("updateAllPlayerData", player);
});
});
app.js as follow
var basicLayer = cc.Layer.extend({
ctor: function(){
this._super();
var size = cc.director.getWinSize();
var ball = new cc.Sprite(res.png1);
ball.x = 100;
ball.y = 100;
var ui = new GameUI;
sio_client= SocketIO.connect("http://localhost:3000");
sio_client.on("updateAllPlayerData", function(data){
alert(data);
playerNum = data.length;
for(var i=0; i<data.length; i++){
playerName.push(data[i].name);
killNum.push(data[i].kill);
deadNum.push(data[i].dead);
}
ui.updateInfo();
});
sio_client.on("connect",function(){
console.log("connect_success ");
var name=prompt("input your name","");
while(name==null || name==""){
name=prompt("input your name","");
}
sio_client.emit("join_success", name);
});
this.addChild(ball, 10, 100);
this.addChild(ui,2);
return true;
}
});
I have the below jquery which is working fine on desktop browsers, when I load it on an android mobile chrome and when I start typing, the second letter goes before the first letter and others follows the second letter.
Here is an image how its working...
Can anyone help me solve this out please?
$("#textbox").on('input', function(evt) {
if(this.lengh == 0) return $(this);
var input = $(this);
var start = input[0].selectionStart;
$(this).val($(this).val().replace(/[^a-zA-Z0-9 +:%=\\/-]/gi,""))
$(this).val(function (_, val) {
return val.toUpperCase();
});
input[0].selectionStart = input[0].selectionEnd = start;
});
The problem is when, in Android, the selectionStart is zero.
Test it with your phone: http://www.vixed.it/st/34947263/
$("#textbox").on('input', function(e) {
e.preventDefault();
var input = $(this);
var start = input[0].selectionStart;
var inputTextChanged = input.val().replace(/[^a-zA-Z0-9 +:%=\\/-]/gi,"").toUpperCase();
input.val(inputTextChanged);
if (start>=1) input[0].selectionEnd = start;
});
I am trying to display the image with URL content://com.android.contacts/contacts/1/photo in the image element of HTML page.
Although it's a old problem, Can anyone please provide me a complete example to display a contact image in img element using PhoneGap.
Thanks in advance,
prodeveloper.
I read a lot on this probleme and the problem seems to be solved on the cordova 3.2.0
here is my code.
You do not need to create a tempory image or something else.
var init = function () {
var options = new ContactFindOptions();
options.filter = ""; // empty search string returns all contacts
options.multiple = true; // return multiple results
var filter = ["displayName",
"phoneNumbers",
"photos"];
navigator.contacts.find(filter, onSuccess, onError, options);
};
function onSuccess(contacts) {
var contactPhoto;
for (var i = 0; i < contacts.length; i++) {
if (contacts[i].displayName && contacts[i].phoneNumbers) {
contactPhoto = defaultvalue;
if (contacts[i].photos) {
for (var j = 0; j < contacts[i].photos.length; j++)
if (contacts[i].photos[j].value) {
contactPhoto = contacts[i].photos[j].value;
break;
}
}
showContactsModel.Contacts.add(
{
displayName: contacts[i].displayName,
phoneNumbers:contacts[i].phoneNumbers,
photo: contactPhoto
});
}
}
}
and my binding
<img data-bind="attr:{src: photo}" alt="something.png" />
you can directly use
<img src="content://com.android.contacts/contacts/1502/photo" alt="Oops!!">
once you have the url of image !
How to pull to refresh?
In Titanium appcelerator I need to show a list of content in tableview. If I pull the view it needs to update. In iPhone I complete but in Android it won't work. Please any one help to solve this problem in Android.
My Android code:-
tableView.addEventListener('scroll',function(e)
{
var offset = e.contentOffset.y;
if (offset < -65.0 && !pulling && !reloading)
{
var t = Ti.UI.create2DMatrix();
t = t.rotate(-180);
pulling = true;
arrow.animate({transform:t,duration:180});
statusLabel.text = "Release to refresh...";
}
else if((offset > -65.0 && offset < 0 ) && pulling && !reloading)
{
pulling = false;
var t = Ti.UI.create2DMatrix();
arrow.animate({transform:t,duration:180});
statusLabel.text = "Pull down to refresh...";
}
});
tableView.addEventListener('dragEnd', function(e)
{
if(pulling && !reloading)
{
reloading = true;
pulling = false;
arrow.hide();
actInd.show();
statusLabel.text = "Reloading...";
tableView.setContentInsets({top:60},{animated:true});
tableView.scrollToTop(-60,true);
arrow.transform=Ti.UI.create2DMatrix();
beginReloading();
}
});
Titanium now supports pull to refresh for BOTH Android (> v6.2.0) and iOS (>3.2.0) with a Titanium.UI.TableView, Titanium.UI.ListView or Titanium.UI.ScrollView object.
See the docs:
https://docs.appcelerator.com/platform/latest/#!/api/Titanium.UI.ListView
https://docs.appcelerator.com/platform/latest/#!/api/Titanium.UI.RefreshControl
Sample code taken from the docs:
var win = Ti.UI.createWindow({
fullscreen:true
});
var counter = 0;
function genData() {
var data = [];
for (var i=1; i<=3; i++) {
data.push({properties:{title:'ROW '+(counter+i)}})
}
counter += 3;
return data;
}
var section = Ti.UI.createListSection();
section.setItems(genData());
var control = Ti.UI.createRefreshControl({
tintColor:'red'
})
var listView = Ti.UI.createListView({
sections:[section],
refreshControl:control
});
control.addEventListener('refreshstart',function(e){
Ti.API.info('refreshstart');
setTimeout(function(){
Ti.API.debug('Timeout');
section.appendItems(genData());
control.endRefreshing();
}, 2000);
})
win.add(listView);
win.open();
Is this just the IOS code form the Kitchen Sink example?
There are a couple of attempts at getting this working on Android, though I haven't confirmed that any of them work as expected. From what I understand, the problem is that you can't get the offset the same way in Android as in IOS.
A quick Google search turned up this link, which was referenced from the official Appcelerator forums.
https://gist.github.com/903895
Is there a way to get true mic streaming input?
The example code I have at the moment looks to be getting the mic input data, and saving it to a sound object and playing it right away.
Is there a way to stream it properly?
If not, in my example, is there a way to get the mic input data, but mute the audio, as it is causing a feedback loop (despite setLoopBack being set to false..)
Code below:
import flash.display.*;
import flash.events.*;
import flash.media.*;
import flash.media.SoundTransform;
import flash.utils.*;
var _soundBytes:ByteArray = new ByteArray();
var _micBytes:ByteArray;
var son:Sound;
var sc:SoundChannel;
var pow:int = 0;
var myBar:Sprite;
stage.quality = "LOW";
// this code ended up muting the mic input oddly?
//SoundMixer.soundTransform = new SoundTransform(0);
init();
function init()
{
myBar = new Sprite;
micInit();
soundInit();
addEventListener(Event.ENTER_FRAME, visualise);
}
function micInit()
{
var mic:Microphone = Microphone.getMicrophone();
if(mic != null) {
//mic.setUseEchoSuppression(true);
mic.setLoopBack(false);
mic.setSilenceLevel(0);
mic.rate = 44;
mic.gain = 60;
mic.addEventListener(SampleDataEvent.SAMPLE_DATA, micSampleDataHandler);
}
}
function micSampleDataHandler(event:SampleDataEvent):void
{
_micBytes = event.data;
sc = son.play();
}
function soundInit():void {
son = new Sound();
son.addEventListener(SampleDataEvent.SAMPLE_DATA, soundSampleDataHandler);
}
function soundSampleDataHandler(event:SampleDataEvent):void {
for (var i:int = 0; i < 8192 && _micBytes.bytesAvailable > 0; i++) {
var sample:Number = _micBytes.readFloat();
event.data.writeFloat(sample);
event.data.writeFloat(sample);
}
}
function drawLines(e:Event):void{
SoundMixer.computeSpectrum(_soundBytes, true);
myBar.graphics.clear();
myBar.graphics.lineStyle(2,0xabc241);
for (var i=0; i < 256; i++) {
pow = _soundBytes.readFloat()*200;
pow = Math.abs(pow);
myBar.graphics.drawRect(i*2, 0, 2, pow);
addChild(myBar);
}
}
Hope you can help!
To use acoustic echo cancellation, call Microphone.getEnhancedMicrophone() to get a reference to an enhanced Microphone object. Set the Microphone.enhancedOptions property to an instance of the MicrophoneEnhancedOptions class. Here is an article that discusses it all. Article about enhanced microphone options at Adobe
EDIT: I spoke too soon. I have used enhanced mic many times before, but I decided to read the article myself to see if there were any interesting things I could learn new from it... and I found this near the end
AEC is computationally expensive. Currently, only desktop platforms are supported for Flash Player and AIR
Although I just looked at the date... last year, so maybe give it a try and it is supported now?!?