Looks like it's a pretty common trouble but I still can't see any solution.
The goal is to play some AAC stream in Adobe AIR mobile app.
I've used a transcoder from https://code.google.com/p/project-thunder-snow/. The one adds FLV headers to AAC data so the stream can be played through a standard AS3 NetStream object (as I understand). It works fine for Win and Mac, but the same app launched on Android or iOS produces neither sound nor any error. I've figured out that transcoder works fine, the cause is in the different
characteristics of NetStreams.
So, is there any solution or, at least, any documentation describing the difference between the NetStreams on PC and mobile platform?
okay... I might have a clue for you. I tried running the NBAAC code link you posted on the other question from inside Flash CS5 and got this error:
onChannelReady : MetaData
Error: Error #2067: The ExternalInterface is not available in this container. ExternalInterface requires Internet Explorer ActiveX, Firefox, Mozilla 1.7.5 and greater, or other browsers that support NPRuntime.
So it seems the player is designed to be used only inside an html page & uses ExternalInterface (Javascript) for getting metadata from browser into SWF
I removed the ExternalInterface stuff and it played ok in Flash Player & Device Central without browser/HTML. Try this code in your AIR app (I don't have AIR installed anymore to confirm..)
package
{
import com.thebitstream.flv.CodecFactory;
import com.thebitstream.flv.Transcoder;
import com.thebitstream.flv.codec.*;
import com.thebitstream.ice.*;
import com.thebitstream.nsv.*;
import flash.display.Sprite;
import flash.events.Event;
import flash.events.IOErrorEvent;
import flash.events.ProgressEvent;
import flash.events.SecurityErrorEvent;
//import flash.external.externalInterface;
import flash.media.SoundTransform;
import flash.net.NetConnection;
import flash.net.NetStream;
import flash.net.NetStreamAppendBytesAction;
import flash.net.URLRequest;
import flash.net.URLRequestMethod;
import flash.net.URLStream;
import flash.utils.ByteArray;
import flash.utils.setTimeout;
[SWF (width="250",height="250")]
public dynamic class NBAAC_AIR_v2 extends Sprite
{
public var request:URLRequest;
public var transcoder :Transcoder
public var transport :NetConnection;
public var transportStream :NetStream;
public var serverConnection:URLStream;
//public var host:String=" "; //now not used in line... request=new URLRequest(resource);
public var resource:String="http://aacplus-ac-64.timlradio.co.uk/;"; //test station
public function NBAAC_AIR_v2 ()
{
super();
/*
if(loaderInfo.parameters.host)
host=loaderInfo.parameters.host;
if(loaderInfo.parameters.resource)
resource=loaderInfo.parameters.resource;
*/
//CodecFactory.ImportCodec(MetaData);
CodecFactory.ImportCodec(AAC);
CodecFactory.ImportCodec(AACP);
transcoder = new Transcoder();
transcoder.addEventListener(CodecEvent.STREAM_READY,onChannelReady);
transcoder.addEventListener(StreamDataEvent.DATA, onTag);
transcoder.initiate();
transcoder.loadCodec("AAC");
transport = new NetConnection();
transport.connect(null);
flash.utils.setTimeout(boot,500);
}
public function boot():void
{
//externalInterface.addCallback('nbaac.setVolume', onVolume);
//externalInterface.addCallback('nbaac.togglePause', onTogglePause);
//externalInterface.addCallback('nbaac.setBuffer', setBufferLength);
//externalInterface.addCallback('nbaac.getBuffer', getBufferLength);
//externalInterface.addCallback('nbaac.getTime', getTime);
/*
var meta:Object={};
meta.uri="";
meta.StreamTitle="";
*/
//externalInterface.call('logit','start up');
transportStream = new NetStream(transport);
transportStream.bufferTime=2;
transportStream.client = this;
transportStream.soundTransform=new SoundTransform(.5,0);
transportStream.play(null);
transportStream.appendBytesAction(NetStreamAppendBytesAction.RESET_BEGIN);
var headerTag:ByteArray=transcoder.createHeader(false,true);
transportStream.appendBytes(headerTag);
headerTag.clear();
//transcoder.readMetaObject(meta,0);
serverConnection=new URLStream();
serverConnection.addEventListener(ProgressEvent.PROGRESS,loaded);
serverConnection.addEventListener(IOErrorEvent.IO_ERROR, onIo);
serverConnection.addEventListener(SecurityErrorEvent.SECURITY_ERROR, onNoPolicy);
serverConnection.addEventListener(Event.CLOSE, onClose);
request=new URLRequest(resource); //removed "host" from url
//request.requestHeaders=[new URLRequestHeader("GET",resource+" HTTP/1.0")];
//request.requestHeaders=[new URLRequestHeader("Icy-MetaData","1")];
request.method=URLRequestMethod.GET;
serverConnection.load(request);
}
private function getTime() :Number
{ return transportStream.time; }
private function getBufferLength() :Number
{ return transportStream.bufferLength; }
private function setBufferLength (val:Number) :void
{ transportStream.bufferTime = val; }
private function onTogglePause():void
{ transportStream.togglePause(); }
private function onVolume (val:Number) :void
{ transportStream.soundTransform = new SoundTransform(val, 0); }
private function onIo(pe:IOErrorEvent) :void
{ /* externalInterface.call('logit','IOErrorEvent') */ }
private function onTag(sde:StreamDataEvent) :void
{
sde.tag.position=0;
transportStream.appendBytes(sde.tag);
}
private function onChannelReady(ce:CodecEvent) :void
{ trace('onChannelReady :',ce.codec.type); }
private function onClose(e:Event):void
{ /* externalInterface.call('logit','onClose') */ }
public function onMetaData(e:Object):void
{ /* externalInterface.call('logit','onMetaData') */ }
private function loaded(e:ProgressEvent):void
{
var chunk:ByteArray=new ByteArray();
while(serverConnection.bytesAvailable)
{ chunk.writeByte( serverConnection.readByte() ); }
chunk.position=0;
transcoder.addRawData( chunk, 0, "AAC" );
}
private function onNoPolicy(se:SecurityErrorEvent):void
{ /* externalInterface.call('logit','SecurityErrorEvent'+host+resource+'<br />'+se.text); */ }
}
}
Related
I have written a Dart web app that retrieves .mp3 files from a server and plays them back; I am trying to write a mobile version using Flutter. I know dart:web_audio is the main option for a web app, but Flutter can't find it in my SDK. I know it's there because I can compile the following to Javascript:
import 'dart:html';
import 'dart:convert';
import 'dart:web_audio';
AudioContext audioContext;
main() async {
audioContext = new AudioContext();
var ul = (querySelector('#songs') as UListElement);
var signal = await HttpRequest.getString('http://10.0.0.6:8000/api/filelist');
// Map json = JSON.decode(signal);
// for (Map file in json['songs']) {
print("signal: $signal");
Map json = JSON.decode(signal);
for (Map file in json['songs']) {
var li = new LIElement()
..appendText(file['title']);
var button = new ButtonElement();
button.setAttribute("id", "#${file['file']}");
button.appendText("Play");
li.append(button);
new Song(button, file['file']);
ul.append(li);
}
}
class Song {
ButtonElement button;
bool _playing = false;
// AudioContext _audioContext;
AudioBufferSourceNode _source;
String title;
Song(this.button, this.title) {
button..onClick.listen((e) => _toggle());
}
_toggle() {
_playing = !_playing;
_playing ? _start() : _stop();
}
_start() {
return HttpRequest
.request("http://10.0.0.6:8000/music/$title", responseType: "arraybuffer")
.then((HttpRequest httpRequest) {
return audioContext
.decodeAudioData(httpRequest.response)
.then((AudioBuffer buffer) {
_source = audioContext.createBufferSource();
_source.buffer = buffer;
_source.connectNode(audioContext.destination);
_source.start(0);
button.text = "Stop";
_source.onEnded.listen((e){
_playing = false;
button.text = "Play";
});
});
});
}
_stop() {
_source.stop(0);
button.text = "Play";
}
}
How would I rewrite the dart:web_audio parts of my code for a Flutter app? Can Flutter access MediaPlayer? And if so, how would I refer to it in pubspec.yaml?
As raju-bitter noted above, Flutter used to provide some built-in audio wrappers in its core engine but those have since been removed: https://github.com/flutter/flutter/issues/1364.
Flutter-using Apps are just iOS or Android apps, and thus it is possible to do anything the underlying iOS/Android can do via Flutter using some Java or Obj-C code in the hello_services model (https://github.com/flutter/flutter/tree/master/examples/hello_services). This model is documented at https://flutter.io/platform-services. It's not nearly as easy as we'd like it to be yet. Many improvements to come soon.
I know its 4 years late, but i have found audioplayers package which can be used as the following
import 'package:audioplayers/audio_cache.dart';
import 'package:audioplayers/audioplayers.dart';
//Call this function from an event
void playRemoteFile() {
AudioPlayer player = new AudioPlayer();
player.play("https://luan.xyz/files/audio/ambient_c_motion.mp3");
}
I'd like to know if it's possible to play a youtube video inside an AIR app using AS3 code.
It seems that youtube API is deprecated...
If anybody knows a way or a good tutorial.
Thx
----------------EDIT
I've tried this code :
var wt:uint;
var ht:uint ;
var webview:StageWebView ;
var rect:Rectangle;
var url:String;
url = "https://www.youtube.com/watch?v=hFupHx5G44s";
trace(url);
wt = this.stage.stageWidth;
ht = this.stage.stageHeight;
webview = new StageWebView();
rect = new Rectangle(0,300,wt,ht/2);
webview.stage = this.stage;
webview.viewPort = rect;
webview.loadURL(url);
But it's loading the all page of youtube (youtube video + the recommended videos..) with a scroller on the right.
I'd like to load only the video (not all the youtube page).
Edit :
I'd like to load only the video (not all the youtube page).
Use this link format :
url = "https://www.youtube.com/v/hFupHx5G44s";
for example, choose a format like below :
https://www.youtube.com/v/hFupHx5G44s - use the /v/ to get a SWF version
https://www.youtube.com/embed/hFupHx5G44s - use the /embed/ to get a HTML5 version
It's for an ANDROID AIR app.
You should have mentioned that detail in the question. You could try adding
Security.allowDomain("www.youtube.com");
Security.loadPolicyFile("https://www.youtube.com/crossdomain.xml");
//////////# End of Edit
Why not use stageWebView to load the embed (HTML5) player as web page?
The YouTube AS3 API being deprecated likely means you cannot create your own user interface or use features like search. You can re-create the search function easily though (load "search results" source into String and extract links by parsing the source code).
Anyways, I just tried this code below and it worked for an AIR Desktop App.Maybe you can use it as a starting point...
package
{
import flash.display.MovieClip;
import flash.display.BitmapData;
import flash.display.Bitmap;
import flash.display.Sprite;
import flash.display.Loader;
import flash.display.LoaderInfo;
import flash.net.URLRequest;
import flash.events.*;
import flash.system.*;
public class Youtube_AIR_code extends MovieClip
{
public var YT_Loader : Loader;
public var my_VIDEO_ID : String = "hFupHx5G44s"; //# the YouTube ID
public function Youtube_AIR_code ()
{
Security.loadPolicyFile("http://www.youtube.com/crossdomain.xml");
YT_Loader = new Loader();
YT_Loader.contentLoaderInfo.addEventListener(Event.COMPLETE, YT_Ready );
YT_Loader.load( new URLRequest("http://www.youtube.com/v/" + my_VIDEO_ID) );
}
function YT_Ready (e:Event) : void
{
trace("YouTube SWF :: [status] :: Loading Completed");
//addChild(YT_Loader);
//# Wait for Youtube Player to initialise
YT_Loader.content.addEventListener("onReady", on_YT_PlayerLoaded );
}
private function on_YT_PlayerLoaded (e:Event) : void
{
//# Check Original Width/Height - Scale it proportionally
trace( "YT_Loader content Width : " + YT_Loader.content.width );
trace( "YT_Loader content Height : " + YT_Loader.content.height );
//# Testing changed size and position
YT_Loader.width = 320; YT_Loader.height = 240;
YT_Loader.x = 30; YT_Loader.y = 100;
addChild(YT_Loader); //# can add to stage only
}
} //# End Public Class
} //# End Package
Have you seen the Video Player ANE from My Flash Labs?
http://www.myflashlabs.com/product/video-player-ane-adobe-air-native-extension/
i use this code to view my web page .... it is work with air 3.2 for desktop ... how to make it work with air 3.2 for android ? .... it does not loading anything , only white screen !
package {
import flash.display.Sprite;
import flash.html.HTMLLoader;
import flash.net.URLRequest;
public class HTMLLoaderExample extends Sprite
{
public function HTMLLoaderExample()
{
var html:HTMLLoader = new HTMLLoader();
var urlReq:URLRequest = new URLRequest("http://www.doomanco.com/");
html.width = stage.stageWidth;
html.height = stage.stageHeight;
html.load(urlReq);
addChild(html);
html.x = 0;
html.y = 0;
}
}
}
As Adobe said about HTMLLoader: " AIR profile support: This feature is supported on all desktop operating systems, but is not supported on mobile devices or on AIR for TV devices. You can test for support at run time using the HTMLLoader.isSupported property. See AIR Profile Support for more information regarding API support across multiple profiles. ", I think that's not supported for your android device, you ca verify that using HTMLLoader.isSupported property. For more details you can take a look here : Adobe.com : HTMLLoader and here : Adobe.com : Device profiles for AIR.
thanks dude #DodgerThud & #akmozo ..... it is done with this:
package {
import flash.display.MovieClip;
import flash.media.StageWebView;
import flash.geom.Rectangle;
import flash.events.KeyboardEvent;
import flash.ui.Keyboard;
import flash.desktop.NativeApplication;
import flash.display.Stage;
import flash.display.StageAlign;
import flash.display.StageScaleMode;
import flash.events.Event;
public class StageWebViewExample extends MovieClip{
private var webView:StageWebView = new StageWebView();
public function StageWebViewExample()
{
stage.scaleMode = StageScaleMode.NO_SCALE;
stage.align = StageAlign.TOP_LEFT;
webView.stage = this.stage;
webView.viewPort = new Rectangle( 0, 0, stage.fullScreenWidth, stage.fullScreenHeight );
webView.loadURL( "http://www.google.com" );
stage.addEventListener( KeyboardEvent.KEY_DOWN, onKey );
}
private function onKey( event:KeyboardEvent ):void
{
if( event.keyCode == Keyboard.BACK && webView.isHistoryBackEnabled )
{
trace("Back.");
webView.historyBack();
event.preventDefault();
}
if( event.keyCode == Keyboard.SEARCH && webView.isHistoryForwardEnabled )
{
trace("Forward.");
webView.historyForward();
}
}
}
}
I have a simple button that plays a small MP3 file, looping it 30 times. The MP3 is streamed from a server (urlMP3).
I can see on my Galaxy S2 that it accesses the server for each of the 30 loops. Is it downloading the MP3 each time it loops or downloading once and playing from the phone's memory?
//Button 'audioYes' to play audio loop x 30
var soundLoop:Sound = new Sound();
var soundChannel:SoundChannel;
var soundLoopUrl:URLRequest = new URLRequest(urlMP3);
audioYes.addEventListener(MouseEvent.CLICK, f2_MouseClickHandler);
function f2_MouseClickHandler(event:MouseEvent):void {
soundLoop.load(soundLoopUrl);
soundLoop.play(0, 30);
}
If it is downloading each time, what would be a good way to download it once and then play? Thanks for your help.
Edit: Sep 1 2012
I've created a simple flash file and added the following provided by #Rytis. I'm getting an error from the last line, this.mySound.play "Error #1009: Cannot access a property or method of a null object reference." What do I do with that?
import flash.media.Sound;
import flash.net.URLLoader;
import flash.display.Loader;
var mySound:Sound = new Sound();
var myurlLoader:URLLoader = new URLLoader();
myurlLoader.addEventListener(Event.COMPLETE, this.onSoundLoadComplete)
myurlLoader.load(new URLRequest("01.mp3"))
function onSoundLoadComplete(event:Event):void{
this.mySound = URLLoader(event.target).data as Sound;
this.mySound.play(0,30);
}
Short answer - download sound and store it to variable before playing it.
Example:
package {
import flash.events.Event;
import flash.media.Sound;
import flash.net.URLLoader;
import flash.net.URLRequest;
public class SoundLoadTest {
protected var sound : Sound;
public function SoundLoadTest () {
var urlLoader : URLLoader = new URLLoader();
urlLoader.addEventListener( Event.COMPLETE, this.onSoundLoadComplete )
urlLoader.load( new URLRequest( "path/to/sound.file" ) )
}
protected function onSoundLoadComplete ( event : Event) : void {
// save loaded sound to a class field
this.sound = URLLoader( event.target ).data as Sound;
// start playing sound
this.sound.play( 0, 30 );
}
}
}
I am trying to Play a video on a android tablet using stageVideo but any time i click play and add the video to the stage the hole app flickers and then the video is added to the stage. The video then start off being all pixelated. Then it goes away and starts playing properly with only a few jumps. I am wondering what is casing this to happen? Is there a better way to load the video. This also can happen when just using the video object in flex.
The video is stored locally in the file:///mnt/sdcard
The video type is H.264Thanks for your help! If i missed something that you need to know please comment and I will edit my question.
Here is the view for the video. (i am using a view based mobile App)
<?xml version="1.0" encoding="utf-8"?>
<s:View xmlns:fx="http://ns.adobe.com/mxml/2009" xmlns:s="library://ns.adobe.com/flex/spark" title="stageVidPage" backKeyPressed="0" xmlns:mx="library://ns.adobe.com/flex/mx" backgroundAlpha="0" alpha="1">
<fx:Script>
<![CDATA[
import ios.iOSStageVideo;
import mx.core.UIComponent;
import mx.events.FlexEvent;
protected function backClick(event:MouseEvent):void
{
navigator.pushView(SliderAppHomeView);
}
protected function playVideo(event:MouseEvent):void
{
var path:String = new String(new File("file:///mnt/sdcard/Movies/Video_test_11.mp4").url);
var vid:iOSStageVideo = new iOSStageVideo( path , 1280 , 720 );
vid.addEventListener('videoDone' , videoStop);
var container:UIComponent = new UIComponent();
container.width = stage.stageWidth;
container.height = stage.stageHeight;
addElement( container );
container.addChild( vid );
}
private function videoStop(e:Event):void {
//vid.stopVideo();
//container.removeChild( vid );
//removeElement( container );
}
]]>
</fx:Script>
<fx:Declarations>
<!-- Place non-visual elements (e.g., services, value objects) here -->
</fx:Declarations>
<s:actionContent>
<s:Button click="backClick(event)" label="Back"/>
</s:actionContent>
<s:Button left="10" bottom="10" label="Play" alpha="1" click="playVideo(event)"/>
</s:View>
Here is the As class i found online to help play the video (really don't use much of it and since it gives some errors when the video ends i will need to rewrite it. I commented out those parts)
package ios
{
import flash.display.Sprite;
import flash.display.StageAlign;
import flash.display.StageQuality;
import flash.display.StageScaleMode;
import flash.events.Event;
import flash.events.NetStatusEvent;
import flash.events.StageVideoAvailabilityEvent;
import flash.events.StageVideoEvent;
import flash.geom.Rectangle;
import flash.media.StageVideo;
import flash.media.StageVideoAvailability;
import flash.media.Video;
import flash.net.NetConnection;
import flash.net.NetStream;
[Bindable]
public class iOSStageVideo extends Sprite
{
private var videoPath:String;
private var videoWidth:Number;
private var videoHeight:Number;
private var _sv:StageVideo;
private var _vd:Video;
private var _obj:Object;
private var _ns:NetStream;
public function iOSStageVideo( path:String , w:Number , h:Number ){
videoPath = path;
videoWidth = w;
videoHeight = h;
addEventListener(Event.ADDED_TO_STAGE, onAddedToStage);
}
//stage is ready
private function onAddedToStage(e:Event):void{
stage.scaleMode = StageScaleMode.NO_SCALE;
stage.align = StageAlign.TOP_LEFT;
var nc:NetConnection = new NetConnection();
nc.connect(null);
_ns = new NetStream(nc);
_obj = new Object();
_ns.client = _obj; _ns.bufferTime = 2;
_ns.client = _obj;
_obj.onMetaData = MetaData;
_sv = stage.stageVideos[0];
_sv.viewPort = new Rectangle(0, 0, videoWidth , videoHeight );
_sv.attachNetStream(_ns);
playVideo();
}
//video is ready, play it
//public, can be called externally
public function playVideo():void{
_ns.play( videoPath );
_ns.addEventListener(NetStatusEvent.NET_STATUS, videoStatus);
}
//required metadata for stagevideo, even if not used
private function MetaData(info:Object):void{ }
//get video status
private function videoStatus(e:NetStatusEvent):void{
switch(e.info.code){
case "NetStream.Play.StreamNotFound":
//do something
break;
case "NetStream.Play.Start":
//do something
break
case "NetStream.Play.Stop":
stopVideo();
break;
case "NetStream.Buffer.Empty":
//do something
break;
case "NetStream.Buffer.Full":
//do something
break;
case "NetStream.Buffer.Flush":
//do something
break;
}
}
//stop and clear the video
//public, can be called externally
public function stopVideo():void{
_ns.close();
_ns.dispose();
dispatchEvent( new Event('videoDone', true ) );
}
}
}
The video is located on the tablet in the local file system. Thank you for any help!