1
votes

I have a client .ipa file that i test on my iOS device, now i have successfully got the app to run on iPhone by using Adobe Air for IOS and for this i am using Adobe Flash CC.

When i launch the app on the iPhone the video connection does not connect to the red5 streaming server and therefore can not broadcast the stream from camera to server.

I have used stagevideo. When i launch the app on a local pc with web cam and launch another app on the iOS to receive the stream on iPhone i can see the live broadcast from my pc webcam.

But i want to test the iPhone camera and send receive live streaming from red5 server.

How can i achieve this. I have placed the current code below.

                import flash.display.Sprite;

  import flash.display.MovieClip;

import flash.events.NetStatusEvent;

import flash.net.NetConnection;

 import flash.net.NetStream;

 import flash.media.Camera;

 import flash.media.Microphone;

 import flash.media.Video;

 import flash.net.Responder;

 import flash.media.StageVideo;
import flash.events.StageVideoAvailabilityEvent;
import flash.events.StageVideoEvent;
 import flash.geom.Rectangle;

     var nc:NetConnection;
     var good:Boolean;

     var netOut:NetStream;
     var netIn:NetStream;
     var cam:Camera;
     var mic:Microphone;
     var responder:Responder;
     var r:Responder;
     var vidOut:Video;
     var vidIn:Video;
     var outStream:String;
     var inStream:String;

     var sv:StageVideo;

        stage.addEventListener(StageVideoAvailabilityEvent.STAGE_VIDEO_AVAILABILITY, onAvail);
    var sva:Boolean;

    function onAvail(e:StageVideoAvailabilityEvent):void{
        sva = (e.availability == StageVideoAvailability.AVAILABLE);
        trace(sva);
        var rtmpNow:String="rtmp://192.168.1.7/test1";

         nc=new NetConnection;

         nc.client = this;

         nc.connect(rtmpNow,"trik");

         nc.addEventListener(NetStatusEvent.NET_STATUS,getStream);



    }


    function onRender(e:StageVideoEvent):void{
        sv.viewPort = new Rectangle(0,0, 240, 180);

    }

     function getStream(e:NetStatusEvent):void
    {
         good=e.info.code == "NetConnection.Connect.Success";
         if(good)
         {
        trace("hello");
              // Here we call functions in our Java Application


         setCam();
         //setMic();
         //Play streamed video
         netIn = new NetStream(nc);

        if(sva){


         //Publish local video
         netOut=new NetStream(nc);
         //netOut.attachAudio(mic);
         netOut.attachCamera(cam);
         netOut.publish("tester", "live");
        sv = stage.stageVideos[0];

        sv.addEventListener(StageVideoEvent.RENDER_STATE, onRender);
            sv.attachNetStream(netIn);

            netIn.play("tester");

        }else{
         setVid();

         vidIn.attachNetStream(netIn);

         netIn.play("tester");
        }






         }
    }


     function streamNow(streamSelect:Object):void
    {
        trace("hello");


    }

     function setCam():void
    {
         cam=Camera.getCamera();
         cam.setMode(240,180,15);
         cam.setQuality(0,85);
    }

     function setMic():void
    {
        trace("hello");
         mic=Microphone.getMicrophone();
        trace("hello");
         mic.rate =11;
        trace("hello");
         //mic.setSilenceLevel(12,2000);
        trace("hello");
    }

     function setVid():void
    {
        trace("vid");

         vidIn=new Video(240,180);
         addChild(vidIn);
         vidIn.x=0;
         vidIn.y=0;
    }
1

1 Answers

0
votes

Your code mostly looks fine, but I would separate the ns.publish and ns.play parts. IMHO you shouldn't try to play until the publish is successful. Also if you're not just testing the roundtrip to the server, I would just attach the camera to the StageVideo; if that's allowed in iOS.