[ https://issues.apache.org/jira/browse/FLEX-33410?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13583147#comment-13583147 ]
Gaius Coffey commented on FLEX-33410: ------------------------------------- Sorry, I have found a better place to post this. I have created a bug report here: https://bugbase.adobe.com/index.cfm?event=bug&id=3504086 G > NetStream jumps when playing on Android devices > ----------------------------------------------- > > Key: FLEX-33410 > URL: https://issues.apache.org/jira/browse/FLEX-33410 > Project: Apache Flex > Issue Type: Bug > Components: Spark: VideoPlayer > Environment: Android mobile > Reporter: Gaius Coffey > Priority: Blocker > Labels: netstream, osmf, video > > When playing videos using OSMF on Android devices, NetStream will skip the > final few seconds. This makes it impossible to use video commercially. > Steps to reproduce: > 1. Create a Mobile project similar to the below > 2. Run it on an Android device and watch the traces as the video gets to the > end > Replicated on all of Motorola Xoom, Nexus 7, HTC Desire... > package > { > import flash.display.Sprite; > import flash.display.StageAlign; > import flash.display.StageScaleMode; > import flash.events.Event; > import flash.events.NetDataEvent; > import flash.events.NetStatusEvent; > import flash.net.NetStream; > import flash.text.TextField; > import flash.utils.Dictionary; > import flash.utils.getTimer; > > import org.osmf.elements.ProxyElement; > import org.osmf.elements.SerialElement; > import org.osmf.elements.VideoElement; > import org.osmf.events.MediaErrorEvent; > import org.osmf.events.MediaPlayerStateChangeEvent; > import org.osmf.media.MediaElement; > import org.osmf.media.MediaPlayerSprite; > import org.osmf.media.URLResource; > import org.osmf.net.NetStreamLoadTrait; > import org.osmf.net.NetStreamSwitchManagerBase; > import org.osmf.traits.MediaTraitType; > import org.osmf.traits.TimeTrait; > [SWF(backgroundColor="#272727")] > public class DebugAdvert extends Sprite > { > public function DebugAdvert() > { > super(); > > // support autoOrients > stage.align = StageAlign.TOP_LEFT; > stage.scaleMode = StageScaleMode.NO_SCALE; > stage.addEventListener(Event.RESIZE,doResize); > > // Textfield for logging > tf = new TextField; > tf.backgroundColor = 0xffffff; > tf.multiline = true; > tf.background = true; > addChild(tf); > > // MediaPlayerSprite for video display > mp = new MediaPlayerSprite(); > addChild(mp); > > // Position everything for the first time. > doResize(null); > > // Add OEF listener to update traces > addEventListener(Event.ENTER_FRAME,everyFrame); > > // Start the process going by adding a video > var ur:URLResource = new > URLResource(TEST_VIDEO_CONTENT); > var me:MediaElement = > mp.mediaFactory.createMediaElement(ur); > mp.media = me; > } > public const TEST_VIDEO_CONTENT:String = > "videos/TOYOTA_YARIS_MORE_VER2_30_TV.mp4"; > /** > * The threshold for a step between frames that will be logged > as an error. > * This should ideally be the same as frame interval, but will > * vary with device performance. However, even allowing for > that, > * it should NEVER be as high as half a second, let alone the > * three or more seconds seen on many Android devices. > */ > public const ERROR_THRESHOLD:Number = 0.5; > /** > * Number of log records to maintain for display in the > textfield > */ > public const NUM_ERROR_RECORDS:uint = 10; > /** > * On resize, keep the textfield visible and media player sized. > */ > protected function doResize(event:Event):void { > var w:Number = > Math.min(stage.fullScreenWidth,stage.fullScreenHeight); > var h:Number = w; > if(stage.width>stage.height) { > mp.width = w; > mp.height = h; > tf.x = w; > tf.y = 0; > } else { > mp.width = w; > mp.height = h; > tf.x = 0; > tf.y = h; > } > tf.height = stage.fullScreenHeight-tf.y; > tf.width = stage.fullScreenWidth-tf.x; > } > // Ref to media player > protected var mp:MediaPlayerSprite; > // Ref to tf for logging. > protected var tf:TextField; > /** > * Ensure that, whatever the media we are playing, we can find > * the relevant VideoElement to locate NetStream > */ > protected function > recurseForProxies(element:MediaElement):MediaElement { > // if(element is RTEParallelAdTagElement) return (element > as RTEParallelAdTagElement).displayElement; > if(element is ProxyElement) return > recurseForProxies((element as ProxyElement).proxiedElement); > if(element is SerialElement) return > recurseForProxies((element as SerialElement).currentChild); > return element; > } > /** > * On every frame, compare position to previous and log any > jumps that are > * exceptional. > */ > protected function everyFrame(event:Event):void { > // Find underlying media element. Return if empty. > var proxiedElement:MediaElement = > recurseForProxies(mp.media); > if(!proxiedElement) return; > // Check it has a time trait. Return if not. > var tt:TimeTrait = > proxiedElement.getTrait(MediaTraitType.TIME) as TimeTrait; > if(!tt) return; > // Return if tt is not yet inited. > if(isNaN(tt.duration)) return; > // Store current times for reference. > var newTimes:Object = > {currentTime:tt.currentTime,duration:tt.duration,percents:0}; > // Show trace only if there is a previous record to > compare against. > var showTrace:Boolean = false; > // If last times is not set, then set it to current > record and initialise for first trace of this media > if(!lastTimes) { > showTrace = true; > lastTimes = newTimes; > } > // If time stamps are different to previous, then we > have a change, so ensure showTrace is true > if(tt.currentTime!=lastTimes.currentTime || > tt.duration!=lastTimes.duration) showTrace = true; > // If show trace, then generate a log record > if(showTrace) { > // Gap between current and previous trait values > var tGap:Number = > tt.currentTime-lastTimes.currentTime; > var dGap:Number = > tt.duration-lastTimes.duration; > > var remaining:Number = > tt.duration-tt.currentTime; > var stepTime:Number = > tt.currentTime-lastTimes.currentTime; > var gap:Number = 0; > var nstime:Number = -1; > if(proxiedElement is VideoElement) { > var ve:VideoElement = proxiedElement as > VideoElement; > var nslt:NetStreamLoadTrait = > ve.getTrait(MediaTraitType.LOAD) as NetStreamLoadTrait; > var ns:NetStream = nslt ? > nslt.netStream : null; > if(ns) { > if(!listenedNs[ns]) { > // Clear database and > add listeners if found > for each(var > nss:NetStream in listenedNs) { > > nss.removeEventListener(NetStatusEvent.NET_STATUS,traceNetStatus); > > nss.removeEventListener(NetDataEvent.MEDIA_TYPE_DATA,traceNetData); > delete > listenedNs[nss]; > } > > ns.addEventListener(NetStatusEvent.NET_STATUS,traceNetStatus,false,1,false); > > ns.addEventListener(NetDataEvent.MEDIA_TYPE_DATA,traceNetData); > > listenedNs[ns] = ns; > } > nstime = ns.time; > } > } > // Check for an error that we need to report. > var errorStr:String = ""; > if(stepTime>ERROR_THRESHOLD) errorStr = > "\n[ERROR NETSTREAM HAS JUMPED BY "+stepTime+" SECONDS FOR NO ADEQUATELY > EXPLAINED REASON.]"; > // Build the log record > var log:String = "[trait > d:\t"+tt.duration.toFixed(3)+"\tt:\t"+tt.currentTime.toFixed(3)+"\t]\t[NetStream > > t:\t"+nstime.toFixed(3)+"\t]\t[Step:\t"+stepTime.toFixed(3)+"\tremaining:\t"+remaining.toFixed(3)+"]"; > > if(previousNetStatus||previousNetData||errorStr) log += > "\n"+previousNetStatus+"\t"+previousNetData+errorStr; > // Log it and update text field > registerLog(log,errorStr!=""); > updateTf(); > previousNetData = previousNetStatus = ""; > // Store current times for comparison later. > lastTimes = newTimes; > } > } > /** > * Listens for NetDataEvent and adds a trace value when it > occurs. > */ > protected function traceNetData(event:NetDataEvent):void { > var ns:NetStream = event.target as NetStream; > var addStr:String = event.info.handler+" @ > "+getTimer()+" ms"; > addStr += JSON.stringify(event.info.args); > if(previousNetData) previousNetData += " | "; > previousNetData += addStr; > } > protected var previousNetData:String = ""; > /** > * Listens for NetStatusEvent and adds a trace value when it > occurs. > */ > protected function traceNetStatus(event:NetStatusEvent):void { > var addStr:String = event.info.code+" @ "+getTimer()+" > ms"; > if(previousNetStatus) previousNetStatus = > previousNetStatus+","+addStr; > else previousNetStatus = addStr; > } > protected var previousNetStatus:String = ""; > /** > * List of NetStreams that we are monitoring. > */ > protected var listenedNs:Dictionary = new Dictionary; > /** > * Previous time stamps for comparison. > */ > protected var lastTimes:Object; > /** > * Display log records with errors at the top. > */ > protected function updateTf():void { > trace("latestResultsStr: "+logs[0]); > tf.text = > errorLogs.join("\n")+"\n---\n"+logs.join("\n"); > } > /** > * List of last NUM_ERROR_RECORDS records. > */ > protected var logs:Vector.<String> = new Vector.<String>; > /** > * List of error records. > */ > protected var errorLogs:Vector.<String> = new Vector.<String>; > protected function registerLog(s:String,isError:Boolean):void { > if(isError) { > errorLogs.unshift("ERROR "+s); > } > logs.unshift(s); > logs.length = NUM_ERROR_RECORDS; > } > } > } -- This message is automatically generated by JIRA. If you think it was sent incorrectly, please contact your JIRA administrators For more information on JIRA, see: http://www.atlassian.com/software/jira