Rectangle 27 1

The popup window does not stay opened. So you should try playing the file in your background page. Here is two posts on this subject:

I do not think the issue comes from your audio source.

html5 - Chrome Extension and streaming

html5 google-chrome audio google-chrome-extension html5-audio
Rectangle 27 5

Utilize timeupdate event of <audio> element, which is fired three to four times per second, to perform precise animations during streaming of media by checking .currentTime of <audio> element. Where animations or transitions can be started or stopped up to several times per second.

fetch()
response.body.getReader()
ReadableStream
new Audio()
.src
objectURL
.read()
.then()
.mode
"sequence"
sourceBuffer
updateend

If fetch() response.body.getReader() is not available at browser, you can still use timeupdate or progress event of <audio> element to check .currentTime, start or stop animations or transitions at required second of streaming media playback.

canplay
<audio>
MediaSource

You can use an object with properties set to numbers corresponding to .currentTime of <audio> where animation should occur, and values set to css property of element which should be animated to perform precise animations.

javascript
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">    
<head>
  <meta charset="utf-8" />
  <title></title>
  <style>
    body {
      width: 90vw;
      height: 90vh;
      background: #000;
      transition: background 1s;
    }

    span {
      font-family: Georgia;
      font-size: 36px;
      opacity: 0;
    }
  </style>
</head>

<body>
  <audio controls></audio>
  <br>
  <span></span>
  <script type="text/javascript">
    window.onload = function() {
      var url = "/path/to/audio";
      // given 240 seconds total duration of audio 
      // 240/12 = 20
      // properties correspond to `<audio>` `.currentTime`,
      // values correspond to color to set at element
      var colors = {
        0: "red",
        20: "blue",
        40: "green",
        60: "yellow",
        80: "orange",
        100: "purple",
        120: "violet",
        140: "brown",
        160: "tan",
        180: "gold",
        200: "sienna",
        220: "skyblue"
      };
      var body = document.querySelector("body");
      var mediaSource = new MediaSource;
      var audio = document.querySelector("audio");
      var span = document.querySelector("span");
      var color = window.getComputedStyle(body)
                  .getPropertyValue("background-color");
      //console.log(mediaSource.readyState); // closed
      var mimecodec = "audio/mpeg";

      audio.oncanplay = function() {
        this.play();
      }

      audio.ontimeupdate = function() {         
        // 240/12 = 20
        var curr = Math.round(this.currentTime);

        if (colors.hasOwnProperty(curr)) {
          // set `color` to `colors[curr]`
          color = colors[curr]
        }
        // animate `<span>` every 60 seconds
        if (curr % 60 === 0 && span.innerHTML === "") {
          var t = curr / 60;
          span.innerHTML = t + " minute" + (t === 1 ? "" : "s") 
                           + " of " + Math.round(this.duration) / 60 
                          + " minutes of audio";
          span.animate([{
              opacity: 0
            }, {
              opacity: 1
            }, {
              opacity: 0
            }], {
              duration: 2500,
              iterations: 1
            })
            .onfinish = function() {
              span.innerHTML = ""
            }
        }
        // change `background-color` of `body` every 20 seconds
        body.style.backgroundColor = color;
        console.log("current time:", curr
                   , "current background color:", color
                  , "duration:", this.duration);
      }
      // set `<audio>` `.src` to `mediaSource`
      audio.src = URL.createObjectURL(mediaSource);
      mediaSource.addEventListener("sourceopen", sourceOpen);

      function sourceOpen(event) {
        // if the media type is supported by `mediaSource`
        // fetch resource, begin stream read, 
        // append stream to `sourceBuffer`
        if (MediaSource.isTypeSupported(mimecodec)) {
          var sourceBuffer = mediaSource.addSourceBuffer(mimecodec);
          // set `sourceBuffer` `.mode` to `"sequence"`
          sourceBuffer.mode = "sequence";

          fetch(url)
          // return `ReadableStream` of `response`
          .then(response => response.body.getReader())
          .then(reader => {

            var processStream = (data) => {
              if (data.done) {
                  return;
              }
              // append chunk of stream to `sourceBuffer`
              sourceBuffer.appendBuffer(data.value);
            }
            // at `sourceBuffer` `updateend` call `reader.read()`,
            // to read next chunk of stream, append chunk to 
            // `sourceBuffer`
            sourceBuffer.addEventListener("updateend", function() {
              reader.read().then(processStream);
            });
            // start processing stream
            reader.read().then(processStream);
            // do stuff `reader` is closed, 
            // read of stream is complete
            return reader.closed.then(() => {
              // signal end of stream to `mediaSource`
              mediaSource.endOfStream();
              return  mediaSource.readyState;
            })
          })
          // do stuff when `reader.closed`, `mediaSource` stream ended
          .then(msg => console.log(msg))
        } 
        // if `mimecodec` is not supported by `MediaSource`  
        else {
          alert(mimecodec + " not supported");
        }
      };
    }
  </script>
</body>
</html>

javascript - HTML5 audio streaming: precisely measure latency? - Stack...

javascript html5 streaming html5-audio
Rectangle 27 5

Utilize timeupdate event of <audio> element, which is fired three to four times per second, to perform precise animations during streaming of media by checking .currentTime of <audio> element. Where animations or transitions can be started or stopped up to several times per second.

fetch()
response.body.getReader()
ReadableStream
new Audio()
.src
objectURL
.read()
.then()
.mode
"sequence"
sourceBuffer
updateend

If fetch() response.body.getReader() is not available at browser, you can still use timeupdate or progress event of <audio> element to check .currentTime, start or stop animations or transitions at required second of streaming media playback.

canplay
<audio>
MediaSource

You can use an object with properties set to numbers corresponding to .currentTime of <audio> where animation should occur, and values set to css property of element which should be animated to perform precise animations.

javascript
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">    
<head>
  <meta charset="utf-8" />
  <title></title>
  <style>
    body {
      width: 90vw;
      height: 90vh;
      background: #000;
      transition: background 1s;
    }

    span {
      font-family: Georgia;
      font-size: 36px;
      opacity: 0;
    }
  </style>
</head>

<body>
  <audio controls></audio>
  <br>
  <span></span>
  <script type="text/javascript">
    window.onload = function() {
      var url = "/path/to/audio";
      // given 240 seconds total duration of audio 
      // 240/12 = 20
      // properties correspond to `<audio>` `.currentTime`,
      // values correspond to color to set at element
      var colors = {
        0: "red",
        20: "blue",
        40: "green",
        60: "yellow",
        80: "orange",
        100: "purple",
        120: "violet",
        140: "brown",
        160: "tan",
        180: "gold",
        200: "sienna",
        220: "skyblue"
      };
      var body = document.querySelector("body");
      var mediaSource = new MediaSource;
      var audio = document.querySelector("audio");
      var span = document.querySelector("span");
      var color = window.getComputedStyle(body)
                  .getPropertyValue("background-color");
      //console.log(mediaSource.readyState); // closed
      var mimecodec = "audio/mpeg";

      audio.oncanplay = function() {
        this.play();
      }

      audio.ontimeupdate = function() {         
        // 240/12 = 20
        var curr = Math.round(this.currentTime);

        if (colors.hasOwnProperty(curr)) {
          // set `color` to `colors[curr]`
          color = colors[curr]
        }
        // animate `<span>` every 60 seconds
        if (curr % 60 === 0 && span.innerHTML === "") {
          var t = curr / 60;
          span.innerHTML = t + " minute" + (t === 1 ? "" : "s") 
                           + " of " + Math.round(this.duration) / 60 
                          + " minutes of audio";
          span.animate([{
              opacity: 0
            }, {
              opacity: 1
            }, {
              opacity: 0
            }], {
              duration: 2500,
              iterations: 1
            })
            .onfinish = function() {
              span.innerHTML = ""
            }
        }
        // change `background-color` of `body` every 20 seconds
        body.style.backgroundColor = color;
        console.log("current time:", curr
                   , "current background color:", color
                  , "duration:", this.duration);
      }
      // set `<audio>` `.src` to `mediaSource`
      audio.src = URL.createObjectURL(mediaSource);
      mediaSource.addEventListener("sourceopen", sourceOpen);

      function sourceOpen(event) {
        // if the media type is supported by `mediaSource`
        // fetch resource, begin stream read, 
        // append stream to `sourceBuffer`
        if (MediaSource.isTypeSupported(mimecodec)) {
          var sourceBuffer = mediaSource.addSourceBuffer(mimecodec);
          // set `sourceBuffer` `.mode` to `"sequence"`
          sourceBuffer.mode = "sequence";

          fetch(url)
          // return `ReadableStream` of `response`
          .then(response => response.body.getReader())
          .then(reader => {

            var processStream = (data) => {
              if (data.done) {
                  return;
              }
              // append chunk of stream to `sourceBuffer`
              sourceBuffer.appendBuffer(data.value);
            }
            // at `sourceBuffer` `updateend` call `reader.read()`,
            // to read next chunk of stream, append chunk to 
            // `sourceBuffer`
            sourceBuffer.addEventListener("updateend", function() {
              reader.read().then(processStream);
            });
            // start processing stream
            reader.read().then(processStream);
            // do stuff `reader` is closed, 
            // read of stream is complete
            return reader.closed.then(() => {
              // signal end of stream to `mediaSource`
              mediaSource.endOfStream();
              return  mediaSource.readyState;
            })
          })
          // do stuff when `reader.closed`, `mediaSource` stream ended
          .then(msg => console.log(msg))
        } 
        // if `mimecodec` is not supported by `MediaSource`  
        else {
          alert(mimecodec + " not supported");
        }
      };
    }
  </script>
</body>
</html>

javascript - HTML5 audio streaming: precisely measure latency? - Stack...

javascript html5 streaming html5-audio
Rectangle 27 4

Returning a file stream is not the same as "streaming". Technically, the server is just pushing the video file in one go. The client is then just downloading the file the same as if they were literally downloading it. If the client (web browser) is capable of interpreting the video file and the video file is capable of being streamed (front-loads the headers), then, the browser may begin playing it before it fully completes, but this too is not really streaming. It's more akin to loading a progressive JPEG: the browser is proactively trying to display information as soon as possible before the full set of information is available. If the connection slows, the video will stop. There's no concept of buffering.

If you want to really stream a video, you need a true streaming server. There's a variety of options out there, but generally the way they work is by adapting the video file to the connection with the client: less or more bitrate. It's this interaction you won't have just pushing a file via MVC or even directly via IIS.

Regardless of whether you go with a true streaming solution or just rely on the browser playing the video as it downloads, you should still segregate this from your actual MVC site. Web servers have a finite number of requests they can handle, and they are designed to clear those requests as quick as possible (seconds or even milliseconds). They are not suited for sending large amounts of data in a single response that make take minutes or more. If a large enough number of requests comes through for the videos, you could end up deadlocking your web server and taking down your whole site. By virtue of being a different server, a streaming server would not pose this problem. Alternatively, you can host the files on a CDN.

c# - Video Streaming with HTML 5 and ASP.NET MVC 5 - Stack Overflow

c# asp.net-mvc html5 video streaming
Rectangle 27 43

The spirit of the question, I think, was not truly answered. No, you cannot use a video tag to play rtsp streams as of now. The other answer regarding the link to Chromium guy's "never" is a bit misleading as the linked thread / answer is not directly referring to Chrome playing rtsp via the video tag. Read the entire linked thread, especially the comments at the very bottom and links to other threads.

The real answer is this: No, you cannot just put a video tag on an html 5 page and play rtsp. You need to use a Javascript library of some sort (unless you want to get into playing things with flash and silverlight players) to play streaming video. {IMHO} At the rate the html 5 video discussion and implementation is going, the various vendors of proprietary video standards are not interested in helping this move forward so don't count of the promised ease of use of the video tag unless the browser makers take it upon themselves to somehow solve the problem...again, not likely.{/IMHO}

video - Streaming via RTSP or RTP in HTML5 - Stack Overflow

video html5 streaming rtsp rtp
Rectangle 27 8

There is a little trick I think about: allow access to the music through a one-time unique url:

// Page generation
// Generate unique token and put it into database
<audio controls="controls">
  <source src="song.ogg?token=UNIQUETOKEN" type="audio/ogg" />
</audio>

// On access
- Check for corresponding UNIQUETOKEN in database
- Remove token from database
- Send audio data

It won't prevent users from downloading your song through CURL (for instance), but they won't be able to view the page AND navigate the source to download the song again.

Also, it should be possible to download separate samples of your track and play them continuously with a custom player. These are just ideas, I'm not sure they would be efficient enough for what you request.

This idea is a good theory, but I'm not quite sure how this would be implemented in practice? Yes I could set up a database and get/set a token, but I don't understand how to grab the song using this url with a token as a query string using HTML.

In database, you'll have a table "tokens" with an id and a corresponding filename. You may have several ids for a single filename. On sever-side, your script will find UNIQUETOKEN, get the corresponding filename, delete the line in database, and output the song data. How to read and output a file content depends on your server-side language. In PHP, it would be: echo file_get_contents($filename).

Well the website will be hosted by a 3rd party and I don't have the ability to set any type of file permissions. So what would stop a user from navigating to the URL (without token) and still accessing the file?

Files won't be accessible directly, but only by your script. For instance, if your webdirectory is www/public_html/, put your files in www/files/. Your script www/public_html/song.ext will read the file ../files/file.ext but this one is not accessible directly.

The problem with that, is that I don't have the ability to make files read-only if I am hosting them on this 3rd party server directly. I guess I can set up some ASP.NET permissions on that folder...but then I would have to pass credentials through my javascript.

security - How to secure music when streaming through HTML5 audio tag ...

security html5 audio
Rectangle 27 1

We use Web Audio for streaming via Aurora.js using a protocol very similar to HTTP Live Streaming. We did this because we wanted the same streaming backend to serve iPhone, Android and the web.

It was all a very long and painful process that took over 6 months of effort, but now that its all finished, its all good.

Have a look at http://radioflote.com and feel free to shoot questions or clarifications regarding anything. Go ahead and disassemble the code if you want to. Not a problem.

We created apps for mobiles because we weren't sure if was a good idea to tax the mobile processors with software decoding. But it works on newer Android phones, the iPhone didn't support HTML Audio for a long while, but I heard its working on the latest devices. I haven't confirmed this yet.

html5 - Does web based radio and audio streaming services use the Web ...

html5 audio audio-streaming webradio
Rectangle 27 4

I don't know of any internet radio services playing back their streams with the Web Audio API currently, but I wouldn't be surprised to find one. I've been working on one myself using Audiocog's excellent Aurora.js library, which enables codecs in-browser that wouldn't normally be available, by decoding the audio with JavaScript. However, for compatibility reasons as you have pointed out, this would be considered a bit experimental today.

Most internet radio stations use progressive HTTP streaming (SHOUTcast/Icecast style) which can be played back within an <audio> element or Flash. This works well but can be hard to get right, especially if you use SHOUTcast servers as they are not quite 100% compatible with HTTP, hurting browser support in some versions of Firefox and a lot of mobile browsers. I ended up writing my own server called AudioPump Server to get better browser and mobile browser support with HTTP progressive.

Depending on your Flash code and ActionScript version available, you might also have to deal with memory leaks in creative ways, since by default Flash will keep all of your stream data in memory indefinitely as it was never built to stream over HTTP. Many use RTMP with Flash (with Wowza or similar on the server), which Flash was built to stream with to get around this problem.

iOS supports HLS which is basically a collection of static files served by an HTTP server. The encoder writes a chunk of the stream to each file as the encoding occurs, and the client just downloads them and plays them back seamlessly. The benefit here is that the client can choose a bitrate to stream and, raising quality up and down as network conditions change. This also means that you can completely switch networks (say from WiFi to 3G) and still maintain the stream since chunks are downloaded independently and statelessly. Android "supports" HLS, but it is buggy. Safari is the only browser currently supporting HLS.

Compatibility detection is not something you need to solve yourself. There are many players, such as jPlayer and JW Player which wrangle HTML5 audio support detection, codec support detection, and provide a common API between playback for HTML5 audio and Flash. They also provide an optional UI if you want to get up and running quickly.

Finally, most stations do offer a link to allow you to play the stream in your own media player. This is done by linking to a playlist file (usually M3U or PLS) which is downloaded and often immediately opened (as configured by the user and their browser). The player software loads this playlist and then connects directly to the streaming server to begin playback. On Android, you simply link to the stream URL. It will detect the Content-Type response header, disconnect, and open its configured media player for playback. These days you have to hunt to find these direct links, but they are out there.

If you ever want to know what a station is using without digging around in their compiled and minified source code, simply use a tool like Fiddler or Wireshark and watch the traffic. You will find that it is very straightforward under the hood.

html5 - Does web based radio and audio streaming services use the Web ...

html5 audio audio-streaming webradio
Rectangle 27 6

It is not possible to use the RTMP protocol in HTML5, because the RTMP protocol is only used between the server and the flash player. So you can use the other streaming protocols for viewing the streaming videos in HTML5.

HTML5 live streaming - Stack Overflow

html5 video-streaming html5-video
Rectangle 27 4

You might want to try:

response.reset();
response.setStatus(206);
response.setHeader("Accept-Ranges", "bytes");
response.setHeader("Content-length", Integer.toString(length + 1));
response.setHeader("Content-range", "bytes " + start.toString() + "-" + end.toString() + "/" + Long.toString(f.size()));
response.setContentType(...);

And this type of output should only be done if the client specifically asked for a range. You can check by using:

String range = request.getHeader("range");

if range is not null, then you'll have to parse the range for the start and end byte requests. Note that you can have "0-" as a range In some cases, you'll see "0-1" as a request to see if your service knows how to handle range requests.

html5 - How to serve audio file for streaming from Grails with code 20...

html5 http grails audio-streaming http-response-codes
Rectangle 27 7

With VLC i'm able to transcode a live RTSP stream (mpeg4) to an HTTP stream in a OGG format (Vorbis/Theora). The quality is poor but the video work in Chrome 9. I have also tested with a trancoding in WEBM (VP8) but it's don't seem to work (VLC have the option but i don't know if it's really implemented for now..)

The first to have a doc on this should notify us ;)

"C:\Program Files\VideoLAN\VLC\vlc.exe" -I dummy screen:// :screen-fps=16.000000 :screen-caching=100 :sout=#transcode{vcodec=theo,vb=800,scale=1,width=600,height=480,acodec=mp3}:http{mux=ogg,dst=127.0.0.1:8080/desktop.ogg} :no-sout-rtp-sap :no-sout-standard-sap :ttl=1 :sout-keep
<video id="video" src="http://localhost:8080/desktop.ogg" autoplay="autoplay">

But the performance is unfortunately pretty poor and would be great if it could also be done with MP4 container. AFAIK more browsers have support for MP4 than for OGG.

my_ip:port
<video>
<video width="640"><source src="http://my_ip:port/test" type="video/ogg">HTML5 not supported</video>

video - Streaming via RTSP or RTP in HTML5 - Stack Overflow

video html5 streaming rtsp rtp
Rectangle 27 6

Live streaming in HTML5 is possible via the use of Media Source Extensions (MSE) - the relatively new W3C standard: https://www.w3.org/TR/media-source/ MSE is an an extension of HTML5 <video> tag; the javascript on webpage can fetch audio/video segments from the server and push them to MSE for playback. The fetching mechanism can be done via HTTP requests (MPEG-DASH) or via WebSockets. As of September 2016 all major browsers on all devices support MSE. iOS is the only exception.

For high latency (5+ seconds) HTML5 live video streaming you can consider MPEG-DASH implementations by video.js or Wowza streaming engine.

Actually, Windows7 with IE11 is also a very real exception to MSE ...

HTML5 live streaming - Stack Overflow

html5 video-streaming html5-video
Rectangle 27 1

There no way for you to measure latency directly, but any AudioElement generate events like 'playing' if it just played (fired quite often), or 'stalled' if stoped streaming, or 'waiting' if data is loading. So what you can do, is to manipulate your video based on this events.

So play while stalled or waiting is fired, then continue playing video if playing fired again.

But I advice you check other events that might affect your flow (error for example would be important for you).

javascript - HTML5 audio streaming: precisely measure latency? - Stack...

javascript html5 streaming html5-audio
Rectangle 27 1

There no way for you to measure latency directly, but any AudioElement generate events like 'playing' if it just played (fired quite often), or 'stalled' if stoped streaming, or 'waiting' if data is loading. So what you can do, is to manipulate your video based on this events.

So play while stalled or waiting is fired, then continue playing video if playing fired again.

But I advice you check other events that might affect your flow (error for example would be important for you).

javascript - HTML5 audio streaming: precisely measure latency? - Stack...

javascript html5 streaming html5-audio
Rectangle 27 2

For your use case, you should also take advantage of the fact that you know the sample rate of the PCM samples and you know how many sample you've read. This determines how long it will take to play out the buffer. Use that to figure out when to schedule the next buffer.

(But note that if the PCM sample rate is not the same as audioCtx.sampleRate, the data will be resampled, which might mess up your timing.

Thanks, this in what I did, rather than simply "play right now". I have now something which sounds way better!

html5 - Cracks in webaudio playback during streaming of raw audio data...

html5 html5-audio audio-streaming web-audio web-audio-api
Rectangle 27 2

For your use case, you should also take advantage of the fact that you know the sample rate of the PCM samples and you know how many sample you've read. This determines how long it will take to play out the buffer. Use that to figure out when to schedule the next buffer.

(But note that if the PCM sample rate is not the same as audioCtx.sampleRate, the data will be resampled, which might mess up your timing.

Thanks, this in what I did, rather than simply "play right now". I have now something which sounds way better!

html5 - Cracks in webaudio playback during streaming of raw audio data...

html5 html5-audio audio-streaming web-audio web-audio-api
Rectangle 27 3

This is a very common misconception. There is no live HTML5 video support (except for HLS on iOS and Mac Safari). You may be able to 'hack' it using a webm container, but I would not expect that to be universally supported. What you are looking for is included in the Media Source Extensions, where you can feed the fragments to the browser one at a time. but you will need to write some client side javascript.

There are solutions but there is not support for live streaming. This is directly refering to my comment seen above. And webm is supported on major browsers, mostly the latest stable version.

I'd really prefer not to transcode from H.264 to webm and it shouldn't be necessary. Also as I have to support IE11 and Safari, MediaSource extensions won't help. But I think if I simulate a file stream on the server side (which works!) then it should work, but I'll have to simulate a file buffer on node.js.

As other suggested, I would seek a possibility to use WebRTC which is native unlike VLC or flash plugin. I know this technology is still hard to implement. Good luck.

I got this to work by updating to the latest version of FFMPEG as it appears there was corruption in the mp4 when using fragmented mode (necessary for MP4 live streaming so the client isn't waiting for the moov index file which will never come when live streaming). And my node.js code to redirect the FFMPEG stream directly to the browser now works.

Yes, works fine on IE11 (my preferred browser). I get jumpy response in Chrome.

node.js - Best approach to real time http streaming to HTML5 video cli...

html5 node.js ffmpeg streaming
Rectangle 27 1

The Icecast and Shoutcast servers themselves have internal buffers. I know the shoutcast one can be configured (look in the advanced directives in the docs).

html5 - Minimizing latency in streaming audio with html 5 - Stack Over...

html5 oggvorbis html5-audio icecast
Rectangle 27 6

Using negative values is currently not supported so you will have to load and reverse the buffers manually.

Note that this will require CORS enabled audio source (the one in the example isn't, so I couldn't set up a live demo). Here is one way of doing this:

  • Load the data via AJAX (this requires CORS enabled for the audio file)
  • Let the browser parse the buffer into an audio buffer
  • Get the channel buffer(s) (references)
  • Initialize the audio buffer and play

This will of course limit you some as you cannot use the Audio element anymore. You will have to support the features you want by adding controls and code for them manually.

// load audio as a raw array buffer:
fetch("http://mathweirdo.com/bingo/audio/buzzer.mp3", process);

// then process the buffer using decoder
function process(file) {
  var actx = new (window.AudioContext || window.webkitAudioContext);
  actx.decodeAudioData(file, function(buffer) {

      var src = actx.createBufferSource(),      // enable using loaded data as source
          channel, tmp, i, t = 0, len, len2;

      // reverse channels
      while(t < buffer.numberOfChannels) {      // iterate each channel
        channel = buffer.getChannelData(t++);   // get reference to a channel
        len = channel.length - 1;               // end of buffer
        len2 = len >>> 1;                       // center of buffer (integer)
        for(i = 0; i < len2; i++) {             // loop to center
            tmp = channel[len - i];             // from end -> tmp
            channel[len - i] = channel[i];      // end = from beginning
            channel[i] = tmp;                   // tmp -> beginning
        }
      }

      // play
      src.buffer = buffer;
      src.connect(actx.destination);
      if (!src.start) src.start = src.noteOn;
      src.start(0);
    },
    function() {alert("Could not decode audio!")}
  )
}

// ajax loader
function fetch(url, callback) {
  var xhr = new XMLHttpRequest();
  try {
    xhr.open("GET", url);
    xhr.responseType = "arraybuffer";
    xhr.onerror = function() {alert("Network error")};
    xhr.onload = function() {
      if (xhr.status === 200) callback(xhr.response);
      else alert(xhr.statusText);
    };
    xhr.send();
  } catch (err) {alert(err.message)}
}

javascript - Playing audio backwards with HTMLMediaElement - Stack Ove...

javascript html5 audio streaming html5-audio
Rectangle 27 5

No, you can't reuse an AudioBufferSourceNode, and you cant push onto an AudioBuffer. Their lengths are immutable.

This article (http://www.html5rocks.com/en/tutorials/audio/scheduling/) has some good information about scheduling with the Web Audio API. But you're on the right track.

javascript - Web Audio API: How to play a stream of MP3 chunks - Stack...

javascript html5 audio streaming web-audio