But works fine in Firefox.
Google Chrome version:61.0.3163.100 (Official Build) (64-bit)
Mozilla Firefox version: 56.0.2 (64-bit)
My video is fragmented MP4 streamed over WebSocket to a client html page,where it's being fetched into MSE. The video codec is H264 Main Profile. The video info has been check both in FFPROBE and other inspectors to make sure the data integrity is ok. The following flags are used when muxing the FMP4:
"empty_moov+default_base_moof+frag_keyframe"
I also double checked that the first fragmented is the so called " initializing segment" with size of 24 bytes. As I said, Firefox playback is fine.
Here is the client code (mostly borrowed from here):
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>MSE Demo</title>
</head>
<body>
<h1>MSE Demo</h1>
<div>
<video id="video1" controls width="80%" autoplay="true"> </video>
</div>
<script type="text/javascript">
(function () {
var mime = 'video/mp4; codecs="avc1.4D401E"';
if (!MediaSource.isTypeSupported(mime)) {
document.querySelector('h1').append(' - Unsuported mime type :(');
return;
}
var buffer;
var websocket;
var buffer_size = 4 * 1024 * 1024;
var buffer_index = 0;
var frag_mp4_buffer = new Uint8Array(buffer_size);
var video = document.querySelector('video');
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceended', function (e) { console.log('sourceended: ' + mediaSource.readyState); });
mediaSource.addEventListener('sourceclose', function (e) { console.log('sourceclose: ' + mediaSource.readyState); });
mediaSource.addEventListener('error', function (e) { console.log('error: ' + mediaSource.readyState); });
video.src = window.URL.createObjectURL(mediaSource);
video.crossOrigin = 'anonymous';
mediaSource.addEventListener('sourceopen', function (e) {
console.log('sourceopen: ' + mediaSource.readyState);
//doesn't help:
// var playPromise = video.play();
// In browsers that don’t yet support this functionality,
// playPromise won’t be defined.
/*
if (playPromise !== undefined) {
playPromise.then(function () {
// Automatic playback started!
}).catch(function (error) {
// Automatic playback failed.
// Show a UI element to let the user manually start playback.
});
}
*/
buffer = mediaSource.addSourceBuffer(mime);
buffer.addEventListener('updateend', function (e) {
if (video.duration && !video.currentTime) {
video.currentTime = video.duration;
}
});
var websocket = new WebSocket('ws://' + document.location.hostname + ':8080');
websocket.binaryType = 'arraybuffer';
websocket.addEventListener('message', function (e) {
var data = new Uint8Array(e.data);
console.log("got packet! size:" + data.length);
if (data.length) {
if ((buffer_index + data.length) <= buffer_size) {
frag_mp4_buffer.set(data, buffer_index);
buffer_index = buffer_index + data.length;
if (!buffer.updating && mediaSource.readyState == 'open')
{
var appended = frag_mp4_buffer.slice(0, buffer_index);
buffer.appendBuffer(appended);
frag_mp4_buffer.fill(0);
buffer_index = 0;
}
}
}
}, false);
}, false);
})();
</script>
</body>
Another important info,you can see I mented out the video.play()
call. That's actually the only place which throws an error when the app start:
Uncaught (in promise) DOMException: Failed to load because no supported source was found
I tried the following solution from here :
var playPromise = video.play();
if (playPromise !== undefined) {
playPromise.then(function () {
// Automatic playback started!
}).catch(function (error) {
// Automatic playback failed.
$(document).on('click', '#video1', function (e) {
var video = $(this).get(0);
if (video.paused === false) {
video.pause();
} else {
video.play();
}
return false;
});
});
}
But it changed nothing. The video area is white always.
But works fine in Firefox.
Google Chrome version:61.0.3163.100 (Official Build) (64-bit)
Mozilla Firefox version: 56.0.2 (64-bit)
My video is fragmented MP4 streamed over WebSocket to a client html page,where it's being fetched into MSE. The video codec is H264 Main Profile. The video info has been check both in FFPROBE and other inspectors to make sure the data integrity is ok. The following flags are used when muxing the FMP4:
"empty_moov+default_base_moof+frag_keyframe"
I also double checked that the first fragmented is the so called " initializing segment" with size of 24 bytes. As I said, Firefox playback is fine.
Here is the client code (mostly borrowed from here):
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>MSE Demo</title>
</head>
<body>
<h1>MSE Demo</h1>
<div>
<video id="video1" controls width="80%" autoplay="true"> </video>
</div>
<script type="text/javascript">
(function () {
var mime = 'video/mp4; codecs="avc1.4D401E"';
if (!MediaSource.isTypeSupported(mime)) {
document.querySelector('h1').append(' - Unsuported mime type :(');
return;
}
var buffer;
var websocket;
var buffer_size = 4 * 1024 * 1024;
var buffer_index = 0;
var frag_mp4_buffer = new Uint8Array(buffer_size);
var video = document.querySelector('video');
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceended', function (e) { console.log('sourceended: ' + mediaSource.readyState); });
mediaSource.addEventListener('sourceclose', function (e) { console.log('sourceclose: ' + mediaSource.readyState); });
mediaSource.addEventListener('error', function (e) { console.log('error: ' + mediaSource.readyState); });
video.src = window.URL.createObjectURL(mediaSource);
video.crossOrigin = 'anonymous';
mediaSource.addEventListener('sourceopen', function (e) {
console.log('sourceopen: ' + mediaSource.readyState);
//doesn't help:
// var playPromise = video.play();
// In browsers that don’t yet support this functionality,
// playPromise won’t be defined.
/*
if (playPromise !== undefined) {
playPromise.then(function () {
// Automatic playback started!
}).catch(function (error) {
// Automatic playback failed.
// Show a UI element to let the user manually start playback.
});
}
*/
buffer = mediaSource.addSourceBuffer(mime);
buffer.addEventListener('updateend', function (e) {
if (video.duration && !video.currentTime) {
video.currentTime = video.duration;
}
});
var websocket = new WebSocket('ws://' + document.location.hostname + ':8080');
websocket.binaryType = 'arraybuffer';
websocket.addEventListener('message', function (e) {
var data = new Uint8Array(e.data);
console.log("got packet! size:" + data.length);
if (data.length) {
if ((buffer_index + data.length) <= buffer_size) {
frag_mp4_buffer.set(data, buffer_index);
buffer_index = buffer_index + data.length;
if (!buffer.updating && mediaSource.readyState == 'open')
{
var appended = frag_mp4_buffer.slice(0, buffer_index);
buffer.appendBuffer(appended);
frag_mp4_buffer.fill(0);
buffer_index = 0;
}
}
}
}, false);
}, false);
})();
</script>
</body>
Another important info,you can see I mented out the video.play()
call. That's actually the only place which throws an error when the app start:
Uncaught (in promise) DOMException: Failed to load because no supported source was found
I tried the following solution from here :
var playPromise = video.play();
if (playPromise !== undefined) {
playPromise.then(function () {
// Automatic playback started!
}).catch(function (error) {
// Automatic playback failed.
$(document).on('click', '#video1', function (e) {
var video = $(this).get(0);
if (video.paused === false) {
video.pause();
} else {
video.play();
}
return false;
});
});
}
But it changed nothing. The video area is white always.
Share Improve this question edited Nov 13, 2017 at 17:49 Michael IV asked Nov 13, 2017 at 15:36 Michael IVMichael IV 11.5k14 gold badges110 silver badges235 bronze badges3 Answers
Reset to default 4I just ran into a similar issue where I could only get my fragmented mp4 to play on firefox, but not on chrome, using this mse example. I was trying to deliver a plete file first instead of sending it via websocket because I wanted to make sure that my fragmented mp4 was formatted properly. The issue I found was that in my ffmpeg mand, if I inlcuded -an
to removed the audio, then my file would not play in chrome, but still worked in firefox. Using no audio flag or specifically using -c:a libfdk_aac
allowed my mp4 to play on chrome and firefox. Also, you should only need -movflags +dash
instead of all of those other movflags. For reference on my code snippets below, I am using an rtsp feed from an ip camera that is encoded with h264 video, hence the -c:v copy
.
tested on mac, worked on firefox AND safari but NOT chrome:
ffmpeg -i input_source -an -c:v copy -f mp4 -movflags +dash dash.mp4
tested on mac, worked on firefox AND safari AND chrome
ffmpeg -i input_source -c:v copy -f mp4 -movflags +dash dash.mp4
ffmpeg -i input_source -c:a libfdk_aac -c:v copy -f mp4 -movflags +dash dash.mp4
*edit I just found out more information of why the encoding of audio was relevant to playback on chrome. I had the mimetype/codec wrong and chrome was not as forgiving as firefox and safari.
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
I changed it tovar mimeCodec = 'video/mp4; codecs="avc1.42E01E"';
and my mp4 that was encoded without audio played in chrome, as well as the other browsers. Perhaps you have audio but did not include the audio portion of the codec? Or maybe your video codec is not the right one for your video? It is hard to say without seeing your full ffmpeg mand that was used to create the file.*2nd edit. I made a little project to test live streaming mp4 on media source extensions using ffmpeg, nodejs, express, and socket.io. Its a little rough around the edges, but it mostly works.
This works for me on Firefox and Chrome:
ffmpeg -i input.any \
-f mp4 \
-movflags faststart+separate_moof+empty_moov+default_base_moof \
-acodec aac -b:a 256000 \
-frag_duration 500K \
output.mp4
which should also work with SourceBuffer from the Meadia Source Extension API. The Mime-Type you're looking here for is audio/mp4; codecs="mp4a.40.2"
.
At this point however, I am not sure how to seek inside this file (if that's even possible) e.g. if you don't want to send the entire file to a client but start from a particular timestamp and jump directly to a [moof][mdata]
-pair to start loading from.
Chrome does not like it when videos don't have audio.
If you're generating an HLS playlist using a m4s segments (which are in essence chunks of a fragmented mp4) then you'll also need to make sure that there is audio or it will simply not play.
This one should work for all browsers:
ffmpeg -y -i SOURCE.mp4 -f lavfi -i aevalsrc=0 -af apad -shortest -c:v copy -movflags empty_moov+default_base_moof+frag_keyframe+omit_tfhd_offset -write_tmcd off FRAGMENTED_WITH_PADDED.mp4