I'm working on a concent to show the thumbnail of a large number of images from the local drive.
With HTML5 File API is seems quite possible, but whn I try to load a big number of images the memory usage of the browser goes over the roof and the collapses.
I think the problem is that the FileReader doesn't releases the memory after a file read.
Originally I had a new instance of FileReader and a simple loop to iterate through the images.
To solve this memory problem I replaced this to have one FileReader only, but it did not really help.
Here is the relevand code block:
<script>
var areader = new FileReader();
var counter=0;
function loadImage(file) {
var canvas = document.createElement("canvas");
areader.onload = function (event) {
var img = new Image;
img.onload = function () {
canvas.width = img.width / 100;
canvas.height = img.height / 100;
var ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0, img.width / 100, img.height / 100);
var browse = document.getElementById("uploadInput");
if (browse.files.length > counter) {
counter++;
areader.result = null;//I don't think this makes any difference
loadImage(browse.files[counter]);
}
};
img.src = event.target.result;
};
areader.readAsDataURL(file);
preview.appendChild(canvas);
}
function showImages() {
loadImage(document.getElementById("uploadInput").files[0]);
}
If anybody e across this problem the I do something very stupid could you reply.
Thanks,
Tamas
I'm working on a concent to show the thumbnail of a large number of images from the local drive.
With HTML5 File API is seems quite possible, but whn I try to load a big number of images the memory usage of the browser goes over the roof and the collapses.
I think the problem is that the FileReader doesn't releases the memory after a file read.
Originally I had a new instance of FileReader and a simple loop to iterate through the images.
To solve this memory problem I replaced this to have one FileReader only, but it did not really help.
Here is the relevand code block:
<script>
var areader = new FileReader();
var counter=0;
function loadImage(file) {
var canvas = document.createElement("canvas");
areader.onload = function (event) {
var img = new Image;
img.onload = function () {
canvas.width = img.width / 100;
canvas.height = img.height / 100;
var ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0, img.width / 100, img.height / 100);
var browse = document.getElementById("uploadInput");
if (browse.files.length > counter) {
counter++;
areader.result = null;//I don't think this makes any difference
loadImage(browse.files[counter]);
}
};
img.src = event.target.result;
};
areader.readAsDataURL(file);
preview.appendChild(canvas);
}
function showImages() {
loadImage(document.getElementById("uploadInput").files[0]);
}
If anybody e across this problem the I do something very stupid could you reply.
Thanks,
Tamas
Share asked Nov 18, 2012 at 17:28 snecisneci 311 silver badge3 bronze badges 2- How many images are we talking about here? – Pointy Commented Nov 18, 2012 at 17:37
- And how big are the images, too? Is this a problem in all browsers or just one? – Sean Redmond Commented Nov 18, 2012 at 17:50
1 Answer
Reset to default 10It's not the file reader but you are using the entire image's data in base64 as the src
property of the image, which will actually take 133% of the image's size in memory.
You should use Blob URLs instead:
var URL = window.URL || window.webkitURL;
function loadImage( file ) {
var canvas = document.createElement("canvas"),
img = new Image();
img.onload = function() {
canvas.width = img.width / 100;
canvas.height = img.height / 100;
var ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0, img.width / 100, img.height / 100);
URL.revokeObjectURL( img.src );
img = null;
var browse = document.getElementById("uploadInput");
if (browse.files.length > counter) {
counter++;
loadImage(browse.files[counter]);
}
};
img.src = URL.createObjectURL( file );
preview.appendChild(canvas);
}