我正在使用JavaScript为唯一文件值生成File HASH VALUE.请检查下面的哈希生成机制代码,该机制运行良好.
<script type="text/javascript">
// Reference: https://code.google.com/p/crypto-js/#MD5
function handleFileSelect(evt)
{
var files = evt.target.files; // FileList object
// Loop through the FileList and render image files as thumbnails.
for (var i = 0, f; f = files[i]; i++)
{
var reader = new FileReader();
// Closure to capture the file information.
reader.onload = (function(theFile)
{
return function(e)
{
var span = document.createElement('span');
var test = e.target.result;
//var hash = hex_md5(test);
var hash = CryptoJS.MD5(test);
var elem = document.getElementById("hashValue");
elem.value = hash;
};
})(f);
// Read in the image file as a data URL.
reader.readAsBinaryString(f);
}
}
document.getElementById('videoupload').addEventListener('change', handleFileSelect, false);
</script>
但是,在为大型文件生成HASH VALUE时遇到问题,如在客户端浏览器崩溃时.
最多30MB HASHING运行良好但如果我尝试上传大于该系统崩溃.
我的问题是:
Can I generate HASH Value for part of file than reading the LARGE files and getting crashes? If yes, Can I know how to do that width
‘FileReader’;Can I specify any amount of Byte such as 2000 Character of a file to generate HASH Value then generating for large files.
我希望上面两个解决方案适用于大小文件.还有其他选择吗?
解决方法:
- Can I generate HASH Value for part of file than reading the LARGE files and getting crashes? If yes, Can I know how to do that width ‘FileReader’;
是的,你可以这样做,它被称为Progressive Hashing.
var md5 = CryptoJS.algo.MD5.create();
md5.update("file part 1");
md5.update("file part 2");
md5.update("file part 3");
var hash = md5.finalize();
- Can I specify any amount of Byte such as 2000 Character of a file to generate HASH Value then generating for large files.
关于如何使用File.slice
将切片文件传递给FileReader
,有一个HTML5Rocks article:
var blob = file.slice(startingByte, endindByte);
reader.readAsArrayBuffer(blob);
完整解决方案
我把两者结合起来了.棘手的部分是同步文件读取,因为FileReader.readAsArrayBuffer()
是异步的.我编写了一个小型系列函数,它是在series
function of async.js之后建模的.它必须一个接一个地完成,因为没有办法进入CryptoJS的散列函数的内部状态.
另外,CryptoJS无法理解ArrayBuffer是什么,因此必须将其转换为其本机数据表示形式,即所谓的WordArray:
function arrayBufferToWordArray(ab) {
var i8a = new Uint8Array(ab);
var a = [];
for (var i = 0; i < i8a.length; i += 4) {
a.push(i8a[i] << 24 | i8a[i + 1] << 16 | i8a[i + 2] << 8 | i8a[i + 3]);
}
return CryptoJS.lib.WordArray.create(a, i8a.length);
}
另一件事是散列是一种同步操作,其中没有产量可以继续在别处执行.因此,浏览器将冻结,因为JavaScript是单线程的.解决方案是使用Web Workers将散列卸载到不同的线程,以便UI线程保持响应.
Web工作者期望脚本文件在其构造函数中,因此我使用Rob W的this solution来创建内联脚本.
function series(tasks, done){
if(!tasks || tasks.length === 0) {
done();
} else {
tasks[0](function(){
series(tasks.slice(1), done);
});
}
}
function webWorkerOnMessage(e){
if (e.data.type === "create") {
md5 = CryptoJS.algo.MD5.create();
postMessage({type: "create"});
} else if (e.data.type === "update") {
function arrayBufferToWordArray(ab) {
var i8a = new Uint8Array(ab);
var a = [];
for (var i = 0; i < i8a.length; i += 4) {
a.push(i8a[i] << 24 | i8a[i + 1] << 16 | i8a[i + 2] << 8 | i8a[i + 3]);
}
return CryptoJS.lib.WordArray.create(a, i8a.length);
}
md5.update(arrayBufferToWordArray(e.data.chunk));
postMessage({type: "update"});
} else if (e.data.type === "finish") {
postMessage({type: "finish", hash: ""+md5.finalize()});
}
}
// URL.createObjectURL
window.URL = window.URL || window.webkitURL;
// "Server response", used in all examples
var response =
"importScripts('https://cdn.rawgit.com/CryptoStore/crypto-js/3.1.2/build/rollups/md5.js');"+
"var md5;"+
"self.onmessage = "+webWorkerOnMessage.toString();
var blob;
try {
blob = new Blob([response], {type: 'application/javascript'});
} catch (e) { // Backwards-compatibility
window.BlobBuilder = window.BlobBuilder || window.WebKitBlobBuilder || window.MozBlobBuilder;
blob = new BlobBuilder();
blob.append(response);
blob = blob.getBlob();
}
var worker = new Worker(URL.createObjectURL(blob));
var files = evt.target.files; // FileList object
var chunksize = 1000000; // the chunk size doesn't make a difference
var i = 0,
f = files[i],
chunks = Math.ceil(f.size / chunksize),
chunkTasks = [],
startTime = (new Date()).getTime();
worker.onmessage = function(e) {
// create callback
for(var j = 0; j < chunks; j++){
(function(j, f){
chunkTasks.push(function(next){
var blob = f.slice(j * chunksize, Math.min((j+1) * chunksize, f.size));
var reader = new FileReader();
reader.onload = function(e) {
var chunk = e.target.result;
worker.onmessage = function(e) {
// update callback
document.getElementById('num').innerHTML = ""+(j+1)+"/"+chunks;
next();
};
worker.postMessage({type: "update", chunk: chunk});
};
reader.readAsArrayBuffer(blob);
});
})(j, f);
}
series(chunkTasks, function(){
var elem = document.getElementById("hashValueSplit");
var telem = document.getElementById("time");
worker.onmessage = function(e) {
// finish callback
elem.value = e.data.hash;
telem.innerHTML = "in " + Math.ceil(((new Date()).getTime() - startTime) / 1000) + " seconds";
};
worker.postMessage({type: "finish"});
});
// blocking way ahead...
if (document.getElementById("singleHash").checked) {
var reader = new FileReader();
// Closure to capture the file information.
reader.onloadend = (function(theFile) {
function arrayBufferToWordArray(ab) {
var i8a = new Uint8Array(ab);
var a = [];
for (var i = 0; i < i8a.length; i += 4) {
a.push(i8a[i] << 24 | i8a[i + 1] << 16 | i8a[i + 2] << 8 | i8a[i + 3]);
}
return CryptoJS.lib.WordArray.create(a, i8a.length);
}
return function(e) {
var test = e.target.result;
var hash = CryptoJS.MD5(arrayBufferToWordArray(test));
//var hash = "none";
var elem = document.getElementById("hashValue");
elem.value = hash;
};
})(f);
// Read in the image file as a data URL.
reader.readAsArrayBuffer(f);
}
};
worker.postMessage({type: "create"});
DEMO似乎适用于大文件,但需要花费很多时间.也许这可以使用更快的MD5实现来改进.散列3 GB文件花了大约23分钟.
This answer of mine显示了没有用于SHA-256的webworkers的示例.