0

我正在尝试使用 resumable.JS 附加散列,挂钩“fileAdded”事件。我们如何在每个块上附加哈希?

4

1 回答 1

0
  1. 通过如下挂钩 FileAdded 事件。

         r.on('fileAdded', function computeHashes(resumableFile, offset, fileReader) {
            //  debugger;
             // var round = resumableFile.resumableObj.getOpt('forceChunkSize') ? Math.ceil : Math.floor;
              var chunkSize = resumableFile.resumableObj.opts.chunkSize;// resumableFile.getOpt('chunkSize');
              var numChunks = Math.max(Math.floor(resumableFile.file.size / chunkSize), 1);
              var forceChunkSize = false;//resumableFile.getOpt('forceChunkSize');
              var startByte, endByte; var hasher = new SparkMD5();
              var func = (resumableFile.file.slice ? 'slice' : (resumableFile.file.mozSlice ? 'mozSlice' : (resumableFile.file.webkitSlice ? 'webkitSlice' : 'slice')));
              var bytes;
    
              resumableFile.hashes = resumableFile.hashes || [];
              fileReader = fileReader || new FileReader();
              offset = offset || 0;
    
             // if (resumableFile.resumableObj.cancelled === false) {
                  startByte = offset * chunkSize;
                  endByte = Math.min(resumableFile.file.size, (offset + 1) * chunkSize);
    
                  if (resumableFile.file.size - endByte < chunkSize && !forceChunkSize) {
                      endByte = resumableFile.file.size;
                  }
                  bytes  = resumableFile.file[func](startByte, endByte);
    
                  fileReader.onloadend = function (e) {
                  // debugger;
                   var spark = SparkMD5.ArrayBuffer.hash(e.target.result);//hasher.append(e.target.result).end();//
                      //console.log(spark);
                      resumableFile.hashes.push(spark);
    
                      if (numChunks > offset + 1) {
                          computeHashes(resumableFile, offset + 1, fileReader);
                      }
                      if (numChunks == offset + 1) {
                          r.upload();
                      }
    
                      resumableFile.resumableObj.opts.query = function (resumableFile, resumableObj) {
                         // debugger;
                          return { 'checksum': resumableFile.hashes[resumableObj] };
                      };
    
                  };
    
                  fileReader.readAsArrayBuffer(bytes);
    
    
    
              // Show progress pabr
              $('.resumable-progress, .resumable-list').show();
              // Show pause, hide resume
              $('.resumable-progress .progress-resume-link').hide();
              $('.resumable-progress .progress-pause-link').show();
              // Add the file to the list
              $('.resumable-list').append('<li class="resumable-file-' + resumableFile.uniqueIdentifier + '">Uploading <span class="resumable-file-name"></span> <span class="resumable-file-progress"></span>');
              $('.resumable-file-' + resumableFile.uniqueIdentifier + ' .resumable-file-name').html(resumableFile.fileName);
              // Actually start the upload
    
          });
    
  2. 将 var 查询更改为

(typeof $.resumableObj.opts.query == "function") ? $.resumableObj.opts.query($.fileObj, offset) : $.resumableObj.opts.query;

以便它返回当前的偏移量/

于 2015-08-11T14:32:03.233 回答