md5_bak.js 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. import SparkMD5 from 'spark-md5'
  2. /**
  3. * 分段计算MD5
  4. * @param file {File}
  5. * @param options {Object} - onProgress | onSuccess | onError
  6. */
  7. export function generateMD5(file, options = {}) {
  8. const fileReader = new FileReader()
  9. const time = new Date().getTime()
  10. const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice
  11. const chunkSize = 10 * 1024 * 1000
  12. const chunks = Math.ceil(file.size / chunkSize)
  13. let currentChunk = 0
  14. const spark = new SparkMD5.ArrayBuffer()
  15. const loadNext = () => {
  16. let start = currentChunk * chunkSize
  17. let end = start + chunkSize >= file.size ? file.size : start + chunkSize
  18. fileReader.readAsArrayBuffer(blobSlice.call(file.file, start, end))
  19. }
  20. loadNext()
  21. fileReader.onload = (e) => {
  22. spark.append(e.target.result)
  23. if (currentChunk < chunks) {
  24. currentChunk++
  25. loadNext()
  26. if (options.onProgress && typeof options.onProgress == 'function') {
  27. options.onProgress(currentChunk, chunks)
  28. }
  29. } else {
  30. let md5 = spark.end()
  31. // md5计算完毕
  32. if (options.onSuccess && typeof options.onSuccess == 'function') {
  33. options.onSuccess(md5)
  34. }
  35. console.log(
  36. `MD5计算完毕:${file.name} \nMD5:${md5} \n分片:${chunks} 大小:${file.size} 用时:${
  37. new Date().getTime() - time
  38. } ms`
  39. )
  40. }
  41. }
  42. fileReader.onerror = function () {
  43. console.log('MD5计算失败')
  44. if (options.onError && typeof options.onError == 'function') {
  45. options.onError()
  46. }
  47. }
  48. }