Browse Source

first commit

Gitea 1 year ago
commit
e98a9fe23e
5 changed files with 81 additions and 0 deletions
  1. 5 0
      main.js
  2. 9 0
      package.json
  3. 21 0
      tools/createChunk.js
  4. 34 0
      tools/cutFile.js
  5. 12 0
      tools/worker.js

+ 5 - 0
main.js

@@ -0,0 +1,5 @@
+import { createChunk } from "./tools/createChunk";
+import { cutFile } from "./tools/cutFile";
+// import { worker } from "./tools/worker";
+
+export { createChunk, cutFile }

+ 9 - 0
package.json

@@ -0,0 +1,9 @@
+{
+  "name": "tools",
+  "version": "1.0.0",
+  "main": "index.js",
+  "license": "MIT",
+  "dependencies": {
+    "spark-md5": "^3.0.2"
+  }
+}

+ 21 - 0
tools/createChunk.js

@@ -0,0 +1,21 @@
+import SparkMD5 from 'spark-md5';
+export function createChunk(file, index, chunkSize){
+    return new Promise((resolve) => {
+        const start = index * chunkSize;
+        const end = start + chunkSize;
+        const spark = new SparkMD5.ArrayBuffer();
+        const fileReader = new FileReader();
+        const blob = file.slice(start, end);
+        fileReader.onload = e => {
+            spark.append(e.target.result);
+            resolve({
+                start,
+                end,
+                index,
+                hash: spark.end(),
+                blob
+            })
+        }
+        fileReader.readAsArrayBuffer(blob);
+    })
+}

+ 34 - 0
tools/cutFile.js

@@ -0,0 +1,34 @@
+const CHUNK_SIZE = 1024 * 1024 * 5;
+const THREAD_COUNT = navigator.hardwareConcurrency || 4;
+
+export async function cutFile(file){
+    new Promise((resolve) => {
+        const chunkCount = Math.ceil(file.size / CHUNK_SIZE);
+        const threadChunkCount = Math.ceil(chunkCount / THREAD_COUNT);
+        let result = [], finishCount = 0;
+        for(let i = 0; i < chunkCount; i++){
+            const worker = new Worker("./worker.js", {
+                type: "module"
+            });
+            let start = i * threadChunkCount;
+            let end = (i + 1 ) * threadChunkCount;
+            if(end > chunkCount) end = chunkCount;
+            worker.postMessage({
+                file,
+                chunkSize: CHUNK_SIZE,
+                startChunkIndex: start,
+                endChunkIndex: end
+            })
+            worker.onmessage = e => {
+                for(let i = start; i< end; i++){
+                    result[i] = e.data[i - start]
+                }
+                worker.terminate();
+                finishCount++;
+                if(finishCount === THREAD_COUNT){
+                    resolve(result)
+                }
+            }
+        }
+    })
+}

+ 12 - 0
tools/worker.js

@@ -0,0 +1,12 @@
+import { createChunk } from "./createChunk";
+
+onmessage = async (e) => {
+  const { file, chunkSize, startChunkIndex, endChunkIndex } = e.data;
+  const proms = [];
+  for(let i = startChunkIndex; i < endChunkIndex; i++){
+    proms.push(createChunk(file, chunkSize, i));
+  }
+  const chunk = await Promise.all(proms);
+  postMessage(chunk);
+};
+