Home > Web Front-end > JS Tutorial > How to implement multipart upload in Node.js? Method introduction

How to implement multipart upload in Node.js? Method introduction

青灯夜游
Release: 2022-07-29 20:22:17
forward
1795 people have browsed it

NodeHow to implement multipart upload? The following article will introduce you to the method of implementing multipart upload in Node.js. I hope it will be helpful to you!

How to implement multipart upload in Node.js? Method introduction

#Uploading large files will consume a lot of time, and the upload may fail midway. At this time we need the front-end and back-end to cooperate to solve this problem.

Solution steps:

  • File fragmentation to reduce the time consumed by each request. If a request fails, you can upload it separately instead of starting from scratch

  • Notify the server to merge file fragments

  • Control the number of concurrent requests to avoid browser memory overflow

  • When a request fails due to network or other reasons, we resend the request

File fragmentation and merging

In JavaScript, the FIle object is a subclass of the 'Blob' object, which contains an important method slice, through which we can split the binary file like this:

<!DOCTYPE html>
<html>
<head>
    <meta charset="UTF-8">
    <meta http-equiv="X-UA-Compatible" content="IE=edge">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Document</title>
    <script src="https://cdn.bootcdn.net/ajax/libs/axios/0.24.0/axios.min.js"></script>
</head>
<body>
    <input type="file" multiple="multiple" id="fileInput" />
    <button onclick="SliceUpload()">上传</button>  
    <script>
        function SliceUpload() {
            const file = document.getElementById('fileInput').files[0]
            if (!file) return

            // 文件分片
            let size = 1024 * 50; //50KB 50KB Section size
            let fileChunks = [];
            let index = 0;        //Section num
            for (let cur = 0; cur < file.size; cur += size) {
                fileChunks.push({
                    hash: index++,
                    chunk: file.slice(cur, cur + size),
                });
            }

            // 上传分片
            const uploadList = fileChunks.map((item, index) => {
                let formData = new FormData();
                formData.append("filename", file.name);
                formData.append("hash", item.hash);
                formData.append("chunk", item.chunk);
                return axios({
                    method: "post",
                    url: "/upload",
                    data: formData,
                });
            });
            await Promise.all(uploadList);

            // 所有分片上传完成,通知服务器合并分片
            await axios({
                method: "get",
                url: "/merge",
                params: {
                    filename: file.name,
                },
            });
            console.log("Upload to complete");
        }
    </script>
</body>
</html>
Copy after login

Concurrency Control

If the file is large, there will be many fragments divided in this way, and the browser will initiate a large number of requests in a short period of time, which may cause memory exhaustion, so concurrency control is required.

Here we combine the Promise.race() method to control the number of concurrent requests and avoid browser memory overflow.

// 加入并发控制
async function SliceUpload() {
    const file = document.getElementById('fileInput').files[0]
    if (!file) return

    // 文件分片
    let size = 1024 * 50; //50KB 50KB Section size
    let fileChunks = [];
    let index = 0;        //Section num
    for (let cur = 0; cur < file.size; cur += size) {
        fileChunks.push({
            hash: index++,
            chunk: file.slice(cur, cur + size),
        });
    }

    let pool = []; //Concurrent pool
    let max = 3; //Maximum concurrency
    for (let i = 0; i < fileChunks.length; i++) {
        let item = fileChunks[i];
        let formData = new FormData();
        formData.append("filename", file.name);
        formData.append("hash", item.hash);
        formData.append("chunk", item.chunk);

        // 上传分片
        let task = axios({
            method: "post",
            url: "/upload",
            data: formData,
        });
        task.then(() => {
        // 从并发池中移除已经完成的请求
        let index = pool.findIndex((t) => t === task);
            pool.splice(index);
        });

        // 把请求放入并发池中,如果已经达到最大并发量
        pool.push(task);
        if (pool.length === max) {
            //All requests are requested complete
            await Promise.race(pool);
        }
    }

    // 所有分片上传完成,通知服务器合并分片
    await axios({
        method: "get",
        url: "/merge",
        params: {
            filename: file.name,
        },
    });
    console.log("Upload to complete");
}
Copy after login

Make code reusable

function SliceUpload() {
    const file = document.getElementById('fileInput').files[0]
    if (!file) return

    // 文件分片
    let size = 1024 * 50; // 分片大小设置
    let fileChunks = [];
    let index = 0;        // 分片序号
    for (let cur = 0; cur < file.size; cur += size) {
        fileChunks.push({
            hash: index++,
            chunk: file.slice(cur, cur + size),
        });
    }

    const uploadFileChunks = async function(list){
        if(list.length === 0){
            // 所有分片上传完成,通知如无
            await axios({
                method: &#39;get&#39;,
                url: &#39;/merge&#39;,
                params: {
                    filename: file.name
                }
            });
            console.log(&#39;Upload to complete&#39;)
            return
        }

        let pool = []       // 并发池
        let max = 3         // 最大并发数
        let finish = 0      // 完成数量
        let failList = []   // 失败列表
        for(let i=0;i<list.length;i++){
            let item = list[i]
            let formData = new FormData()
            formData.append(&#39;filename&#39;, file.name)
            formData.append(&#39;hash&#39;, item.hash)
            formData.append(&#39;chunk&#39;, item.chunk)
            
            let task = axios({
                method: &#39;post&#39;,
                url: &#39;/upload&#39;,
                data: formData
            })

            task.then((data)=>{
                // 从并发池中移除已经完成的请求
                let index = pool.findIndex(t=> t===task)
                pool.splice(index)
            }).catch(()=>{
                failList.push(item)
            }).finally(()=>{
                finish++
                // 如果有失败的重新上传
                if(finish===list.length){
                    uploadFileChunks(failList)
                }
            })
            pool.push(task)
            if(pool.length === max){
                await Promise.race(pool)
            }
        }
    }

    uploadFileChunks(fileChunks)
}
Copy after login

Server interface implementation

const express = require('express')
const multiparty = require('multiparty')
const fs = require('fs')
const path = require('path')
const { Buffer } = require('buffer')
// file path
const STATIC_FILES = path.join(__dirname, './static/files')
// Temporary path to upload files
const STATIC_TEMPORARY = path.join(__dirname, './static/temporary')
const server = express()
// Static file hosting
server.use(express.static(path.join(__dirname, './dist')))
// Interface for uploading slices
server.post('/upload', (req, res) => {
    const form = new multiparty.Form();
    form.parse(req, function(err, fields, files) {
        let filename = fields.filename[0]
        let hash = fields.hash[0]
        let chunk = files.chunk[0]
        let dir = `${STATIC_TEMPORARY}/${filename}`
        // console.log(filename, hash, chunk)
        try {
            if (!fs.existsSync(dir)) fs.mkdirSync(dir)
            const buffer = fs.readFileSync(chunk.path)
            const ws = fs.createWriteStream(`${dir}/${hash}`)
            ws.write(buffer)
            ws.close()
            res.send(`${filename}-${hash} Section uploaded successfully`)
        } catch (error) {
            console.error(error)
            res.status(500).send(`${filename}-${hash} Section uploading failed`)
        }
    })
})

//Merged slice interface
server.get('/merge', async (req, res) => {
    const { filename } = req.query
    try {
        let len = 0
        const bufferList = fs.readdirSync(`${STATIC_TEMPORARY}/${filename}`).map((hash,index) => {
            const buffer = fs.readFileSync(`${STATIC_TEMPORARY}/${filename}/${index}`)
            len += buffer.length
            return buffer
        });
        //Merge files
        const buffer = Buffer.concat(bufferList, len);
        const ws = fs.createWriteStream(`${STATIC_FILES}/${filename}`)
        ws.write(buffer);
        ws.close();
        res.send(`Section merge completed`);
    } catch (error) {
        console.error(error);
    }
})

server.listen(3000, _ => {
    console.log('http://localhost:3000/')
})
Copy after login

For more node-related knowledge, please visit : nodejs tutorial!

The above is the detailed content of How to implement multipart upload in Node.js? Method introduction. For more information, please follow other related articles on the PHP Chinese website!

Related labels:
source:segmentfault.com
Statement of this Website
The content of this article is voluntarily contributed by netizens, and the copyright belongs to the original author. This site does not assume corresponding legal responsibility. If you find any content suspected of plagiarism or infringement, please contact admin@php.cn
Popular Tutorials
More>
Latest Downloads
More>
Web Effects
Website Source Code
Website Materials
Front End Template