如何使用流加密然后将数据流传输到GridF? (mongodb的)

时间:2017-06-12 09:03:41

标签: node.js mongodb stream gridfs

我目前正在构建一个接受用户上传的应用程序,然后我想对其进行加密,然后使用grid-fs库将其保存到mongodb。我正在使用multer接收文件,我正在尝试对其进行加密,然后使用gridfs-stream库将其保存到mongodb。有人可以看看这个并帮助我吗?我不知道如何使用nodejs中的流。

import { createCipher } from "crypto";
import { AES_PASSWORD } from "../config";
import stream from "stream";
import GridFs from "../models/GridFs";

const cipher = createCipher("aes-256-cbc", AES_PASSWORD);
const maxFileSize = 1024 * 1024 * 10; // 10MB
const fileChunkSize = 2 * 255 * 1024; // 510kb

const uploadController = (req,res,next) => {
  // User information
  const user_id = req.user._id;

  // File meta data from the file infromation
  const {
    mimetype,
    buffer,
    size
  } = req.file;
  const content_type = mimetype;

  // File information provided by Resumable.js
  const {
    resumableChunkNumber,
    resumableChunkSize,
    resumableCurrentChunkSize,
    resumableTotalSize,
    resumableType,
    resumableIdentifier,
    resumableFilename,
    resumableRelativePath,
    resumableTotalChunks
  } = req.body;

  /*
   * Developer's note:
   * Somehow the data being posted automatically by resumable.js
   * is of type string. Thus we have to parse the string here
   * to prevent errors when doing any comparisons
   */
  const data = buffer;
  const chunk_n = parseInt(resumableChunkNumber) - 1;
  const chunk_total = parseInt(resumableTotalChunks);
  const chunk_size = parseInt(resumableChunkSize);
  const file_length = parseInt(resumableTotalSize);
  const uid = resumableIdentifier;
  const file_name = resumableFilename;

  // Verify Chunk size as configured
  if(chunk_size != fileChunkSize){
    return res.status(400).json({
      error: "chunk_size is not equal to configured chunk size as the backend"
    });
  }

  // Ensure file size is not larger than max allowable at the moment
  if( file_length && file_length > maxFileSize) {
    return res.status(400).json({
      error: "file length is greater than configured maxFileSize"
    });
  }

  // Basic logic in ensuring the max number of chunks is correct when
  // there is overlap
  let expectedNumberOfChunks = file_length / chunk_size;
  if(file_length % chunk_size){
    expectedNumberOfChunks += 1;
  }
  if(chunk_n > expectedNumberOfChunks){
    return res.status(400).json({
      error: "chunk_n is greater than the number of expected chunks"
    });
  }

  const writeStream = GridFs.createWriteStream({
    content_type,
    chunk_size,
    chunk_n,
    uid,
    file_name,
    file_length,
    user_id
  });

  // All is good Start encrytion
  const bufferStream = new stream.PassThrough();
  try{
    bufferStream.write(buffer);
    bufferStream.pipe(cipher).pipe(writeStream).on("close", () => {
      buffer.end();
      return res.status(200).send("ok");
    });
  } catch(error){
    console.log(error);
    return res.status(500).json({ error });
  }
};

export default uploadController;

什么都没发生,请求只是超时。我现在迷失了语言。

0 个答案:

没有答案