我想上传大于150mb的文件。
在Dropbox API V2文档中,它表示您应该开始upload session。
文档说你不能发送超过150mb的数据的POST,但我不确定如何使用upload_session API实现这一点。
答案 0 :(得分:2)
虽然任何单个请求都不应超过150 MB(通常您应该使用明显更小的块大小),但您可以使用多个请求上传大于此的文件。
下面是使用上传会话的示例。该示例使用Python SDK,但使用JavaScript SDK,但它应该作为有用的参考,因为逻辑是相同的。 (它们都使用相同的底层API。)
这使用Dropbox Python SDK将file_path
指定的本地文件中的文件上传到Dropbox API,并将其dest_path
指定到远程路径。它还根据文件大小选择是否使用上传会话:
f = open(file_path)
file_size = os.path.getsize(file_path)
CHUNK_SIZE = 4 * 1024 * 1024
if file_size <= CHUNK_SIZE:
print dbx.files_upload(f.read(), dest_path)
else:
upload_session_start_result = dbx.files_upload_session_start(f.read(CHUNK_SIZE))
cursor = dropbox.files.UploadSessionCursor(session_id=upload_session_start_result.session_id,
offset=f.tell())
commit = dropbox.files.CommitInfo(path=dest_path)
while f.tell() < file_size:
if ((file_size - f.tell()) <= CHUNK_SIZE):
print dbx.files_upload_session_finish(f.read(CHUNK_SIZE),
cursor,
commit)
else:
dbx.files_upload_session_append(f.read(CHUNK_SIZE),
cursor.session_id,
cursor.offset)
cursor.offset = f.tell()
f.close()
答案 1 :(得分:2)
您可以使用files/upload_session/start
,files/upload_session/append_v2
和files/upload_session/finish
API端点快速上传文件块。这是一个使用我的小Dropbox v2 api包装器(dropbox-v2-api)的例子:
const CHUNK_LENGTH = 100;
//create read streams, which generates set of 100 (CHUNK_LENGTH) characters of values: 1 and 2
const firstUploadChunkStream = () => utils.createMockedReadStream('1', CHUNK_LENGTH);
const secondUploadChunkStream = () => utils.createMockedReadStream('2', CHUNK_LENGTH);
sessionStart((sessionId) => {
sessionAppend(sessionId, () => {
sessionFinish(sessionId);
});
});
function sessionStart(cb) {
dropbox({
resource: 'files/upload_session/start',
parameters: {
close: false
},
readStream: firstUploadChunkStream()
}, (err, response) => {
if (err) { return console.log('sessionStart error: ', err) }
console.log('sessionStart response:', response);
cb(response.session_id);
});
}
function sessionAppend(sessionId, cb) {
dropbox({
resource: 'files/upload_session/append_v2',
parameters: {
cursor: {
session_id: sessionId,
offset: CHUNK_LENGTH
},
close: false,
},
readStream: secondUploadChunkStream()
}, (err, response) => {
if(err){ return console.log('sessionAppend error: ', err) }
console.log('sessionAppend response:', response);
cb();
});
}
function sessionFinish(sessionId) {
dropbox({
resource: 'files/upload_session/finish',
parameters: {
cursor: {
session_id: sessionId,
offset: CHUNK_LENGTH * 2
},
commit: {
path: "/result.txt",
mode: "add",
autorename: true,
mute: false
}
}
}, (err, response) => {
if (err) { return console.log('sessionFinish error: ', err) }
console.log('sessionFinish response:', response);
});
}
答案 2 :(得分:0)
我有一个例子!
testFile1Data = "test file data 1";
dbx.filesUploadSessionStart({
contents: testFile1Data,
close: true,
})
.then(function (response) {
file1Start = response;
})
.catch(function (err) {
console.log(err);
});
testFile2Data = "test file data 2";
dbx.filesUploadSessionStart({
contents: testFile2Data,
close: true,
})
.then(function (response) {
file2Start = response;
})
.catch(function (err) {
console.log(err);
});
dbx.filesUploadSessionFinishBatch({entries: [
{cursor: {session_id: file1Start.session_id, offset: testFile1Data.length}, commit: {path: "/testFile1.txt"}},
{cursor: {session_id: file2Start.session_id, offset: testFile2Data.length}, commit: {path: "/testFile2.txt"}},
]})
.then(function (response) {
finishBatch = response;
})
.catch(function (err) {
console.log(err);
});
dbx.filesUploadSessionFinishBatchCheck({async_job_id: finishBatch.async_job_id})
.then(function (response) {
finishBatch = response
})
.catch(function (err) {
console.log(err);
});
我从github上的问题主题获得了示例 - https://github.com/dropbox/dropbox-sdk-js/issues/80#issuecomment-283189888