您好我正在寻找文件上传API或是否有任何自定义代码可用或我可以应用于上传大文件的一些逻辑。我知道我们可以使用块文件。但我不知道它们如何在客户端进行组合并将其合并到服务器端以用于Office 365.请提供一些帮助。 我通过这个例子。 https://gist.github.com/beckettkev/cbb4f1e594ef648e06b6287d6af39138
但它不适合我。我能够制作一大块文件。 现在我有两个选择。
1-合并所有基本上是数组缓冲区对象的块并将其发送到office 365上传服务。 2-单独发送所有块并在服务器中生成一个文件。但又问题如何在服务器中合并文件。
请提出建议。
答案 0 :(得分:1)
经过大量研究。我为共享点在线编写了以下代码。使用请求执行程序进行工作。我尝试了500MB的文件,对我来说一切正常。专门使它缩小。但您可以按原样使用它。
"use strict";var FileUploadService=function(){function e(){}var t=e.prototype;return t.siteUrl=_spPageContextInfo.webAbsoluteUrl,t.siteRelativeUrl="/"!=_spPageContextInfo.webServerRelativeUrl?_spPageContextInfo.webServerRelativeUrl:"",t.fileUpload=function(e,t,n){var r=this;return new Promise(function(o,i){r.createDummyFile(n,t).then(function(a){var u=new FileReader,l=0,c=e.size,s=parseInt(1e6)>c?Math.round(.8*c):parseInt(1e6),f=[];u.readAsArrayBuffer(e),u.onload=function(e){for(;l<c;)l+s>c&&(s=c-l),f.push({offset:l,length:s,method:r.getUploadMethod(l,s,c)}),l+=s;var a=c/f.length/c*100;if(f.length>0){var u=r.guid();r.uploadFile(e.target.result,u,t,n,f,0,0,a,o,i)}}})})},t.createDummyFile=function(e,t){var n=this;return new Promise(function(r,o){var i="decodedurl='"+n.siteRelativeUrl+"/"+t+"'",a=n.siteUrl+"/_api/Web/GetFolderByServerRelativePath("+i+")/files/add(overwrite=true, url='"+e+"')";n.executeAsync(a,n.convertDataBinaryString(2),{accept:"application/json;odata=verbose"}).then(function(e){return r(!0)}).catch(function(e){return o(e)})})},t.convertDataBinaryString=function(e){for(var t="",n=new Uint8Array(e),r=0;r<n.byteLength;r++)t+=String.fromCharCode(n[r]);return t},t.executeAsync=function(e,t,n){var r=this;return new Promise(function(o,i){new SP.RequestExecutor(r.siteUrl).executeAsync({url:e,method:"POST",body:t,binaryStringRequestBody:!0,headers:n,success:function(e){return o(e)},error:function(e){return i(e.responseText)}})})},t.uploadFileChunk=function(e,t,n,r,o,i){var a=this;return new Promise(function(i,u){var l=0===r.offset?"":",fileOffset="+r.offset,c=a.siteUrl+"/_api/web/getfilebyserverrelativeurl('"+a.siteRelativeUrl+"/"+t+"/"+n+"')/"+r.method+"(uploadId=guid'"+e+"'"+l+")";a.executeAsync(c,o,{Accept:"application/json; odata=verbose","Content-Type":"application/octet-stream"}).then(function(e){return i(e)}).catch(function(e){return u(e)})})},t.uploadFile=function(e,t,n,r,o,i,a,u,l,c){var s=this,f=this.convertFileToBlobChunks(e,a,o[i]);this.uploadFileChunk(t,n,r,o[i],f,a).then(function(f){var d=i===o.length-1;i+=1;d||Math.round(i*u);i<o.length?s.uploadFile(e,t,n,r,o,i,a,u,l,c):l(f)}).catch(function(e){console.log("Error in uploadFileChunk! "+e),c(e)})},t.getUploadMethod=function(e,t,n){return e+t+1>n?"finishupload":0===e?"startupload":e<n?"continueupload":null},t.convertFileToBlobChunks=function(e,t,n){var r=e.slice(n.offset,n.offset+n.length);return this.convertDataBinaryString(r)},t.guid=function(){function e(){return Math.floor(65536*(1+Math.random())).toString(16).substring(1)}return e()+e()+"-"+e()+"-"+e()+"-"+e()+"-"+e()+e()+e()},e}();function UploadFile(e){var t=e.target.files;0!=t.length&&FileUploadService.prototype.fileUpload(t[0],"UploadLibrary",t[0].name).then(function(e){console.log("File Uploaded Successfully")}).catch(function(e){console.log(e)})}
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css">
<script type="text/javascript" src="/_layouts/15/SP.RequestExecutor.js"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.4.1/jquery.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js"></script>
<input class="upload form-control" id="DocUploader" placeholder="Upload file" type="file" onchange="UploadFile(event)">
答案 1 :(得分:-1)
public Microsoft.SharePoint.Client.File UploadFileSlicePerSlice(ClientContext ctx, string libraryName, string fileName, int fileChunkSizeInMB = 3)
{
// Each sliced upload requires a unique ID.
Guid uploadId = Guid.NewGuid();
// Get the name of the file.
string uniqueFileName = Path.GetFileName(fileName);
// Ensure that target library exists, and create it if it is missing.
if (!LibraryExists(ctx, ctx.Web, libraryName))
{
CreateLibrary(ctx, ctx.Web, libraryName);
}
// Get the folder to upload into.
List docs = ctx.Web.Lists.GetByTitle(libraryName);
ctx.Load(docs, l => l.RootFolder);
// Get the information about the folder that will hold the file.
ctx.Load(docs.RootFolder, f => f.ServerRelativeUrl);
ctx.ExecuteQuery();
// File object.
Microsoft.SharePoint.Client.File uploadFile;
// Calculate block size in bytes.
int blockSize = fileChunkSizeInMB * 1024 * 1024;
// Get the information about the folder that will hold the file.
ctx.Load(docs.RootFolder, f => f.ServerRelativeUrl);
ctx.ExecuteQuery();
// Get the size of the file.
long fileSize = new FileInfo(fileName).Length;
if (fileSize <= blockSize)
{
// Use regular approach.
using (FileStream fs = new FileStream(fileName, FileMode.Open))
{
FileCreationInformation fileInfo = new FileCreationInformation();
fileInfo.ContentStream = fs;
fileInfo.Url = uniqueFileName;
fileInfo.Overwrite = true;
uploadFile = docs.RootFolder.Files.Add(fileInfo);
ctx.Load(uploadFile);
ctx.ExecuteQuery();
// Return the file object for the uploaded file.
return uploadFile;
}
}
else
{
// Use large file upload approach.
ClientResult<long> bytesUploaded = null;
FileStream fs = null;
try
{
fs = System.IO.File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
using (BinaryReader br = new BinaryReader(fs))
{
byte[] buffer = new byte[blockSize];
Byte[] lastBuffer = null;
long fileoffset = 0;
long totalBytesRead = 0;
int bytesRead;
bool first = true;
bool last = false;
// Read data from file system in blocks.
while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
{
totalBytesRead = totalBytesRead + bytesRead;
// You've reached the end of the file.
if (totalBytesRead == fileSize)
{
last = true;
// Copy to a new buffer that has the correct size.
lastBuffer = new byte[bytesRead];
Array.Copy(buffer, 0, lastBuffer, 0, bytesRead);
}
if (first)
{
using (MemoryStream contentStream = new MemoryStream())
{
// Add an empty file.
FileCreationInformation fileInfo = new FileCreationInformation();
fileInfo.ContentStream = contentStream;
fileInfo.Url = uniqueFileName;
fileInfo.Overwrite = true;
uploadFile = docs.RootFolder.Files.Add(fileInfo);
// Start upload by uploading the first slice.
using (MemoryStream s = new MemoryStream(buffer))
{
// Call the start upload method on the first slice.
bytesUploaded = uploadFile.StartUpload(uploadId, s);
ctx.ExecuteQuery();
// fileoffset is the pointer where the next slice will be added.
fileoffset = bytesUploaded.Value;
}
// You can only start the upload once.
first = false;
}
}
else
{
// Get a reference to your file.
uploadFile = ctx.Web.GetFileByServerRelativeUrl(docs.RootFolder.ServerRelativeUrl + System.IO.Path.AltDirectorySeparatorChar + uniqueFileName);
if (last)
{
// Is this the last slice of data?
using (MemoryStream s = new MemoryStream(lastBuffer))
{
// End sliced upload by calling FinishUpload.
uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// Return the file object for the uploaded file.
return uploadFile;
}
}
else
{
using (MemoryStream s = new MemoryStream(buffer))
{
// Continue sliced upload.
bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// Update fileoffset for the next slice.
fileoffset = bytesUploaded.Value;
}
}
}
} // while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
}
}
finally
{
if (fs != null)
{
fs.Dispose();
}
}
}
return null;
}
更新
检查以下样本以获取JS解决方案。