Amazon S3 Stream太长了

时间:2016-05-24 23:51:19

标签: c# amazon-web-services amazon-s3

当我尝试从Amazon S3下载大文件,大小为2GB的zip文件时,会抛出异常“stream is too long”。这就是我将这个文件从亚马逊读到流中的方式

  var s3File = new S3FileInfo(Client, BucketName, ObjectKey);
  var stream = s3File.OpenRead();

是否可以将文件内容读入小块然后在本地合并?

-Alan-

2 个答案:

答案 0 :(得分:1)

    public class BuketObjectResult
    {
        public bool Success { get; set; }

        public long Size { get; set; }

    }


    public void GetBucketObjectData()
    {

        try
        {

            BuketObjectResult res = CheckFile();

            //CHUNK  Divide in small chunks eg : 2GB file : 20MB chunks 

            int chunkSize = (int)(res.Size / CHUNK);

            if (!res.Success && (res.Size == 0 || chunkSize <= 0))
            {
                res.Success = false;

                return ;

            }

            string fileName = "Your file name";

            long startPostion = 0;

            long endPosition = 0;

            while (startPostion >= 0)
            {
                byte[] chunk = new byte[chunkSize];

                endPosition = chunkSize + startPostion;

                if (endPosition > res.Size) //to get rest of the file
                    endPosition = res.Size;

                GetObjectRequest request = new GetObjectRequest
                {
                    BucketName = "your bucket name",
                    Key = "your key",
                    ByteRange = new ByteRange(startPostion, endPosition)
                };


                using (GetObjectResponse response = s3client.GetObject(request))
                using (Stream responseStream = response.ResponseStream)
                using (FileStream fileStream = File.Open(fileName, FileMode.Append))
                {

                    int readIndex = ReadChunk(responseStream, ref chunk);

                    startPostion += readIndex;


                    if (readIndex != 0)
                    {
                        fileStream.Write(chunk, 0, readIndex);

                    }

                    if (readIndex != chunk.Length) // We didn't read a full chunk: we're done (read only rest of the bytes)
                        break;


                }


            }

            // Verify 
            FileInfo fi = new FileInfo(fileName);

            if (fi.Length == res.Size)
            {
                res.Success = true;
            }


        }
        catch (Exception e)
        {

        }

    }

    public BuketObjectResult CheckFile()
    {
        BuketObjectResult res = new BuketObjectResult() { Success = false};

        try
        {
            ListObjectsRequest request = new ListObjectsRequest()
            {
                BucketName = "bucketName here ",
                Delimiter = "/",
                Prefix = "Location here"
            };


            ListObjectsResponse response = s3client.ListObjects(request);



            if (response.S3Objects != null && response.S3Objects.Count > 0)
            {
                S3Object o = response.S3Objects.Where(x => x.Size != 0).FirstOrDefault();

                if (o != null)
                {

                    res.Success = true;

                    res.Size = o.Size;


                }

            }


        }
        catch (Exception e)
        {

        }

        return res;
    }

答案 1 :(得分:0)

AmazonS3 SDK具有TransferUtility级别。它允许从AmazonS3上传和下载大文件(> 2GB)。下载使用c#和亚马逊SDK的另一种下载方式:

            AmazonS3Config cfg = new AmazonS3Config();
            cfg.RegionEndpoint = Amazon.RegionEndpoint.EUCentral1;//region of your bucket.
            string bucketName = "your bucket";
            AmazonS3Client s3Client = new AmazonS3Client("your accesskey ", "your secret key", cfg);
            TransferUtility t = new TransferUtility(s3Client);
            t.Download(@"c:\ReadFiles\read.txt", bucketName, "testUploadFromTransferUtility.txt");