6

使用 .NET SDK v.1.5.21.0

我正在尝试上传一个大文件(63Mb),我正在按照以下示例进行操作:

http://docs.aws.amazon.com/AmazonS3/latest/dev/LLuploadFileDotNet.html

但是使用助手而不是漏洞代码并使用 jQuery File Upload

https://github.com/blueimp/jQuery-File-Upload/blob/master/basic-plus.html

我所拥有的是:

string bucket = "mybucket";

long totalSize = long.Parse(context.Request.Headers["X-File-Size"]),
        maxChunkSize = long.Parse(context.Request.Headers["X-File-MaxChunkSize"]),
        uploadedBytes = long.Parse(context.Request.Headers["X-File-UloadedBytes"]),
        partNumber = uploadedBytes / maxChunkSize + 1,
        fileSize = partNumber * inputStream.Length;

bool lastPart = inputStream.Length < maxChunkSize;

// http://docs.aws.amazon.com/AmazonS3/latest/dev/LLuploadFileDotNet.html
if (partNumber == 1) // initialize upload
{
    iView.Utilities.Amazon_S3.S3MultipartUpload.InitializePartToCloud(fileName, bucket);
}

try
{
    // upload part
    iView.Utilities.Amazon_S3.S3MultipartUpload.UploadPartToCloud(fs, fileName, bucket, (int)partNumber, uploadedBytes, maxChunkSize);

    if (lastPart)
        // wrap it up and go home
        iView.Utilities.Amazon_S3.S3MultipartUpload.CompletePartToCloud(fileName, bucket);

}
catch (System.Exception ex)
{
    // Huston, we have a problem!
    //Console.WriteLine("Exception occurred: {0}", exception.Message);
    iView.Utilities.Amazon_S3.S3MultipartUpload.AbortPartToCloud(fileName, bucket);
}

public static class S3MultipartUpload
{
    private static string accessKey = System.Configuration.ConfigurationManager.AppSettings["AWSAccessKey"];
    private static string secretAccessKey = System.Configuration.ConfigurationManager.AppSettings["AWSSecretKey"];
    private static AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(accessKey, secretAccessKey);
    public static InitiateMultipartUploadResponse initResponse;
    public static List<UploadPartResponse> uploadResponses;

    public static void InitializePartToCloud(string destinationFilename, string destinationBucket)
    {
        // 1. Initialize.
        uploadResponses = new List<UploadPartResponse>();

        InitiateMultipartUploadRequest initRequest =
            new InitiateMultipartUploadRequest()
            .WithBucketName(destinationBucket)
            .WithKey(destinationFilename.TrimStart('/'));

        initResponse = client.InitiateMultipartUpload(initRequest);
    }
    public static void UploadPartToCloud(Stream fileStream, string destinationFilename, string destinationBucket, int partNumber, long uploadedBytes, long maxChunkedBytes)
    {
        // 2. Upload Parts.
        UploadPartRequest request = new UploadPartRequest()
            .WithBucketName(destinationBucket)
            .WithKey(destinationFilename.TrimStart('/'))
            .WithUploadId(initResponse.UploadId)
            .WithPartNumber(partNumber)
            .WithPartSize(maxChunkedBytes)
            .WithFilePosition(uploadedBytes)
            .WithInputStream(fileStream) as UploadPartRequest;

        uploadResponses.Add(client.UploadPart(request));
    }
    public static void CompletePartToCloud(string destinationFilename, string destinationBucket)
    {
        // Step 3: complete.
        CompleteMultipartUploadRequest compRequest =
            new CompleteMultipartUploadRequest()
            .WithBucketName(destinationBucket)
            .WithKey(destinationFilename.TrimStart('/'))
            .WithUploadId(initResponse.UploadId)
            .WithPartETags(uploadResponses);

        CompleteMultipartUploadResponse completeUploadResponse =
            client.CompleteMultipartUpload(compRequest);
    }
    public static void AbortPartToCloud(string destinationFilename, string destinationBucket)
    {
        // abort.
        client.AbortMultipartUpload(new AbortMultipartUploadRequest()
                .WithBucketName(destinationBucket)
                .WithKey(destinationFilename.TrimStart('/'))
                .WithUploadId(initResponse.UploadId));
    }
}

maxChunckedSize是 6Mb (6 * (1024*1024)),因为我读过最小值是 5Mb ...

为什么我会出现"Your proposed upload is smaller than the minimum allowed size"异常?我究竟做错了什么?

错误是:

<Error>
  <Code>EntityTooSmall</Code>
  <Message>Your proposed upload is smaller than the minimum allowed size</Message>
  <ETag>d41d8cd98f00b204e9800998ecf8427e</ETag>
  <MinSizeAllowed>5242880</MinSizeAllowed>
  <ProposedSize>0</ProposedSize>
  <RequestId>C70E7A23C87CE5FC</RequestId>
  <HostId>pmhuMXdRBSaCDxsQTHzucV5eUNcDORvKY0L4ZLMRBz7Ch1DeMh7BtQ6mmfBCLPM2</HostId>
  <PartNumber>1</PartNumber>
</Error>

ProposedSize如果我正在传递流和流长度,我怎么能得到?

4

2 回答 2

1

这是最新Amazon SDK的工作解决方案(今天:v.1.5.37.0

Amazon S3 分段上传的工作方式如下:

  1. 使用初始化请求client.InitiateMultipartUpload(initRequest)
  2. 使用发送文件块(循环直到结束)client.UploadPart(request)
  3. 使用完成请求client.CompleteMultipartUpload(compRequest)
  4. 如果出现任何问题,请记住处理客户端和请求,并使用以下命令触发 abort 命令client.AbortMultipartUpload(abortMultipartUploadRequest)

我保留客户端,Session因为我们在每个块上传时也需要这个,保留ETags现在用于完成该过程的那些。


您可以在Amazon Docs本身中看到一个示例和简单的方法,我最终拥有了一个类来完成所有事情,此外,我还集成了可爱的jQuery File Upload插件(下面还有处理程序代码)。

S3MultipartUpload如下_

public class S3MultipartUpload : IDisposable
{
    string accessKey = System.Configuration.ConfigurationManager.AppSettings.Get("AWSAccessKey");
    string secretAccessKey = System.Configuration.ConfigurationManager.AppSettings.Get("AWSSecretKey");

    AmazonS3 client;
    public string OriginalFilename { get; set; }
    public string DestinationFilename { get; set; }
    public string DestinationBucket { get; set; }

    public InitiateMultipartUploadResponse initResponse;
    public List<PartETag> uploadPartETags;
    public string UploadId { get; private set; }

    public S3MultipartUpload(string destinationFilename, string destinationBucket)
    {
        if (client == null)
        {
            System.Net.WebRequest.DefaultWebProxy = null; // disable proxy to make upload quicker

            client = Amazon.AWSClientFactory.CreateAmazonS3Client(accessKey, secretAccessKey, new AmazonS3Config()
            {
                RegionEndpoint = Amazon.RegionEndpoint.EUWest1,
                CommunicationProtocol = Protocol.HTTP
            });

            this.OriginalFilename = destinationFilename.TrimStart('/');
            this.DestinationFilename = string.Format("{0:yyyy}{0:MM}{0:dd}{0:HH}{0:mm}{0:ss}{0:fffff}_{1}", DateTime.UtcNow, this.OriginalFilename);
            this.DestinationBucket = destinationBucket;

            this.InitializePartToCloud();
        }
    }

    private void InitializePartToCloud()
    {
        // 1. Initialize.
        uploadPartETags = new List<PartETag>();

        InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest();
        initRequest.BucketName = this.DestinationBucket;
        initRequest.Key = this.DestinationFilename;

        // make it public
        initRequest.AddHeader("x-amz-acl", "public-read");

        initResponse = client.InitiateMultipartUpload(initRequest);
    }
    public void UploadPartToCloud(Stream fileStream, long uploadedBytes, long maxChunkedBytes)
    {
        int partNumber = uploadPartETags.Count() + 1; // current part

        // 2. Upload Parts.
        UploadPartRequest request = new UploadPartRequest();
        request.BucketName = this.DestinationBucket;
        request.Key = this.DestinationFilename;
        request.UploadId = initResponse.UploadId;
        request.PartNumber = partNumber;
        request.PartSize = fileStream.Length;
        //request.FilePosition = uploadedBytes // remove this line?
        request.InputStream = fileStream; // as UploadPartRequest;

        var up = client.UploadPart(request);
        uploadPartETags.Add(new PartETag() { ETag = up.ETag, PartNumber = partNumber });
    }
    public string CompletePartToCloud()
    {
        // Step 3: complete.
        CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest();
        compRequest.BucketName = this.DestinationBucket;
        compRequest.Key = this.DestinationFilename;
        compRequest.UploadId = initResponse.UploadId;
        compRequest.PartETags = uploadPartETags;

        string r = "Something went badly wrong";

        using (CompleteMultipartUploadResponse completeUploadResponse = client.CompleteMultipartUpload(compRequest))
            r = completeUploadResponse.ResponseXml;

        return r;
    }
    public void AbortPartToCloud()
    {
        // abort.
        client.AbortMultipartUpload(new AbortMultipartUploadRequest()
        {
            BucketName = this.DestinationBucket,
            Key = this.DestinationFilename,
            UploadId = initResponse.UploadId
        });
    }

    public void Dispose()
    {
        if (client != null) client.Dispose();
        if (initResponse != null) initResponse.Dispose();
    }
}

我将DestinationFilename其用作目标文件,因此可以避免使用相同的名称,但我会在OriginalFilename以后需要时保留它。

使用 jQuery File Upload Plugin,所有的工作都在一个 Generic Handler 中,过程是这样的:

// Upload partial file
private void UploadPartialFile(string fileName, HttpContext context, List<FilesStatus> statuses)
{
    if (context.Request.Files.Count != 1)
        throw new HttpRequestValidationException("Attempt to upload chunked file containing more than one fragment per request");

    var inputStream = context.Request.Files[0].InputStream;
    string contentRange = context.Request.Headers["Content-Range"]; // "bytes 0-6291455/14130271"

    int fileSize = int.Parse(contentRange.Split('/')[1]);,
        maxChunkSize = int.Parse(context.Request.Headers["X-Max-Chunk-Size"]),
        uploadedBytes = int.Parse(contentRange.Replace("bytes ", "").Split('-')[0]);

    iView.Utilities.AWS.S3MultipartUpload s3Upload = null;

    try
    {

        // ######################################################################################
        // 1. Initialize Amazon S3 Client
        if (uploadedBytes == 0)
        {
            HttpContext.Current.Session["s3-upload"] = new iView.Utilities.AWS.S3MultipartUpload(fileName, awsBucket);

            s3Upload = (iView.Utilities.AWS.S3MultipartUpload)HttpContext.Current.Session["s3-upload"];
            string msg = System.String.Format("Upload started: {0} ({1:N0}Mb)", s3Upload.DestinationFilename, (fileSize / 1024));
            this.Log(msg);
        }

        // cast current session object
        if (s3Upload == null)
            s3Upload = (iView.Utilities.AWS.S3MultipartUpload)HttpContext.Current.Session["s3-upload"];

        // ######################################################################################
        // 2. Send Chunks
        s3Upload.UploadPartToCloud(inputStream, uploadedBytes, maxChunkSize);

        // ######################################################################################
        // 3. Complete Upload
        if (uploadedBytes + maxChunkSize > fileSize)
        {
            string completeRequest = s3Upload.CompletePartToCloud();
            this.Log(completeRequest); // log S3 response

            s3Upload.Dispose(); // dispose all objects
            HttpContext.Current.Session["s3-upload"] = null; // we don't need this anymore
        }

    }
    catch (System.Exception ex)
    {
        if (ex.InnerException != null)
            while (ex.InnerException != null)
                ex = ex.InnerException;

        this.Log(string.Format("{0}\n\n{1}", ex.Message, ex.StackTrace)); // log error

        s3Upload.AbortPartToCloud(); // abort current upload
        s3Upload.Dispose(); // dispose all objects

        statuses.Add(new FilesStatus(ex.Message));
        return;
    }

    statuses.Add(new FilesStatus(s3Upload.DestinationFilename, fileSize, ""));
}

请记住,要Session在 Generic Handler 中有一个对象,您需要实现IRequiresSessionState这样您的处理程序将如下所示:

public class UploadHandlerSimple : IHttpHandler, IRequiresSessionState

在里面fileupload.js(下_initXHRData)我添加了一个额外的头文件,X-Max-Chunk-Size这样我就可以将它传递给亚马逊并计算它是否是上传文件的最后一部分。


随意发表评论并进行智能编辑以供所有人使用。

于 2013-10-31T19:54:52.350 回答
0

我猜你没有在 UploadPartToCloud() 函数中设置部分的内容长度。

于 2013-06-07T09:32:36.637 回答