今日、Amazon S3 への (低レベルの) マルチパート アップロードがうまくいきません。最終日かそこらで、失敗し始めました。80 個以上のファイルのアップロードのキューがあり、約 60 個のファイルが終わるまでは問題ありませんでしたが、成功するよりも頻繁に失敗しました。それ以来、単一のファイル キューでさえ失敗しました。
ドキュメントの低レベルのマルチパート アップロードの例と基本的に同じコードを使用していますが、失敗した場合に単一のパートのアップロードを再試行する do-while ループを除きます。成功したパーツのアップロードのみが List に追加され、後で CompleteMultipartUploadRequest の一部として追加されます。
ただし、パーツのアップロードが失敗することはありません。すべてのパーツのアップロードに続いて送信される CompleteMultipartUploadRequest のみ。これは、すべての失敗で私が見た唯一の例外であり、常に CompleteMultipartUpload 要求に起因します。
S3がパーツを連結する「準備ができている」という問題があった場合に備えて、CompleteMultipartUploadオブジェクトの作成とリクエストをループでラップしましたが、かなりの時間遅延を伴う漸進的なバックオフ期間でさえ役に立ちませんでした.
Exception: Maximum number of retry attempts reached : 3
Exception: at Amazon.S3.AmazonS3Client.pauseOnRetry(Int32 retries, Int32 maxRetries, HttpStatusCode status, String requestAddr, WebHeaderCollection headers, Exception cause)
at Amazon.S3.AmazonS3Client.handleRetry(S3Request userRequest, HttpWebRequest request, WebHeaderCollection respHdrs, Int64 orignalStreamPosition, Int32 retries, HttpStatusCode statusCode, Exception cause)
at Amazon.S3.AmazonS3Client.getResponseCallback[T](IAsyncResult result)
at Amazon.S3.AmazonS3Client.endOperation[T](IAsyncResult result)
at Amazon.S3.AmazonS3Client.EndCompleteMultipartUpload(IAsyncResult asyncResult)
at Amazon.S3.AmazonS3Client.CompleteMultipartUpload(CompleteMultipartUploadRequest request)
これが以下のコードです。何が間違っている可能性があるかについて何か提案はありますか?
// List to store upload part responses.
List<UploadPartResponse> uploadResponses = new List<UploadPartResponse>();
List<PartETag> uploadPartETags = new List<PartETag>();
// 1. Initialize.
InitiateMultipartUploadRequest initiateRequest = new InitiateMultipartUploadRequest()
.WithBucketName(s3bucketName)
.WithKey(key);
initResponse = s3Client.InitiateMultipartUpload(initiateRequest);
bwLogUploadFiles("multipart upload ID " + initResponse.UploadId);
// 2. Upload Parts.
uploadFileSize = new FileInfo(sourceFilepath).Length;
uploadTypicalPartSize = PART_SIZE_DEFAULT; // 5 MB
uploadNumParts = uploadFileSize / uploadTypicalPartSize + 1;
Debug.WriteLine("# of parts: " + uploadNumParts);
int retryCount = 0;
long filePosition = 0;
for (int i = 1; filePosition < uploadFileSize; i++)
{
uploadCurrentPart = i;
//long percent = (100 * filePosition) / uploadFileSize;
//reportUploadProgress((int)percent, filePosition);
bwLogUploadFiles("upload part " + i + " of " + uploadNumParts);
retryCount = 0;
// make the part size exactly equal to the lesser of the part size (5MB) or the remaining amount
//long tmpPartSize = Math.Min(uploadTypicalPartSize, (uploadFileSize - filePosition));
// per documentation examples, just make part size the same every time, even if remaining file length is bigger
long tmpPartSize = uploadTypicalPartSize;
// Create request to upload a part.
UploadPartRequest uploadRequest = new UploadPartRequest()
.WithBucketName(s3bucketName)
.WithKey(key)
.WithUploadId(initResponse.UploadId)
.WithPartNumber(i)
.WithPartSize(tmpPartSize)
.WithFilePosition(filePosition)
.WithFilePath(sourceFilepath)
.WithSubscriber(transferUtilityUploadSubscriberLowLevel)
.WithReadWriteTimeout(PART_TIMEOUT)
.WithTimeout(UPLOAD_TIMEOUT);
UploadPartResponse resp = null;
// repeat the part upload until it succeeds.
Boolean anotherPass;
do
{
anotherPass = false; // assume everythings ok
try {
// Upload part
resp = s3Client.UploadPart(uploadRequest);
// add response to our list.
uploadResponses.Add(resp);
// only creating PartETag and adding to a list for testing a different way of constructing the CompleteMultipartUploadRequest at the end.
PartETag petag = new PartETag(resp.PartNumber, resp.ETag);
uploadPartETags.Add(petag);
bwLogUploadFiles("upload part " + resp.PartNumber + " of " + uploadNumParts + " success. Part ETag "+resp.ETag);
}
catch (Exception e)
{
anotherPass = true; // repeat
retryCount++;
Debug.WriteLine(e.Message +": retry part #" + i);
bwLogUploadFiles("upload part " + i + " of " + uploadNumParts + " FAIL. Will retry if attempt #" + retryCount + "<"uploading part #"+i+" couldn't upload after "+MAX_RETRIES+" attempts. Upload failed");
filePosition += tmpPartSize;
}
//reportUploadProgress(100, uploadFileSize);
// Step 3: complete.
Boolean retryCompleteRequest = true;
Boolean completeSuccess = false;
int completeAttempts = 0;
const int delaySecondsMultiple = 3;
// retry a few times in case it's just a timing or S3 sync or readiness issue. Maybe giving it some time make following the part uploads will do the trick
do
{
retryCompleteRequest = false;
try
{
bwLogUploadFiles("complete the multipart upload, attempt #"+(completeAttempts+1) );
if (completeAttempts >0)
{
bwLogUploadFiles("delay " + (delaySecondsMultiple * completeAttempts) + " seconds");
Thread.Sleep(delaySecondsMultiple * 1000 * completeAttempts); //
}
Debug.WriteLine("now complete the Mulitpart Upload Request");
CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest()
.WithBucketName(s3bucketName)
.WithKey(key)
.WithUploadId(initResponse.UploadId)
//.WithPartETags(uploadResponses) // historically we've been attaching a List<UploadPartResponse>
.WithPartETags(uploadPartETags); // for testing we're trying List<PartETag>
CompleteMultipartUploadResponse completeUploadResponse = s3Client.CompleteMultipartUpload(completeRequest);
completeSuccess = true;
}
catch (Exception e)
{
completeAttempts++;
retryCompleteRequest = true;
Console.WriteLine("Exception occurred: {0}", e.Message);
Console.WriteLine(e.StackTrace);
bwLogUploadFiles("Exception: " + e.Message);
bwLogUploadFiles("Exception: " + e.StackTrace);
}
}
while (retryCompleteRequest && completeAttempts < MAX_RETRIES);