10

我正在开发用户将 ZIP 文件上传到我的服务器的应用程序,在该 ZIP 文件将被扩展的服务器上,然后我需要将其上传到服务器。现在我的问题是:如何使用 Java 将包含多个文件和子文件夹的目录上传到 S3 存储桶?有没有这方面的例子?目前我正在使用 JetS3t 来管理我与 S3 的所有通信。

4

3 回答 3

22

HI 这是将目录上传到 S3 存储桶的简单方法。

BasicAWSCredentials awsCreds = new BasicAWSCredentials(access_key_id,
            secret_access_key);
    AmazonS3 s3Client = new AmazonS3Client(awsCreds);

    TransferManager tm = TransferManagerBuilder.standard().withS3Client(s3Client).build();

    MultipleFileUpload upload = tm.uploadDirectory(existingBucketName,
            "BuildNumber#1", "FilePathYouWant", true);
于 2015-04-27T11:32:53.033 回答
4

我建造了一些非常相似的东西。在服务器上展开 zip 后调用 FileUtils.listFiles() 它将递归地列出文件夹中的文件。只需迭代列表并创建 s3objects 并将文件上传到 s3。利用线程存储服务,可以同时上传多个文件。还要确保您处理上传事件。如果某些文件无法上传,jets3t 库会告诉您。一旦进入办公室,我就可以发布我编写的代码。

编辑:代码:

这是代码:

    private static ProviderCredentials credentials;
private static S3Service s3service;
private static ThreadedS3Service storageService;
private static S3Bucket bucket;
private List<S3Object> s3Objs=new ArrayList<S3Object>();
private Set<String> s3ObjsCompleted=new HashSet<String>();
private boolean isErrorOccured=true;
private final ByteFormatter byteFormatter = new ByteFormatter();
private final TimeFormatter timeFormatter = new TimeFormatter();


    private void initialise() throws ServiceException, S3ServiceException {
    credentials=<create your credentials>;
        s3service = new RestS3Service(credentials);
        bucket = new S3Bucket(<bucket details>);
        storageService=new ThreadedS3Service(s3service, this);
    }
}

private void uploadFolder(File folder) throws NoSuchAlgorithmException, IOException {
    readFolderContents(folder);
    uploadFilesInList(folder);
}
private void readFolderContents(File folder) throws NoSuchAlgorithmException, IOException {
    Iterator<File> filesinFolder=FileUtils.iterateFiles(folder,null,null);

    while(filesinFolder.hasNext()) {
        File file=filesinFolder.next();
        String key = <create your key from the filename or something>;
        S3Object s3Obj=new S3Object(bucket, file);
        s3Obj.setKey(key);
        s3Obj.setContentType(Mimetypes.getInstance().getMimetype(s3Obj.getKey()));
        s3Objs.add(s3Obj);  
    }
}
private void uploadFilesInList(File folder) {
    logger.debug("Uploading files in folder "+folder.getAbsolutePath());
    isErrorOccured=false;
    s3ObjsCompleted.clear();

    storageService.putObjects(bucket.getName(), s3Objs.toArray(new S3Object[s3Objs.size()]));   

    if(isErrorOccured || s3Objs.size()!=s3ObjsCompleted.size()) {
        logger.debug("Have to try uploading a few objects again for folder "+folder.getAbsolutePath()+" - Completed = "+s3ObjsCompleted.size()+" and Total ="+s3Objs.size());
        List<S3Object> s3ObjsRemaining=new ArrayList<S3Object>();
        for(S3Object s3Obj : s3Objs) {
            if(!s3ObjsCompleted.contains(s3Obj.getKey())) {
                s3ObjsRemaining.add(s3Obj);
            }
        }
        s3Objs=s3ObjsRemaining;
        uploadFilesInList(folder);
    }
}

@Override
public void event(CreateObjectsEvent event) {
    super.event(event);
    if (ServiceEvent.EVENT_IGNORED_ERRORS == event.getEventCode()) {
        Throwable[] throwables = event.getIgnoredErrors();
        for (int i = 0; i < throwables.length; i++) {
            logger.error("Ignoring error: " + throwables[i].getMessage());
        }
    }else if(ServiceEvent.EVENT_STARTED == event.getEventCode()) {
        logger.debug("**********************************Upload Event Started***********************************");
    }else if(event.getEventCode()==ServiceEvent.EVENT_ERROR) {
        isErrorOccured=true;
    }else if(event.getEventCode()==ServiceEvent.EVENT_IN_PROGRESS) {
        StorageObject[] storeObjs=event.getCreatedObjects();
        for(StorageObject storeObj : storeObjs) {
            s3ObjsCompleted.add(storeObj.getKey());
        }
        ThreadWatcher watcher = event.getThreadWatcher();
        if (watcher.getBytesTransferred() >= watcher.getBytesTotal()) {
            logger.debug("Upload Completed.. Verifying");
        }else {
            int percentage = (int) (((double) watcher.getBytesTransferred() / watcher.getBytesTotal()) * 100);

            long bytesPerSecond = watcher.getBytesPerSecond();
            StringBuilder transferDetailsText=new StringBuilder("Uploading.... ");
            transferDetailsText.append("Speed: " + byteFormatter.formatByteSize(bytesPerSecond) + "/s");

            if (watcher.isTimeRemainingAvailable()) {
                long secondsRemaining = watcher.getTimeRemaining();
                if (transferDetailsText.length() > 0) {
                    transferDetailsText.append(" - ");
                }
                transferDetailsText.append("Time remaining: " + timeFormatter.formatTime(secondsRemaining));
            }

            logger.debug(transferDetailsText.toString()+" "+percentage);
        }
    }else if(ServiceEvent.EVENT_COMPLETED==event.getEventCode()) {
        logger.debug("**********************************Upload Event Completed***********************************");
        if(isErrorOccured) {
            logger.debug("**********************But with errors, have to retry failed uploads**************************");
        }
    }
}
于 2012-01-06T03:00:10.253 回答
1

这是我在 2021 年 12 月的做法,因为现在已弃用 BasicAWSCredentials。

        AWSCredentials = new BasicAWSCredentials(env.getProperty("AWS_ACCESS_KEY_ID"),
                env.getProperty("AWS_SECRET_ACCESS_KEY"));
        AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
                .withRegion(Regions.US_EAST_1).withCredentials(new AWSStaticCredentialsProvider(AWSCredentials))
                .build();

        TransferManager tm = TransferManagerBuilder.standard().withS3Client(s3Client).build();

MultipleFileUpload upload = tm.uploadDirectory(existingBucketName,
            "BuildNumber#1", "FilePathYouWant", true);
于 2021-12-14T16:42:12.210 回答