Spring Cloud: тестирование клиента S3 с помощью TestContainters - PullRequest
0 голосов
/ 01 ноября 2018

Я использую Spring Cloud ResourceLoader для доступа к S3, например ::10000

public class S3DownUpLoader {

private final ResourceLoader resourceLoader;

@Autowired
public S3DownUpLoader(ResourceLoader resourceLoader) {
    this.resourceLoader = resourceLoader;
}
public String storeOnS3(String filename, byte[] data) throws IOException {
    String location = "s3://" + bucket + "/" + filename;
    WritableResource writeableResource = (WritableResource) this.resourceLoader.getResource(location);
    FileCopyUtils.copy( data, writeableResource.getOutputStream());
    return filename;
}

Работает хорошо, и мне нужна помощь для тестирования кода с помощью Localstack / Testcontainers. Я попробовал следующий тест, но он не работает - мой производственный профиль забирается (клиент s3 с конфигурацией localstack не внедряется):

@RunWith(SpringRunner.class)
@SpringBootTest
public class S3DownUpLoaderTest {

@ClassRule
public static LocalStackContainer localstack = new LocalStackContainer().withServices(S3);

@Autowired
S3DownUpLoader s3DownUpLoader;

@Test
public void testA() {
     s3DownUpLoader.storeOnS3(...);
}


@TestConfiguration
@EnableContextResourceLoader
public static class S3Configuration {

    @Primary
    @Bean(destroyMethod = "shutdown")
    public AmazonS3 amazonS3() {
        return AmazonS3ClientBuilder
                .standard()
                .withEndpointConfiguration(localstack.getEndpointConfiguration(S3))
                .withCredentials(localstack.getDefaultCredentialsProvider())
                .build();
    }

}
}

Ответы [ 2 ]

0 голосов
/ 01 ноября 2018

Кажется, единственный способ предоставить пользовательский компонент amazonS3 для ResourceLoader - это ввести его вручную. Тест выглядит как

@RunWith(SpringRunner.class)
@SpringBootTest
@ContextConfiguration(classes = S3DownUpLoaderTest.S3Configuration.class)
public class S3DownUpLoaderTest implements ApplicationContextAware {

private static final String BUCKET_NAME = "bucket";

@ClassRule
public static LocalStackContainer localstack = new LocalStackContainer().withServices(S3);

@Autowired
S3DownUpLoader s3DownUpLoader;

@Autowired
SimpleStorageProtocolResolver resourceLoader;

@Autowired
AmazonS3 amazonS3;

@Before
public void setUp(){
    amazonS3.createBucket(BUCKET_NAME);
}

@Test
public void someTestA() throws IOException {
    ....

}

@After
public void tearDown(){
    ObjectListing object_listing = amazonS3.listObjects(QLM_BUCKET_NAME);
    while (true) {
        for (S3ObjectSummary summary : object_listing.getObjectSummaries()) {
            amazonS3.deleteObject(BUCKET_NAME, summary.getKey());
        }

        // more object_listing to retrieve?
        if (object_listing.isTruncated()) {
            object_listing = amazonS3.listNextBatchOfObjects(object_listing);
        } else {
            break;
        }
    };

    amazonS3.deleteBucket(BUCKET_NAME);
}

@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
    if (applicationContext instanceof ConfigurableApplicationContext) {
        ConfigurableApplicationContext configurableApplicationContext = (ConfigurableApplicationContext) applicationContext;
        configurableApplicationContext.addProtocolResolver(this.resourceLoader);
    }

}

public static class S3Configuration {

    @Bean
    public S3DownUpLoader s3DownUpLoader(ResourceLoader resourceLoader){
        return new S3DownUpLoader(resourceLoader);
    }

    @Bean(destroyMethod = "shutdown")
    public AmazonS3 amazonS3() {
        return AmazonS3ClientBuilder
                .standard()
                .withEndpointConfiguration(localstack.getEndpointConfiguration(S3))
                .withCredentials(localstack.getDefaultCredentialsProvider())
                .build();
    }

    @Bean
    public SimpleStorageProtocolResolver resourceLoader(){
        return new SimpleStorageProtocolResolver(amazonS3());
    }

}
0 голосов
/ 01 ноября 2018

как мы обсуждали на GitHub ,

Мы решаем эту проблему немного по-другому. Я на самом деле никогда не видел, как вы используете WritableResource, что выглядит очень интересно. Тем не менее, вот как мы решаем эту проблему:

@RunWith(SpringRunner.class)
@SpringBootTest(properties = "spring.profiles.active=test")
@ContextConfiguration(classes = AbstractAmazonS3Test.S3Configuration.class)
public abstract class AbstractAmazonS3Test  {

    private static final String REGION = Regions.EU_WEST_1.getName();

    /**
     * Configure S3.
     */
    @TestConfiguration
    public static class S3Configuration {

        @Bean
        public AmazonS3 amazonS3() {
            //localstack docker image is running locally on port 4572 for S3
            final String serviceEndpoint = String.format("http://%s:%s", "127.0.0.1", "4572");
            return AmazonS3Client.builder()
                .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(serviceEndpoint, REGION))
                .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials("dummyKey", "dummySecret")))
                .build();
        }
    }

}

И образец теста:

public class CsvS3UploadServiceIntegrationTest extends AbstractAmazonS3Test {

    private static final String SUCCESS_CSV = "a,b";
    private static final String STANDARD_STORAGE = "STANDARD";

    @Autowired
    private AmazonS3 s3;

    @Autowired
    private S3ConfigurationProperties properties;

    @Autowired
    private CsvS3UploadService service;

    @Before
    public void setUp() {
        s3.createBucket(properties.getBucketName());
    }

    @After
    public void tearDown() {
        final String bucketName = properties.getBucketName();
        s3.listObjects(bucketName).getObjectSummaries().stream()
            .map(S3ObjectSummary::getKey)
            .forEach(key -> s3.deleteObject(bucketName, key));
        s3.deleteBucket(bucketName);
    }

    @Test
    public void uploadSuccessfulCsv() {
        service.uploadSuccessfulCsv(SUCCESS_CSV);
        final S3ObjectSummary s3ObjectSummary = getOnlyFileFromS3();
        assertThat(s3ObjectSummary.getKey(), containsString("-success.csv"));
        assertThat(s3ObjectSummary.getETag(), is("b345e1dc09f20fdefdea469f09167892"));
        assertThat(s3ObjectSummary.getStorageClass(), is(STANDARD_STORAGE));
        assertThat(s3ObjectSummary.getSize(), is(3L));
    }

    private S3ObjectSummary getOnlyFileFromS3() {
        final ObjectListing listing = s3.listObjects(properties.getBucketName());
        final List<S3ObjectSummary> objects = listing.getObjectSummaries();
        assertThat(objects, iterableWithSize(1));
        return Iterables.getOnlyElement(objects);
    }
}

И тестируемый код:

@Service
@RequiredArgsConstructor
@EnableConfigurationProperties(S3ConfigurationProperties.class)
public class CsvS3UploadServiceImpl implements CsvS3UploadService {

    private static final String CSV_MIME_TYPE = CSV_UTF_8.toString();

    private final AmazonS3 amazonS3;
    private final S3ConfigurationProperties properties;
    private final S3ObjectKeyService s3ObjectKeyService;

    @Override
    public void uploadSuccessfulCsv(final String source) {
        final String key = s3ObjectKeyService.getSuccessKey();
        doUpload(source, key, getObjectMetadata(source));
    }

    private void doUpload(final String source, final String key, final ObjectMetadata metadata) {
        try (ReaderInputStream in = new ReaderInputStream(new StringReader(source), UTF_8)) {
            final PutObjectRequest request = new PutObjectRequest(properties.getBucketName(), key, in, metadata);
            amazonS3.putObject(request);
        } catch (final IOException ioe) {
            throw new CsvUploadException("Unable to upload " + key, ioe);
        }
    }

    private ObjectMetadata getObjectMetadata(final String source) {
        final ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentType(CSV_MIME_TYPE);
        metadata.setContentLength(source.getBytes(UTF_8).length);
        metadata.setContentMD5(getMD5ChecksumAsBase64(source));
        metadata.setSSEAlgorithm(SSEAlgorithm.KMS.getAlgorithm());
        return metadata;
    }

    private String getMD5ChecksumAsBase64(final String source) {
        final HashCode md5 = Hashing.md5().hashString(source, UTF_8);
        return Base64.getEncoder().encodeToString(md5.asBytes());
    }
}
...