summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorgfyoung <gfyoung17@gmail.com>2016-06-30 23:00:10 -0400
committerAli Beyad <ali@elastic.co>2016-06-30 23:00:10 -0400
commitd24cc65cad2f3152237df8b6c457a2d0a603f13a (patch)
treecabec4a52ec125b0e9ddebea63b83d865b997afb
parenta24d302e73251ae4fd2d41abff1fa2cab419be66 (diff)
Raised IOException on deleteBlob (#18815)
Raise IOException on deleteBlob if the blob doesn't exist This commit raises an IOException on BlobContainer#deleteBlob if the blob does not exist, in conformance with the BlobContainer interface contract. Each implementation of BlobContainer now conforms to this contract (file system, S3, Azure, HDFS). This commit also contains blob container tests for each of the repository implementations. Closes #18530
-rw-r--r--core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java2
-rw-r--r--plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java10
-rw-r--r--plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java4
-rw-r--r--plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java47
-rw-r--r--plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java18
-rw-r--r--plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java104
-rw-r--r--plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java4
-rw-r--r--plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/MockAmazonS3.java200
-rw-r--r--plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java39
-rw-r--r--test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java16
10 files changed, 432 insertions, 12 deletions
diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java
index 822f8d1721..67ce298bec 100644
--- a/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java
+++ b/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java
@@ -85,7 +85,7 @@ public class FsBlobContainer extends AbstractBlobContainer {
@Override
public void deleteBlob(String blobName) throws IOException {
Path blobPath = path.resolve(blobName);
- Files.deleteIfExists(blobPath);
+ Files.delete(blobPath);
}
@Override
diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java
index e6c3a46907..6117062fc2 100644
--- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java
+++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java
@@ -70,6 +70,11 @@ public class AzureBlobContainer extends AbstractBlobContainer {
@Override
public InputStream readBlob(String blobName) throws IOException {
logger.trace("readBlob({})", blobName);
+
+ if (!blobExists(blobName)) {
+ throw new IOException("Blob [" + blobName + "] does not exist");
+ }
+
try {
return blobStore.getInputStream(blobStore.container(), buildKey(blobName));
} catch (StorageException e) {
@@ -116,6 +121,11 @@ public class AzureBlobContainer extends AbstractBlobContainer {
@Override
public void deleteBlob(String blobName) throws IOException {
logger.trace("deleteBlob({})", blobName);
+
+ if (!blobExists(blobName)) {
+ throw new IOException("Blob [" + blobName + "] does not exist");
+ }
+
try {
blobStore.deleteBlob(blobStore.container(), buildKey(blobName));
} catch (URISyntaxException | StorageException e) {
diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java
index 8160c56032..506d574ea6 100644
--- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java
+++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java
@@ -95,13 +95,13 @@ public class AzureStorageServiceMock extends AbstractLifecycleComponent<AzureSto
MapBuilder<String, BlobMetaData> blobsBuilder = MapBuilder.newMapBuilder();
for (String blobName : blobs.keySet()) {
final String checkBlob;
- if (keyPath != null) {
+ if (keyPath != null && !keyPath.isEmpty()) {
// strip off key path from the beginning of the blob name
checkBlob = blobName.replace(keyPath, "");
} else {
checkBlob = blobName;
}
- if (startsWithIgnoreCase(checkBlob, prefix)) {
+ if (prefix == null || startsWithIgnoreCase(checkBlob, prefix)) {
blobsBuilder.put(blobName, new PlainBlobMetaData(checkBlob, blobs.get(blobName).size()));
}
}
diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java
new file mode 100644
index 0000000000..5b161613c9
--- /dev/null
+++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.repositories.azure;
+
+import com.microsoft.azure.storage.StorageException;
+import org.elasticsearch.cloud.azure.blobstore.AzureBlobStore;
+import org.elasticsearch.cloud.azure.storage.AzureStorageServiceMock;
+import org.elasticsearch.common.blobstore.BlobStore;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
+import org.elasticsearch.repositories.RepositoryName;
+import org.elasticsearch.repositories.RepositorySettings;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+public class AzureBlobStoreContainerTests extends ESBlobStoreContainerTestCase {
+ @Override
+ protected BlobStore newBlobStore() throws IOException {
+ try {
+ RepositoryName repositoryName = new RepositoryName("azure", "ittest");
+ RepositorySettings repositorySettings = new RepositorySettings(
+ Settings.EMPTY, Settings.EMPTY);
+ AzureStorageServiceMock client = new AzureStorageServiceMock(Settings.EMPTY);
+ return new AzureBlobStore(repositoryName, Settings.EMPTY, repositorySettings, client);
+ } catch (URISyntaxException | StorageException e) {
+ throw new IOException(e);
+ }
+ }
+}
diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java
index 6ba726e2b2..c8b3d9f7e1 100644
--- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java
+++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java
@@ -68,16 +68,16 @@ final class HdfsBlobContainer extends AbstractBlobContainer {
@Override
public void deleteBlob(String blobName) throws IOException {
- try {
- store.execute(new Operation<Boolean>() {
- @Override
- public Boolean run(FileContext fileContext) throws IOException {
- return fileContext.delete(new Path(path, blobName), true);
- }
- });
- } catch (FileNotFoundException ok) {
- // behaves like Files.deleteIfExists
+ if (!blobExists(blobName)) {
+ throw new IOException("Blob [" + blobName + "] does not exist");
}
+
+ store.execute(new Operation<Boolean>() {
+ @Override
+ public Boolean run(FileContext fileContext) throws IOException {
+ return fileContext.delete(new Path(path, blobName), true);
+ }
+ });
}
@Override
diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java
new file mode 100644
index 0000000000..a96a8183e5
--- /dev/null
+++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.repositories.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.AbstractFileSystem;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.UnsupportedFileSystemException;
+import org.elasticsearch.common.blobstore.BlobStore;
+import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
+
+import javax.security.auth.Subject;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.security.AccessController;
+import java.security.Principal;
+import java.security.PrivilegedAction;
+import java.util.Collections;
+
+public class HdfsBlobStoreContainerTests extends ESBlobStoreContainerTestCase {
+
+ @Override
+ protected BlobStore newBlobStore() throws IOException {
+ return AccessController.doPrivileged(
+ new PrivilegedAction<HdfsBlobStore>() {
+ @Override
+ public HdfsBlobStore run() {
+ try {
+ FileContext fileContext = createContext(new URI("hdfs:///"));
+ return new HdfsBlobStore(fileContext, "temp", 1024);
+ } catch (IOException | URISyntaxException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ });
+ }
+
+ public FileContext createContext(URI uri) {
+ // mirrors HdfsRepository.java behaviour
+ Configuration cfg = new Configuration(true);
+ cfg.setClassLoader(HdfsRepository.class.getClassLoader());
+ cfg.reloadConfiguration();
+
+ Constructor<?> ctor;
+ Subject subject;
+
+ try {
+ Class<?> clazz = Class.forName("org.apache.hadoop.security.User");
+ ctor = clazz.getConstructor(String.class);
+ ctor.setAccessible(true);
+ } catch (ClassNotFoundException | NoSuchMethodException e) {
+ throw new RuntimeException(e);
+ }
+
+ try {
+ Principal principal = (Principal) ctor.newInstance(System.getProperty("user.name"));
+ subject = new Subject(false, Collections.singleton(principal),
+ Collections.emptySet(), Collections.emptySet());
+ } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
+ throw new RuntimeException(e);
+ }
+
+ // disable file system cache
+ cfg.setBoolean("fs.hdfs.impl.disable.cache", true);
+
+ // set file system to TestingFs to avoid a bunch of security
+ // checks, similar to what is done in HdfsTests.java
+ cfg.set(String.format("fs.AbstractFileSystem.%s.impl", uri.getScheme()),
+ TestingFs.class.getName());
+
+ // create the FileContext with our user
+ return Subject.doAs(subject, new PrivilegedAction<FileContext>() {
+ @Override
+ public FileContext run() {
+ try {
+ TestingFs fs = (TestingFs) AbstractFileSystem.get(uri, cfg);
+ return FileContext.getFileContext(fs, cfg);
+ } catch (UnsupportedFileSystemException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ });
+ }
+}
diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java
index 42df840ce4..5e014ab3ec 100644
--- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java
+++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java
@@ -108,6 +108,10 @@ public class S3BlobContainer extends AbstractBlobContainer {
@Override
public void deleteBlob(String blobName) throws IOException {
+ if (!blobExists(blobName)) {
+ throw new IOException("Blob [" + blobName + "] does not exist");
+ }
+
try {
blobStore.client().deleteObject(blobStore.bucket(), buildKey(blobName));
} catch (AmazonClientException e) {
diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/MockAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/MockAmazonS3.java
new file mode 100644
index 0000000000..8124f69394
--- /dev/null
+++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/MockAmazonS3.java
@@ -0,0 +1,200 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.cloud.aws.blobstore;
+
+import com.amazonaws.AmazonClientException;
+import com.amazonaws.AmazonServiceException;
+import com.amazonaws.services.s3.AbstractAmazonS3;
+import com.amazonaws.services.s3.model.AmazonS3Exception;
+import com.amazonaws.services.s3.model.CopyObjectRequest;
+import com.amazonaws.services.s3.model.CopyObjectResult;
+import com.amazonaws.services.s3.model.DeleteObjectRequest;
+import com.amazonaws.services.s3.model.GetObjectMetadataRequest;
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.ListObjectsRequest;
+import com.amazonaws.services.s3.model.ObjectListing;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.PutObjectRequest;
+import com.amazonaws.services.s3.model.PutObjectResult;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.S3ObjectInputStream;
+import com.amazonaws.services.s3.model.S3ObjectSummary;
+import com.amazonaws.util.Base64;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.DigestInputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+class MockAmazonS3 extends AbstractAmazonS3 {
+
+ private Map<String, InputStream> blobs = new ConcurrentHashMap<>();
+
+ // in ESBlobStoreContainerTestCase.java, the maximum
+ // length of the input data is 100 bytes
+ private byte[] byteCounter = new byte[100];
+
+ @Override
+ public boolean doesBucketExist(String bucket) {
+ return true;
+ }
+
+ @Override
+ public ObjectMetadata getObjectMetadata(
+ GetObjectMetadataRequest getObjectMetadataRequest)
+ throws AmazonClientException, AmazonServiceException {
+ String blobName = getObjectMetadataRequest.getKey();
+
+ if (!blobs.containsKey(blobName)) {
+ throw new AmazonS3Exception("[" + blobName + "] does not exist.");
+ }
+
+ return new ObjectMetadata(); // nothing is done with it
+ }
+
+ @Override
+ public PutObjectResult putObject(PutObjectRequest putObjectRequest)
+ throws AmazonClientException, AmazonServiceException {
+ String blobName = putObjectRequest.getKey();
+ DigestInputStream stream = (DigestInputStream) putObjectRequest.getInputStream();
+
+ if (blobs.containsKey(blobName)) {
+ throw new AmazonS3Exception("[" + blobName + "] already exists.");
+ }
+
+ blobs.put(blobName, stream);
+
+ // input and output md5 hashes need to match to avoid an exception
+ String md5 = Base64.encodeAsString(stream.getMessageDigest().digest());
+ PutObjectResult result = new PutObjectResult();
+ result.setContentMd5(md5);
+
+ return result;
+ }
+
+ @Override
+ public S3Object getObject(GetObjectRequest getObjectRequest)
+ throws AmazonClientException, AmazonServiceException {
+ // in ESBlobStoreContainerTestCase.java, the prefix is empty,
+ // so the key and blobName are equivalent to each other
+ String blobName = getObjectRequest.getKey();
+
+ if (!blobs.containsKey(blobName)) {
+ throw new AmazonS3Exception("[" + blobName + "] does not exist.");
+ }
+
+ // the HTTP request attribute is irrelevant for reading
+ S3ObjectInputStream stream = new S3ObjectInputStream(
+ blobs.get(blobName), null, false);
+ S3Object s3Object = new S3Object();
+ s3Object.setObjectContent(stream);
+ return s3Object;
+ }
+
+ @Override
+ public ObjectListing listObjects(ListObjectsRequest listObjectsRequest)
+ throws AmazonClientException, AmazonServiceException {
+ MockObjectListing list = new MockObjectListing();
+ list.setTruncated(false);
+
+ String blobName;
+ String prefix = listObjectsRequest.getPrefix();
+
+ ArrayList<S3ObjectSummary> mockObjectSummaries = new ArrayList<>();
+
+ for (Map.Entry<String, InputStream> blob : blobs.entrySet()) {
+ blobName = blob.getKey();
+ S3ObjectSummary objectSummary = new S3ObjectSummary();
+
+ if (prefix.isEmpty() || blobName.startsWith(prefix)) {
+ objectSummary.setKey(blobName);
+
+ try {
+ objectSummary.setSize(getSize(blob.getValue()));
+ } catch (IOException e) {
+ throw new AmazonS3Exception("Object listing " +
+ "failed for blob [" + blob.getKey() + "]");
+ }
+
+ mockObjectSummaries.add(objectSummary);
+ }
+ }
+
+ list.setObjectSummaries(mockObjectSummaries);
+ return list;
+ }
+
+ @Override
+ public CopyObjectResult copyObject(CopyObjectRequest copyObjectRequest)
+ throws AmazonClientException, AmazonServiceException {
+ String sourceBlobName = copyObjectRequest.getSourceKey();
+ String targetBlobName = copyObjectRequest.getDestinationKey();
+
+ if (!blobs.containsKey(sourceBlobName)) {
+ throw new AmazonS3Exception("Source blob [" +
+ sourceBlobName + "] does not exist.");
+ }
+
+ if (blobs.containsKey(targetBlobName)) {
+ throw new AmazonS3Exception("Target blob [" +
+ targetBlobName + "] already exists.");
+ }
+
+ blobs.put(targetBlobName, blobs.get(sourceBlobName));
+ return new CopyObjectResult(); // nothing is done with it
+ }
+
+ @Override
+ public void deleteObject(DeleteObjectRequest deleteObjectRequest)
+ throws AmazonClientException, AmazonServiceException {
+ String blobName = deleteObjectRequest.getKey();
+
+ if (!blobs.containsKey(blobName)) {
+ throw new AmazonS3Exception("[" + blobName + "] does not exist.");
+ }
+
+ blobs.remove(blobName);
+ }
+
+ private int getSize(InputStream stream) throws IOException {
+ int size = stream.read(byteCounter);
+ stream.reset(); // in case we ever need the size again
+ return size;
+ }
+
+ private class MockObjectListing extends ObjectListing {
+ // the objectSummaries attribute in ObjectListing.java
+ // is read-only, but we need to be able to write to it,
+ // so we create a mock of it to work around this
+ private List<S3ObjectSummary> mockObjectSummaries;
+
+ @Override
+ public List<S3ObjectSummary> getObjectSummaries() {
+ return mockObjectSummaries;
+ }
+
+ private void setObjectSummaries(List<S3ObjectSummary> objectSummaries) {
+ mockObjectSummaries = objectSummaries;
+ }
+ }
+}
diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java
new file mode 100644
index 0000000000..bca1c1d8a1
--- /dev/null
+++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.cloud.aws.blobstore;
+
+import org.elasticsearch.common.blobstore.BlobStore;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeUnit;
+import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
+
+import java.io.IOException;
+import java.util.Locale;
+
+public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
+ protected BlobStore newBlobStore() throws IOException {
+ MockAmazonS3 client = new MockAmazonS3();
+ String bucket = randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
+
+ return new S3BlobStore(Settings.EMPTY, client, bucket, null, false,
+ new ByteSizeValue(10, ByteSizeUnit.MB), 5, "public-read-write", "standard");
+ }
+}
diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java
index 6ff0b71cdc..67ad0eb735 100644
--- a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java
@@ -112,6 +112,22 @@ public abstract class ESBlobStoreContainerTestCase extends ESTestCase {
}
}
+ public void testDeleteBlob() throws IOException {
+ try (final BlobStore store = newBlobStore()) {
+ final String blobName = "foobar";
+ final BlobContainer container = store.blobContainer(new BlobPath());
+ expectThrows(IOException.class, () -> container.deleteBlob(blobName));
+
+ byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16)));
+ final BytesArray bytesArray = new BytesArray(data);
+ container.writeBlob(blobName, bytesArray);
+ container.deleteBlob(blobName); // should not raise
+
+ // blob deleted, so should raise again
+ expectThrows(IOException.class, () -> container.deleteBlob(blobName));
+ }
+ }
+
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/15579")
public void testOverwriteFails() throws IOException {
try (final BlobStore store = newBlobStore()) {