diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 9203402880b9..99671e9c6925 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -220,6 +220,11 @@
junit-vintage-engine
test
+
+ org.awaitility
+ awaitility
+ test
+
org.slf4j
jcl-over-slf4j
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 8e34b84c7514..127ff7fdfb09 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -264,6 +264,8 @@ public void map(BytesWritable key, NullWritable value, Context context)
Path outputPath = getOutputPath(inputInfo);
copyFile(context, inputInfo, outputPath);
+ // inject failure
+ injectTestFailure(context, inputInfo);
}
/**
@@ -290,19 +292,23 @@ private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException
return new Path(outputArchive, path);
}
- @SuppressWarnings("checkstyle:linelength")
/**
* Used by TestExportSnapshot to test for retries when failures happen. Failure is injected in
- * {@link #copyFile(Mapper.Context, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo, Path)}.
+ * {@link #map(BytesWritable, NullWritable, org.apache.hadoop.mapreduce.Mapper.Context)}
*/
private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)
throws IOException {
- if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;
- if (testing.injectedFailureCount >= testing.failuresCountToInject) return;
+ if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {
+ return;
+ }
+ if (testing.injectedFailureCount >= testing.failuresCountToInject) {
+ return;
+ }
testing.injectedFailureCount++;
context.getCounter(Counter.COPY_FAILED).increment(1);
LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);
- throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",
+ throw new IOException(String.format(
+ context.getTaskAttemptID() + " TEST FAILURE (%d of max %d): Unable to copy input=%s",
testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));
}
@@ -358,8 +364,6 @@ private void copyFile(final Context context, final SnapshotFileInfo inputInfo,
LOG.error("Error copying " + inputPath + " to " + outputPath, e);
context.getCounter(Counter.COPY_FAILED).increment(1);
throw e;
- } finally {
- injectTestFailure(context, inputInfo);
}
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ConsecutiveExportsTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ConsecutiveExportsTestBase.java
new file mode 100644
index 000000000000..852602e80c94
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ConsecutiveExportsTestBase.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.fs.Path;
+import org.junit.jupiter.api.TestTemplate;
+
+public class ConsecutiveExportsTestBase extends ExportSnapshotTestBase {
+
+ protected ConsecutiveExportsTestBase(boolean mob) {
+ super(mob);
+ }
+
+ @TestTemplate
+ public void testConsecutiveExports() throws Exception {
+ Path copyDir = getLocalDestinationDir(TEST_UTIL);
+ testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles, copyDir, false);
+ testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles, copyDir, true);
+ removeExportDir(copyDir);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportFileSystemStateTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportFileSystemStateTestBase.java
new file mode 100644
index 000000000000..6ddfc211e75d
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportFileSystemStateTestBase.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.junit.jupiter.api.TestTemplate;
+
+public class ExportFileSystemStateTestBase extends ExportSnapshotTestBase {
+
+ protected ExportFileSystemStateTestBase(boolean mob) {
+ super(mob);
+ }
+
+ /**
+ * Verify if exported snapshot and copied files matches the original one.
+ */
+ @TestTemplate
+ public void testExportFileSystemState() throws Exception {
+ testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles);
+ }
+
+ @TestTemplate
+ public void testExportFileSystemStateWithSkipTmp() throws Exception {
+ TEST_UTIL.getConfiguration().setBoolean(ExportSnapshot.CONF_SKIP_TMP, true);
+ try {
+ testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles);
+ } finally {
+ TEST_UTIL.getConfiguration().setBoolean(ExportSnapshot.CONF_SKIP_TMP, false);
+ }
+ }
+
+ @TestTemplate
+ public void testEmptyExportFileSystemState() throws Exception {
+ testExportFileSystemState(tableName, emptySnapshotName, emptySnapshotName, 0);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportFileSystemStateWithMergeOrSplitRegionTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportFileSystemStateWithMergeOrSplitRegionTestBase.java
new file mode 100644
index 000000000000..1405e338e560
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportFileSystemStateWithMergeOrSplitRegionTestBase.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import static org.awaitility.Awaitility.await;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.hasSize;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import java.time.Duration;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.tool.BulkLoadHFilesTool;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.HFileTestUtil;
+import org.junit.jupiter.api.TestTemplate;
+
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+
+public class ExportFileSystemStateWithMergeOrSplitRegionTestBase extends ExportSnapshotTestBase {
+
+ protected ExportFileSystemStateWithMergeOrSplitRegionTestBase(boolean mob) {
+ super(mob);
+ }
+
+ @TestTemplate
+ public void testExportFileSystemStateWithMergeRegion() throws Exception {
+ // disable compaction
+ admin.compactionSwitch(false,
+ admin.getRegionServers().stream().map(a -> a.getServerName()).collect(Collectors.toList()));
+ // create Table
+ String suffix = mob ? methodName + "-mob" : methodName;
+ TableName tableName0 = TableName.valueOf("testtb-" + suffix + "-1");
+ String snapshotName0 = "snaptb0-" + suffix + "-1";
+ admin.createTable(
+ TableDescriptorBuilder.newBuilder(tableName0)
+ .setColumnFamilies(
+ Lists.newArrayList(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build()))
+ .build(),
+ new byte[][] { Bytes.toBytes("2") });
+ // put some data
+ try (Table table = admin.getConnection().getTable(tableName0)) {
+ table.put(new Put(Bytes.toBytes("1")).addColumn(FAMILY, null, Bytes.toBytes("1")));
+ table.put(new Put(Bytes.toBytes("2")).addColumn(FAMILY, null, Bytes.toBytes("2")));
+ }
+ List regions = admin.getRegions(tableName0);
+ assertEquals(2, regions.size());
+ tableNumFiles = regions.size();
+ // merge region
+ admin.mergeRegionsAsync(new byte[][] { regions.get(0).getEncodedNameAsBytes(),
+ regions.get(1).getEncodedNameAsBytes() }, true).get();
+ await().atMost(Duration.ofSeconds(30))
+ .untilAsserted(() -> assertThat(admin.getRegions(tableName0), hasSize(1)));
+ // take a snapshot
+ admin.snapshot(snapshotName0, tableName0);
+ // export snapshot and verify
+ testExportFileSystemState(tableName0, snapshotName0, snapshotName0, tableNumFiles);
+ }
+
+ @TestTemplate
+ public void testExportFileSystemStateWithSplitRegion() throws Exception {
+ // disable compaction
+ admin.compactionSwitch(false,
+ admin.getRegionServers().stream().map(a -> a.getServerName()).collect(Collectors.toList()));
+ // create Table
+ String suffix = mob ? methodName + "-mob" : methodName;
+ TableName splitTableName = TableName.valueOf(suffix);
+ String splitTableSnap = "snapshot-" + suffix;
+ admin.createTable(TableDescriptorBuilder.newBuilder(splitTableName).setColumnFamilies(
+ Lists.newArrayList(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build())).build());
+
+ Path output = TEST_UTIL.getDataTestDir("output/cf");
+ TEST_UTIL.getTestFileSystem().mkdirs(output);
+ // Create and load a large hfile to ensure the execution time of MR job.
+ HFileTestUtil.createHFile(TEST_UTIL.getConfiguration(), TEST_UTIL.getTestFileSystem(),
+ new Path(output, "test_file"), FAMILY, Bytes.toBytes("q"), Bytes.toBytes("1"),
+ Bytes.toBytes("9"), 9999999);
+ BulkLoadHFilesTool tool = new BulkLoadHFilesTool(TEST_UTIL.getConfiguration());
+ tool.run(new String[] { output.getParent().toString(), splitTableName.getNameAsString() });
+
+ List regions = admin.getRegions(splitTableName);
+ assertEquals(1, regions.size());
+ tableNumFiles = regions.size();
+
+ // split region
+ admin.split(splitTableName, Bytes.toBytes("5"));
+ await().atMost(Duration.ofSeconds(30))
+ .untilAsserted(() -> assertThat(admin.getRegions(splitTableName), hasSize(2)));
+
+ // take a snapshot
+ admin.snapshot(splitTableSnap, splitTableName);
+ // export snapshot and verify
+ Configuration tmpConf = TEST_UTIL.getConfiguration();
+ // Decrease the buffer size of copier to avoid the export task finished shortly
+ tmpConf.setInt("snapshot.export.buffer.size", 1);
+ // Decrease the maximum files of each mapper to ensure the three files(1 hfile + 2 reference
+ // files) copied in different mappers concurrently.
+ tmpConf.setInt("snapshot.export.default.map.group", 1);
+ testExportFileSystemState(tmpConf, splitTableName, splitTableSnap, splitTableSnap,
+ tableNumFiles, TEST_UTIL.getDefaultRootDirPath(), getHdfsDestinationDir(), false, false,
+ getBypassRegionPredicate(), true, false);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotMiscTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotMiscTestBase.java
new file mode 100644
index 000000000000..25c469551335
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotMiscTestBase.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.SnapshotType;
+import org.apache.hadoop.hbase.util.AbstractHBaseTool;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.junit.jupiter.api.TestTemplate;
+
+public class ExportSnapshotMiscTestBase extends ExportSnapshotTestBase {
+
+ protected ExportSnapshotMiscTestBase(boolean mob) {
+ super(mob);
+ }
+
+ @TestTemplate
+ public void testExportWithTargetName() throws Exception {
+ final String targetName = "testExportWithTargetName";
+ testExportFileSystemState(tableName, snapshotName, targetName, tableNumFiles);
+ }
+
+ @TestTemplate
+ public void testExportWithResetTtl() throws Exception {
+ String suffix = mob ? methodName + "-mob" : methodName;
+ TableName tableName = TableName.valueOf(suffix);
+ String snapshotName = "snaptb-" + suffix;
+ Long ttl = 100000L;
+ // create Table
+ createTable(tableName);
+ SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY);
+ int tableNumFiles = admin.getRegions(tableName).size();
+ // take a snapshot with TTL
+ Map props = new HashMap<>();
+ props.put("TTL", ttl);
+ admin.snapshot(snapshotName, tableName, props);
+ Optional ttlOpt =
+ admin.listSnapshots().stream().filter(s -> s.getName().equals(snapshotName))
+ .map(org.apache.hadoop.hbase.client.SnapshotDescription::getTtl).findAny();
+ assertTrue(ttlOpt.isPresent());
+ assertEquals(ttl, ttlOpt.get());
+
+ testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles,
+ getHdfsDestinationDir(), false, true);
+ }
+
+ @TestTemplate
+ public void testExportExpiredSnapshot() throws Exception {
+ String suffix = mob ? methodName + "-mob" : methodName;
+ TableName tableName = TableName.valueOf(suffix);
+ String snapshotName = "snapshot-" + suffix;
+ createTable(tableName);
+ SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY);
+ Map properties = new HashMap<>();
+ properties.put("TTL", 10);
+ org.apache.hadoop.hbase.client.SnapshotDescription snapshotDescription =
+ new org.apache.hadoop.hbase.client.SnapshotDescription(snapshotName, tableName,
+ SnapshotType.FLUSH, null, EnvironmentEdgeManager.currentTime(), -1, properties);
+ admin.snapshot(snapshotDescription);
+ boolean isExist =
+ admin.listSnapshots().stream().anyMatch(ele -> snapshotName.equals(ele.getName()));
+ assertTrue(isExist);
+ TEST_UTIL.waitFor(60000,
+ () -> SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
+ snapshotDescription.getCreationTime(), EnvironmentEdgeManager.currentTime()));
+ boolean isExpiredSnapshot =
+ SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
+ snapshotDescription.getCreationTime(), EnvironmentEdgeManager.currentTime());
+ assertTrue(isExpiredSnapshot);
+ int res = runExportSnapshot(TEST_UTIL.getConfiguration(), snapshotName, snapshotName,
+ TEST_UTIL.getDefaultRootDirPath(), getHdfsDestinationDir(), false, false, false, true, true);
+ assertEquals(res, AbstractHBaseTool.EXIT_FAILURE);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotRetryTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotRetryTestBase.java
new file mode 100644
index 000000000000..3ce4a3827cd8
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotRetryTestBase.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.jupiter.api.TestTemplate;
+
+public class ExportSnapshotRetryTestBase extends ExportSnapshotTestBase {
+
+ protected ExportSnapshotRetryTestBase(boolean mob) {
+ super(mob);
+ }
+
+ /**
+ * Check that ExportSnapshot will succeed if something fails but the retry succeed.
+ */
+ @TestTemplate
+ public void testExportRetry() throws Exception {
+ Path copyDir = TestExportSnapshotMisc.getLocalDestinationDir(TEST_UTIL);
+ Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
+ conf.setBoolean(ExportSnapshot.Testing.CONF_TEST_FAILURE, true);
+ conf.setInt(ExportSnapshot.Testing.CONF_TEST_FAILURE_COUNT, 2);
+ conf.setInt("mapreduce.map.maxattempts", 3);
+ testExportFileSystemState(conf, tableName, snapshotName, snapshotName, tableNumFiles,
+ TEST_UTIL.getDefaultRootDirPath(), copyDir, true, false, getBypassRegionPredicate(), true,
+ false);
+ }
+
+ /**
+ * Check that ExportSnapshot will fail if we inject failure more times than MR will retry.
+ */
+ @TestTemplate
+ public void testExportFailure() throws Exception {
+ Path copyDir = TestExportSnapshotMisc.getLocalDestinationDir(TEST_UTIL);
+ FileSystem fs = FileSystem.get(copyDir.toUri(), new Configuration());
+ copyDir = copyDir.makeQualified(fs.getUri(), fs.getWorkingDirectory());
+ Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
+ conf.setBoolean(ExportSnapshot.Testing.CONF_TEST_FAILURE, true);
+ conf.setInt(ExportSnapshot.Testing.CONF_TEST_FAILURE_COUNT, 4);
+ conf.setInt("mapreduce.map.maxattempts", 3);
+ testExportFileSystemState(conf, tableName, snapshotName, snapshotName, tableNumFiles,
+ TEST_UTIL.getDefaultRootDirPath(), copyDir, true, false, getBypassRegionPredicate(), false,
+ false);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotTestBase.java
new file mode 100644
index 000000000000..68e7f7eb5103
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotTestBase.java
@@ -0,0 +1,359 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import static org.apache.hadoop.util.ToolRunner.run;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Stream;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.mob.MobUtils;
+import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.Pair;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.TestInfo;
+import org.junit.jupiter.params.provider.Arguments;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
+
+public class ExportSnapshotTestBase {
+
+ private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshotTestBase.class);
+
+ protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+
+ protected final static byte[] FAMILY = Bytes.toBytes("cf");
+
+ protected String methodName;
+
+ protected TableName tableName;
+
+ protected String emptySnapshotName;
+
+ protected String snapshotName;
+
+ protected int tableNumFiles;
+
+ protected Admin admin;
+
+ protected boolean mob;
+
+ public static Stream parameters() {
+ return Stream.of(Arguments.of(false), Arguments.of(true));
+ }
+
+ protected ExportSnapshotTestBase(boolean mob) {
+ this.mob = mob;
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TEST_UTIL.shutdownMiniMapReduceCluster();
+ TEST_UTIL.shutdownMiniCluster();
+ }
+
+ /**
+ * Create a table and take a snapshot of the table used by the export test.
+ */
+ @BeforeEach
+ public void setUp(TestInfo testInfo) throws Exception {
+ this.admin = TEST_UTIL.getAdmin();
+
+ methodName = testInfo.getTestMethod().map(Method::getName).orElse(getClass().getSimpleName());
+
+ String suffix = mob ? methodName + "-mob" : methodName;
+ tableName = TableName.valueOf("testtb-" + suffix);
+ snapshotName = "snaptb0-" + suffix;
+ emptySnapshotName = "emptySnaptb0-" + suffix;
+
+ // create Table
+ createTable(this.tableName);
+
+ // Take an empty snapshot
+ admin.snapshot(emptySnapshotName, tableName);
+
+ // Add some rows
+ SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY);
+ tableNumFiles = admin.getRegions(tableName).size();
+
+ // take a snapshot
+ admin.snapshot(snapshotName, tableName);
+ }
+
+ protected final void createTable(TableName tableName) throws Exception {
+ if (mob) {
+ MobSnapshotTestingUtils.createPreSplitMobTable(TEST_UTIL, tableName, 2, FAMILY);
+ } else {
+ SnapshotTestingUtils.createPreSplitTable(TEST_UTIL, tableName, 2, FAMILY);
+ }
+ }
+
+ protected interface RegionPredicate {
+ boolean evaluate(final RegionInfo regionInfo);
+ }
+
+ protected final RegionPredicate getBypassRegionPredicate() {
+ if (mob) {
+ return MobUtils::isMobRegionInfo;
+ } else {
+ return null;
+ }
+ }
+
+ @AfterEach
+ public void tearDown() throws Exception {
+ for (TableName tn : TEST_UTIL.getAdmin().listTableNames()) {
+ TEST_UTIL.deleteTable(tn);
+ }
+ SnapshotTestingUtils.deleteAllSnapshots(TEST_UTIL.getAdmin());
+ SnapshotTestingUtils.deleteArchiveDirectory(TEST_UTIL);
+ }
+
+ protected final void testExportFileSystemState(final TableName tableName,
+ final String snapshotName, final String targetName, int filesExpected) throws Exception {
+ testExportFileSystemState(tableName, snapshotName, targetName, filesExpected,
+ getHdfsDestinationDir(), false);
+ }
+
+ protected final void testExportFileSystemState(final TableName tableName,
+ final String snapshotName, final String targetName, int filesExpected, Path copyDir,
+ boolean overwrite) throws Exception {
+ testExportFileSystemState(tableName, snapshotName, targetName, filesExpected, copyDir,
+ overwrite, false);
+ }
+
+ protected final void testExportFileSystemState(final TableName tableName,
+ final String snapshotName, final String targetName, int filesExpected, Path copyDir,
+ boolean overwrite, boolean resetTtl) throws Exception {
+ testExportFileSystemState(TEST_UTIL.getConfiguration(), tableName, snapshotName, targetName,
+ filesExpected, TEST_UTIL.getDefaultRootDirPath(), copyDir, overwrite, resetTtl,
+ getBypassRegionPredicate(), true, false);
+ }
+
+ /**
+ * Creates destination directory, runs ExportSnapshot() tool, and runs some verifications.
+ */
+ protected static void testExportFileSystemState(final Configuration conf,
+ final TableName tableName, final String snapshotName, final String targetName,
+ final int filesExpected, final Path srcDir, Path rawTgtDir, final boolean overwrite,
+ final boolean resetTtl, final RegionPredicate bypassregionPredicate, final boolean success,
+ final boolean checksumVerify) throws Exception {
+ FileSystem tgtFs = rawTgtDir.getFileSystem(conf);
+ FileSystem srcFs = srcDir.getFileSystem(conf);
+ Path tgtDir = rawTgtDir.makeQualified(tgtFs.getUri(), tgtFs.getWorkingDirectory());
+
+ // Export Snapshot
+ int res = runExportSnapshot(conf, snapshotName, targetName, srcDir, rawTgtDir, overwrite,
+ resetTtl, checksumVerify, true, true);
+ assertEquals(success ? 0 : 1, res, "success " + success + ", res=" + res);
+ if (!success) {
+ final Path targetDir = new Path(HConstants.SNAPSHOT_DIR_NAME, targetName);
+ assertFalse(tgtFs.exists(new Path(tgtDir, targetDir)),
+ tgtDir.toString() + " " + targetDir.toString());
+ return;
+ }
+ LOG.info("Exported snapshot");
+
+ // Verify File-System state
+ FileStatus[] rootFiles = tgtFs.listStatus(tgtDir);
+ assertEquals(filesExpected > 0 ? 2 : 1, rootFiles.length);
+ for (FileStatus fileStatus : rootFiles) {
+ String name = fileStatus.getPath().getName();
+ assertTrue(fileStatus.isDirectory(), fileStatus.toString());
+ assertTrue(name.equals(HConstants.SNAPSHOT_DIR_NAME)
+ || name.equals(HConstants.HFILE_ARCHIVE_DIRECTORY), name.toString());
+ }
+ LOG.info("Verified filesystem state");
+
+ // Compare the snapshot metadata and verify the hfiles
+ final Path snapshotDir = new Path(HConstants.SNAPSHOT_DIR_NAME, snapshotName);
+ final Path targetDir = new Path(HConstants.SNAPSHOT_DIR_NAME, targetName);
+ verifySnapshotDir(srcFs, new Path(srcDir, snapshotDir), tgtFs, new Path(tgtDir, targetDir));
+ Set snapshotFiles =
+ verifySnapshot(conf, tgtFs, tgtDir, tableName, targetName, resetTtl, bypassregionPredicate);
+ assertEquals(filesExpected, snapshotFiles.size());
+ }
+
+ /*
+ * verify if the snapshot folder on file-system 1 match the one on file-system 2
+ */
+ protected static void verifySnapshotDir(final FileSystem fs1, final Path root1,
+ final FileSystem fs2, final Path root2) throws IOException {
+ assertEquals(listFiles(fs1, root1, root1), listFiles(fs2, root2, root2));
+ }
+
+ /*
+ * Verify if the files exists
+ */
+ protected static Set verifySnapshot(final Configuration conf, final FileSystem fs,
+ final Path rootDir, final TableName tableName, final String snapshotName,
+ final boolean resetTtl, final RegionPredicate bypassregionPredicate) throws IOException {
+ final Path exportedSnapshot =
+ new Path(rootDir, new Path(HConstants.SNAPSHOT_DIR_NAME, snapshotName));
+ final Set snapshotFiles = new HashSet<>();
+ final Path exportedArchive = new Path(rootDir, HConstants.HFILE_ARCHIVE_DIRECTORY);
+ SnapshotReferenceUtil.visitReferencedFiles(conf, fs, exportedSnapshot,
+ new SnapshotReferenceUtil.SnapshotVisitor() {
+ @Override
+ public void storeFile(final RegionInfo regionInfo, final String family,
+ final SnapshotRegionManifest.StoreFile storeFile) throws IOException {
+ if (bypassregionPredicate != null && bypassregionPredicate.evaluate(regionInfo)) {
+ return;
+ }
+
+ if (!storeFile.hasReference() && !StoreFileInfo.isReference(storeFile.getName())) {
+ String hfile = storeFile.getName();
+ snapshotFiles.add(hfile);
+ verifyNonEmptyFile(new Path(exportedArchive,
+ new Path(CommonFSUtils.getTableDir(new Path("./"), tableName),
+ new Path(regionInfo.getEncodedName(), new Path(family, hfile)))));
+ } else {
+ Pair referredToRegionAndFile =
+ StoreFileInfo.getReferredToRegionAndFile(storeFile.getName());
+ String region = referredToRegionAndFile.getFirst();
+ String hfile = referredToRegionAndFile.getSecond();
+ snapshotFiles.add(hfile);
+ verifyNonEmptyFile(new Path(exportedArchive,
+ new Path(CommonFSUtils.getTableDir(new Path("./"), tableName),
+ new Path(region, new Path(family, hfile)))));
+ }
+ }
+
+ private void verifyNonEmptyFile(final Path path) throws IOException {
+ assertTrue(fs.exists(path), path + " should exists");
+ assertTrue(fs.getFileStatus(path).getLen() > 0, path + " should not be empty");
+ }
+ });
+
+ // Verify Snapshot description
+ SnapshotDescription desc = SnapshotDescriptionUtils.readSnapshotInfo(fs, exportedSnapshot);
+ assertTrue(desc.getName().equals(snapshotName));
+ assertTrue(desc.getTable().equals(tableName.getNameAsString()));
+ if (resetTtl) {
+ assertEquals(HConstants.DEFAULT_SNAPSHOT_TTL, desc.getTtl());
+ }
+ return snapshotFiles;
+ }
+
+ private static Set listFiles(final FileSystem fs, final Path root, final Path dir)
+ throws IOException {
+ Set files = new HashSet<>();
+ LOG.debug("List files in {} in root {} at {}", fs, root, dir);
+ int rootPrefix = root.makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString().length();
+ FileStatus[] list = CommonFSUtils.listStatus(fs, dir);
+ if (list != null) {
+ for (FileStatus fstat : list) {
+ LOG.debug(Objects.toString(fstat.getPath()));
+ if (fstat.isDirectory()) {
+ files.addAll(listFiles(fs, root, fstat.getPath()));
+ } else {
+ files.add(fstat.getPath().makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString()
+ .substring(rootPrefix));
+ }
+ }
+ }
+ return files;
+ }
+
+ protected final Path getHdfsDestinationDir() {
+ Path rootDir = TEST_UTIL.getHBaseCluster().getMaster().getMasterFileSystem().getRootDir();
+ Path path =
+ new Path(new Path(rootDir, "export-test"), "export-" + EnvironmentEdgeManager.currentTime());
+ LOG.info("HDFS export destination path: " + path);
+ return path;
+ }
+
+ protected static Path getLocalDestinationDir(HBaseTestingUtility htu) {
+ Path path = htu.getDataTestDir("local-export-" + EnvironmentEdgeManager.currentTime());
+ try {
+ FileSystem fs = FileSystem.getLocal(htu.getConfiguration());
+ LOG.info("Local export destination path: " + path);
+ return path.makeQualified(fs.getUri(), fs.getWorkingDirectory());
+ } catch (IOException ioe) {
+ throw new RuntimeException(ioe);
+ }
+ }
+
+ protected static void removeExportDir(final Path path) throws IOException {
+ FileSystem fs = FileSystem.get(path.toUri(), new Configuration());
+ fs.delete(path, true);
+ }
+
+ protected static int runExportSnapshot(final Configuration conf, final String sourceSnapshotName,
+ final String targetSnapshotName, final Path srcDir, Path rawTgtDir, final boolean overwrite,
+ final boolean resetTtl, final boolean checksumVerify, final boolean noSourceVerify,
+ final boolean noTargetVerify) throws Exception {
+ FileSystem tgtFs = rawTgtDir.getFileSystem(conf);
+ FileSystem srcFs = srcDir.getFileSystem(conf);
+ Path tgtDir = rawTgtDir.makeQualified(tgtFs.getUri(), tgtFs.getWorkingDirectory());
+ LOG.info("tgtFsUri={}, tgtDir={}, rawTgtDir={}, srcFsUri={}, srcDir={}", tgtFs.getUri(), tgtDir,
+ rawTgtDir, srcFs.getUri(), srcDir);
+ List opts = new ArrayList<>();
+ opts.add("--snapshot");
+ opts.add(sourceSnapshotName);
+ opts.add("--copy-to");
+ opts.add(tgtDir.toString());
+ if (!targetSnapshotName.equals(sourceSnapshotName)) {
+ opts.add("--target");
+ opts.add(targetSnapshotName);
+ }
+ if (overwrite) {
+ opts.add("--overwrite");
+ }
+ if (resetTtl) {
+ opts.add("--reset-ttl");
+ }
+ if (!checksumVerify) {
+ opts.add("--no-checksum-verify");
+ }
+ if (!noSourceVerify) {
+ opts.add("--no-source-verify");
+ }
+ if (!noTargetVerify) {
+ opts.add("--no-target-verify");
+ }
+
+ // Export Snapshot
+ return run(conf, new ExportSnapshot(), opts.toArray(new String[opts.size()]));
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotTestHelpers.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotTestHelpers.java
new file mode 100644
index 000000000000..031e0819ac02
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotTestHelpers.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import java.util.UUID;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
+import org.apache.hadoop.hbase.mob.MobConstants;
+import org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting;
+import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.hbase.security.access.PermissionStorage;
+import org.apache.hadoop.hbase.security.access.SecureTestUtil;
+
+final class ExportSnapshotTestHelpers {
+
+ private ExportSnapshotTestHelpers() {
+ }
+
+ private static void setUpBaseConf(Configuration conf) {
+ conf.setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true);
+ conf.setInt("hbase.regionserver.msginterval", 100);
+ // If a single node has enough failures (default 3), resource manager will blacklist it.
+ // With only 2 nodes and tests injecting faults, we don't want that.
+ conf.setInt("mapreduce.job.maxtaskfailures.per.tracker", 100);
+ conf.setInt(MobConstants.MOB_FILE_CACHE_SIZE_KEY, 0);
+ }
+
+ static void startCluster(HBaseTestingUtility util, boolean useTmpDir) throws Exception {
+ Configuration conf = util.getConfiguration();
+ setUpBaseConf(conf);
+ if (useTmpDir) {
+ FileSystem localFs = FileSystem.getLocal(conf);
+ Path tmpDir = util.getDataTestDir(UUID.randomUUID().toString())
+ .makeQualified(localFs.getUri(), localFs.getWorkingDirectory());
+ conf.set(SnapshotDescriptionUtils.SNAPSHOT_WORKING_DIR, tmpDir.toUri().toString());
+ }
+ util.startMiniCluster(1);
+ util.startMiniMapReduceCluster();
+ }
+
+ static void startSecureCluster(HBaseTestingUtility util) throws Exception {
+ setUpBaseConf(util.getConfiguration());
+ // Setup separate test-data directory for MR cluster and set corresponding configurations.
+ // Otherwise, different test classes running MR cluster can step on each other.
+ util.getDataTestDir();
+
+ // set the always on security provider
+ UserProvider.setUserProviderForTesting(util.getConfiguration(),
+ HadoopSecurityEnabledUserProviderForTesting.class);
+
+ // setup configuration
+ SecureTestUtil.enableSecurity(util.getConfiguration());
+
+ util.startMiniCluster(3);
+ util.startMiniMapReduceCluster();
+
+ // Wait for the ACL table to become available
+ util.waitTableEnabled(PermissionStorage.ACL_TABLE_NAME);
+ }
+
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportWithChecksumTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportWithChecksumTestBase.java
new file mode 100644
index 000000000000..8a8bfaca24da
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/ExportWithChecksumTestBase.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.fs.Path;
+import org.junit.jupiter.api.TestTemplate;
+
+public class ExportWithChecksumTestBase extends ExportSnapshotTestBase {
+
+ protected ExportWithChecksumTestBase(boolean mob) {
+ super(mob);
+ }
+
+ @TestTemplate
+ public void testExportWithChecksum() throws Exception {
+ // Test different schemes: input scheme is hdfs:// and output scheme is file://
+ // The checksum verification will fail
+ Path copyLocalDir = getLocalDestinationDir(TEST_UTIL);
+ testExportFileSystemState(TEST_UTIL.getConfiguration(), tableName, snapshotName, snapshotName,
+ tableNumFiles, TEST_UTIL.getDefaultRootDirPath(), copyLocalDir, false, false,
+ getBypassRegionPredicate(), false, true);
+
+ // Test same schemes: input scheme is hdfs:// and output scheme is hdfs://
+ // The checksum verification will success
+ Path copyHdfsDir = getHdfsDestinationDir();
+ testExportFileSystemState(TEST_UTIL.getConfiguration(), tableName, snapshotName, snapshotName,
+ tableNumFiles, TEST_UTIL.getDefaultRootDirPath(), copyHdfsDir, false, false,
+ getBypassRegionPredicate(), true, true);
+ }
+
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestConsecutiveExports.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestConsecutiveExports.java
new file mode 100644
index 000000000000..cb7988922328
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestConsecutiveExports.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestConsecutiveExports extends ConsecutiveExportsTestBase {
+
+ public TestConsecutiveExports(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, false);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestConsecutiveExportsWithTmpDir.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestConsecutiveExportsWithTmpDir.java
new file mode 100644
index 000000000000..c65e0cda8f97
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestConsecutiveExportsWithTmpDir.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestConsecutiveExportsWithTmpDir extends ConsecutiveExportsTestBase {
+
+ public TestConsecutiveExportsWithTmpDir(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, true);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemState.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemState.java
new file mode 100644
index 000000000000..505ca7de2670
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemState.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportFileSystemState extends ExportFileSystemStateTestBase {
+
+ public TestExportFileSystemState(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, false);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemStateWithMergeOrSplitRegion.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemStateWithMergeOrSplitRegion.java
new file mode 100644
index 000000000000..ff673bb46e2d
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemStateWithMergeOrSplitRegion.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportFileSystemStateWithMergeOrSplitRegion
+ extends ExportFileSystemStateWithMergeOrSplitRegionTestBase {
+
+ public TestExportFileSystemStateWithMergeOrSplitRegion(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, false);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemStateWithMergeOrSplitRegionTmpDir.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemStateWithMergeOrSplitRegionTmpDir.java
new file mode 100644
index 000000000000..36c749eb126e
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemStateWithMergeOrSplitRegionTmpDir.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportFileSystemStateWithMergeOrSplitRegionTmpDir
+ extends ExportFileSystemStateWithMergeOrSplitRegionTestBase {
+
+ public TestExportFileSystemStateWithMergeOrSplitRegionTmpDir(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, true);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemStateWithTmpDir.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemStateWithTmpDir.java
new file mode 100644
index 000000000000..922daf9002ca
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportFileSystemStateWithTmpDir.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportFileSystemStateWithTmpDir extends ExportFileSystemStateTestBase {
+
+ public TestExportFileSystemStateWithTmpDir(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, true);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
deleted file mode 100644
index 0cea6f6b8e97..000000000000
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
+++ /dev/null
@@ -1,571 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.snapshot;
-
-import static org.apache.hadoop.util.ToolRunner.run;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.client.SnapshotType;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
-import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
-import org.apache.hadoop.hbase.tool.BulkLoadHFilesTool;
-import org.apache.hadoop.hbase.util.AbstractHBaseTool;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.CommonFSUtils;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.HFileTestUtil;
-import org.apache.hadoop.hbase.util.Pair;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-
-import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
-
-/**
- * Test Export Snapshot Tool
- */
-@Category({ VerySlowMapReduceTests.class, LargeTests.class })
-public class TestExportSnapshot {
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestExportSnapshot.class);
-
- private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshot.class);
-
- protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-
- protected final static byte[] FAMILY = Bytes.toBytes("cf");
-
- @Rule
- public final TestName testName = new TestName();
-
- protected TableName tableName;
- private byte[] emptySnapshotName;
- private byte[] snapshotName;
- private int tableNumFiles;
- private Admin admin;
-
- public static void setUpBaseConf(Configuration conf) {
- conf.setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true);
- conf.setInt("hbase.regionserver.msginterval", 100);
- // If a single node has enough failures (default 3), resource manager will blacklist it.
- // With only 2 nodes and tests injecting faults, we don't want that.
- conf.setInt("mapreduce.job.maxtaskfailures.per.tracker", 100);
- }
-
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- setUpBaseConf(TEST_UTIL.getConfiguration());
- TEST_UTIL.startMiniCluster(1);
- TEST_UTIL.startMiniMapReduceCluster();
- }
-
- @AfterClass
- public static void tearDownAfterClass() throws Exception {
- TEST_UTIL.shutdownMiniMapReduceCluster();
- TEST_UTIL.shutdownMiniCluster();
- }
-
- /**
- * Create a table and take a snapshot of the table used by the export test.
- */
- @Before
- public void setUp() throws Exception {
- this.admin = TEST_UTIL.getAdmin();
-
- tableName = TableName.valueOf("testtb-" + testName.getMethodName());
- snapshotName = Bytes.toBytes("snaptb0-" + testName.getMethodName());
- emptySnapshotName = Bytes.toBytes("emptySnaptb0-" + testName.getMethodName());
-
- // create Table
- createTable(this.tableName);
-
- // Take an empty snapshot
- admin.snapshot(emptySnapshotName, tableName);
-
- // Add some rows
- SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY);
- tableNumFiles = admin.getTableRegions(tableName).size();
-
- // take a snapshot
- admin.snapshot(snapshotName, tableName);
- }
-
- protected void createTable(TableName tableName) throws Exception {
- SnapshotTestingUtils.createPreSplitTable(TEST_UTIL, tableName, 2, FAMILY);
- }
-
- protected interface RegionPredicate {
- boolean evaluate(final RegionInfo regionInfo);
- }
-
- protected RegionPredicate getBypassRegionPredicate() {
- return null;
- }
-
- @After
- public void tearDown() throws Exception {
- TEST_UTIL.deleteTable(tableName);
- SnapshotTestingUtils.deleteAllSnapshots(TEST_UTIL.getAdmin());
- SnapshotTestingUtils.deleteArchiveDirectory(TEST_UTIL);
- }
-
- /**
- * Verify if exported snapshot and copied files matches the original one.
- */
- @Test
- public void testExportFileSystemState() throws Exception {
- testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles);
- }
-
- @Test
- public void testExportFileSystemStateWithMergeRegion() throws Exception {
- // disable compaction
- admin.compactionSwitch(false,
- admin.getRegionServers().stream().map(a -> a.getServerName()).collect(Collectors.toList()));
- // create Table
- TableName tableName0 = TableName.valueOf("testtb-" + testName.getMethodName() + "-1");
- byte[] snapshotName0 = Bytes.toBytes("snaptb0-" + testName.getMethodName() + "-1");
- admin.createTable(
- TableDescriptorBuilder.newBuilder(tableName0)
- .setColumnFamilies(
- Lists.newArrayList(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build()))
- .build(),
- new byte[][] { Bytes.toBytes("2") });
- // put some data
- try (Table table = admin.getConnection().getTable(tableName0)) {
- table.put(new Put(Bytes.toBytes("1")).addColumn(FAMILY, null, Bytes.toBytes("1")));
- table.put(new Put(Bytes.toBytes("2")).addColumn(FAMILY, null, Bytes.toBytes("2")));
- }
- List regions = admin.getRegions(tableName0);
- assertEquals(2, regions.size());
- tableNumFiles = regions.size();
- // merge region
- admin.mergeRegionsAsync(new byte[][] { regions.get(0).getEncodedNameAsBytes(),
- regions.get(1).getEncodedNameAsBytes() }, true).get();
- // take a snapshot
- admin.snapshot(snapshotName0, tableName0);
- // export snapshot and verify
- testExportFileSystemState(tableName0, snapshotName0, snapshotName0, tableNumFiles);
- // delete table
- TEST_UTIL.deleteTable(tableName0);
- }
-
- @Test
- public void testExportFileSystemStateWithSplitRegion() throws Exception {
- // disable compaction
- admin.compactionSwitch(false, admin.getRegionServers().stream().map(ServerName::getServerName)
- .collect(Collectors.toList()));
- // create Table
- TableName splitTableName = TableName.valueOf(testName.getMethodName());
- String splitTableSnap = "snapshot-" + testName.getMethodName();
- admin.createTable(TableDescriptorBuilder.newBuilder(splitTableName).setColumnFamilies(
- Lists.newArrayList(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build())).build());
-
- Path output = TEST_UTIL.getDataTestDir("output/cf");
- TEST_UTIL.getTestFileSystem().mkdirs(output);
- // Create and load a large hfile to ensure the execution time of MR job.
- HFileTestUtil.createHFile(TEST_UTIL.getConfiguration(), TEST_UTIL.getTestFileSystem(),
- new Path(output, "test_file"), FAMILY, Bytes.toBytes("q"), Bytes.toBytes("1"),
- Bytes.toBytes("9"), 9999999);
- BulkLoadHFilesTool tool = new BulkLoadHFilesTool(TEST_UTIL.getConfiguration());
- tool.run(new String[] { output.getParent().toString(), splitTableName.getNameAsString() });
-
- List regions = admin.getRegions(splitTableName);
- assertEquals(1, regions.size());
- tableNumFiles = regions.size();
-
- // split region
- admin.splitRegionAsync(regions.get(0).getEncodedNameAsBytes(), Bytes.toBytes("5")).get();
- regions = admin.getRegions(splitTableName);
- assertEquals(2, regions.size());
-
- // take a snapshot
- admin.snapshot(splitTableSnap, splitTableName);
- // export snapshot and verify
- Configuration tmpConf = TEST_UTIL.getConfiguration();
- // Decrease the buffer size of copier to avoid the export task finished shortly
- tmpConf.setInt("snapshot.export.buffer.size", 1);
- // Decrease the maximum files of each mapper to ensure the three files(1 hfile + 2 reference
- // files) copied in different mappers concurrently.
- tmpConf.setInt("snapshot.export.default.map.group", 1);
- testExportFileSystemState(tmpConf, splitTableName, Bytes.toBytes(splitTableSnap),
- Bytes.toBytes(splitTableSnap), tableNumFiles, TEST_UTIL.getDefaultRootDirPath(),
- getHdfsDestinationDir(), false, false, getBypassRegionPredicate(), true, false);
- // delete table
- TEST_UTIL.deleteTable(splitTableName);
- }
-
- @Test
- public void testExportFileSystemStateWithSkipTmp() throws Exception {
- TEST_UTIL.getConfiguration().setBoolean(ExportSnapshot.CONF_SKIP_TMP, true);
- try {
- testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles);
- } finally {
- TEST_UTIL.getConfiguration().setBoolean(ExportSnapshot.CONF_SKIP_TMP, false);
- }
- }
-
- @Test
- public void testEmptyExportFileSystemState() throws Exception {
- testExportFileSystemState(tableName, emptySnapshotName, emptySnapshotName, 0);
- }
-
- @Test
- public void testConsecutiveExports() throws Exception {
- Path copyDir = getLocalDestinationDir(TEST_UTIL);
- testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles, copyDir, false);
- testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles, copyDir, true);
- removeExportDir(copyDir);
- }
-
- @Test
- public void testExportWithChecksum() throws Exception {
- // Test different schemes: input scheme is hdfs:// and output scheme is file://
- // The checksum verification will fail
- Path copyLocalDir = getLocalDestinationDir(TEST_UTIL);
- testExportFileSystemState(TEST_UTIL.getConfiguration(), tableName, snapshotName, snapshotName,
- tableNumFiles, TEST_UTIL.getDefaultRootDirPath(), copyLocalDir, false, false,
- getBypassRegionPredicate(), false, true);
-
- // Test same schemes: input scheme is hdfs:// and output scheme is hdfs://
- // The checksum verification will success
- Path copyHdfsDir = getHdfsDestinationDir();
- testExportFileSystemState(TEST_UTIL.getConfiguration(), tableName, snapshotName, snapshotName,
- tableNumFiles, TEST_UTIL.getDefaultRootDirPath(), copyHdfsDir, false, false,
- getBypassRegionPredicate(), true, true);
- }
-
- @Test
- public void testExportWithTargetName() throws Exception {
- final byte[] targetName = Bytes.toBytes("testExportWithTargetName");
- testExportFileSystemState(tableName, snapshotName, targetName, tableNumFiles);
- }
-
- @Test
- public void testExportWithResetTtl() throws Exception {
- String name = "testExportWithResetTtl";
- TableName tableName = TableName.valueOf(name);
- String snapshotNameStr = "snaptb-" + name;
- byte[] snapshotName = Bytes.toBytes(snapshotNameStr);
- Long ttl = 100000L;
-
- try {
- // create Table
- createTable(tableName);
- SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY);
- int tableNumFiles = admin.getRegions(tableName).size();
- // take a snapshot with TTL
- Map props = new HashMap<>();
- props.put("TTL", ttl);
- admin.snapshot(snapshotNameStr, tableName, props);
- Optional ttlOpt =
- admin.listSnapshots().stream().filter(s -> s.getName().equals(snapshotNameStr))
- .map(org.apache.hadoop.hbase.client.SnapshotDescription::getTtl).findAny();
- assertTrue(ttlOpt.isPresent());
- assertEquals(ttl, ttlOpt.get());
-
- testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles,
- getHdfsDestinationDir(), false, true);
- } catch (Exception e) {
- throw e;
- } finally {
- TEST_UTIL.deleteTable(tableName);
- }
- }
-
- @Test
- public void testExportExpiredSnapshot() throws Exception {
- String name = "testExportExpiredSnapshot";
- TableName tableName = TableName.valueOf(name);
- String snapshotName = "snapshot-" + name;
- createTable(tableName);
- SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY);
- Map properties = new HashMap<>();
- properties.put("TTL", 10);
- org.apache.hadoop.hbase.client.SnapshotDescription snapshotDescription =
- new org.apache.hadoop.hbase.client.SnapshotDescription(snapshotName, tableName,
- SnapshotType.FLUSH, null, EnvironmentEdgeManager.currentTime(), -1, properties);
- admin.snapshot(snapshotDescription);
- boolean isExist =
- admin.listSnapshots().stream().anyMatch(ele -> snapshotName.equals(ele.getName()));
- assertTrue(isExist);
- int retry = 6;
- while (
- !SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
- snapshotDescription.getCreationTime(), EnvironmentEdgeManager.currentTime()) && retry > 0
- ) {
- retry--;
- Thread.sleep(10 * 1000);
- }
- boolean isExpiredSnapshot =
- SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
- snapshotDescription.getCreationTime(), EnvironmentEdgeManager.currentTime());
- assertTrue(isExpiredSnapshot);
- int res = runExportSnapshot(TEST_UTIL.getConfiguration(), snapshotName, snapshotName,
- TEST_UTIL.getDefaultRootDirPath(), getHdfsDestinationDir(), false, false, false, true, true);
- assertTrue(res == AbstractHBaseTool.EXIT_FAILURE);
- }
-
- private void testExportFileSystemState(final TableName tableName, final byte[] snapshotName,
- final byte[] targetName, int filesExpected) throws Exception {
- testExportFileSystemState(tableName, snapshotName, targetName, filesExpected,
- getHdfsDestinationDir(), false);
- }
-
- protected void testExportFileSystemState(final TableName tableName, final byte[] snapshotName,
- final byte[] targetName, int filesExpected, Path copyDir, boolean overwrite) throws Exception {
- testExportFileSystemState(tableName, snapshotName, targetName, filesExpected, copyDir,
- overwrite, false);
- }
-
- protected void testExportFileSystemState(final TableName tableName, final byte[] snapshotName,
- final byte[] targetName, int filesExpected, Path copyDir, boolean overwrite, boolean resetTtl)
- throws Exception {
- testExportFileSystemState(TEST_UTIL.getConfiguration(), tableName, snapshotName, targetName,
- filesExpected, TEST_UTIL.getDefaultRootDirPath(), copyDir, overwrite, resetTtl,
- getBypassRegionPredicate(), true, false);
- }
-
- /**
- * Creates destination directory, runs ExportSnapshot() tool, and runs some verifications.
- */
- protected static void testExportFileSystemState(final Configuration conf,
- final TableName tableName, final byte[] snapshotName, final byte[] targetName,
- final int filesExpected, final Path srcDir, Path rawTgtDir, final boolean overwrite,
- final boolean resetTtl, final RegionPredicate bypassregionPredicate, final boolean success,
- final boolean checksumVerify) throws Exception {
- FileSystem tgtFs = rawTgtDir.getFileSystem(conf);
- FileSystem srcFs = srcDir.getFileSystem(conf);
- Path tgtDir = rawTgtDir.makeQualified(tgtFs.getUri(), tgtFs.getWorkingDirectory());
-
- // Export Snapshot
- int res = runExportSnapshot(conf, Bytes.toString(snapshotName), Bytes.toString(targetName),
- srcDir, rawTgtDir, overwrite, resetTtl, checksumVerify, true, true);
- assertEquals("success " + success + ", res=" + res, success ? 0 : 1, res);
- if (!success) {
- final Path targetDir = new Path(HConstants.SNAPSHOT_DIR_NAME, Bytes.toString(targetName));
- assertFalse(tgtDir.toString() + " " + targetDir.toString(),
- tgtFs.exists(new Path(tgtDir, targetDir)));
- return;
- }
- LOG.info("Exported snapshot");
-
- // Verify File-System state
- FileStatus[] rootFiles = tgtFs.listStatus(tgtDir);
- assertEquals(filesExpected > 0 ? 2 : 1, rootFiles.length);
- for (FileStatus fileStatus : rootFiles) {
- String name = fileStatus.getPath().getName();
- assertTrue(fileStatus.toString(), fileStatus.isDirectory());
- assertTrue(name.toString(), name.equals(HConstants.SNAPSHOT_DIR_NAME)
- || name.equals(HConstants.HFILE_ARCHIVE_DIRECTORY));
- }
- LOG.info("Verified filesystem state");
-
- // Compare the snapshot metadata and verify the hfiles
- final Path snapshotDir = new Path(HConstants.SNAPSHOT_DIR_NAME, Bytes.toString(snapshotName));
- final Path targetDir = new Path(HConstants.SNAPSHOT_DIR_NAME, Bytes.toString(targetName));
- verifySnapshotDir(srcFs, new Path(srcDir, snapshotDir), tgtFs, new Path(tgtDir, targetDir));
- Set snapshotFiles = verifySnapshot(conf, tgtFs, tgtDir, tableName,
- Bytes.toString(targetName), resetTtl, bypassregionPredicate);
- assertEquals(filesExpected, snapshotFiles.size());
- }
-
- /*
- * verify if the snapshot folder on file-system 1 match the one on file-system 2
- */
- protected static void verifySnapshotDir(final FileSystem fs1, final Path root1,
- final FileSystem fs2, final Path root2) throws IOException {
- assertEquals(listFiles(fs1, root1, root1), listFiles(fs2, root2, root2));
- }
-
- /*
- * Verify if the files exists
- */
- protected static Set verifySnapshot(final Configuration conf, final FileSystem fs,
- final Path rootDir, final TableName tableName, final String snapshotName,
- final boolean resetTtl, final RegionPredicate bypassregionPredicate) throws IOException {
- final Path exportedSnapshot =
- new Path(rootDir, new Path(HConstants.SNAPSHOT_DIR_NAME, snapshotName));
- final Set snapshotFiles = new HashSet<>();
- final Path exportedArchive = new Path(rootDir, HConstants.HFILE_ARCHIVE_DIRECTORY);
- SnapshotReferenceUtil.visitReferencedFiles(conf, fs, exportedSnapshot,
- new SnapshotReferenceUtil.SnapshotVisitor() {
- @Override
- public void storeFile(final RegionInfo regionInfo, final String family,
- final SnapshotRegionManifest.StoreFile storeFile) throws IOException {
- if (bypassregionPredicate != null && bypassregionPredicate.evaluate(regionInfo)) {
- return;
- }
-
- if (!storeFile.hasReference() && !StoreFileInfo.isReference(storeFile.getName())) {
- String hfile = storeFile.getName();
- snapshotFiles.add(hfile);
- verifyNonEmptyFile(new Path(exportedArchive,
- new Path(CommonFSUtils.getTableDir(new Path("./"), tableName),
- new Path(regionInfo.getEncodedName(), new Path(family, hfile)))));
- } else {
- Pair referredToRegionAndFile =
- StoreFileInfo.getReferredToRegionAndFile(storeFile.getName());
- String region = referredToRegionAndFile.getFirst();
- String hfile = referredToRegionAndFile.getSecond();
- snapshotFiles.add(hfile);
- verifyNonEmptyFile(new Path(exportedArchive,
- new Path(CommonFSUtils.getTableDir(new Path("./"), tableName),
- new Path(region, new Path(family, hfile)))));
- }
- }
-
- private void verifyNonEmptyFile(final Path path) throws IOException {
- assertTrue(path + " should exists", fs.exists(path));
- assertTrue(path + " should not be empty", fs.getFileStatus(path).getLen() > 0);
- }
- });
-
- // Verify Snapshot description
- SnapshotDescription desc = SnapshotDescriptionUtils.readSnapshotInfo(fs, exportedSnapshot);
- assertTrue(desc.getName().equals(snapshotName));
- assertTrue(desc.getTable().equals(tableName.getNameAsString()));
- if (resetTtl) {
- assertEquals(HConstants.DEFAULT_SNAPSHOT_TTL, desc.getTtl());
- }
- return snapshotFiles;
- }
-
- private static Set listFiles(final FileSystem fs, final Path root, final Path dir)
- throws IOException {
- Set files = new HashSet<>();
- LOG.debug("List files in {} in root {} at {}", fs, root, dir);
- int rootPrefix = root.makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString().length();
- FileStatus[] list = CommonFSUtils.listStatus(fs, dir);
- if (list != null) {
- for (FileStatus fstat : list) {
- LOG.debug(Objects.toString(fstat.getPath()));
- if (fstat.isDirectory()) {
- files.addAll(listFiles(fs, root, fstat.getPath()));
- } else {
- files.add(fstat.getPath().makeQualified(fs).toString().substring(rootPrefix));
- }
- }
- }
- return files;
- }
-
- private Path getHdfsDestinationDir() {
- Path rootDir = TEST_UTIL.getHBaseCluster().getMaster().getMasterFileSystem().getRootDir();
- Path path =
- new Path(new Path(rootDir, "export-test"), "export-" + EnvironmentEdgeManager.currentTime());
- LOG.info("HDFS export destination path: " + path);
- return path;
- }
-
- static Path getLocalDestinationDir(HBaseTestingUtility htu) {
- Path path = htu.getDataTestDir("local-export-" + EnvironmentEdgeManager.currentTime());
- try {
- FileSystem fs = FileSystem.getLocal(htu.getConfiguration());
- LOG.info("Local export destination path: " + path);
- return path.makeQualified(fs.getUri(), fs.getWorkingDirectory());
- } catch (IOException ioe) {
- throw new RuntimeException(ioe);
- }
- }
-
- private static void removeExportDir(final Path path) throws IOException {
- FileSystem fs = FileSystem.get(path.toUri(), new Configuration());
- fs.delete(path, true);
- }
-
- private static int runExportSnapshot(final Configuration conf, final String sourceSnapshotName,
- final String targetSnapshotName, final Path srcDir, Path rawTgtDir, final boolean overwrite,
- final boolean resetTtl, final boolean checksumVerify, final boolean noSourceVerify,
- final boolean noTargetVerify) throws Exception {
- FileSystem tgtFs = rawTgtDir.getFileSystem(conf);
- FileSystem srcFs = srcDir.getFileSystem(conf);
- Path tgtDir = rawTgtDir.makeQualified(tgtFs.getUri(), tgtFs.getWorkingDirectory());
- LOG.info("tgtFsUri={}, tgtDir={}, rawTgtDir={}, srcFsUri={}, srcDir={}", tgtFs.getUri(), tgtDir,
- rawTgtDir, srcFs.getUri(), srcDir);
- List opts = new ArrayList<>();
- opts.add("--snapshot");
- opts.add(sourceSnapshotName);
- opts.add("--copy-to");
- opts.add(tgtDir.toString());
- if (!targetSnapshotName.equals(sourceSnapshotName)) {
- opts.add("--target");
- opts.add(targetSnapshotName);
- }
- if (overwrite) {
- opts.add("--overwrite");
- }
- if (resetTtl) {
- opts.add("--reset-ttl");
- }
- if (!checksumVerify) {
- opts.add("--no-checksum-verify");
- }
- if (!noSourceVerify) {
- opts.add("--no-source-verify");
- }
- if (!noTargetVerify) {
- opts.add("--no-target-verify");
- }
-
- // Export Snapshot
- return run(conf, new ExportSnapshot(), opts.toArray(new String[opts.size()]));
- }
-}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotAdjunct.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotAdjunct.java
deleted file mode 100644
index 6094f88f89fe..000000000000
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotAdjunct.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.snapshot;
-
-import static org.junit.Assert.assertFalse;
-
-import java.util.Iterator;
-import java.util.Map;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Tests that are adjunct to {@link TestExportSnapshot}. They used to be in same test suite but the
- * test suite ran too close to the maximum time limit so we split these out. Uses facility from
- * TestExportSnapshot where possible.
- * @see TestExportSnapshot
- */
-@Ignore // HBASE-24493
-@Category({ VerySlowMapReduceTests.class, LargeTests.class })
-public class TestExportSnapshotAdjunct {
- private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshotAdjunct.class);
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestExportSnapshotAdjunct.class);
- @Rule
- public final TestName testName = new TestName();
-
- protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-
- protected TableName tableName;
- private String emptySnapshotName;
- private String snapshotName;
- private int tableNumFiles;
- private Admin admin;
-
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- TestExportSnapshot.setUpBaseConf(TEST_UTIL.getConfiguration());
- TEST_UTIL.startMiniCluster(3);
- TEST_UTIL.startMiniMapReduceCluster();
- }
-
- /**
- * Check for references to '/tmp'. We are trying to avoid having references to outside of the test
- * data dir when running tests. References outside of the test dir makes it so concurrent tests
- * can stamp on each other by mistake. This check is for references to the 'tmp'. This is a
- * strange place for this test but I want somewhere where the configuration is full -- filed w/
- * hdfs and mapreduce configurations.
- */
- private void checkForReferencesToTmpDir() {
- Configuration conf = TEST_UTIL.getConfiguration();
- for (Iterator> i = conf.iterator(); i.hasNext();) {
- Map.Entry e = i.next();
- if (e.getKey().contains("original.hbase.dir")) {
- continue;
- }
- if (e.getValue().contains("java.io.tmpdir")) {
- continue;
- }
- if (e.getValue().contains("hadoop.tmp.dir")) {
- continue;
- }
- if (e.getValue().contains("hbase.tmp.dir")) {
- continue;
- }
- assertFalse(e.getKey() + " " + e.getValue(), e.getValue().contains("tmp"));
- }
- }
-
- @AfterClass
- public static void tearDownAfterClass() throws Exception {
- TEST_UTIL.shutdownMiniMapReduceCluster();
- TEST_UTIL.shutdownMiniCluster();
- }
-
- /**
- * Create a table and take a snapshot of the table used by the export test.
- */
- @Before
- public void setUp() throws Exception {
- this.admin = TEST_UTIL.getAdmin();
-
- tableName = TableName.valueOf("testtb-" + testName.getMethodName());
- snapshotName = "snaptb0-" + testName.getMethodName();
- emptySnapshotName = "emptySnaptb0-" + testName.getMethodName();
-
- // Create Table
- SnapshotTestingUtils.createPreSplitTable(TEST_UTIL, tableName, 2, TestExportSnapshot.FAMILY);
-
- // Take an empty snapshot
- admin.snapshot(emptySnapshotName, tableName);
-
- // Add some rows
- SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, TestExportSnapshot.FAMILY);
- tableNumFiles = admin.getRegions(tableName).size();
-
- // take a snapshot
- admin.snapshot(snapshotName, tableName);
- }
-
- @After
- public void tearDown() throws Exception {
- TEST_UTIL.deleteTable(tableName);
- SnapshotTestingUtils.deleteAllSnapshots(TEST_UTIL.getAdmin());
- SnapshotTestingUtils.deleteArchiveDirectory(TEST_UTIL);
- }
-
- /**
- * Check that ExportSnapshot will succeed if something fails but the retry succeed.
- */
- @Test
- public void testExportRetry() throws Exception {
- Path copyDir = TestExportSnapshot.getLocalDestinationDir(TEST_UTIL);
- Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
- conf.setBoolean(ExportSnapshot.Testing.CONF_TEST_FAILURE, true);
- conf.setInt(ExportSnapshot.Testing.CONF_TEST_FAILURE_COUNT, 2);
- conf.setInt("mapreduce.map.maxattempts", 3);
- TestExportSnapshot.testExportFileSystemState(conf, tableName, Bytes.toBytes(snapshotName),
- Bytes.toBytes(snapshotName), tableNumFiles, TEST_UTIL.getDefaultRootDirPath(), copyDir, true,
- false, null, true, false);
- }
-
- /**
- * Check that ExportSnapshot will fail if we inject failure more times than MR will retry.
- */
- @Test
- public void testExportFailure() throws Exception {
- Path copyDir = TestExportSnapshot.getLocalDestinationDir(TEST_UTIL);
- FileSystem fs = FileSystem.get(copyDir.toUri(), new Configuration());
- copyDir = copyDir.makeQualified(fs.getUri(), fs.getWorkingDirectory());
- Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
- conf.setBoolean(ExportSnapshot.Testing.CONF_TEST_FAILURE, true);
- conf.setInt(ExportSnapshot.Testing.CONF_TEST_FAILURE_COUNT, 4);
- conf.setInt("mapreduce.map.maxattempts", 3);
- TestExportSnapshot.testExportFileSystemState(conf, tableName, Bytes.toBytes(snapshotName),
- Bytes.toBytes(snapshotName), tableNumFiles, TEST_UTIL.getDefaultRootDirPath(), copyDir, true,
- false, null, false, false);
- }
-}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotHelpers.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotHelpers.java
index 71402d0989de..6617bde2008d 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotHelpers.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotHelpers.java
@@ -17,30 +17,25 @@
*/
package org.apache.hadoop.hbase.snapshot;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Pair;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
/**
* Test Export Snapshot Tool helpers
*/
-@Category({ RegionServerTests.class, SmallTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(SmallTests.TAG)
public class TestExportSnapshotHelpers {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestExportSnapshotHelpers.class);
-
/**
* Verfy the result of getBalanceSplits() method. The result are groups of files, used as input
* list for the "export" mappers. All the groups should have similar amount of data. The input
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotMisc.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotMisc.java
new file mode 100644
index 000000000000..8ac6f4aa0201
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotMisc.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportSnapshotMisc extends ExportSnapshotMiscTestBase {
+
+ protected TestExportSnapshotMisc(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, true);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotMiscWithTmpDir.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotMiscWithTmpDir.java
new file mode 100644
index 000000000000..743bc37cae9e
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotMiscWithTmpDir.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportSnapshotMiscWithTmpDir extends ExportSnapshotMiscTestBase {
+
+ protected TestExportSnapshotMiscWithTmpDir(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, false);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotRetry.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotRetry.java
new file mode 100644
index 000000000000..09f4a3b01ef7
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotRetry.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportSnapshotRetry extends ExportSnapshotRetryTestBase {
+
+ public TestExportSnapshotRetry(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, false);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotRetryWithTmpDir.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotRetryWithTmpDir.java
new file mode 100644
index 000000000000..144b2228d753
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotRetryWithTmpDir.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportSnapshotRetryWithTmpDir extends ExportSnapshotRetryTestBase {
+
+ public TestExportSnapshotRetryWithTmpDir(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, true);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV1NoCluster.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV1NoCluster.java
index cd24787e016d..e38d797345a8 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV1NoCluster.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV1NoCluster.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.snapshot;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.HashSet;
@@ -25,7 +25,6 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -33,14 +32,12 @@
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -49,25 +46,24 @@
* separate the tests. See companion file for test of v2 snapshot.
* @see TestExportSnapshotV2NoCluster
*/
-@Category({ MapReduceTests.class, MediumTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(SmallTests.TAG)
public class TestExportSnapshotV1NoCluster {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestExportSnapshotV1NoCluster.class);
+
private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshotV1NoCluster.class);
private HBaseCommonTestingUtility testUtil = new HBaseCommonTestingUtility();
private Path testDir;
private FileSystem fs;
- @Before
+ @BeforeEach
public void setUpBefore() throws Exception {
// Make sure testDir is on LocalFileSystem
this.fs = FileSystem.getLocal(this.testUtil.getConfiguration());
this.testDir = setup(fs, this.testUtil);
LOG.info("fs={}, fsuri={}, fswd={}, testDir={}", this.fs, this.fs.getUri(),
this.fs.getWorkingDirectory(), this.testDir);
- assertTrue("FileSystem '" + fs + "' is not local", fs instanceof LocalFileSystem);
+ assertTrue(fs instanceof LocalFileSystem, "FileSystem '" + fs + "' is not local");
}
/**
@@ -122,9 +118,9 @@ static void testSnapshotWithRefsExportFileSystemState(FileSystem fs,
}
}
int snapshotFilesCount = dataFiles.size();
- byte[] snapshotName = Bytes.toBytes(builder.getSnapshotDescription().getName());
+ String snapshotName = builder.getSnapshotDescription().getName();
TableName tableName = builder.getTableDescriptor().getTableName();
- TestExportSnapshot.testExportFileSystemState(testUtil.getConfiguration(), tableName,
+ ExportSnapshotTestBase.testExportFileSystemState(testUtil.getConfiguration(), tableName,
snapshotName, snapshotName, snapshotFilesCount, testDir,
getDestinationDir(fs, testUtil, testDir), false, false, null, true, false);
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV2NoCluster.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV2NoCluster.java
index 70c598d520f6..98ec2263949e 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV2NoCluster.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV2NoCluster.java
@@ -17,20 +17,18 @@
*/
package org.apache.hadoop.hbase.snapshot;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -38,25 +36,22 @@
* Test Export Snapshot Tool; tests v2 snapshots.
* @see TestExportSnapshotV1NoCluster
*/
-@Category({ MapReduceTests.class, MediumTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(SmallTests.TAG)
public class TestExportSnapshotV2NoCluster {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestExportSnapshotV2NoCluster.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshotV2NoCluster.class);
private HBaseCommonTestingUtility testUtil = new HBaseCommonTestingUtility();
private Path testDir;
private FileSystem fs;
- @Before
+ @BeforeEach
public void before() throws Exception {
// Make sure testDir is on LocalFileSystem
this.fs = FileSystem.getLocal(this.testUtil.getConfiguration());
this.testDir = TestExportSnapshotV1NoCluster.setup(this.fs, this.testUtil);
LOG.info("fs={}, testDir={}", this.fs, this.testDir);
- assertTrue("FileSystem '" + fs + "' is not local", fs instanceof LocalFileSystem);
+ assertTrue(fs instanceof LocalFileSystem, "FileSystem '" + fs + "' is not local");
}
@Test
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.java
deleted file mode 100644
index fe380e683db0..000000000000
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.snapshot;
-
-import java.io.IOException;
-import java.util.UUID;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.experimental.categories.Category;
-
-@Ignore // HBASE-24493
-@Category({ MediumTests.class })
-public class TestExportSnapshotWithTemporaryDirectory extends TestExportSnapshot {
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestExportSnapshotWithTemporaryDirectory.class);
-
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- setUpBaseConf(TEST_UTIL.getConfiguration());
- TEST_UTIL.startMiniCluster(3);
- TEST_UTIL.startMiniMapReduceCluster();
- }
-
- @AfterClass
- public static void tearDownAfterClass() throws Exception {
- TestExportSnapshot.tearDownAfterClass();
- }
-
- public static void setUpBaseConf(Configuration conf) {
- Path tmpDir = null;
- try {
- FileSystem localFs = FileSystem.getLocal(conf);
- tmpDir = TEST_UTIL.getDataTestDir(UUID.randomUUID().toString())
- .makeQualified(localFs.getUri(), localFs.getWorkingDirectory());
- } catch (IOException ioe) {
- throw new RuntimeException(ioe);
- }
- TestExportSnapshot.setUpBaseConf(conf);
- conf.set(SnapshotDescriptionUtils.SNAPSHOT_WORKING_DIR, tmpDir.toUri().toString());
- }
-}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportWithChecksum.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportWithChecksum.java
new file mode 100644
index 000000000000..710673727400
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportWithChecksum.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportWithChecksum extends ExportWithChecksumTestBase {
+
+ public TestExportWithChecksum(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, false);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportWithChecksumWithTmpDir.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportWithChecksumWithTmpDir.java
new file mode 100644
index 000000000000..bed72735ddea
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportWithChecksumWithTmpDir.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestExportWithChecksumWithTmpDir extends ExportWithChecksumTestBase {
+
+ public TestExportWithChecksumWithTmpDir(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startCluster(TEST_UTIL, true);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobExportSnapshot.java
deleted file mode 100644
index 4943b40d6a71..000000000000
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobExportSnapshot.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.snapshot;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.mob.MobConstants;
-import org.apache.hadoop.hbase.mob.MobUtils;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.experimental.categories.Category;
-
-/**
- * Test Export Snapshot Tool
- */
-@Ignore // HBASE-24493
-@Category({ VerySlowRegionServerTests.class, LargeTests.class })
-public class TestMobExportSnapshot extends TestExportSnapshot {
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestMobExportSnapshot.class);
-
- public static void setUpBaseConf(Configuration conf) {
- TestExportSnapshot.setUpBaseConf(conf);
- conf.setInt(MobConstants.MOB_FILE_CACHE_SIZE_KEY, 0);
- }
-
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- setUpBaseConf(TEST_UTIL.getConfiguration());
- TEST_UTIL.startMiniCluster(3);
- TEST_UTIL.startMiniMapReduceCluster();
- }
-
- @Override
- protected void createTable(TableName tableName) throws Exception {
- MobSnapshotTestingUtils.createPreSplitMobTable(TEST_UTIL, tableName, 2, FAMILY);
- }
-
- @Override
- protected RegionPredicate getBypassRegionPredicate() {
- return new RegionPredicate() {
- @Override
- public boolean evaluate(final RegionInfo regionInfo) {
- return MobUtils.isMobRegionInfo(regionInfo);
- }
- };
- }
-}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.java
deleted file mode 100644
index 2fa686f768f1..000000000000
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.snapshot;
-
-import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting;
-import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.security.access.PermissionStorage;
-import org.apache.hadoop.hbase.security.access.SecureTestUtil;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.experimental.categories.Category;
-
-/**
- * Reruns TestMobExportSnapshot using MobExportSnapshot in secure mode.
- */
-@Category({ VerySlowRegionServerTests.class, LargeTests.class })
-public class TestMobSecureExportSnapshot extends TestMobExportSnapshot {
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestMobSecureExportSnapshot.class);
-
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- setUpBaseConf(TEST_UTIL.getConfiguration());
- // Setup separate test-data directory for MR cluster and set corresponding configurations.
- // Otherwise, different test classes running MR cluster can step on each other.
- TEST_UTIL.getDataTestDir();
-
- // set the always on security provider
- UserProvider.setUserProviderForTesting(TEST_UTIL.getConfiguration(),
- HadoopSecurityEnabledUserProviderForTesting.class);
-
- // setup configuration
- SecureTestUtil.enableSecurity(TEST_UTIL.getConfiguration());
-
- TEST_UTIL.startMiniCluster(3);
- TEST_UTIL.startMiniMapReduceCluster();
-
- // Wait for the ACL table to become available
- TEST_UTIL.waitTableEnabled(PermissionStorage.ACL_TABLE_NAME);
- }
-}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureConsecutiveExports.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureConsecutiveExports.java
new file mode 100644
index 000000000000..993cc0370eb6
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureConsecutiveExports.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestSecureConsecutiveExports extends ConsecutiveExportsTestBase {
+
+ public TestSecureConsecutiveExports(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startSecureCluster(TEST_UTIL);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportFileSystemState.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportFileSystemState.java
new file mode 100644
index 000000000000..ab4d2659f2e8
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportFileSystemState.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestSecureExportFileSystemState extends ExportFileSystemStateTestBase {
+
+ public TestSecureExportFileSystemState(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startSecureCluster(TEST_UTIL);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportFileSystemStateWithMergeOrSplitRegion.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportFileSystemStateWithMergeOrSplitRegion.java
new file mode 100644
index 000000000000..45fcb743b3ed
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportFileSystemStateWithMergeOrSplitRegion.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestSecureExportFileSystemStateWithMergeOrSplitRegion
+ extends ExportFileSystemStateWithMergeOrSplitRegionTestBase {
+
+ public TestSecureExportFileSystemStateWithMergeOrSplitRegion(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startSecureCluster(TEST_UTIL);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java
deleted file mode 100644
index a2a588ac5724..000000000000
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.snapshot;
-
-import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting;
-import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.security.access.PermissionStorage;
-import org.apache.hadoop.hbase.security.access.SecureTestUtil;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.experimental.categories.Category;
-
-/**
- * Reruns TestExportSnapshot using ExportSnapshot in secure mode.
- */
-@Category({ VerySlowRegionServerTests.class, LargeTests.class })
-public class TestSecureExportSnapshot extends TestExportSnapshot {
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestSecureExportSnapshot.class);
-
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- setUpBaseConf(TEST_UTIL.getConfiguration());
- // Setup separate test-data directory for MR cluster and set corresponding configurations.
- // Otherwise, different test classes running MR cluster can step on each other.
- TEST_UTIL.getDataTestDir();
-
- // set the always on security provider
- UserProvider.setUserProviderForTesting(TEST_UTIL.getConfiguration(),
- HadoopSecurityEnabledUserProviderForTesting.class);
-
- // setup configuration
- SecureTestUtil.enableSecurity(TEST_UTIL.getConfiguration());
-
- TEST_UTIL.startMiniCluster(3);
- TEST_UTIL.startMiniMapReduceCluster();
-
- // Wait for the ACL table to become available
- TEST_UTIL.waitTableEnabled(PermissionStorage.ACL_TABLE_NAME);
- }
-}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshotMisc.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshotMisc.java
new file mode 100644
index 000000000000..088499e5f7d6
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshotMisc.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestSecureExportSnapshotMisc extends ExportSnapshotMiscTestBase {
+
+ protected TestSecureExportSnapshotMisc(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startSecureCluster(TEST_UTIL);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshotRetry.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshotRetry.java
new file mode 100644
index 000000000000..ad32fee09e26
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshotRetry.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestSecureExportSnapshotRetry extends ExportSnapshotRetryTestBase {
+
+ public TestSecureExportSnapshotRetry(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startSecureCluster(TEST_UTIL);
+ }
+}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportWithChecksum.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportWithChecksum.java
new file mode 100644
index 000000000000..0f6d5a125015
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportWithChecksum.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.snapshot;
+
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MapReduceTests;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: mob = {0}")
+public class TestSecureExportWithChecksum extends ExportWithChecksumTestBase {
+
+ public TestSecureExportWithChecksum(boolean mob) {
+ super(mob);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ ExportSnapshotTestHelpers.startSecureCluster(TEST_UTIL);
+ }
+}