diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java index ff09253c062a..c5ed097ea7c3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -36,40 +36,35 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; -import org.apache.hadoop.hbase.testclassification.FilterTests; -import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; /** * By using this class as the super class of a set of tests you will have a HBase testing cluster * available that is very suitable for writing tests for scanning and filtering against. */ -@Category({ FilterTests.class, MediumTests.class }) public class FilterTestingCluster { private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static Admin admin = null; private static List createdTables = new ArrayList<>(); protected static void createTable(TableName tableName, String columnFamilyName) { - assertNotNull("HBaseAdmin is not initialized successfully.", admin); + assertNotNull(admin, "HBaseAdmin is not initialized successfully."); TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(Bytes.toBytes(columnFamilyName))).build(); try { admin.createTable(tableDescriptor); createdTables.add(tableName); - assertTrue("Fail to create the table", admin.tableExists(tableName)); + assertTrue(admin.tableExists(tableName), "Fail to create the table"); } catch (IOException e) { - assertNull("Exception found while creating table", e); + assertNull(e, "Exception found while creating table"); } } protected static Table openTable(TableName tableName) throws IOException { Table table = TEST_UTIL.getConnection().getTable(tableName); - assertTrue("Fail to create the table", admin.tableExists(tableName)); + assertTrue(admin.tableExists(tableName), "Fail to create the table"); return table; } @@ -82,7 +77,7 @@ private static void deleteTables() { admin.deleteTable(tableName); } } catch (IOException e) { - assertNull("Exception found deleting the table", e); + assertNull(e, "Exception found deleting the table"); } } } @@ -94,21 +89,20 @@ private static void initialize(Configuration conf) { try { admin = TEST_UTIL.getAdmin(); } catch (MasterNotRunningException e) { - assertNull("Master is not running", e); + assertNull(e, "Master is not running"); } catch (ZooKeeperConnectionException e) { - assertNull("Cannot connect to ZooKeeper", e); + assertNull(e, "Cannot connect to ZooKeeper"); } catch (IOException e) { - assertNull("IOException", e); + assertNull(e, "IOException"); } } - @BeforeClass public static void setUp() throws Exception { TEST_UTIL.startMiniCluster(1); initialize(TEST_UTIL.getConfiguration()); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { deleteTables(); TEST_UTIL.shutdownMiniCluster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java index 8319884890a8..61d67632d72b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java @@ -17,46 +17,45 @@ */ package org.apache.hadoop.hbase.filter; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.math.BigDecimal; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestBigDecimalComparator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBigDecimalComparator.class); - @Test public void testObjectEquals() { BigDecimal bd = new BigDecimal(Double.MIN_VALUE); // Check that equals returns true for identical objects final BigDecimalComparator bdc = new BigDecimalComparator(bd); - Assert.assertTrue(bdc.equals(bdc)); - Assert.assertEquals(bdc.hashCode(), bdc.hashCode()); + assertTrue(bdc.equals(bdc)); + assertEquals(bdc.hashCode(), bdc.hashCode()); // Check that equals returns true for the same object final BigDecimalComparator bdc1 = new BigDecimalComparator(bd); final BigDecimalComparator bdc2 = new BigDecimalComparator(bd); - Assert.assertTrue(bdc1.equals(bdc2)); - Assert.assertEquals(bdc1.hashCode(), bdc2.hashCode()); + assertTrue(bdc1.equals(bdc2)); + assertEquals(bdc1.hashCode(), bdc2.hashCode()); // Check that equals returns false for different objects final BigDecimalComparator bdc3 = new BigDecimalComparator(bd); final BigDecimalComparator bdc4 = new BigDecimalComparator(new BigDecimal(Long.MIN_VALUE)); - Assert.assertFalse(bdc3.equals(bdc4)); - Assert.assertNotEquals(bdc3.hashCode(), bdc4.hashCode()); + assertFalse(bdc3.equals(bdc4)); + assertNotEquals(bdc3.hashCode(), bdc4.hashCode()); // Check that equals returns false for a different type final BigDecimalComparator bdc5 = new BigDecimalComparator(bd); - Assert.assertFalse(bdc5.equals(0)); + assertFalse(bdc5.equals(0)); } @Test @@ -74,8 +73,8 @@ public void testEqualsValue() { int comp2 = comparator2.compareTo(value2); // then - Assert.assertEquals(0, comp1); - Assert.assertEquals(0, comp2); + assertEquals(0, comp1); + assertEquals(0, comp2); } @Test @@ -93,9 +92,9 @@ public void testGreaterThanValue() { int comp3 = comparator.compareTo(val3); // then - Assert.assertEquals(1, comp1); - Assert.assertEquals(1, comp2); - Assert.assertEquals(1, comp3); + assertEquals(1, comp1); + assertEquals(1, comp2); + assertEquals(1, comp3); } @Test @@ -113,9 +112,9 @@ public void testLessThanValue() { int comp3 = comparator.compareTo(val3); // then - Assert.assertEquals(-1, comp1); - Assert.assertEquals(-1, comp2); - Assert.assertEquals(-1, comp3); + assertEquals(-1, comp1); + assertEquals(-1, comp2); + assertEquals(-1, comp3); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java index 2f629de4ea37..3bbfe7e8611b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java @@ -17,26 +17,21 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for the bit comparator */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestBitComparator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBitComparator.class); - private static byte[] zeros = new byte[] { 0, 0, 0, 0, 0, 0 }; private static ByteBuffer zeros_bb = ByteBuffer.wrap(zeros); private static byte[] ones = new byte[] { 1, 1, 1, 1, 1, 1 }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java index 90d8f187c98c..9a5cc0e690a3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java @@ -17,17 +17,15 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -37,13 +35,10 @@ * filter. More test functionality can be found within * {@link org.apache.hadoop.hbase.filter.TestFilter#testColumnPaginationFilter()} */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestColumnPaginationFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestColumnPaginationFilter.class); - private static final byte[] ROW = Bytes.toBytes("row_1_test"); private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test"); private static final byte[] VAL_1 = Bytes.toBytes("a"); @@ -52,7 +47,7 @@ public class TestColumnPaginationFilter { private Filter columnPaginationFilterOffset; private Filter columnPaginationFilter; - @Before + @BeforeEach public void setUp() throws Exception { columnPaginationFilter = getColumnPaginationFilter(); columnPaginationFilterOffset = getColumnPaginationFilterOffset(); @@ -79,7 +74,7 @@ private Filter serializationTest(Filter filter) throws Exception { */ private void basicFilterTests(ColumnPaginationFilter filter) throws Exception { KeyValue c = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1); - assertTrue("basicFilter1", filter.filterCell(c) == Filter.ReturnCode.INCLUDE_AND_NEXT_COL); + assertTrue(filter.filterCell(c) == Filter.ReturnCode.INCLUDE_AND_NEXT_COL, "basicFilter1"); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java index 5a5cdb72f3f8..211d76f969c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import java.util.Map; import java.util.Set; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; @@ -46,29 +45,21 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestColumnPrefixFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestColumnPrefixFilter.class); - private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Rule - public TestName name = new TestName(); - @Test - public void testColumnPrefixFilter() throws IOException { + public void testColumnPrefixFilter(TestInfo testInfo) throws IOException { String family = "Family"; - TableDescriptorBuilder tableDescriptorBuilder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); + TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())); ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family)).setMaxVersions(3).build(); tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); @@ -130,10 +121,10 @@ public void testColumnPrefixFilter() throws IOException { } @Test - public void testColumnPrefixFilterWithFilterList() throws IOException { + public void testColumnPrefixFilterWithFilterList(TestInfo testInfo) throws IOException { String family = "Family"; - TableDescriptorBuilder tableDescriptorBuilder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); + TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())); ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family)).setMaxVersions(3).build(); tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java index 3756d1a153da..41fe95419231 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -28,7 +28,6 @@ import java.util.Objects; import java.util.Set; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; @@ -43,140 +42,49 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -class StringRange { - private String start = null; - private String end = null; - private boolean startInclusive = true; - private boolean endInclusive = false; - - public StringRange(String start, boolean startInclusive, String end, boolean endInclusive) { - this.start = start; - this.startInclusive = startInclusive; - this.end = end; - this.endInclusive = endInclusive; - } - - public String getStart() { - return this.start; - } - - public String getEnd() { - return this.end; - } - - public boolean isStartInclusive() { - return this.startInclusive; - } - - public boolean isEndInclusive() { - return this.endInclusive; - } - - @Override - public int hashCode() { - int hashCode = 0; - if (this.start != null) { - hashCode ^= this.start.hashCode(); - } - - if (this.end != null) { - hashCode ^= this.end.hashCode(); - } - return hashCode; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof StringRange)) { - return false; - } - StringRange oth = (StringRange) obj; - return this.startInclusive == oth.startInclusive && this.endInclusive == oth.endInclusive - && Objects.equals(this.start, oth.start) && Objects.equals(this.end, oth.end); - } - - @Override - public String toString() { - String result = (this.startInclusive ? "[" : "(") + (this.start == null ? null : this.start) - + ", " + (this.end == null ? null : this.end) + (this.endInclusive ? "]" : ")"); - return result; - } - - public boolean inRange(String value) { - boolean afterStart = true; - if (this.start != null) { - int startCmp = value.compareTo(this.start); - afterStart = this.startInclusive ? startCmp >= 0 : startCmp > 0; - } - - boolean beforeEnd = true; - if (this.end != null) { - int endCmp = value.compareTo(this.end); - beforeEnd = this.endInclusive ? endCmp <= 0 : endCmp < 0; - } - - return afterStart && beforeEnd; - } - -} - -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestColumnRangeFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestColumnRangeFilter.class); - private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final Logger LOG = LoggerFactory.getLogger(TestColumnRangeFilter.class); - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void setUp() throws Exception { // Nothing to do. } - @After + @AfterEach public void tearDown() throws Exception { // Nothing to do. } @Test - public void TestColumnRangeFilterClient() throws Exception { + public void TestColumnRangeFilterClient(TestInfo testInfo) throws Exception { String family = "Family"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(family), - Integer.MAX_VALUE); + Table ht = TEST_UTIL.createTable(TableName.valueOf(testInfo.getTestMethod().get().getName()), + Bytes.toBytes(family), Integer.MAX_VALUE); List rows = generateRandomWords(10, 8); long maxTimestamp = 2; @@ -296,3 +204,85 @@ List generateRandomWords(int numberOfWords, int maxLengthOfWords) { } } + +class StringRange { + private String start = null; + private String end = null; + private boolean startInclusive = true; + private boolean endInclusive = false; + + public StringRange(String start, boolean startInclusive, String end, boolean endInclusive) { + this.start = start; + this.startInclusive = startInclusive; + this.end = end; + this.endInclusive = endInclusive; + } + + public String getStart() { + return this.start; + } + + public String getEnd() { + return this.end; + } + + public boolean isStartInclusive() { + return this.startInclusive; + } + + public boolean isEndInclusive() { + return this.endInclusive; + } + + @Override + public int hashCode() { + int hashCode = 0; + if (this.start != null) { + hashCode ^= this.start.hashCode(); + } + + if (this.end != null) { + hashCode ^= this.end.hashCode(); + } + return hashCode; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof StringRange)) { + return false; + } + StringRange oth = (StringRange) obj; + return this.startInclusive == oth.startInclusive && this.endInclusive == oth.endInclusive + && Objects.equals(this.start, oth.start) && Objects.equals(this.end, oth.end); + } + + @Override + public String toString() { + String result = (this.startInclusive ? "[" : "(") + (this.start == null ? null : this.start) + + ", " + (this.end == null ? null : this.end) + (this.endInclusive ? "]" : ")"); + return result; + } + + public boolean inRange(String value) { + boolean afterStart = true; + if (this.start != null) { + int startCmp = value.compareTo(this.start); + afterStart = this.startInclusive ? startCmp >= 0 : startCmp > 0; + } + + boolean beforeEnd = true; + if (this.end != null) { + int endCmp = value.compareTo(this.end); + beforeEnd = this.endInclusive ? endCmp <= 0 : endCmp < 0; + } + + return afterStart && beforeEnd; + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java index b99538e33cbe..b7e5f7408bfd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java @@ -17,66 +17,63 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.math.BigDecimal; import java.nio.charset.Charset; import java.util.Collections; import java.util.regex.Pattern; +import java.util.stream.Stream; import org.apache.commons.io.IOUtils; import org.apache.commons.text.StringSubstitutor; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassLoaderTestHelper; -import org.junit.AfterClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; -@RunWith(Parameterized.class) -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: allowFastReflectionFallthrough={0}") public class TestComparatorSerialization { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestComparatorSerialization.class); - - @Parameterized.Parameter(0) public boolean allowFastReflectionFallthrough; - @Parameterized.Parameters(name = "{index}: allowFastReflectionFallthrough={0}") - public static Iterable data() { - return HBaseCommonTestingUtil.BOOLEAN_PARAMETERIZED; + public TestComparatorSerialization(boolean allowFastReflectionFallthrough) { + this.allowFastReflectionFallthrough = allowFastReflectionFallthrough; + } + + public static Stream parameters() { + return Stream.of(Arguments.of(true), Arguments.of(false)); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { // set back to true so that it doesn't affect any other tests ProtobufUtil.setAllowFastReflectionFallthrough(true); } - @Test + @TestTemplate public void testBinaryComparator() throws Exception { BinaryComparator binaryComparator = new BinaryComparator(Bytes.toBytes("binaryComparator")); assertTrue(binaryComparator.areSerializedFieldsEqual( ProtobufUtil.toComparator(ProtobufUtil.toComparator(binaryComparator)))); } - @Test + @TestTemplate public void testBinaryPrefixComparator() throws Exception { BinaryPrefixComparator binaryPrefixComparator = new BinaryPrefixComparator(Bytes.toBytes("binaryPrefixComparator")); @@ -84,7 +81,7 @@ public void testBinaryPrefixComparator() throws Exception { ProtobufUtil.toComparator(ProtobufUtil.toComparator(binaryPrefixComparator)))); } - @Test + @TestTemplate public void testBitComparator() throws Exception { BitComparator bitComparator = new BitComparator(Bytes.toBytes("bitComparator"), BitComparator.BitwiseOp.XOR); @@ -92,14 +89,14 @@ public void testBitComparator() throws Exception { ProtobufUtil.toComparator(ProtobufUtil.toComparator(bitComparator)))); } - @Test + @TestTemplate public void testNullComparator() throws Exception { NullComparator nullComparator = new NullComparator(); assertTrue(nullComparator.areSerializedFieldsEqual( ProtobufUtil.toComparator(ProtobufUtil.toComparator(nullComparator)))); } - @Test + @TestTemplate public void testRegexStringComparator() throws Exception { // test without specifying flags RegexStringComparator regexStringComparator = new RegexStringComparator(".+-2"); @@ -110,18 +107,18 @@ public void testRegexStringComparator() throws Exception { try { new RegexStringComparator("regex", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); } catch (Throwable t) { - assertNull("Exception occurred while created the RegexStringComparator object", t); + assertNull(t, "Exception occurred while created the RegexStringComparator object"); } } - @Test + @TestTemplate public void testSubstringComparator() throws Exception { SubstringComparator substringComparator = new SubstringComparator("substr"); assertTrue(substringComparator.areSerializedFieldsEqual( ProtobufUtil.toComparator(ProtobufUtil.toComparator(substringComparator)))); } - @Test + @TestTemplate public void testBigDecimalComparator() throws Exception { BigDecimal bigDecimal = new BigDecimal(Double.MIN_VALUE); BigDecimalComparator bigDecimalComparator = new BigDecimalComparator(bigDecimal); @@ -134,7 +131,7 @@ public void testBigDecimalComparator() throws Exception { * proves that this still works after HBASE-27276 despite not going through our fast function * caches. */ - @Test + @TestTemplate public void testCustomComparator() throws Exception { ByteArrayComparable baseFilter = new BinaryComparator("foo".getBytes()); ComparatorProtos.Comparator proto = ProtobufUtil.toComparator(baseFilter); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java index 72eccff47cb0..768a57c20449 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; @@ -46,21 +45,17 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestDependentColumnFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestDependentColumnFilter.class); - private static final Logger LOG = LoggerFactory.getLogger(TestDependentColumnFilter.class); private static final byte[][] ROWS = { Bytes.toBytes("test1"), Bytes.toBytes("test2") }; private static final byte[][] FAMILIES = @@ -76,7 +71,7 @@ public class TestDependentColumnFilter { List testVals; private HRegion region; - @Before + @BeforeEach public void setUp() throws Exception { testVals = makeTestVals(); @@ -93,7 +88,7 @@ public void setUp() throws Exception { addData(); } - @After + @AfterEach public void tearDown() throws Exception { HBaseTestingUtil.closeRegionAndWAL(this.region); } @@ -151,15 +146,15 @@ private void verifyScan(Scan s, long expectedRows, long expectedCells) throws IO LOG.info("counter=" + i + ", " + results); if (results.isEmpty()) break; cells += results.size(); - assertTrue("Scanned too many rows! Only expected " + expectedRows - + " total but already scanned " + (i + 1), expectedRows > i); - assertTrue("Expected " + expectedCells + " cells total but " + "already scanned " + cells, - expectedCells >= cells); + assertTrue(expectedRows > i, "Scanned too many rows! Only expected " + expectedRows + + " total but already scanned " + (i + 1)); + assertTrue(expectedCells >= cells, + "Expected " + expectedCells + " cells total but " + "already scanned " + cells); results.clear(); } - assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); - assertEquals("Expected " + expectedCells + " cells but scanned " + cells + " cells", - expectedCells, cells); + assertEquals(expectedRows, i, "Expected " + expectedRows + " rows but scanned " + i + " rows"); + assertEquals(expectedCells, cells, + "Expected " + expectedCells + " cells but scanned " + cells + " cells"); } /** @@ -224,11 +219,11 @@ public void testFilterDropping() throws Exception { accepted.add(val); } } - assertEquals("check all values accepted from filterCell", 5, accepted.size()); + assertEquals(5, accepted.size(), "check all values accepted from filterCell"); filter.filterRowCells(accepted); - assertEquals("check filterRow(List) dropped cell without corresponding column entry", - 4, accepted.size()); + assertEquals(4, accepted.size(), + "check filterRow(List) dropped cell without corresponding column entry"); // start do it again with dependent column dropping on filter = new DependentColumnFilter(FAMILIES[1], QUALIFIER, true); @@ -238,10 +233,10 @@ public void testFilterDropping() throws Exception { accepted.add(val); } } - assertEquals("check the filtering column cells got dropped", 2, accepted.size()); + assertEquals(2, accepted.size(), "check the filtering column cells got dropped"); filter.filterRowCells(accepted); - assertEquals("check cell retention", 2, accepted.size()); + assertEquals(2, accepted.size(), "check cell retention"); } /** @@ -252,14 +247,14 @@ public void testToStringWithNullComparator() { // Test constructor that implicitly sets a null comparator Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER); assertNotNull(filter.toString()); - assertTrue("check string contains 'null' as compatator is null", - filter.toString().contains("null")); + assertTrue(filter.toString().contains("null"), + "check string contains 'null' as compatator is null"); // Test constructor with explicit null comparator filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER, true, CompareOperator.EQUAL, null); assertNotNull(filter.toString()); - assertTrue("check string contains 'null' as compatator is null", - filter.toString().contains("null")); + assertTrue(filter.toString().contains("null"), + "check string contains 'null' as compatator is null"); } @Test @@ -267,7 +262,7 @@ public void testToStringWithNonNullComparator() { Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER, true, CompareOperator.EQUAL, new BinaryComparator(MATCH_VAL)); assertNotNull(filter.toString()); - assertTrue("check string contains comparator value", filter.toString().contains("match")); + assertTrue(filter.toString().contains("match"), "check string contains comparator value"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index e0b382a5ea95..fd5e130da067 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -51,15 +50,12 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -68,19 +64,14 @@ /** * Test filters at the HRegion doorstep. */ -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestFilter.class); - private final static Logger LOG = LoggerFactory.getLogger(TestFilter.class); private HRegion region; private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Rule - public TestName name = new TestName(); - // // Rows, Qualifiers, and Values are in two groups, One and Two. // @@ -132,7 +123,7 @@ public class TestFilter { private long numRows = (long) ROWS_ONE.length + ROWS_TWO.length; private long colsPerRow = (long) FAMILIES.length * QUALIFIERS_ONE.length; - @Before + @BeforeEach public void setUp() throws Exception { TableDescriptor tableDescriptor = TableDescriptorBuilder .newBuilder(TableName.valueOf("TestFilter")) @@ -217,7 +208,7 @@ public void setUp() throws Exception { numRows -= 2; } - @After + @AfterEach public void tearDown() throws Exception { HBaseTestingUtil.closeRegionAndWAL(region); } @@ -275,8 +266,8 @@ public void testRegionScannerReseek() throws Exception { // the results should belong to ROWS_THREE[1] scanner.next(results); for (Cell keyValue : results) { - assertTrue("The rows with ROWS_TWO as row key should be appearing.", - CellUtil.matchingRows(keyValue, ROWS_THREE[1])); + assertTrue(CellUtil.matchingRows(keyValue, ROWS_THREE[1]), + "The rows with ROWS_TWO as row key should be appearing."); } // again try to reseek to a value before ROWS_THREE[1] scanner.reseek(ROWS_ONE[1]); @@ -284,8 +275,8 @@ public void testRegionScannerReseek() throws Exception { // This time no seek would have been done to ROWS_ONE[1] scanner.next(results); for (Cell keyValue : results) { - assertFalse("Cannot rewind back to a value less than previous reseek.", - Bytes.toString(CellUtil.cloneRow(keyValue)).contains("testRowOne")); + assertFalse(Bytes.toString(CellUtil.cloneRow(keyValue)).contains("testRowOne"), + "Cannot rewind back to a value less than previous reseek."); } } @@ -508,16 +499,15 @@ public void testWhileMatchFilterWithFilterRowWithReverseScan() throws Exception scannerCounter++; if (scannerCounter >= pageSize) { - Assert.assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); } if (!isMoreResults) { break; } } scanner.close(); - Assert.assertEquals("The page filter returned more rows than expected", pageSize, - scannerCounter); + assertEquals(pageSize, scannerCounter, "The page filter returned more rows than expected"); } @Test @@ -533,8 +523,8 @@ public void testWhileMatchFilterWithFilterRowKeyWithReverseScan() throws Excepti ArrayList values = new ArrayList<>(); boolean isMoreResults = scanner.next(values); if (!isMoreResults || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) { - Assert.assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); } if (!isMoreResults) { break; @@ -562,14 +552,14 @@ public void testWhileMatchFilterWithFilterRow() throws Exception { scannerCounter++; if (scannerCounter >= pageSize) { - assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); } if (!isMoreResults) { break; } } - assertEquals("The page filter returned more rows than expected", pageSize, scannerCounter); + assertEquals(pageSize, scannerCounter, "The page filter returned more rows than expected"); } /** @@ -612,7 +602,7 @@ public void test94FilterRowCompatibility() throws Exception { InternalScanner scanner = this.region.getScanner(s); ArrayList values = new ArrayList<>(); scanner.next(values); - assertTrue("All rows should be filtered out", values.isEmpty()); + assertTrue(values.isEmpty(), "All rows should be filtered out"); } /** @@ -631,8 +621,8 @@ public void testWhileMatchFilterWithFilterRowKey() throws Exception { ArrayList values = new ArrayList<>(); boolean isMoreResults = scanner.next(values); if (!isMoreResults || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) { - assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); } if (!isMoreResults) { break; @@ -655,8 +645,8 @@ public void testWhileMatchFilterWithFilterCell() throws Exception { while (true) { ArrayList values = new ArrayList<>(); boolean isMoreResults = scanner.next(values); - assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); if (!isMoreResults) { break; } @@ -1436,11 +1426,11 @@ public void testFilterListWithSingleColumnValueFilter() throws IOException { // HBASE-9747 @Test - public void testFilterListWithPrefixFilter() throws IOException { + public void testFilterListWithPrefixFilter(TestInfo testInfo) throws IOException { byte[] family = Bytes.toBytes("f1"); byte[] qualifier = Bytes.toBytes("q1"); TableDescriptor tableDescriptor = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) + TableDescriptorBuilder.newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build(); RegionInfo info = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(); HRegion testRegion = HBaseTestingUtil.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), @@ -1758,13 +1748,13 @@ private void verifyScan(Scan s, long expectedRows, long expectedKeys) throws IOE Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); LOG.info("counter=" + i + ", " + results); if (results.isEmpty()) break; - assertTrue("Scanned too many rows! Only expected " + expectedRows - + " total but already scanned " + (i + 1), expectedRows > i); - assertEquals("Expected " + expectedKeys + " keys per row but " + "returned " + results.size(), - expectedKeys, results.size()); + assertTrue(expectedRows > i, "Scanned too many rows! Only expected " + expectedRows + + " total but already scanned " + (i + 1)); + assertEquals(expectedKeys, results.size(), + "Expected " + expectedKeys + " keys per row but " + "returned " + results.size()); results.clear(); } - assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); + assertEquals(expectedRows, i, "Expected " + expectedRows + " rows but scanned " + i + " rows"); } private void verifyScanNoEarlyOut(Scan s, long expectedRows, long expectedKeys) @@ -1777,13 +1767,13 @@ private void verifyScanNoEarlyOut(Scan s, long expectedRows, long expectedKeys) Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); LOG.info("counter=" + i + ", " + results); if (results.isEmpty()) break; - assertTrue("Scanned too many rows! Only expected " + expectedRows - + " total but already scanned " + (i + 1), expectedRows > i); - assertEquals("Expected " + expectedKeys + " keys per row but " + "returned " + results.size(), - expectedKeys, results.size()); + assertTrue(expectedRows > i, "Scanned too many rows! Only expected " + expectedRows + + " total but already scanned " + (i + 1)); + assertEquals(expectedKeys, results.size(), + "Expected " + expectedKeys + " keys per row but " + "returned " + results.size()); results.clear(); } - assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); + assertEquals(expectedRows, i, "Expected " + expectedRows + " rows but scanned " + i + " rows"); } private void verifyScanFull(Scan s, KeyValue[] kvs) throws IOException { @@ -1795,23 +1785,22 @@ private void verifyScanFull(Scan s, KeyValue[] kvs) throws IOException { done = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); if (results.isEmpty()) break; - assertTrue( + assertTrue(kvs.length >= idx + results.size(), "Scanned too many keys! Only expected " + kvs.length + " total but already scanned " + (results.size() + idx) - + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"), - kvs.length >= idx + results.size()); + + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")")); for (Cell kv : results) { LOG.info("row=" + row + ", result=" + kv.toString() + ", match=" + kvs[idx].toString()); - assertTrue("Row mismatch", CellUtil.matchingRows(kv, kvs[idx])); - assertTrue("Family mismatch", CellUtil.matchingFamily(kv, kvs[idx])); - assertTrue("Qualifier mismatch", CellUtil.matchingQualifier(kv, kvs[idx])); - assertTrue("Value mismatch", CellUtil.matchingValue(kv, kvs[idx])); + assertTrue(CellUtil.matchingRows(kv, kvs[idx]), "Row mismatch"); + assertTrue(CellUtil.matchingFamily(kv, kvs[idx]), "Family mismatch"); + assertTrue(CellUtil.matchingQualifier(kv, kvs[idx]), "Qualifier mismatch"); + assertTrue(CellUtil.matchingValue(kv, kvs[idx]), "Value mismatch"); idx++; } results.clear(); } LOG.info("Looked at " + row + " rows with " + idx + " keys"); - assertEquals("Expected " + kvs.length + " total keys but scanned " + idx, kvs.length, idx); + assertEquals(kvs.length, idx, "Expected " + kvs.length + " total keys but scanned " + idx); } private void verifyScanFullNoValues(Scan s, KeyValue[] kvs, boolean useLen) throws IOException { @@ -1823,34 +1812,33 @@ private void verifyScanFullNoValues(Scan s, KeyValue[] kvs, boolean useLen) thro more = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); if (results.isEmpty()) break; - assertTrue( + assertTrue(kvs.length >= idx + results.size(), "Scanned too many keys! Only expected " + kvs.length + " total but already scanned " + (results.size() + idx) - + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"), - kvs.length >= idx + results.size()); + + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")")); for (Cell kv : results) { LOG.info("row=" + row + ", result=" + kv.toString() + ", match=" + kvs[idx].toString()); - assertTrue("Row mismatch", CellUtil.matchingRows(kv, kvs[idx])); - assertTrue("Family mismatch", CellUtil.matchingFamily(kv, kvs[idx])); - assertTrue("Qualifier mismatch", CellUtil.matchingQualifier(kv, kvs[idx])); - assertFalse("Should not have returned whole value", CellUtil.matchingValue(kv, kvs[idx])); + assertTrue(CellUtil.matchingRows(kv, kvs[idx]), "Row mismatch"); + assertTrue(CellUtil.matchingFamily(kv, kvs[idx]), "Family mismatch"); + assertTrue(CellUtil.matchingQualifier(kv, kvs[idx]), "Qualifier mismatch"); + assertFalse(CellUtil.matchingValue(kv, kvs[idx]), "Should not have returned whole value"); if (useLen) { - assertEquals("Value in result is not SIZEOF_INT", Bytes.SIZEOF_INT, kv.getValueLength()); + assertEquals(Bytes.SIZEOF_INT, kv.getValueLength(), "Value in result is not SIZEOF_INT"); LOG.info("idx = " + idx + ", len=" + kvs[idx].getValueLength() + ", actual=" + Bytes.toInt(CellUtil.cloneValue(kv))); - assertEquals("Scan value should be the length of the actual value. ", - kvs[idx].getValueLength(), Bytes.toInt(CellUtil.cloneValue(kv))); + assertEquals(kvs[idx].getValueLength(), Bytes.toInt(CellUtil.cloneValue(kv)), + "Scan value should be the length of the actual value. "); LOG.info("good"); } else { - assertEquals("Value in result is not empty", 0, kv.getValueLength()); + assertEquals(0, kv.getValueLength(), "Value in result is not empty"); } idx++; } results.clear(); } LOG.info("Looked at " + row + " rows with " + idx + " keys"); - assertEquals("Expected " + kvs.length + " total keys but scanned " + idx, kvs.length, idx); + assertEquals(kvs.length, idx, "Expected " + kvs.length + " total keys but scanned " + idx); } @Test @@ -2182,11 +2170,11 @@ public boolean filterRow() throws IOException { } @Test - @Ignore("TODO: intentionally disabled?") - public void testNestedFilterListWithSCVF() throws IOException { + @Disabled("TODO: intentionally disabled?") + public void testNestedFilterListWithSCVF(TestInfo testInfo) throws IOException { byte[] columnStatus = Bytes.toBytes("S"); TableDescriptor tableDescriptor = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) + TableDescriptorBuilder.newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILIES[0])).build(); RegionInfo info = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(); HRegion testRegion = HBaseTestingUtil.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java index d1655373b77b..544e23c0648e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java @@ -17,15 +17,14 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HTestConst; import org.apache.hadoop.hbase.KeyValue; @@ -41,22 +40,17 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * To test behavior of filters at server from region side. */ -@Category(SmallTests.class) +@Tag(SmallTests.TAG) public class TestFilterFromRegionSide { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterFromRegionSide.class); - private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static HRegion REGION; @@ -82,7 +76,7 @@ public class TestFilterFromRegionSide { private static int NUM_COLS = NUM_FAMILIES * NUM_QUALIFIERS; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TABLE_NAME); @@ -117,7 +111,7 @@ private static ArrayList createPuts(byte[][] rows, byte[][] families, byte[ return puts; } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { REGION.close(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java index ede7b2d2ed28..1242dee3598b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.ArrayList; @@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -41,23 +40,18 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFilterList { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterList.class); - static final int MAX_PAGES = 2; @Test @@ -865,8 +859,8 @@ public void testReversedFilterListWithMockSeekHintFilter() throws IOException { filterList.addFilter(filter2); filterList.addFilter(filter3); - Assert.assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); - Assert.assertEquals(kv3, filterList.getNextCellHint(kv1)); + assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); + assertEquals(kv3, filterList.getNextCellHint(kv1)); filterList = new FilterList(Operator.MUST_PASS_ALL); filterList.setReversed(true); @@ -874,8 +868,8 @@ public void testReversedFilterListWithMockSeekHintFilter() throws IOException { filterList.addFilter(filter2); filterList.addFilter(filter3); - Assert.assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); - Assert.assertEquals(kv1, filterList.getNextCellHint(kv1)); + assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); + assertEquals(kv1, filterList.getNextCellHint(kv1)); } @Test @@ -1040,11 +1034,11 @@ public void testTransformCell() throws IOException { TransformFilter filter2 = new TransformFilter(ReturnCode.NEXT_ROW); TransformFilter filter3 = new TransformFilter(ReturnCode.SEEK_NEXT_USING_HINT); FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, filter1, filter2, filter3); - Assert.assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv)); - Assert.assertEquals(kv, filterList.transformCell(kv)); - Assert.assertEquals(true, filter1.getTransformed()); - Assert.assertEquals(false, filter2.getTransformed()); - Assert.assertEquals(false, filter3.getTransformed()); + assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv)); + assertEquals(kv, filterList.transformCell(kv)); + assertEquals(true, filter1.getTransformed()); + assertEquals(false, filter2.getTransformed()); + assertEquals(false, filter3.getTransformed()); // case MUST_PASS_ALL filter1 = new TransformFilter(ReturnCode.INCLUDE); @@ -1052,11 +1046,11 @@ public void testTransformCell() throws IOException { filter3 = new TransformFilter(ReturnCode.INCLUDE_AND_NEXT_COL); filterList = new FilterList(Operator.MUST_PASS_ALL, filter1, filter2, filter3); - Assert.assertEquals(ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, filterList.filterCell(kv)); - Assert.assertEquals(kv, filterList.transformCell(kv)); - Assert.assertEquals(true, filter1.getTransformed()); - Assert.assertEquals(true, filter2.getTransformed()); - Assert.assertEquals(true, filter3.getTransformed()); + assertEquals(ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, filterList.filterCell(kv)); + assertEquals(kv, filterList.transformCell(kv)); + assertEquals(true, filter1.getTransformed()); + assertEquals(true, filter2.getTransformed()); + assertEquals(true, filter3.getTransformed()); } @Test @@ -1083,10 +1077,10 @@ public void testFilterListWithORWhenPassingCellMismatchPreviousRC() throws IOExc Mockito.when(subFilter2.filterCell(kv4)).thenReturn(ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW); Filter filterList = new FilterList(Operator.MUST_PASS_ONE, subFilter1, subFilter2); - Assert.assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv1)); - Assert.assertEquals(ReturnCode.NEXT_COL, filterList.filterCell(kv2)); - Assert.assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv3)); - Assert.assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv4)); + assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv1)); + assertEquals(ReturnCode.NEXT_COL, filterList.filterCell(kv2)); + assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv3)); + assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv4)); // One sub-filter will filterAllRemaining but other sub-filter will return SEEK_HINT subFilter1 = Mockito.mock(FilterBase.class); @@ -1096,7 +1090,7 @@ public void testFilterListWithORWhenPassingCellMismatchPreviousRC() throws IOExc subFilter2 = Mockito.mock(FilterBase.class); Mockito.when(subFilter2.filterCell(kv1)).thenReturn(ReturnCode.SEEK_NEXT_USING_HINT); filterList = new FilterList(Operator.MUST_PASS_ONE, subFilter1, subFilter2); - Assert.assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); + assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); // Two sub-filter returns SEEK_NEXT_USING_HINT, then we should return SEEK_NEXT_USING_HINT. subFilter1 = Mockito.mock(FilterBase.class); @@ -1105,6 +1099,6 @@ public void testFilterListWithORWhenPassingCellMismatchPreviousRC() throws IOExc subFilter2 = Mockito.mock(FilterBase.class); Mockito.when(subFilter2.filterCell(kv1)).thenReturn(ReturnCode.SEEK_NEXT_USING_HINT); filterList = new FilterList(Operator.MUST_PASS_ONE, subFilter1, subFilter2); - Assert.assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); + assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java index c01614cb30a8..6f15883c08f7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase.filter; +import static org.junit.jupiter.api.Assertions.assertEquals; + import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; @@ -30,14 +31,11 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -45,32 +43,26 @@ * Tests filter Lists in ways that rely on a MiniCluster. Where possible, favor tests in * TestFilterList and TestFilterFromRegionSide instead. */ -@Category({ MediumTests.class, FilterTests.class }) +@Tag(MediumTests.TAG) +@Tag(FilterTests.TAG) public class TestFilterListOnMini { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterListOnMini.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFilterListOnMini.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(1); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Test - public void testFiltersWithOR() throws Exception { - TableName tn = TableName.valueOf(name.getMethodName()); + public void testFiltersWithOR(TestInfo testInfo) throws Exception { + TableName tn = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table table = TEST_UTIL.createTable(tn, new String[] { "cf1", "cf2" }); byte[] CF1 = Bytes.toBytes("cf1"); byte[] CF2 = Bytes.toBytes("cf2"); @@ -90,7 +82,7 @@ public void testFiltersWithOR() throws Exception { ResultScanner scanner = table.getScanner(scan); LOG.info("Filter list: " + filterList); for (Result rr = scanner.next(); rr != null; rr = scanner.next()) { - Assert.assertEquals(2, rr.size()); + assertEquals(2, rr.size()); } } @@ -98,8 +90,8 @@ public void testFiltersWithOR() throws Exception { * Test case for HBASE-21620 */ @Test - public void testColumnPrefixFilterConcatWithOR() throws Exception { - TableName tn = TableName.valueOf(name.getMethodName()); + public void testColumnPrefixFilterConcatWithOR(TestInfo testInfo) throws Exception { + TableName tn = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[] cf1 = Bytes.toBytes("f1"); byte[] row = Bytes.toBytes("row"); byte[] value = Bytes.toBytes("value"); @@ -127,7 +119,7 @@ public void testColumnPrefixFilterConcatWithOR() throws Exception { cellCount += result.listCells().size(); resultCount++; } - Assert.assertEquals(resultCount, 1); - Assert.assertEquals(cellCount, 4); + assertEquals(resultCount, 1); + assertEquals(cellCount, 4); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java index caa9998e3bb1..1524e50ed28c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; @@ -36,13 +35,11 @@ import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,13 +47,9 @@ * This test is for the optimization added in HBASE-15243. * FilterList with two MultiRowRangeFilter's is constructed using Operator.MUST_PASS_ONE. */ -@Category(MediumTests.class) +@Tag(MediumTests.TAG) public class TestFilterListOrOperatorWithBlkCnt { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterListOrOperatorWithBlkCnt.class); - private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final Logger LOG = LoggerFactory.getLogger(TestFilterListOrOperatorWithBlkCnt.class); @@ -66,12 +59,7 @@ public class TestFilterListOrOperatorWithBlkCnt { private TableName tableName; private int numRows = 10000; - @Rule - public TestName name = new TestName(); - - /** - * */ - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { long blkSize = 4096; /* @@ -83,9 +71,7 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - /** - * */ - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -95,8 +81,9 @@ private static long getBlkAccessCount() { } @Test - public void testMultiRowRangeWithFilterListOrOperatorWithBlkCnt() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeWithFilterListOrOperatorWithBlkCnt(TestInfo testInfo) + throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java index e3a13e5ec7ac..b3f33579c28f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java @@ -17,22 +17,22 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; +import java.util.stream.Stream; import org.apache.commons.io.IOUtils; import org.apache.commons.text.StringSubstitutor; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.DoNotRetryIOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange; import org.apache.hadoop.hbase.testclassification.FilterTests; @@ -41,53 +41,50 @@ import org.apache.hadoop.hbase.util.ClassLoaderTestHelper; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; -@RunWith(Parameterized.class) -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: allowFastReflectionFallthrough={0}") public class TestFilterSerialization { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterSerialization.class); - - @Parameterized.Parameter(0) public boolean allowFastReflectionFallthrough; - @Parameterized.Parameters(name = "{index}: allowFastReflectionFallthrough={0}") - public static Iterable data() { - return HBaseCommonTestingUtil.BOOLEAN_PARAMETERIZED; + public TestFilterSerialization(boolean allowFastReflectionFallthrough) { + this.allowFastReflectionFallthrough = allowFastReflectionFallthrough; + } + + public static Stream parameters() { + return Stream.of(Arguments.of(true), Arguments.of(false)); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { // set back to true so that it doesn't affect any other tests ProtobufUtil.setAllowFastReflectionFallthrough(true); } - @Test + @TestTemplate public void testColumnCountGetFilter() throws Exception { ColumnCountGetFilter columnCountGetFilter = new ColumnCountGetFilter(1); assertTrue(columnCountGetFilter.areSerializedFieldsEqual( ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnCountGetFilter)))); } - @Test + @TestTemplate public void testColumnPaginationFilter() throws Exception { ColumnPaginationFilter columnPaginationFilter = new ColumnPaginationFilter(1, 7); assertTrue(columnPaginationFilter.areSerializedFieldsEqual( ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnPaginationFilter)))); } - @Test + @TestTemplate public void testColumnPrefixFilter() throws Exception { // empty string ColumnPrefixFilter columnPrefixFilter = new ColumnPrefixFilter(Bytes.toBytes("")); @@ -100,7 +97,7 @@ public void testColumnPrefixFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnPrefixFilter)))); } - @Test + @TestTemplate public void testColumnRangeFilter() throws Exception { // null columns ColumnRangeFilter columnRangeFilter = new ColumnRangeFilter(null, true, null, false); @@ -113,7 +110,7 @@ public void testColumnRangeFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnRangeFilter)))); } - @Test + @TestTemplate public void testDependentColumnFilter() throws Exception { // null column qualifier/family DependentColumnFilter dependentColumnFilter = new DependentColumnFilter(null, null); @@ -128,7 +125,7 @@ public void testDependentColumnFilter() throws Exception { ProtobufUtil.toFilter(ProtobufUtil.toFilter(dependentColumnFilter)))); } - @Test + @TestTemplate public void testFamilyFilter() throws Exception { FamilyFilter familyFilter = new FamilyFilter(CompareOperator.EQUAL, new BinaryPrefixComparator(Bytes.toBytes("testValueOne"))); @@ -136,7 +133,7 @@ public void testFamilyFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(familyFilter)))); } - @Test + @TestTemplate public void testFilterList() throws Exception { // empty filter list FilterList filterList = new FilterList(new LinkedList<>()); @@ -151,7 +148,7 @@ public void testFilterList() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(filterList)))); } - @Test + @TestTemplate public void testFilterWrapper() throws Exception { FilterWrapper filterWrapper = new FilterWrapper(new ColumnRangeFilter(Bytes.toBytes("e"), false, Bytes.toBytes("f"), true)); @@ -159,14 +156,14 @@ public void testFilterWrapper() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(filterWrapper)))); } - @Test + @TestTemplate public void testFirstKeyOnlyFilter() throws Exception { FirstKeyOnlyFilter firstKeyOnlyFilter = new FirstKeyOnlyFilter(); assertTrue(firstKeyOnlyFilter .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(firstKeyOnlyFilter)))); } - @Test + @TestTemplate public void testFuzzyRowFilter() throws Exception { LinkedList> fuzzyList = new LinkedList<>(); fuzzyList.add(new Pair<>(Bytes.toBytes("999"), new byte[] { 0, 0, 1 })); @@ -176,7 +173,7 @@ public void testFuzzyRowFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(fuzzyRowFilter)))); } - @Test + @TestTemplate public void testInclusiveStopFilter() throws Exception { // InclusveStopFilter with null stopRowKey InclusiveStopFilter inclusiveStopFilter = new InclusiveStopFilter(null); @@ -189,7 +186,7 @@ public void testInclusiveStopFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(inclusiveStopFilter)))); } - @Test + @TestTemplate public void testKeyOnlyFilter() throws Exception { // KeyOnlyFilter with lenAsVal KeyOnlyFilter keyOnlyFilter = new KeyOnlyFilter(true); @@ -202,7 +199,7 @@ public void testKeyOnlyFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(keyOnlyFilter)))); } - @Test + @TestTemplate public void testMultipleColumnPrefixFilter() throws Exception { // empty array byte[][] prefixes = null; @@ -220,14 +217,14 @@ public void testMultipleColumnPrefixFilter() throws Exception { ProtobufUtil.toFilter(ProtobufUtil.toFilter(multipleColumnPrefixFilter)))); } - @Test + @TestTemplate public void testPageFilter() throws Exception { PageFilter pageFilter = new PageFilter(6); assertTrue(pageFilter .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(pageFilter)))); } - @Test + @TestTemplate public void testPrefixFilter() throws Exception { // null prefix PrefixFilter prefixFilter = new PrefixFilter(null); @@ -240,7 +237,7 @@ public void testPrefixFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(prefixFilter)))); } - @Test + @TestTemplate public void testQualifierFilter() throws Exception { QualifierFilter qualifierFilter = new QualifierFilter(CompareOperator.EQUAL, new NullComparator()); @@ -248,14 +245,14 @@ public void testQualifierFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(qualifierFilter)))); } - @Test + @TestTemplate public void testRandomRowFilter() throws Exception { RandomRowFilter randomRowFilter = new RandomRowFilter((float) 0.1); assertTrue(randomRowFilter .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(randomRowFilter)))); } - @Test + @TestTemplate public void testRowFilter() throws Exception { RowFilter rowFilter = new RowFilter(CompareOperator.EQUAL, new SubstringComparator("testRowFilter")); @@ -263,7 +260,7 @@ public void testRowFilter() throws Exception { rowFilter.areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(rowFilter)))); } - @Test + @TestTemplate public void testSingleColumnValueExcludeFilter() throws Exception { // null family/column SingleColumnValueExcludeFilter SingleColumnValueExcludeFilter singleColumnValueExcludeFilter = @@ -279,7 +276,7 @@ public void testSingleColumnValueExcludeFilter() throws Exception { ProtobufUtil.toFilter(ProtobufUtil.toFilter(singleColumnValueExcludeFilter)))); } - @Test + @TestTemplate public void testSingleColumnValueFilter() throws Exception { // null family/column SingleColumnValueFilter SingleColumnValueFilter singleColumnValueFilter = @@ -294,14 +291,14 @@ public void testSingleColumnValueFilter() throws Exception { ProtobufUtil.toFilter(ProtobufUtil.toFilter(singleColumnValueFilter)))); } - @Test + @TestTemplate public void testSkipFilter() throws Exception { SkipFilter skipFilter = new SkipFilter(new PageFilter(6)); assertTrue(skipFilter .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(skipFilter)))); } - @Test + @TestTemplate public void testTimestampsFilter() throws Exception { // Empty timestamp list TimestampsFilter timestampsFilter = new TimestampsFilter(new LinkedList<>()); @@ -317,7 +314,7 @@ public void testTimestampsFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(timestampsFilter)))); } - @Test + @TestTemplate public void testValueFilter() throws Exception { ValueFilter valueFilter = new ValueFilter(CompareOperator.NO_OP, new BinaryComparator(Bytes.toBytes("testValueOne"))); @@ -325,7 +322,7 @@ public void testValueFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(valueFilter)))); } - @Test + @TestTemplate public void testWhileMatchFilter() throws Exception { WhileMatchFilter whileMatchFilter = new WhileMatchFilter( new ColumnRangeFilter(Bytes.toBytes("c"), false, Bytes.toBytes("d"), true)); @@ -333,7 +330,7 @@ public void testWhileMatchFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(whileMatchFilter)))); } - @Test + @TestTemplate public void testMultiRowRangeFilter() throws Exception { List ranges = new ArrayList<>(); ranges.add(new RowRange(Bytes.toBytes(30), true, Bytes.toBytes(40), false)); @@ -345,7 +342,7 @@ public void testMultiRowRangeFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(multiRowRangeFilter)))); } - @Test + @TestTemplate public void testColumnValueFilter() throws Exception { ColumnValueFilter columnValueFilter = new ColumnValueFilter(Bytes.toBytes("family"), Bytes.toBytes("qualifier"), CompareOperator.EQUAL, Bytes.toBytes("value")); @@ -357,7 +354,7 @@ public void testColumnValueFilter() throws Exception { * Test that we can load and deserialize custom filters. Good to have generally, but also proves * that this still works after HBASE-27276 despite not going through our fast function caches. */ - @Test + @TestTemplate public void testCustomFilter() throws Exception { Filter baseFilter = new PrefixFilter("foo".getBytes()); FilterProtos.Filter filterProto = ProtobufUtil.toFilter(baseFilter); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java index e3b4cc32222c..d0baee34e436 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java @@ -17,16 +17,15 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -36,23 +35,19 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test if Filter is incompatible with scan-limits */ -@Category({ FilterTests.class, LargeTests.class }) +@Tag(FilterTests.TAG) +@Tag(LargeTests.TAG) public class TestFilterWithScanLimits extends FilterTestingCluster { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterWithScanLimits.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFilterWithScanLimits.class); private static final TableName tableName = TableName.valueOf("scanWithLimit"); @@ -88,14 +83,15 @@ public void testScanWithLimit() { table.close(); } catch (Exception e) { // no correct result is expected - assertNotNull("No IncompatibleFilterException catched", e); + assertNotNull(e, "No IncompatibleFilterException catched"); } LOG.debug("check the fetched kv number"); - assertEquals("We should not get result(s) returned.", 0, kv_number); + assertEquals(0, kv_number, "We should not get result(s) returned."); } - @BeforeClass - public static void prepareData() { + @BeforeAll + public static void prepareData() throws Exception { + FilterTestingCluster.setUp(); try { createTable(tableName, columnFamily); Table table = openTable(tableName); @@ -116,7 +112,7 @@ public static void prepareData() { table.put(puts); table.close(); } catch (IOException e) { - assertNull("Exception found while putting data into table", e); + assertNull(e, "Exception found while putting data into table"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java index dcc31dceb637..ba04ab923f0d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -50,11 +49,10 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -62,13 +60,10 @@ * Test if the FilterWrapper retains the same semantics defined in the * {@link org.apache.hadoop.hbase.filter.Filter} */ -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFilterWrapper { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterWrapper.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFilterWrapper.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); @@ -102,8 +97,8 @@ public void testFilterWrapper() { for (Cell kv : result.listCells()) { LOG.debug(kv_number + ". kv: " + kv); kv_number++; - assertEquals("Returned row is not correct", Bytes.toString(CellUtil.cloneRow(kv)), - "row" + (row_number + 1)); + assertEquals("row" + (row_number + 1), Bytes.toString(CellUtil.cloneRow(kv)), + "Returned row is not correct"); } } @@ -111,17 +106,17 @@ public void testFilterWrapper() { table.close(); } catch (Exception e) { // no correct result is expected - assertNull("Exception happens in scan", e); + assertNull(e, "Exception happens in scan"); } LOG.debug("check the fetched kv number"); - assertEquals("We should get 8 results returned.", 8, kv_number); - assertEquals("We should get 2 rows returned", 2, row_number); + assertEquals(8, kv_number, "We should get 8 results returned."); + assertEquals(2, row_number, "We should get 2 rows returned"); } private static void prepareData() { try { Table table = connection.getTable(name); - assertTrue("Fail to create the table", admin.tableExists(name)); + assertTrue(admin.tableExists(name), "Fail to create the table"); List puts = new ArrayList<>(); // row1 => , , , @@ -144,21 +139,21 @@ private static void prepareData() { table.put(puts); table.close(); } catch (IOException e) { - assertNull("Exception found while putting data into table", e); + assertNull(e, "Exception found while putting data into table"); } } private static void createTable() { - assertNotNull("HBaseAdmin is not initialized successfully.", admin); + assertNotNull(admin, "HBaseAdmin is not initialized successfully."); if (admin != null) { TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(name) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("f1"))).build(); try { admin.createTable(tableDescriptor); - assertTrue("Fail to create the table", admin.tableExists(name)); + assertTrue(admin.tableExists(name), "Fail to create the table"); } catch (IOException e) { - assertNull("Exception found while creating table", e); + assertNull(e, "Exception found while creating table"); } } } @@ -169,7 +164,7 @@ private static void deleteTable() { admin.disableTable(name); admin.deleteTable(name); } catch (IOException e) { - assertNull("Exception found deleting the table", e); + assertNull(e, "Exception found deleting the table"); } } } @@ -181,23 +176,23 @@ private static void initialize(Configuration conf) { connection = ConnectionFactory.createConnection(TestFilterWrapper.conf); admin = TEST_UTIL.getAdmin(); } catch (MasterNotRunningException e) { - assertNull("Master is not running", e); + assertNull(e, "Master is not running"); } catch (ZooKeeperConnectionException e) { - assertNull("Cannot connect to ZooKeeper", e); + assertNull(e, "Cannot connect to ZooKeeper"); } catch (IOException e) { - assertNull("Caught IOException", e); + assertNull(e, "Caught IOException"); } createTable(); prepareData(); } - @BeforeClass + @BeforeAll public static void setUp() throws Exception { TEST_UTIL.startMiniCluster(1); initialize(TEST_UTIL.getConfiguration()); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { deleteTable(); connection.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFiltersWithBinaryComponentComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFiltersWithBinaryComponentComparator.java index 2e148e41c55c..8554d0199b03 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFiltersWithBinaryComponentComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFiltersWithBinaryComponentComparator.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; @@ -37,17 +36,15 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category(MediumTests.class) +@Tag(MediumTests.TAG) public class TestFiltersWithBinaryComponentComparator { /** @@ -55,10 +52,6 @@ public class TestFiltersWithBinaryComponentComparator { * The descrption on jira should also help you in understanding tests implemented in this class */ - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFiltersWithBinaryComponentComparator.class); - private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final Logger LOG = LoggerFactory.getLogger(TestFiltersWithBinaryComponentComparator.class); @@ -70,23 +63,20 @@ public class TestFiltersWithBinaryComponentComparator { private int cOffset = 8; private int dOffset = 12; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Test - public void testRowFilterWithBinaryComponentComparator() throws IOException { + public void testRowFilterWithBinaryComponentComparator(TestInfo testInfo) throws IOException { // SELECT * from table where a=1 and b > 10 and b < 20 and c > 90 and c < 100 and d=1 - tableName = TableName.valueOf(name.getMethodName()); + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(ht, family, qf); FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL); @@ -105,9 +95,9 @@ public void testRowFilterWithBinaryComponentComparator() throws IOException { } @Test - public void testValueFilterWithBinaryComponentComparator() throws IOException { + public void testValueFilterWithBinaryComponentComparator(TestInfo testInfo) throws IOException { // SELECT * from table where value has 'y' at position 1 - tableName = TableName.valueOf(name.getMethodName()); + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(ht, family, qf); FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL); @@ -123,10 +113,11 @@ public void testValueFilterWithBinaryComponentComparator() throws IOException { } @Test - public void testRowAndValueFilterWithBinaryComponentComparator() throws IOException { + public void testRowAndValueFilterWithBinaryComponentComparator(TestInfo testInfo) + throws IOException { // SELECT * from table where a=1 and b > 10 and b < 20 and c > 90 and c < 100 and d=1 // and value has 'y' at position 1" - tableName = TableName.valueOf(name.getMethodName()); + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(ht, family, qf); FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java index d9239f4e0c02..731991c7ab62 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.nio.ByteBuffer; @@ -25,7 +25,6 @@ import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; @@ -39,39 +38,27 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -/** - */ -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFuzzyRowAndColumnRangeFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFuzzyRowAndColumnRangeFilter.class); - private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowAndColumnRangeFilter.class); - @Rule - public TestName name = new TestName(); - /** * @throws java.lang.Exception */ - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } @@ -79,32 +66,16 @@ public static void setUpBeforeClass() throws Exception { /** * @throws java.lang.Exception */ - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - // Nothing to do. - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - // Nothing to do. - } - @Test - public void Test() throws Exception { + public void Test(TestInfo testInfo) throws Exception { String cf = "f"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(cf), - Integer.MAX_VALUE); + Table ht = TEST_UTIL.createTable(TableName.valueOf(testInfo.getTestMethod().get().getName()), + Bytes.toBytes(cf), Integer.MAX_VALUE); // 10 byte row key - (2 bytes 4 bytes 4 bytes) // 4 byte qualifier diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java index d184bf2a3183..3a280d843046 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java @@ -17,134 +17,129 @@ */ package org.apache.hadoop.hbase.filter; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestFuzzyRowFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFuzzyRowFilter.class); - @Test public void testSatisfiesNoUnsafeForward() { - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, (byte) -128, 1, 0, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, (byte) -128, 2, 0, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, 2, 1, 3, 3 }, 0, 5, new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, 1, 1, 3, 0 }, // row to check 0, 5, new byte[] { 1, 2, 0, 3 }, // fuzzy row new byte[] { 0, 0, 1, 0 })); // mask - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, 1, 1, 3, 0 }, 0, 5, new byte[] { 1, (byte) 245, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe( - false, new byte[] { 1, 2, 1, 0, 1 }, 0, 5, new byte[] { 0, 1, 2 }, new byte[] { 1, 0, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(false, + new byte[] { 1, 2, 1, 0, 1 }, 0, 5, new byte[] { 0, 1, 2 }, new byte[] { 1, 0, 0 })); } @Test public void testSatisfiesForward() { - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(false, new byte[] { 1, (byte) -128, 1, 0, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, new byte[] { 1, (byte) -128, 2, 0, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(false, new byte[] { 1, 2, 1, 3, 3 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, new byte[] { 1, 1, 1, 3, 0 }, // row to check new byte[] { 1, 2, 0, 3 }, // fuzzy row new byte[] { -1, -1, 0, -1 })); // mask - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, new byte[] { 1, 1, 1, 3, 0 }, new byte[] { 1, (byte) 245, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, new byte[] { 1, 2, 1, 0, 1 }, new byte[] { 0, 1, 2 }, new byte[] { 0, -1, -1 })); } @Test public void testSatisfiesReverse() { - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(true, new byte[] { 1, (byte) -128, 1, 0, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 1, (byte) -128, 2, 0, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 2, 3, 1, 1, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(true, new byte[] { 1, 2, 1, 3, 3 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 1, (byte) 245, 1, 3, 0 }, new byte[] { 1, 1, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 1, 3, 1, 3, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 2, 1, 1, 1, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 1, 2, 1, 0, 1 }, new byte[] { 0, 1, 2 }, new byte[] { 0, -1, -1 })); } @Test public void testSatisfiesNoUnsafeReverse() { - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, (byte) -128, 1, 0, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, (byte) -128, 2, 0, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe( - true, new byte[] { 2, 3, 1, 1, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, + new byte[] { 2, 3, 1, 1, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, 2, 1, 3, 3 }, 0, 5, new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, (byte) 245, 1, 3, 0 }, 0, 5, new byte[] { 1, 1, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, - FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, 3, 1, 3, 0 }, 0, 5, - new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, + new byte[] { 1, 3, 1, 3, 0 }, 0, 5, new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, - FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 2, 1, 1, 1, 0 }, 0, 5, - new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, + new byte[] { 2, 1, 1, 1, 0 }, 0, 5, new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe( - true, new byte[] { 1, 2, 1, 0, 1 }, 0, 5, new byte[] { 0, 1, 2 }, new byte[] { 1, 0, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, + new byte[] { 1, 2, 1, 0, 1 }, 0, 5, new byte[] { 0, 1, 2 }, new byte[] { 1, 0, 0 })); } @Test @@ -225,15 +220,15 @@ public void testGetNextForFuzzyRuleForward() { new byte[] { 1, 1 }); // expected next // No next for this one - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 2, 3, 1, 1, 1 }, // row to - // check + assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 2, 3, 1, 1, 1 }, // row to + // check new byte[] { 1, 0, 1 }, // fuzzy row new byte[] { -1, 0, -1 })); // mask - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 1, (byte) 245, 1, 3, 0 }, + assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 1, (byte) 245, 1, 3, 0 }, new byte[] { 1, 1, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 1, 3, 1, 3, 0 }, + assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 1, 3, 1, 3, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 2, 1, 1, 1, 0 }, + assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 2, 1, 1, 1, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); } @@ -333,7 +328,7 @@ public void testGetNextForFuzzyRuleReverse() { new byte[] { 1, 1, 2, 3 }); // expected next // no before cell than current which satisfies the fuzzy row -> null - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(true, new byte[] { 1, 1, 1, 3, 0 }, + assertNull(FuzzyRowFilter.getNextForFuzzyRule(true, new byte[] { 1, 1, 1, 3, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); } @@ -342,6 +337,6 @@ private static void assertNext(boolean reverse, byte[] fuzzyRow, byte[] mask, by KeyValue kv = KeyValueUtil.createFirstOnRow(current); byte[] nextForFuzzyRule = FuzzyRowFilter.getNextForFuzzyRule(reverse, kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), fuzzyRow, mask); - Assert.assertEquals(Bytes.toStringBinary(expected), Bytes.toStringBinary(nextForFuzzyRule)); + assertEquals(Bytes.toStringBinary(expected), Bytes.toStringBinary(nextForFuzzyRule)); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java index 8337009fbadc..a3b99a6db65f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.io.IOException; import java.nio.ByteBuffer; @@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -47,35 +46,27 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFuzzyRowFilterEndToEnd { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFuzzyRowFilterEndToEnd.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowFilterEndToEnd.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final byte fuzzyValue = (byte) 63; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.client.scanner.caching", 1000); @@ -87,19 +78,19 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } // HBASE-15676 Test that fuzzy info of all fixed bits (0s) finds matching row. @Test - public void testAllFixedBits() throws IOException { + public void testAllFixedBits(TestInfo testInfo) throws IOException { String cf = "f"; String cq = "q"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(cf), - Integer.MAX_VALUE); + String name = testInfo.getTestMethod().get().getName(); + Table ht = TEST_UTIL.createTable(TableName.valueOf(name), Bytes.toBytes(cf), Integer.MAX_VALUE); // Load data String[] rows = new String[] { "\\x9C\\x00\\x044\\x00\\x00\\x00\\x00", "\\x9C\\x00\\x044\\x01\\x00\\x00\\x00", "\\x9C\\x00\\x044\\x00\\x01\\x00\\x00", @@ -138,16 +129,16 @@ public void testAllFixedBits() throws IOException { assertEquals(true, Arrays.equals(copyFuzzyKey, fuzzyKey)); assertEquals(true, Arrays.equals(copyMask, mask)); - TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName())); + TEST_UTIL.deleteTable(TableName.valueOf(name)); } @Test - public void testHBASE14782() throws IOException { + public void testHBASE14782(TestInfo testInfo) throws IOException { String cf = "f"; String cq = "q"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(cf), - Integer.MAX_VALUE); + String name = testInfo.getTestMethod().get().getName(); + Table ht = TEST_UTIL.createTable(TableName.valueOf(name), Bytes.toBytes(cf), Integer.MAX_VALUE); // Load data String[] rows = new String[] { "\\x9C\\x00\\x044\\x00\\x00\\x00\\x00", "\\x9C\\x00\\x044\\x01\\x00\\x00\\x00", @@ -183,14 +174,14 @@ public void testHBASE14782() throws IOException { total++; } assertEquals(rows.length, total); - TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName())); + TEST_UTIL.deleteTable(TableName.valueOf(name)); } @Test - public void testFilterList() throws Exception { + public void testFilterList(TestInfo testInfo) throws Exception { String cf = "f"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(cf), - Integer.MAX_VALUE); + Table ht = TEST_UTIL.createTable(TableName.valueOf(testInfo.getTestMethod().get().getName()), + Bytes.toBytes(cf), Integer.MAX_VALUE); // 10 byte row key - (2 bytes 4 bytes 4 bytes) // 4 byte qualifier @@ -287,7 +278,7 @@ private void runScanner(Table hTable, int expectedSize, Filter filter1, Filter f } @Test - public void testHBASE26967() throws IOException { + public void testHBASE26967(TestInfo testInfo) throws IOException { byte[] row1 = Bytes.toBytes("1"); byte[] row2 = Bytes.toBytes("2"); String cf1 = "f1"; @@ -295,8 +286,8 @@ public void testHBASE26967() throws IOException { String cq1 = "col1"; String cq2 = "col2"; - Table ht = - TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), new String[] { cf1, cf2 }); + String name = testInfo.getTestMethod().get().getName(); + Table ht = TEST_UTIL.createTable(TableName.valueOf(name), new String[] { cf1, cf2 }); // Put data List puts = Lists.newArrayList(); @@ -336,15 +327,16 @@ public void testHBASE26967() throws IOException { // Only one row who's rowKey=1 assertNull(scanner.next()); - TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName())); + TEST_UTIL.deleteTable(TableName.valueOf(name)); } @Test - public void testHBASE28634() throws IOException { + public void testHBASE28634(TestInfo testInfo) throws IOException { final String CF = "f"; final String CQ = "name"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(CF)); + String name = testInfo.getTestMethod().get().getName(); + Table ht = TEST_UTIL.createTable(TableName.valueOf(name), Bytes.toBytes(CF)); // Put data List puts = Lists.newArrayList(); @@ -399,6 +391,6 @@ public void testHBASE28634() throws IOException { assertEquals(2, actualRowsList.size()); - TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName())); + TEST_UTIL.deleteTable(TableName.valueOf(name)); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEndLarge.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEndLarge.java index f061c0ec01fb..f872cbe3b165 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEndLarge.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEndLarge.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.nio.ByteBuffer; @@ -25,7 +25,6 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -41,21 +40,17 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ FilterTests.class, LargeTests.class }) +@Tag(FilterTests.TAG) +@Tag(LargeTests.TAG) public class TestFuzzyRowFilterEndToEndLarge { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFuzzyRowFilterEndToEndLarge.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowFilterEndToEndLarge.class); private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); @@ -70,7 +65,7 @@ public class TestFuzzyRowFilterEndToEndLarge { private static String table = "TestFuzzyRowFilterEndToEndLarge"; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.client.scanner.caching", 1000); @@ -82,7 +77,7 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java index 06fed4dc7fff..2ca0284d2147 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java @@ -17,36 +17,31 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests the inclusive stop row filter */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestInclusiveStopFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestInclusiveStopFilter.class); - private final byte[] STOP_ROW = Bytes.toBytes("stop_row"); private final byte[] GOOD_ROW = Bytes.toBytes("good_row"); private final byte[] PAST_STOP_ROW = Bytes.toBytes("zzzzzz"); Filter mainFilter; - @Before + @BeforeEach public void setUp() throws Exception { mainFilter = new InclusiveStopFilter(STOP_ROW); } @@ -75,15 +70,15 @@ public void testSerialization() throws Exception { } private void stopRowTests(Filter filter) throws Exception { - assertFalse("Filtering on " + Bytes.toString(GOOD_ROW), - filter.filterRowKey(KeyValueUtil.createFirstOnRow(GOOD_ROW))); - assertFalse("Filtering on " + Bytes.toString(STOP_ROW), - filter.filterRowKey(KeyValueUtil.createFirstOnRow(STOP_ROW))); - assertTrue("Filtering on " + Bytes.toString(PAST_STOP_ROW), - filter.filterRowKey(KeyValueUtil.createFirstOnRow(PAST_STOP_ROW))); + assertFalse(filter.filterRowKey(KeyValueUtil.createFirstOnRow(GOOD_ROW)), + "Filtering on " + Bytes.toString(GOOD_ROW)); + assertFalse(filter.filterRowKey(KeyValueUtil.createFirstOnRow(STOP_ROW)), + "Filtering on " + Bytes.toString(STOP_ROW)); + assertTrue(filter.filterRowKey(KeyValueUtil.createFirstOnRow(PAST_STOP_ROW)), + "Filtering on " + Bytes.toString(PAST_STOP_ROW)); - assertTrue("FilterAllRemaining", filter.filterAllRemaining()); - assertFalse("FilterNotNull", filter.filterRow()); + assertTrue(filter.filterAllRemaining(), "FilterAllRemaining"); + assertFalse(filter.filterRow(), "FilterNotNull"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java index fbaba370d176..683c291a116e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.hbase.filter; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; @@ -38,24 +39,19 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test the invocation logic of the filters. A filter must be invoked only for the columns that are * requested for. */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestInvocationRecordFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestInvocationRecordFilter.class); - private static final byte[] TABLE_NAME_BYTES = Bytes.toBytes("invocationrecord"); private static final byte[] FAMILY_NAME_BYTES = Bytes.toBytes("mycf"); @@ -66,7 +62,7 @@ public class TestInvocationRecordFilter { private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private HRegion region; - @Before + @BeforeEach public void setUp() throws Exception { TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(TABLE_NAME_BYTES)) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_NAME_BYTES)).build(); @@ -144,12 +140,11 @@ public void verifyInvocationResults(Integer[] selectQualifiers, Integer[] expect temp.clear(); } actualValues.addAll(temp); - Assert.assertTrue( - "Actual values " + actualValues + " differ from the expected values:" + expectedValues, - expectedValues.equals(actualValues)); + assertTrue(expectedValues.equals(actualValues), + "Actual values " + actualValues + " differ from the expected values:" + expectedValues); } - @After + @AfterEach public void tearDown() throws Exception { WAL wal = ((HRegion) region).getWAL(); ((HRegion) region).close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java index a2388736d4a7..9d1de84108dd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java @@ -17,14 +17,15 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValueUtil; @@ -37,24 +38,17 @@ import org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category(LargeTests.class) +@Tag(LargeTests.TAG) public class TestMultiRowRangeFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiRowRangeFilter.class); - private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final Logger LOG = LoggerFactory.getLogger(TestMultiRowRangeFilter.class); private byte[] family = Bytes.toBytes("family"); @@ -63,19 +57,12 @@ public class TestMultiRowRangeFilter { private TableName tableName; private int numRows = 100; - @Rule - public TestName name = new TestName(); - - /** - * */ - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - /** - * */ - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -206,20 +193,24 @@ public void testMergeAndSortWithEmptyStartRowAndStopRow() throws IOException { assertRangesEqual(expectedRanges, actualRanges); } - @Test(expected = IllegalArgumentException.class) + @Test public void testMultiRowRangeWithoutRange() throws IOException { List ranges = new ArrayList<>(); - new MultiRowRangeFilter(ranges); + assertThrows(IllegalArgumentException.class, () -> { + new MultiRowRangeFilter(ranges); + }); } - @Test(expected = IllegalArgumentException.class) + @Test public void testMultiRowRangeWithInvalidRange() throws IOException { List ranges = new ArrayList<>(); ranges.add(new RowRange(Bytes.toBytes(10), true, Bytes.toBytes(20), false)); // the start row larger than the stop row ranges.add(new RowRange(Bytes.toBytes(80), true, Bytes.toBytes(20), false)); ranges.add(new RowRange(Bytes.toBytes(30), true, Bytes.toBytes(70), false)); - new MultiRowRangeFilter(ranges); + assertThrows(IllegalArgumentException.class, () -> { + new MultiRowRangeFilter(ranges); + }); } @Test @@ -290,17 +281,16 @@ public void testMergeAndSortWithRowInclusive() throws IOException { public void assertRangesEqual(List expected, List actual) { assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); i++) { - Assert.assertTrue(Bytes.equals(expected.get(i).getStartRow(), actual.get(i).getStartRow())); - Assert - .assertTrue(expected.get(i).isStartRowInclusive() == actual.get(i).isStartRowInclusive()); - Assert.assertTrue(Bytes.equals(expected.get(i).getStopRow(), actual.get(i).getStopRow())); - Assert.assertTrue(expected.get(i).isStopRowInclusive() == actual.get(i).isStopRowInclusive()); + assertTrue(Bytes.equals(expected.get(i).getStartRow(), actual.get(i).getStartRow())); + assertTrue(expected.get(i).isStartRowInclusive() == actual.get(i).isStartRowInclusive()); + assertTrue(Bytes.equals(expected.get(i).getStopRow(), actual.get(i).getStopRow())); + assertTrue(expected.get(i).isStopRowInclusive() == actual.get(i).isStopRowInclusive()); } } @Test - public void testMultiRowRangeFilterWithRangeOverlap() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithRangeOverlap(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -327,8 +317,8 @@ public void testMultiRowRangeFilterWithRangeOverlap() throws IOException { } @Test - public void testMultiRowRangeFilterWithoutRangeOverlap() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithoutRangeOverlap(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -354,8 +344,8 @@ public void testMultiRowRangeFilterWithoutRangeOverlap() throws IOException { } @Test - public void testMultiRowRangeFilterWithEmptyStartRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithEmptyStartRow(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -376,8 +366,8 @@ public void testMultiRowRangeFilterWithEmptyStartRow() throws IOException { } @Test - public void testMultiRowRangeFilterWithEmptyStopRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithEmptyStopRow(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -397,8 +387,8 @@ public void testMultiRowRangeFilterWithEmptyStopRow() throws IOException { } @Test - public void testMultiRowRangeFilterWithInclusive() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithInclusive(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -425,8 +415,8 @@ public void testMultiRowRangeFilterWithInclusive() throws IOException { } @Test - public void testMultiRowRangeFilterWithExclusive() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithExclusive(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 6000000); TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); try (Table ht = TEST_UTIL.getConnection().getTableBuilder(tableName, null) @@ -453,8 +443,8 @@ public void testMultiRowRangeFilterWithExclusive() throws IOException { } @Test - public void testMultiRowRangeWithFilterListAndOperator() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeWithFilterListAndOperator(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -488,8 +478,8 @@ public void testMultiRowRangeWithFilterListAndOperator() throws IOException { } @Test - public void testMultiRowRangeWithFilterListOrOperator() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeWithFilterListOrOperator(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -525,8 +515,8 @@ public void testMultiRowRangeWithFilterListOrOperator() throws IOException { } @Test - public void testOneRowRange() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testOneRowRange(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); ArrayList rowRangesList = new ArrayList<>(); @@ -554,8 +544,8 @@ public void testOneRowRange() throws IOException { } @Test - public void testReverseMultiRowRangeFilterWithinTable() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testReverseMultiRowRangeFilterWithinTable(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family); generateRows(numRows, ht, family, qf, value); @@ -587,12 +577,12 @@ public void testReverseMultiRowRangeFilterWithinTable() throws IOException { } sb.append(observedValue); } - assertEquals("Saw results: " + sb.toString(), 22, results.size()); + assertEquals(22, results.size(), "Saw results: " + sb); } @Test - public void testReverseMultiRowRangeFilterIncludingMaxRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testReverseMultiRowRangeFilterIncludingMaxRow(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family); for (String rowkey : Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h")) { byte[] row = Bytes.toBytes(rowkey); @@ -620,8 +610,8 @@ public void testReverseMultiRowRangeFilterIncludingMaxRow() throws IOException { } @Test - public void testReverseMultiRowRangeFilterIncludingMinRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testReverseMultiRowRangeFilterIncludingMinRow(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family); for (String rowkey : Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h")) { byte[] row = Bytes.toBytes(rowkey); @@ -649,8 +639,9 @@ public void testReverseMultiRowRangeFilterIncludingMinRow() throws IOException { } @Test - public void testReverseMultiRowRangeFilterIncludingMinAndMaxRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testReverseMultiRowRangeFilterIncludingMinAndMaxRow(TestInfo testInfo) + throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family); for (String rowkey : Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h")) { byte[] row = Bytes.toBytes(rowkey); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java index 0367e5df56fe..94ec64a61652 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import java.util.Map; import java.util.Set; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; @@ -46,29 +45,21 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestMultipleColumnPrefixFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultipleColumnPrefixFilter.class); - private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Rule - public TestName name = new TestName(); - @Test - public void testMultipleColumnPrefixFilter() throws IOException { + public void testMultipleColumnPrefixFilter(TestInfo testInfo) throws IOException { String family = "Family"; - TableDescriptorBuilder tableDescriptorBuilder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); + TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())); ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family)).setMaxVersions(3).build(); tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); @@ -129,11 +120,11 @@ public void testMultipleColumnPrefixFilter() throws IOException { } @Test - public void testMultipleColumnPrefixFilterWithManyFamilies() throws IOException { + public void testMultipleColumnPrefixFilterWithManyFamilies(TestInfo testInfo) throws IOException { String family1 = "Family1"; String family2 = "Family2"; - TableDescriptorBuilder tableDescriptorBuilder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); + TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())); ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family1)).setMaxVersions(3).build(); tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); @@ -202,10 +193,11 @@ public void testMultipleColumnPrefixFilterWithManyFamilies() throws IOException } @Test - public void testMultipleColumnPrefixFilterWithColumnPrefixFilter() throws IOException { + public void testMultipleColumnPrefixFilterWithColumnPrefixFilter(TestInfo testInfo) + throws IOException { String family = "Family"; - TableDescriptorBuilder tableDescriptorBuilder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); + TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())); ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family)).build(); tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java index 5b48fa59771c..cbbb8725cebd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java @@ -17,21 +17,17 @@ */ package org.apache.hadoop.hbase.filter; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import static org.junit.jupiter.api.Assertions.assertEquals; + import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestNullComparator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestNullComparator.class); - @Test public void testNullValue() { // given @@ -43,8 +39,8 @@ public void testNullValue() { int comp2 = comparator.compareTo(value, 5, 15); // then - Assert.assertEquals(0, comp1); - Assert.assertEquals(0, comp2); + assertEquals(0, comp1); + assertEquals(0, comp2); } @Test @@ -58,8 +54,8 @@ public void testNonNullValue() { int comp2 = comparator.compareTo(value, 1, 3); // then - Assert.assertEquals(1, comp1); - Assert.assertEquals(1, comp2); + assertEquals(1, comp1); + assertEquals(1, comp2); } @Test @@ -73,8 +69,8 @@ public void testEmptyValue() { int comp2 = comparator.compareTo(value, 1, 3); // then - Assert.assertEquals(1, comp1); - Assert.assertEquals(1, comp2); + assertEquals(1, comp1); + assertEquals(1, comp2); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java index 80591422a018..4caacba56e7e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java @@ -17,28 +17,23 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for the page filter */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestPageFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPageFilter.class); - static final int ROW_LIMIT = 3; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java index 06edcd9a208a..d5a585dd6afc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -27,36 +27,31 @@ import java.util.List; import java.util.regex.Pattern; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * This class tests ParseFilter.java It tests the entire work flow from when a string is given by * the user and how it is parsed to construct the corresponding Filter object */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestParseFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestParseFilter.class); - ParseFilter f; Filter filter; - @Before + @BeforeEach public void setUp() throws Exception { f = new ParseFilter(); } - @After + @AfterEach public void tearDown() throws Exception { // Nothing to do. } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java index 34e15cf920bd..d34d16c495bc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java @@ -17,37 +17,32 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestPrefixFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPrefixFilter.class); - Filter mainFilter; static final char FIRST_CHAR = 'a'; static final char LAST_CHAR = 'e'; static final String HOST_PREFIX = "org.apache.site-"; - @Before + @BeforeEach public void setUp() throws Exception { this.mainFilter = new PrefixFilter(Bytes.toBytes(HOST_PREFIX)); } @@ -81,14 +76,14 @@ private void prefixRowTests(Filter filter) throws Exception { private void prefixRowTests(Filter filter, boolean lastFilterAllRemaining) throws Exception { for (char c = FIRST_CHAR; c <= LAST_CHAR; c++) { byte[] t = createRow(c); - assertFalse("Failed with character " + c, - filter.filterRowKey(KeyValueUtil.createFirstOnRow(t))); + assertFalse(filter.filterRowKey(KeyValueUtil.createFirstOnRow(t)), + "Failed with character " + c); assertFalse(filter.filterAllRemaining()); } String yahooSite = "com.yahoo.www"; byte[] yahooSiteBytes = Bytes.toBytes(yahooSite); KeyValue yahooSiteCell = KeyValueUtil.createFirstOnRow(yahooSiteBytes); - assertFalse("Failed with character " + yahooSite, filter.filterRowKey(yahooSiteCell)); + assertFalse(filter.filterRowKey(yahooSiteCell), "Failed with character " + yahooSite); assertEquals(Filter.ReturnCode.SEEK_NEXT_USING_HINT, filter.filterCell(yahooSiteCell)); assertEquals(lastFilterAllRemaining, filter.filterAllRemaining()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestQualifierFilterWithEmptyQualifier.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestQualifierFilterWithEmptyQualifier.java index 673c542a99a8..321c9cfa8408 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestQualifierFilterWithEmptyQualifier.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestQualifierFilterWithEmptyQualifier.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -44,33 +43,25 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test qualifierFilter with empty qualifier column */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestQualifierFilterWithEmptyQualifier { private final static Logger LOG = LoggerFactory.getLogger(TestQualifierFilterWithEmptyQualifier.class); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestQualifierFilterWithEmptyQualifier.class); private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private HRegion region; - @Rule - public TestName name = new TestName(); - private static final byte[][] ROWS = { Bytes.toBytes("testRowOne-0"), Bytes.toBytes("testRowOne-1"), Bytes.toBytes("testRowOne-2"), Bytes.toBytes("testRowOne-3") }; private static final byte[] FAMILY = Bytes.toBytes("testFamily"); @@ -79,7 +70,7 @@ public class TestQualifierFilterWithEmptyQualifier { private static final byte[] VALUE = Bytes.toBytes("testValueOne"); private long numRows = (long) ROWS.length; - @Before + @BeforeEach public void setUp() throws Exception { TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf("TestQualifierFilter")) @@ -102,7 +93,7 @@ public void setUp() throws Exception { this.region.flush(true); } - @After + @AfterEach public void tearDown() throws Exception { HBaseTestingUtil.closeRegionAndWAL(region); } @@ -147,12 +138,12 @@ private void verifyScanNoEarlyOut(Scan s, long expectedRows, long expectedKeys) if (results.isEmpty()) { break; } - assertTrue("Scanned too many rows! Only expected " + expectedRows - + " total but already scanned " + (i + 1), expectedRows > i); - assertEquals("Expected " + expectedKeys + " keys per row but " + "returned " + results.size(), - expectedKeys, results.size()); + assertTrue(expectedRows > i, "Scanned too many rows! Only expected " + expectedRows + + " total but already scanned " + (i + 1)); + assertEquals(expectedKeys, results.size(), + "Expected " + expectedKeys + " keys per row but " + "returned " + results.size()); results.clear(); } - assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); + assertEquals(i, expectedRows, "Expected " + expectedRows + " rows but scanned " + i + " rows"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java index 8454ab357aec..1758c888e971 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java @@ -17,28 +17,23 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestRandomRowFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRandomRowFilter.class); - protected RandomRowFilter quarterChanceFilter; - @Before + @BeforeEach public void setUp() throws Exception { quarterChanceFilter = new RandomRowFilter(0.25f); } @@ -59,7 +54,7 @@ public void testBasics() throws Exception { // since we're dealing with randomness, we must have a include an epsilon // tolerance. int epsilon = max / 100; - assertTrue("Roughly 25% should pass the filter", Math.abs(included - max / 4) < epsilon); + assertTrue(Math.abs(included - max / 4) < epsilon, "Roughly 25% should pass filter"); } /** @@ -69,8 +64,8 @@ public void testBasics() throws Exception { public void testSerialization() throws Exception { RandomRowFilter newFilter = serializationTest(quarterChanceFilter); // use epsilon float comparison - assertTrue("float should be equal", - Math.abs(newFilter.getChance() - quarterChanceFilter.getChance()) < 0.000001f); + assertTrue(Math.abs(newFilter.getChance() - quarterChanceFilter.getChance()) < 0.000001f, + "float should be equal"); } private RandomRowFilter serializationTest(RandomRowFilter filter) throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java index f965153b05bf..734ef3d88b99 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java @@ -17,25 +17,21 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.regex.Pattern; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestRegexComparator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegexComparator.class); - @Test public void testSerialization() throws Exception { // Default engine is the Java engine @@ -56,7 +52,7 @@ public void testJavaEngine() throws Exception { for (TestCase t : TEST_CASES) { boolean result = new RegexStringComparator(t.regex, t.flags, EngineType.JAVA) .compareTo(Bytes.toBytes(t.haystack)) == 0; - assertEquals("Regex '" + t.regex + "' failed test '" + t.haystack + "'", result, t.expected); + assertEquals(t.expected, result, "Regex '" + t.regex + "' failed test '" + t.haystack + "'"); } } @@ -65,7 +61,7 @@ public void testJoniEngine() throws Exception { for (TestCase t : TEST_CASES) { boolean result = new RegexStringComparator(t.regex, t.flags, EngineType.JONI) .compareTo(Bytes.toBytes(t.haystack)) == 0; - assertEquals("Regex '" + t.regex + "' failed test '" + t.haystack + "'", result, t.expected); + assertEquals(t.expected, result, "Regex '" + t.regex + "' failed test '" + t.haystack + "'"); } } @@ -146,5 +142,5 @@ public TestCase(String regex, int flags, String haystack, boolean expected) { new TestCase("[\\n-#]", "-", false), new TestCase("[\\043]+", "blahblah#blech", true), new TestCase("[\\042-\\044]+", "blahblah#blech", true), new TestCase("[\\u1234-\\u1236]", "blahblah\u1235blech", true), - new TestCase("[^\043]*", "blahblah#blech", true), new TestCase("(|f)?+", "foo", true), }; + new TestCase("[^\\043]*", "blahblah#blech", true), new TestCase("(|f)?+", "foo", true), }; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java index a47b9ef3d1e5..d002b23b6e10 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java @@ -17,12 +17,14 @@ */ package org.apache.hadoop.hbase.filter; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.fail; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.codec.binary.Hex; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -32,33 +34,30 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test if Scan.setStartStopRowForPrefixScan works as intended. */ -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestScanRowPrefix extends FilterTestingCluster { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestScanRowPrefix.class); - private static final Logger LOG = LoggerFactory.getLogger(TestScanRowPrefix.class); - @Rule - public TestName name = new TestName(); + @BeforeAll + public static void setUpBeforeClass() throws Exception { + FilterTestingCluster.setUp(); + } @Test - public void testPrefixScanning() throws IOException { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testPrefixScanning(TestInfo testInfo) throws IOException { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); createTable(tableName, "F"); Table table = openTable(tableName); @@ -218,10 +217,10 @@ private void verifyScanResult(Table table, Scan scan, List expectedKeys, + tableOfTwoListsOfByteArrays("Expected", expectedKeys, "Actual ", actualKeys); } - Assert.assertArrayEquals(fullMessage, expectedKeys.toArray(), actualKeys.toArray()); + assertArrayEquals(expectedKeys.toArray(), actualKeys.toArray(), fullMessage); } catch (IOException e) { e.printStackTrace(); - Assert.fail(); + fail(); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSeekHints.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSeekHints.java index 8aba60d6ac21..7fb74bb13b09 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSeekHints.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSeekHints.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import java.io.IOException; import java.util.ArrayList; @@ -25,7 +25,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -40,15 +39,13 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ FilterTests.class, MediumTests.class }) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; + +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestSeekHints { private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); @@ -57,14 +54,7 @@ public class TestSeekHints { private static String table = "t"; private static Table ht; - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSeekHints.class); - - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.client.scanner.caching", 1000); @@ -85,7 +75,7 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.flush(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java index 38f7ed465387..05f25bd73fe4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java @@ -17,36 +17,31 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for {@link SingleColumnValueExcludeFilter}. Because this filter extends * {@link SingleColumnValueFilter}, only the added functionality is tested. That is, method * filterCell(Cell). */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestSingleColumnValueExcludeFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSingleColumnValueExcludeFilter.class); - private static final byte[] ROW = Bytes.toBytes("test"); private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test"); private static final byte[] COLUMN_QUALIFIER = Bytes.toBytes("foo"); @@ -72,22 +67,22 @@ public void testFilterCell() throws Exception { filter.filterRowCells(kvs); - assertEquals("resultSize", 2, kvs.size()); - assertTrue("leftKV1", CellComparatorImpl.COMPARATOR.compare(kvs.get(0), c) == 0); - assertTrue("leftKV2", CellComparatorImpl.COMPARATOR.compare(kvs.get(1), c) == 0); - assertFalse("allRemainingWhenMatch", filter.filterAllRemaining()); + assertEquals(2, kvs.size(), "resultSize"); + assertTrue(CellComparatorImpl.COMPARATOR.compare(kvs.get(0), c) == 0, "leftKV1"); + assertTrue(CellComparatorImpl.COMPARATOR.compare(kvs.get(1), c) == 0, "leftKV2"); + assertFalse(filter.filterAllRemaining(), "allRemainingWhenMatch"); // A 'mismatch' situation filter.reset(); // INCLUDE expected because test column has not yet passed c = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1); - assertTrue("otherColumn", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertTrue(filter.filterCell(c) == Filter.ReturnCode.INCLUDE, "otherColumn"); // Test column will pass (wont match), expect NEXT_ROW c = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_2); - assertTrue("testedMismatch", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertTrue(filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW, "testedMismatch"); // After a mismatch (at least with LatestVersionOnly), subsequent columns are EXCLUDE c = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1); - assertTrue("otherColumn", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertTrue(filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW, "otherColumn"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java index fa43f10c1feb..968b026d3e65 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; @@ -26,26 +27,21 @@ import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests the value filter */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestSingleColumnValueFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSingleColumnValueFilter.class); - private static final byte[] ROW = Bytes.toBytes("test"); private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test"); private static final byte[] COLUMN_QUALIFIER = Bytes.toBytes("foo"); @@ -68,7 +64,7 @@ public class TestSingleColumnValueFilter { Filter regexFilter; Filter regexPatternFilter; - @Before + @BeforeEach public void setUp() throws Exception { basicFilter = basicFilterNew(); nullFilter = nullFilterNew(); @@ -107,129 +103,129 @@ public void testLongComparator() throws IOException { Filter filter = new SingleColumnValueFilter(COLUMN_FAMILY, COLUMN_QUALIFIER, CompareOperator.GREATER, new LongComparator(100L)); KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(1L)); - assertTrue("less than", filter.filterCell(cell) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(cell), "less than"); filter.reset(); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("less than", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(c), "less than"); filter.reset(); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(100L)); - assertTrue("Equals 100", filter.filterCell(cell) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(cell), "Equals 100"); filter.reset(); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("Equals 100", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(c), "Equals 100"); filter.reset(); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(120L)); - assertTrue("include 120", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "include 120"); filter.reset(); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("include 120", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "include 120"); } private void basicFilterTests(SingleColumnValueFilter filter) throws Exception { KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_2); - assertTrue("basicFilter1", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter1"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter1", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter1"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_3); - assertTrue("basicFilter2", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter2"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter2", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter2"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_4); - assertTrue("basicFilter3", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter3"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter3", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("basicFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter3"); + assertFalse(filter.filterRow(), "basicFilterNotNull"); filter.reset(); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1); - assertTrue("basicFilter4", filter.filterCell(cell) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(cell), "basicFilter4"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter4", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(c), "basicFilter4"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_2); - assertTrue("basicFilter4", filter.filterCell(cell) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(cell), "basicFilter4"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter4", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); - assertFalse("basicFilterAllRemaining", filter.filterAllRemaining()); - assertTrue("basicFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(c), "basicFilter4"); + assertFalse(filter.filterAllRemaining(), "basicFilterAllRemaining"); + assertTrue(filter.filterRow(), "basicFilterNotNull"); filter.reset(); filter.setLatestVersionOnly(false); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1); - assertTrue("basicFilter5", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter5"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter5", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter5"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_2); - assertTrue("basicFilter5", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter5"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter5", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("basicFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter5"); + assertFalse(filter.filterRow(), "basicFilterNotNull"); } private void nullFilterTests(Filter filter) throws Exception { ((SingleColumnValueFilter) filter).setFilterIfMissing(true); KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_1); - assertTrue("null1", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "null1"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("null1", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("null1FilterRow", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "null1"); + assertFalse(filter.filterRow(), "null1FilterRow"); filter.reset(); cell = new KeyValue(ROW, COLUMN_FAMILY, Bytes.toBytes("qual2"), FULLSTRING_2); - assertTrue("null2", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "null2"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("null2", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertTrue("null2FilterRow", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "null2"); + assertTrue(filter.filterRow(), "null2FilterRow"); } private void substrFilterTests(Filter filter) throws Exception { KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_1); - assertTrue("substrTrue", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "substrTrue"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("substrTrue", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "substrTrue"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_2); - assertTrue("substrFalse", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "substrFalse"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("substrFalse", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("substrFilterAllRemaining", filter.filterAllRemaining()); - assertFalse("substrFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "substrFalse"); + assertFalse(filter.filterAllRemaining(), "substrFilterAllRemaining"); + assertFalse(filter.filterRow(), "substrFilterNotNull"); } private void regexFilterTests(Filter filter) throws Exception { KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_1); - assertTrue("regexTrue", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "regexTrue"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("regexTrue", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "regexTrue"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_2); - assertTrue("regexFalse", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "regexFalse"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("regexFalse", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("regexFilterAllRemaining", filter.filterAllRemaining()); - assertFalse("regexFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "regexFalse"); + assertFalse(filter.filterAllRemaining(), "regexFilterAllRemaining"); + assertFalse(filter.filterRow(), "regexFilterNotNull"); } private void regexPatternFilterTests(Filter filter) throws Exception { KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_1); - assertTrue("regexTrue", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "regexTrue"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("regexTrue", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("regexFilterAllRemaining", filter.filterAllRemaining()); - assertFalse("regexFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "regexTrue"); + assertFalse(filter.filterAllRemaining(), "regexFilterAllRemaining"); + assertFalse(filter.filterRow(), "regexFilterNotNull"); } private Filter serializationTest(Filter filter) throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java index b0a2dfc1bab1..1f27d5fbe8ec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java @@ -17,6 +17,10 @@ */ package org.apache.hadoop.hbase.namequeues; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.io.IOException; import java.lang.reflect.Constructor; import java.net.InetAddress; @@ -32,7 +36,6 @@ import java.util.stream.Collectors; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ExtendedCellScanner; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ipc.RpcCall; @@ -42,10 +45,8 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,13 +66,10 @@ /** * Tests for Online SlowLog Provider Service */ -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MediumTests.TAG) +@Tag(MasterTests.TAG) public class TestNamedQueueRecorder { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestNamedQueueRecorder.class); - private static final Logger LOG = LoggerFactory.getLogger(TestNamedQueueRecorder.class); private static final HBaseTestingUtil HBASE_TESTING_UTILITY = new HBaseTestingUtil(); @@ -128,7 +126,7 @@ public void testOnlieSlowLogConsumption() throws Exception { AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15).build(); namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); int i = 0; @@ -140,14 +138,14 @@ public void testOnlieSlowLogConsumption() throws Exception { namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 5)); List slowLogPayloads = getSlowLogPayloads(request); - Assert.assertTrue(confirmPayloadParams(0, 5, slowLogPayloads)); - Assert.assertTrue(confirmPayloadParams(1, 4, slowLogPayloads)); - Assert.assertTrue(confirmPayloadParams(2, 3, slowLogPayloads)); - Assert.assertTrue(confirmPayloadParams(3, 2, slowLogPayloads)); - Assert.assertTrue(confirmPayloadParams(4, 1, slowLogPayloads)); + assertTrue(confirmPayloadParams(0, 5, slowLogPayloads)); + assertTrue(confirmPayloadParams(1, 4, slowLogPayloads)); + assertTrue(confirmPayloadParams(2, 3, slowLogPayloads)); + assertTrue(confirmPayloadParams(3, 2, slowLogPayloads)); + assertTrue(confirmPayloadParams(4, 1, slowLogPayloads)); // add 2 more records for (; i < 7; i++) { @@ -156,10 +154,10 @@ public void testOnlieSlowLogConsumption() throws Exception { namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 7)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloadsList = getSlowLogPayloads(request); return slowLogPayloadsList.size() == 7 && confirmPayloadParams(0, 7, slowLogPayloadsList) && confirmPayloadParams(5, 2, slowLogPayloadsList) @@ -173,10 +171,10 @@ && confirmPayloadParams(5, 2, slowLogPayloadsList) namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 8)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloadsList = getSlowLogPayloads(request); // confirm ringbuffer is full return slowLogPayloadsList.size() == 8 && confirmPayloadParams(7, 3, slowLogPayloadsList) @@ -191,10 +189,10 @@ && confirmPayloadParams(0, 10, slowLogPayloadsList) namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 8)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloadsList = getSlowLogPayloads(request); // confirm ringbuffer is full // and ordered events @@ -207,7 +205,7 @@ && confirmPayloadParams(2, 12, slowLogPayloadsList) AdminProtos.SlowLogResponseRequest largeLogRequest = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15) .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG).build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloadsList = getSlowLogPayloads(largeLogRequest); // confirm ringbuffer is full // and ordered events @@ -217,7 +215,7 @@ && confirmPayloadParams(2, 12, slowLogPayloadsList) && confirmPayloadParams(3, 11, slowLogPayloadsList); })); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { boolean isRingBufferCleaned = namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); @@ -252,7 +250,7 @@ public void testOnlineSlowLogWithHighRecords() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(14 * 11).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); for (int i = 0; i < 14 * 11; i++) { @@ -262,10 +260,10 @@ public void testOnlineSlowLogWithHighRecords() throws Exception { } LOG.debug("Added 14 * 11 records, ringbuffer should only provide latest 14 records"); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 14)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloads = getSlowLogPayloads(request); // confirm strict order of slow log payloads @@ -287,12 +285,12 @@ && confirmPayloadParams(12, 142, slowLogPayloads) boolean isRingBufferCleaned = namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); - Assert.assertTrue(isRingBufferCleaned); + assertTrue(isRingBufferCleaned); LOG.debug("cleared the ringbuffer of Online Slow Log records"); List slowLogPayloads = getSlowLogPayloads(request); // confirm ringbuffer is empty - Assert.assertEquals(slowLogPayloads.size(), 0); + assertEquals(slowLogPayloads.size(), 0); } @Test @@ -306,14 +304,14 @@ public void testOnlineSlowLogWithDefaultDisableConfig() throws Exception { namedQueueRecorder = constructor.newInstance(conf); AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); for (int i = 0; i < 300; i++) { RpcLogDetails rpcLogDetails = getRpcLogDetails("userName_" + (i + 1), "client_" + (i + 1), "class_" + (i + 1)); namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloads = getSlowLogPayloads(request); return slowLogPayloads.size() == 0; })); @@ -331,14 +329,14 @@ public void testOnlineSlowLogWithDisableConfig() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); for (int i = 0; i < 300; i++) { RpcLogDetails rpcLogDetails = getRpcLogDetails("userName_" + (i + 1), "client_" + (i + 1), "class_" + (i + 1)); namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloads = getSlowLogPayloads(request); return slowLogPayloads.size() == 0; })); @@ -356,7 +354,7 @@ public void testSlowLogFilters() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_87").build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); @@ -367,17 +365,17 @@ public void testSlowLogFilters() throws Exception { } LOG.debug("Added 100 records, ringbuffer should only 1 record with matching filter"); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 1)); AdminProtos.SlowLogResponseRequest requestClient = AdminProtos.SlowLogResponseRequest .newBuilder().setLimit(15).setClientAddress("client_85").build(); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(requestClient).size() == 1)); AdminProtos.SlowLogResponseRequest requestSlowLog = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15).build(); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(requestSlowLog).size() == 15)); } @@ -390,7 +388,7 @@ public void testSlowLogFilterWithClientAddress() throws Exception { namedQueueRecorder = constructor.newInstance(conf); AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); String[] clientAddressArray = new String[] { "[127:1:1:1:1:1:1:1]:1", "[127:1:1:1:1:1:1:1]:2", "[127:1:1:1:1:1:1:1]:3", "127.0.0.1:1", "127.0.0.1:2" }; @@ -413,25 +411,25 @@ public void testSlowLogFilterWithClientAddress() throws Exception { AdminProtos.SlowLogResponseRequest.newBuilder() .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG) .setClientAddress("[127:1:1:1:1:1:1:1]:2").build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequestIPv6WithPort).size() == 1)); AdminProtos.SlowLogResponseRequest largeLogRequestIPv6WithoutPort = AdminProtos.SlowLogResponseRequest.newBuilder() .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG) .setClientAddress("[127:1:1:1:1:1:1:1]").build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequestIPv6WithoutPort).size() == 3)); AdminProtos.SlowLogResponseRequest largeLogRequestIPv4WithPort = AdminProtos.SlowLogResponseRequest.newBuilder() .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG) .setClientAddress("127.0.0.1:1").build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequestIPv4WithPort).size() == 1)); AdminProtos.SlowLogResponseRequest largeLogRequestIPv4WithoutPort = AdminProtos.SlowLogResponseRequest.newBuilder() .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG) .setClientAddress("127.0.0.1").build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequestIPv4WithoutPort).size() == 2)); } @@ -448,7 +446,7 @@ public void testConcurrentSlowLogEvents() throws Exception { AdminProtos.SlowLogResponseRequest largeLogRequest = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(500000) .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); for (int j = 0; j < 1000; j++) { @@ -465,9 +463,9 @@ public void testConcurrentSlowLogEvents() throws Exception { Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(5000, () -> getSlowLogPayloads(request).size() > 10000)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(5000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(5000, () -> getSlowLogPayloads(largeLogRequest).size() > 10000)); } @@ -482,7 +480,7 @@ public void testSlowLargeLogEvents() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(14 * 11).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); boolean isSlowLog; @@ -501,10 +499,10 @@ public void testSlowLargeLogEvents() throws Exception { } LOG.debug("Added 14 * 11 records, ringbuffer should only provide latest 14 records"); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 14)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloads = getSlowLogPayloads(request); // confirm strict order of slow log payloads @@ -528,10 +526,10 @@ && confirmPayloadParams(12, 129, slowLogPayloads) AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(14 * 11) .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG).build(); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequest).size() == 14)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List largeLogPayloads = getSlowLogPayloads(largeLogRequest); // confirm strict order of slow log payloads @@ -563,7 +561,7 @@ public void testSlowLogMixedFilters() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_87").setClientAddress("client_88").build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); for (int i = 0; i < 100; i++) { RpcLogDetails rpcLogDetails = @@ -571,31 +569,31 @@ public void testSlowLogMixedFilters() throws Exception { namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 2)); AdminProtos.SlowLogResponseRequest request2 = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_1").setClientAddress("client_2").build(); - Assert.assertEquals(0, getSlowLogPayloads(request2).size()); + assertEquals(0, getSlowLogPayloads(request2).size()); AdminProtos.SlowLogResponseRequest request3 = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_87").setClientAddress("client_88") .setFilterByOperator(AdminProtos.SlowLogResponseRequest.FilterByOperator.AND).build(); - Assert.assertEquals(0, getSlowLogPayloads(request3).size()); + assertEquals(0, getSlowLogPayloads(request3).size()); AdminProtos.SlowLogResponseRequest request4 = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_87").setClientAddress("client_87") .setFilterByOperator(AdminProtos.SlowLogResponseRequest.FilterByOperator.AND).build(); - Assert.assertEquals(1, getSlowLogPayloads(request4).size()); + assertEquals(1, getSlowLogPayloads(request4).size()); AdminProtos.SlowLogResponseRequest request5 = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_88").setClientAddress("client_89") .setFilterByOperator(AdminProtos.SlowLogResponseRequest.FilterByOperator.OR).build(); - Assert.assertEquals(2, getSlowLogPayloads(request5).size()); + assertEquals(2, getSlowLogPayloads(request5).size()); AdminProtos.SlowLogResponseRequest requestSlowLog = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15).build(); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(requestSlowLog).size() == 15)); } @@ -610,11 +608,11 @@ public void testOnlineSlowLogScanPayloadDefaultDisabled() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(1).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(0, getSlowLogPayloads(request).size()); LOG.debug("Initially ringbuffer of Slow Log records is empty"); RpcLogDetails rpcLogDetails = getRpcLogDetailsOfScan(); namedQueueRecorder.addRecord(rpcLogDetails); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { Optional slowLogPayload = getSlowLogPayloads(request).stream().findAny(); if (slowLogPayload.isPresent()) { return !slowLogPayload.get().hasScan(); @@ -634,11 +632,11 @@ public void testOnlineSlowLogScanPayloadExplicitlyDisabled() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(1).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(0, getSlowLogPayloads(request).size()); LOG.debug("Initially ringbuffer of Slow Log records is empty"); RpcLogDetails rpcLogDetails = getRpcLogDetailsOfScan(); namedQueueRecorder.addRecord(rpcLogDetails); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { Optional slowLogPayload = getSlowLogPayloads(request).stream().findAny(); if (slowLogPayload.isPresent()) { return !slowLogPayload.get().hasScan(); @@ -658,11 +656,11 @@ public void testOnlineSlowLogScanPayloadExplicitlyEnabled() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(1).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(0, getSlowLogPayloads(request).size()); LOG.debug("Initially ringbuffer of Slow Log records is empty"); RpcLogDetails rpcLogDetails = getRpcLogDetailsOfScan(); namedQueueRecorder.addRecord(rpcLogDetails); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { Optional slowLogPayload = getSlowLogPayloads(request).stream().findAny(); if (slowLogPayload.isPresent()) { return slowLogPayload.get().hasScan(); @@ -681,11 +679,11 @@ public void testOnlineSlowLogRequestAttributes() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(1).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(0, getSlowLogPayloads(request).size()); LOG.debug("Initially ringbuffer of Slow Log records is empty"); RpcLogDetails rpcLogDetails = getRpcLogDetailsOfScan(); namedQueueRecorder.addRecord(rpcLogDetails); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { Optional slowLogPayload = getSlowLogPayloads(request).stream().findAny(); if (slowLogPayload.isPresent() && !slowLogPayload.get().getRequestAttributeList().isEmpty()) { return slowLogPayload.get().getRequestAttributeList().containsAll(REQUEST_HEADERS); @@ -704,11 +702,11 @@ public void testOnlineSlowLogConnectionAttributes() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(1).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(0, getSlowLogPayloads(request).size()); LOG.debug("Initially ringbuffer of Slow Log records is empty"); RpcLogDetails rpcLogDetails = getRpcLogDetailsOfScan(); namedQueueRecorder.addRecord(rpcLogDetails); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { Optional slowLogPayload = getSlowLogPayloads(request).stream().findAny(); if ( slowLogPayload.isPresent() && !slowLogPayload.get().getConnectionAttributeList().isEmpty() diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java index 39e942b04837..1e6312707d35 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.namequeues; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.IOException; import java.net.InetAddress; @@ -30,16 +30,14 @@ import java.util.Map; import java.util.Optional; import org.apache.hadoop.hbase.ExtendedCellScanner; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.ipc.RpcCall; import org.apache.hadoop.hbase.ipc.RpcCallback; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.protobuf.BlockingService; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; @@ -53,13 +51,10 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(RegionServerTests.TAG) public class TestRpcLogDetails { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRpcLogDetails.class); - private final ClientProtos.Scan scan = ClientProtos.Scan.newBuilder().setStartRow(ByteString.copyFrom(Bytes.toBytes("abc"))) .setStopRow(ByteString.copyFrom(Bytes.toBytes("xyz"))).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestSlowLogAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestSlowLogAccessor.java index 4b130acba6e5..a547b7e1ef7e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestSlowLogAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestSlowLogAccessor.java @@ -17,12 +17,14 @@ */ package org.apache.hadoop.hbase.namequeues; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + import java.io.IOException; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Connection; @@ -36,13 +38,11 @@ import org.apache.hadoop.hbase.slowlog.SlowLogTableAccessor; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -54,20 +54,17 @@ /** * Tests for SlowLog System Table */ -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MediumTests.TAG) +@Tag(MasterTests.TAG) public class TestSlowLogAccessor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSlowLogAccessor.class); - - private static final Logger LOG = LoggerFactory.getLogger(TestNamedQueueRecorder.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSlowLogAccessor.class); private static final HBaseTestingUtil HBASE_TESTING_UTILITY = new HBaseTestingUtil(); private NamedQueueRecorder namedQueueRecorder; - @BeforeClass + @BeforeAll public static void setup() throws Exception { try { HBASE_TESTING_UTILITY.shutdownMiniHBaseCluster(); @@ -83,12 +80,12 @@ public static void setup() throws Exception { HBASE_TESTING_UTILITY.startMiniCluster(); } - @AfterClass + @AfterAll public static void teardown() throws Exception { HBASE_TESTING_UTILITY.shutdownMiniHBaseCluster(); } - @Before + @BeforeEach public void setUp() throws Exception { HRegionServer hRegionServer = HBASE_TESTING_UTILITY.getMiniHBaseCluster().getRegionServer(0); this.namedQueueRecorder = hRegionServer.getNamedQueueRecorder(); @@ -111,7 +108,7 @@ public void testSlowLogRecords() throws Exception { AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15).build(); namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(0, getSlowLogPayloads(request).size()); int i = 0; @@ -144,11 +141,10 @@ public void testSlowLogRecords() throws Exception { namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 14)); - Assert.assertNotEquals(-1, - HBASE_TESTING_UTILITY.waitFor(3000, () -> getTableCount(connection) == 14)); + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getTableCount(connection) == 14)); } private int getTableCount(Connection connection) { @@ -165,7 +161,7 @@ private int getTableCount(Connection connection) { } private Connection waitForSlowLogTableCreation() throws IOException { - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(2000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(2000, () -> { try { return HBASE_TESTING_UTILITY.getAdmin() .tableExists(SlowLogTableAccessor.SLOW_LOG_TABLE_NAME); @@ -183,7 +179,7 @@ public void testHigherSlowLogs() throws Exception { namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(500000).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(0, getSlowLogPayloads(request).size()); for (int j = 0; j < 100; j++) { CompletableFuture.runAsync(() -> { @@ -198,13 +194,13 @@ public void testHigherSlowLogs() throws Exception { }); } - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(7000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(7000, () -> { int count = getSlowLogPayloads(request).size(); LOG.debug("RingBuffer records count: {}", count); return count > 2000; })); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(7000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(7000, () -> { int count = getTableCount(connection); LOG.debug("SlowLog Table records count: {}", count); return count > 2000; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestTooLargeLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestTooLargeLog.java index fdc3e288bfed..a684ae9815e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestTooLargeLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestTooLargeLog.java @@ -17,14 +17,13 @@ */ package org.apache.hadoop.hbase.namequeues; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Set; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; @@ -40,28 +39,19 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ RegionServerTests.class, MediumTests.class }) -public class TestTooLargeLog { +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTooLargeLog.class); +@Tag(MediumTests.TAG) +@Tag(RegionServerTests.TAG) +public class TestTooLargeLog { protected final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); protected static Admin ADMIN; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { // Slow log needs to be enabled initially to spin up the SlowLogQueueService TEST_UTIL.getConfiguration().setBoolean(HConstants.SLOW_LOG_BUFFER_ENABLED_KEY, true); @@ -71,7 +61,7 @@ public static void setUpBeforeClass() throws Exception { ADMIN = TEST_UTIL.getAdmin(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -114,10 +104,10 @@ public void testLogLargeBlockBytesScanned() throws IOException { OnlineLogRecord record = (OnlineLogRecord) entries.get(0); - assertTrue("expected " + record.getBlockBytesScanned() + " to be >= 100", - record.getBlockBytesScanned() >= 100); - assertTrue("expected " + record.getResponseSize() + " to be < 100", - record.getResponseSize() < 100); - assertTrue("expected " + record.getFsReadTime() + " to be > 0", record.getFsReadTime() > 0); + assertTrue(record.getBlockBytesScanned() >= 100, + "expected " + record.getBlockBytesScanned() + " to be >= 100"); + assertTrue(record.getResponseSize() < 100, + "expected " + record.getResponseSize() + " to be < 100"); + assertTrue(record.getFsReadTime() > 0, "expected " + record.getFsReadTime() + " to be > 0"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWALEventTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWALEventTracker.java index d0305fe7535e..6a97536563f2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWALEventTracker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWALEventTracker.java @@ -25,7 +25,7 @@ import static org.apache.hadoop.hbase.namequeues.WALEventTrackerTableAccessor.WAL_LENGTH_COLUMN; import static org.apache.hadoop.hbase.namequeues.WALEventTrackerTableAccessor.WAL_NAME_COLUMN; import static org.apache.hadoop.hbase.namequeues.WALEventTrackerTableAccessor.WAL_STATE_COLUMN; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -36,7 +36,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.Waiter; @@ -51,27 +50,23 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(MediumTests.TAG) +@Tag(RegionServerTests.TAG) public class TestWALEventTracker { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALEventTracker.class); - private static final Logger LOG = LoggerFactory.getLogger(TestNamedQueueRecorder.class); private static HBaseTestingUtil TEST_UTIL; public static Configuration CONF; - @BeforeClass + @BeforeAll public static void setup() throws Exception { CONF = HBaseConfiguration.create(); CONF.setBoolean(WAL_EVENT_TRACKER_ENABLED_KEY, true); @@ -82,13 +77,13 @@ public static void setup() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void teardown() throws Exception { LOG.info("Calling teardown"); TEST_UTIL.shutdownMiniHBaseCluster(); } - @Before + @BeforeEach public void waitForWalEventTrackerTableCreation() { Waiter.waitFor(CONF, 10000, (Waiter.Predicate) () -> TEST_UTIL.getAdmin().tableExists(WAL_EVENT_TRACKER_TABLE_NAME)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWALEventTrackerTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWALEventTrackerTableAccessor.java index 47ca5a8252bf..2b09db406da4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWALEventTrackerTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWALEventTrackerTableAccessor.java @@ -17,22 +17,17 @@ */ package org.apache.hadoop.hbase.namequeues; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.regionserver.wal.WALEventTrackerListener; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(SmallTests.class) +@Tag(SmallTests.TAG) public class TestWALEventTrackerTableAccessor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALEventTrackerTableAccessor.class); /* * Tests that rowkey is getting constructed correctly. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWalEventTrackerQueueService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWalEventTrackerQueueService.java index 4fbb03b13eee..757960cb57b8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWalEventTrackerQueueService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestWalEventTrackerQueueService.java @@ -19,7 +19,7 @@ import static org.apache.hadoop.hbase.master.waleventtracker.WALEventTrackerTableCreator.WAL_EVENT_TRACKER_ENABLED_KEY; import static org.apache.hadoop.hbase.namequeues.WALEventTrackerTableAccessor.WAL_EVENT_TRACKER_TABLE_NAME; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -28,33 +28,24 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.regionserver.wal.WALEventTrackerListener; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; -@Category(SmallTests.class) +@Tag(SmallTests.TAG) public class TestWalEventTrackerQueueService { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWalEventTrackerQueueService.class); - - @Rule - public TestName name = new TestName(); - /* * Test whether wal event tracker metrics are being incremented. */ @Test - public void testMetrics() throws Exception { + public void testMetrics(TestInfo testInfo) throws Exception { + String methodName = testInfo.getTestMethod().get().getName(); String rsName = "test-region-server"; String walName = "test-wal-0"; long timeStamp = EnvironmentEdgeManager.currentTime(); @@ -65,8 +56,8 @@ public void testMetrics() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.setBoolean(WAL_EVENT_TRACKER_ENABLED_KEY, true); conf.setLong(WALEventTrackerTableAccessor.SLEEP_INTERVAL_KEY, 100); - MetricsWALEventTrackerSourceImpl source = new MetricsWALEventTrackerSourceImpl( - name.getMethodName(), name.getMethodName(), name.getMethodName(), name.getMethodName()); + MetricsWALEventTrackerSourceImpl source = + new MetricsWALEventTrackerSourceImpl(methodName, methodName, methodName, methodName); WALEventTrackerQueueService service = new WALEventTrackerQueueService(conf, source); service.addToQueue(payload); Connection mockConnection = mock(Connection.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java index 52a5cb8259eb..f768198a7ad2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java @@ -18,13 +18,12 @@ package org.apache.hadoop.hbase.procedure; import static org.apache.hadoop.hbase.coprocessor.CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.List; import java.util.Optional; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -38,11 +37,10 @@ import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,13 +49,9 @@ /** * Check if CompletedProcedureCleaner cleans up failed nonce procedures. */ -@Category(LargeTests.class) +@Tag(LargeTests.TAG) public class TestFailedProcCleanup { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFailedProcCleanup.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFailedProcCleanup.class); protected static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); @@ -66,14 +60,14 @@ public class TestFailedProcCleanup { private static final byte[] FAMILY = Bytes.toBytesBinary("f"); private static final int evictionDelay = 10 * 1000; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() { conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.procedure.cleaner.evict.ttl", evictionDelay); conf.setInt("hbase.procedure.cleaner.evict.batch.size", 1); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java index 96906701b0d6..ad3778debd83 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java @@ -28,29 +28,24 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Demonstrate how Procedure handles single members, multiple members, and errors semantics */ -@Category({ MasterTests.class, SmallTests.class }) +@Tag(MasterTests.TAG) +@Tag(SmallTests.TAG) public class TestProcedure { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedure.class); - ProcedureCoordinator coord; - @Before + @BeforeEach public void setup() { coord = mock(ProcedureCoordinator.class); final ProcedureCoordinatorRpcs comms = mock(ProcedureCoordinatorRpcs.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java index 06dae4a32c83..6fe3eb325308 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyList; import static org.mockito.ArgumentMatchers.anyString; @@ -40,14 +40,12 @@ import java.util.List; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.InOrder; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -60,13 +58,10 @@ * This only works correctly when we do class level parallelization of tests. If we do method * level serialization this class will likely throw all kinds of errors. */ -@Category({ MasterTests.class, SmallTests.class }) +@Tag(MasterTests.TAG) +@Tag(SmallTests.TAG) public class TestProcedureCoordinator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedureCoordinator.class); - // general test constants private static final long WAKE_FREQUENCY = 1000; private static final long TIMEOUT = 100000; @@ -84,7 +79,7 @@ public class TestProcedureCoordinator { // handle to the coordinator for each test private ProcedureCoordinator coordinator; - @After + @AfterEach public void resetTest() throws IOException { // reset all the mocks used for the tests reset(controller, task, monitor); @@ -116,8 +111,9 @@ public void testThreadPoolSize() throws Exception { coordinator.startProcedure(procSpy.getErrorMonitor(), procName, procData, expected); // null here means second procedure failed to start. - assertNull("Coordinator successfully ran two tasks at once with a single thread pool.", - coordinator.startProcedure(proc2.getErrorMonitor(), "another op", procData, expected)); + assertNull( + coordinator.startProcedure(proc2.getErrorMonitor(), "another op", procData, expected), + "Coordinator successfully ran two tasks at once with a single thread pool."); } /** @@ -296,7 +292,7 @@ private static abstract class OperationAnswer implements Answer { private boolean ran = false; public void ensureRan() { - assertTrue("Prepare mocking didn't actually run!", ran); + assertTrue(ran, "Prepare mocking didn't actually run!"); } @Override diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java index ff068ba6b1e2..0310cdeeba49 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.Date; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.master.procedure.ProcedureDescriber; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; @@ -29,20 +28,16 @@ import org.apache.hadoop.hbase.procedure2.ProcedureYieldException; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.BytesValue; -@Category({ MasterTests.class, SmallTests.class }) +@Tag(MasterTests.TAG) +@Tag(SmallTests.TAG) public class TestProcedureDescriber { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedureDescriber.class); - public static class TestProcedure extends Procedure { @Override protected Procedure[] execute(Object env) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java index 11172e28d8ef..67ce69bed303 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java @@ -17,34 +17,29 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import java.io.IOException; import java.util.HashMap; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MasterTests.TAG) +@Tag(MediumTests.TAG) public class TestProcedureManager { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedureManager.class); - private static final int NUM_RS = 2; private static HBaseTestingUtil util = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void setupBeforeClass() throws Exception { // set configure to indicate which pm should be loaded Configuration conf = util.getConfiguration(); @@ -57,7 +52,7 @@ public static void setupBeforeClass() throws Exception { util.startMiniCluster(NUM_RS); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { util.shutdownMiniCluster(); } @@ -68,7 +63,7 @@ public void testSimpleProcedureManager() throws IOException { byte[] result = admin.execProcedureWithReturn(SimpleMasterProcedureManager.SIMPLE_SIGNATURE, "mytest", new HashMap<>()); - assertArrayEquals("Incorrect return data from execProcedure", - Bytes.toBytes(SimpleMasterProcedureManager.SIMPLE_DATA), result); + assertArrayEquals(Bytes.toBytes(SimpleMasterProcedureManager.SIMPLE_DATA), result, + "Incorrect return data from execProcedure"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java index 2fc138778992..d15b9d96de0a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java @@ -32,17 +32,15 @@ import java.io.IOException; import java.util.concurrent.ThreadPoolExecutor; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.errorhandling.TimeoutException; import org.apache.hadoop.hbase.procedure.Subprocedure.SubprocedureImpl; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.InOrder; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; @@ -53,13 +51,10 @@ /** * Test the procedure member, and it's error handling mechanisms. */ -@Category({ MasterTests.class, SmallTests.class }) +@Tag(MasterTests.TAG) +@Tag(SmallTests.TAG) public class TestProcedureMember { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedureMember.class); - private static final long WAKE_FREQUENCY = 100; private static final long TIMEOUT = 100000; private static final long POOL_KEEP_ALIVE = 1; @@ -77,7 +72,7 @@ public class TestProcedureMember { /** * Reset all the mock objects */ - @After + @AfterEach public void resetTest() throws IOException { reset(mockListener, mockBuilder, mockMemberComms); Closeables.close(member, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java index a9fac158398c..323fdf4fa740 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyList; import static org.mockito.ArgumentMatchers.eq; @@ -35,7 +35,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hbase.Abortable; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; @@ -45,11 +44,10 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.internal.matchers.ArrayEquals; import org.mockito.invocation.InvocationOnMock; @@ -63,13 +61,10 @@ /** * Cluster-wide testing of a distributed three-phase commit using a 'real' zookeeper cluster */ -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MasterTests.TAG) +@Tag(MediumTests.TAG) public class TestZKProcedure { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestZKProcedure.class); - private static final Logger LOG = LoggerFactory.getLogger(TestZKProcedure.class); private static HBaseTestingUtil UTIL = new HBaseTestingUtil(); private static final String COORDINATOR_NODE_NAME = "coordinator"; @@ -81,12 +76,12 @@ public class TestZKProcedure { private static final byte[] data = new byte[] { 1, 2 }; // TODO what is this used for? private static final VerificationMode once = Mockito.times(1); - @BeforeClass + @BeforeAll public static void setupTest() throws Exception { UTIL.startMiniZKCluster(); } - @AfterClass + @AfterAll public static void cleanupTest() throws Exception { UTIL.shutdownMiniZKCluster(); } @@ -311,7 +306,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { Procedure task = coordinator.startProcedure(coordinatorTaskErrorMonitor, opName, data, expected); - assertEquals("Didn't mock coordinator task", coordinatorTask, task); + assertEquals(coordinatorTask, task, "Didn't mock coordinator task"); // wait for the task to complete try { @@ -355,9 +350,9 @@ private void waitAndVerifyProc(Procedure proc, VerificationMode prepare, Verific Mockito.verify(proc, prepare).sendGlobalBarrierStart(); Mockito.verify(proc, commit).sendGlobalBarrierReached(); Mockito.verify(proc, finish).sendGlobalBarrierComplete(); - assertEquals("Operation error state was unexpected", opHasError, - proc.getErrorMonitor().hasException()); - assertEquals("Operation error state was unexpected", opHasError, caughtError); + assertEquals(opHasError, proc.getErrorMonitor().hasException(), + "Operation error state was unexpected"); + assertEquals(opHasError, caughtError, "Operation error state was unexpected"); } @@ -385,9 +380,9 @@ private void waitAndVerifySubproc(Subprocedure op, VerificationMode prepare, Mockito.verify(op, commit).insideBarrier(); // We cannot guarantee that cleanup has run so we don't check it. - assertEquals("Operation error state was unexpected", opHasError, - op.getErrorCheckable().hasException()); - assertEquals("Operation error state was unexpected", opHasError, caughtError); + assertEquals(opHasError, op.getErrorCheckable().hasException(), + "Operation error state was unexpected"); + assertEquals(opHasError, caughtError, "Operation error state was unexpected"); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java index dcc580330459..02c90d0caf90 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; @@ -27,7 +27,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -36,11 +35,10 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -55,13 +53,10 @@ /** * Test zookeeper-based, procedure controllers */ -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MasterTests.TAG) +@Tag(MediumTests.TAG) public class TestZKProcedureControllers { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestZKProcedureControllers.class); - private static final Logger LOG = LoggerFactory.getLogger(TestZKProcedureControllers.class); private final static HBaseTestingUtil UTIL = new HBaseTestingUtil(); private static final String COHORT_NODE_NAME = "expected"; @@ -70,12 +65,12 @@ public class TestZKProcedureControllers { private final byte[] memberData = Bytes.toBytes("data from member"); - @BeforeClass + @BeforeAll public static void setupTest() throws Exception { UTIL.startMiniZKCluster(); } - @AfterClass + @AfterAll public static void cleanupTest() throws Exception { UTIL.shutdownMiniZKCluster(); } @@ -143,8 +138,8 @@ public Void answer(InvocationOnMock invocation) throws Throwable { // Mockito.any()); // cleanup after the test ZKUtil.deleteNodeRecursively(watcher, controller.getZkController().getBaseZnode()); - assertEquals("Didn't delete prepare node", -1, ZKUtil.checkExists(watcher, prepare)); - assertEquals("Didn't delete commit node", -1, ZKUtil.checkExists(watcher, commit)); + assertEquals(-1, ZKUtil.checkExists(watcher, prepare), "Didn't delete prepare node"); + assertEquals(-1, ZKUtil.checkExists(watcher, commit), "Didn't delete commit node"); } @Test @@ -229,10 +224,10 @@ private void runMockCommitWithOrchestratedControllers(StartControllers controlle Mockito.verify(coordinator, times(expected.size())).memberFinishedBarrier( Mockito.eq(operationName), Mockito.anyString(), Mockito.eq(memberData)); - assertEquals("Incorrect number of members returnd data", expected.size(), - dataFromMembers.size()); + assertEquals(expected.size(), dataFromMembers.size(), + "Incorrect number of members returnd data"); for (byte[] result : dataFromMembers) { - assertArrayEquals("Incorrect data from member", memberData, result); + assertArrayEquals(memberData, result, "Incorrect data from member"); } controller.resetMembers(p); @@ -353,9 +348,9 @@ private void verifyZooKeeperClean(String operationName, ZKWatcher watcher, String prepare = ZKProcedureUtil.getAcquireBarrierNode(controller, operationName); String commit = ZKProcedureUtil.getReachedBarrierNode(controller, operationName); String abort = ZKProcedureUtil.getAbortNode(controller, operationName); - assertEquals("Didn't delete prepare node", -1, ZKUtil.checkExists(watcher, prepare)); - assertEquals("Didn't delete commit node", -1, ZKUtil.checkExists(watcher, commit)); - assertEquals("Didn't delete abort node", -1, ZKUtil.checkExists(watcher, abort)); + assertEquals(-1, ZKUtil.checkExists(watcher, prepare), "Didn't delete prepare node"); + assertEquals(-1, ZKUtil.checkExists(watcher, commit), "Didn't delete commit node"); + assertEquals(-1, ZKUtil.checkExists(watcher, abort), "Didn't delete abort node"); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestBase.java index dac4cc1e0e73..73b3ac8147a1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestBase.java @@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter; import org.apache.hadoop.hbase.regionserver.MemStoreLAB; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.After; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; /** * This runs on local filesystem. hsync and hflush are not supported. May lose data! Only use where @@ -42,7 +42,7 @@ public class RegionProcedureStoreTestBase { protected RegionProcedureStore store; - @Before + @BeforeEach public void setUp() throws IOException { htu = new HBaseCommonTestingUtil(); Configuration conf = htu.getConfiguration(); @@ -56,7 +56,7 @@ public void setUp() throws IOException { store = RegionProcedureStoreTestHelper.createStore(server, region, new LoadCounter()); } - @After + @AfterEach public void tearDown() throws IOException { store.stop(true); region.close(true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestProcedure.java index fb1c001a2fe1..1b32f07328c3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestProcedure.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.procedure2.store.region; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import org.apache.hadoop.hbase.procedure2.Procedure; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestHFileProcedurePrettyPrinter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestHFileProcedurePrettyPrinter.java index b85897a1afa7..aa9648995a3d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestHFileProcedurePrettyPrinter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestHFileProcedurePrettyPrinter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.procedure2.store.region; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedReader; import java.io.ByteArrayInputStream; @@ -33,7 +33,6 @@ import org.apache.commons.lang3.mutable.MutableLong; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.master.region.MasterRegionFactory; @@ -42,19 +41,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.util.ToolRunner; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MasterTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(MasterTests.TAG) public class TestHFileProcedurePrettyPrinter extends RegionProcedureStoreTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileProcedurePrettyPrinter.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHFileProcedurePrettyPrinter.class); private List checkOutput(BufferedReader reader, MutableLong putCount, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java index 3ba858c5e036..0088a0d6da27 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.procedure2.store.region; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.net.InetAddress; @@ -29,7 +29,6 @@ import java.util.Optional; import java.util.Set; import org.apache.hadoop.hbase.ExtendedCellScanner; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.ipc.RpcCall; import org.apache.hadoop.hbase.ipc.RpcCallback; @@ -42,9 +41,8 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -55,13 +53,10 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos; -@Category({ MasterTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(MasterTests.TAG) public class TestRegionProcedureStore extends RegionProcedureStoreTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionProcedureStore.class); - private static final Logger LOG = LoggerFactory.getLogger(TestRegionProcedureStore.class); private void verifyProcIdsOnRestart(final Set procIds) throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStoreMigration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStoreMigration.java index 70b93487c12b..8197c614639d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStoreMigration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStoreMigration.java @@ -19,9 +19,9 @@ import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.ArrayList; @@ -32,7 +32,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.TableName; @@ -50,20 +49,16 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; @SuppressWarnings("deprecation") -@Category({ MasterTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(MasterTests.TAG) public class TestRegionProcedureStoreMigration { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionProcedureStoreMigration.class); - private HBaseCommonTestingUtil htu; private MasterServices server; @@ -74,7 +69,7 @@ public class TestRegionProcedureStoreMigration { private WALProcedureStore walStore; - @Before + @BeforeEach public void setUp() throws IOException { htu = new HBaseCommonTestingUtil(); Configuration conf = htu.getConfiguration(); @@ -96,7 +91,7 @@ public void recoverFileLease(FileSystem fs, Path path) throws IOException { region = MasterRegionFactory.create(server); } - @After + @AfterEach public void tearDown() throws IOException { if (store != null) { store.stop(true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java index d942c65b8c71..95f72f69b965 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.procedure2.store.region; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.BufferedReader; import java.io.ByteArrayInputStream; @@ -29,25 +29,20 @@ import java.util.List; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.master.region.MasterRegionFactory; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.util.ToolRunner; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MasterTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(MasterTests.TAG) public class TestWALProcedurePrettyPrinter extends RegionProcedureStoreTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALProcedurePrettyPrinter.class); - private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedurePrettyPrinter.class); @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java index bfbeed768554..1b0ca59361ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java @@ -17,30 +17,25 @@ */ package org.apache.hadoop.hbase.protobuf; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestReplicationProtobuf { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReplicationProtobuf.class); - /** * Little test to check we can basically convert list of a list of KVs into a CellScanner */ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTest.java new file mode 100644 index 000000000000..1efebe4b9ff4 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.tool; + +import org.apache.hadoop.hbase.HBaseTestingUtil; +import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; +import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; + +/** + * Test cases for the atomic load error handling of the bulk load functionality. + */ +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) +public class BulkLoadHFilesSplitRecoveryTest extends BulkLoadHFilesSplitRecoveryTestBase { + + @BeforeAll + public static void setupCluster() throws Exception { + util = new HBaseTestingUtil(); + util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ""); + util.startMiniCluster(1); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTestBase.java similarity index 88% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFilesSplitRecovery.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTestBase.java index 24a28d18751f..2066193dc0ae 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTestBase.java @@ -17,12 +17,13 @@ */ package org.apache.hadoop.hbase.tool; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyList; @@ -40,7 +41,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableExistsException; @@ -56,21 +56,14 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; -import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad; -import org.apache.hadoop.hbase.testclassification.LargeTests; -import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -81,14 +74,10 @@ /** * Test cases for the atomic load error handling of the bulk load functionality. */ -@Category({ MiscTests.class, LargeTests.class }) -public class TestBulkLoadHFilesSplitRecovery { +public class BulkLoadHFilesSplitRecoveryTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBulkLoadHFilesSplitRecovery.class); - - private static final Logger LOG = LoggerFactory.getLogger(TestHRegionServerBulkLoad.class); + private static final Logger LOG = + LoggerFactory.getLogger(BulkLoadHFilesSplitRecoveryTestBase.class); static HBaseTestingUtil util; // used by secure subclass @@ -100,9 +89,6 @@ public class TestBulkLoadHFilesSplitRecovery { private final static byte[][] families = new byte[NUM_CFS][]; - @Rule - public TestName name = new TestName(); - static { for (int i = 0; i < NUM_CFS; i++) { families[i] = Bytes.toBytes(family(i)); @@ -222,14 +208,7 @@ private void forceSplit(TableName table) { } } - @BeforeClass - public static void setupCluster() throws Exception { - util = new HBaseTestingUtil(); - util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ""); - util.startMiniCluster(1); - } - - @AfterClass + @AfterAll public static void teardownCluster() throws Exception { util.shutdownMiniCluster(); } @@ -271,9 +250,9 @@ private static AsyncClusterConnection mockAndInjectError(AsyncClusterConnection * Test that shows that exception thrown from the RS side will result in an exception on the * LIHFile client. */ - @Test(expected = IOException.class) - public void testBulkLoadPhaseFailure() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); + @Test + public void testBulkLoadPhaseFailure(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); final AtomicInteger attemptedCalls = new AtomicInteger(); Configuration conf = new Configuration(util.getConfiguration()); conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2); @@ -289,7 +268,7 @@ protected void bulkLoadPhase(AsyncClusterConnection conn, TableName tableName, } }; Path dir = buildBulkFiles(table, 1); - loader.bulkLoad(table, dir); + assertThrows(IOException.class, () -> loader.bulkLoad(table, dir)); } /** @@ -298,8 +277,8 @@ protected void bulkLoadPhase(AsyncClusterConnection conn, TableName tableName, * ${@link BulkLoadHFiles#RETRY_ON_IO_EXCEPTION} is set */ @Test - public void testRetryOnIOException() throws Exception { - TableName table = TableName.valueOf(name.getMethodName()); + public void testRetryOnIOException(TestInfo testInfo) throws Exception { + TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); AtomicInteger calls = new AtomicInteger(0); setupTable(util.getConnection(), table, 10); Configuration conf = new Configuration(util.getConfiguration()); @@ -334,8 +313,8 @@ protected void bulkLoadPhase(AsyncClusterConnection conn, TableName tableName, * split just before the atomic region load. */ @Test - public void testSplitWhileBulkLoadPhase() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); + public void testSplitWhileBulkLoadPhase(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); setupTable(util.getConnection(), table, 10); populateTable(util.getConnection(), table, 1); assertExpectedTable(table, ROWCOUNT, 1); @@ -374,8 +353,8 @@ protected void bulkLoadPhase(AsyncClusterConnection conn, TableName tableName, * before atomically importing. */ @Test - public void testGroupOrSplitPresplit() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); + public void testGroupOrSplitPresplit(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); setupTable(util.getConnection(), table, 10); populateTable(util.getConnection(), table, 1); assertExpectedTable(util.getConnection(), table, ROWCOUNT, 1); @@ -405,8 +384,8 @@ protected Pair, String> groupOrSplit(AsyncClusterConnection } @Test - public void testCorrectSplitPoint() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); + public void testCorrectSplitPoint(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000010"), Bytes.toBytes("row_00000020"), Bytes.toBytes("row_00000030"), Bytes.toBytes("row_00000040"), Bytes.toBytes("row_00000050"), Bytes.toBytes("row_00000060"), Bytes.toBytes("row_00000070") }; @@ -435,8 +414,8 @@ protected void bulkLoadPhase(AsyncClusterConnection conn, TableName tableName, * multiple times before all of them can be loaded successfully. */ @Test - public void testSplitTmpFileCleanUp() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); + public void testSplitTmpFileCleanUp(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000010"), Bytes.toBytes("row_00000020"), Bytes.toBytes("row_00000030"), Bytes.toBytes("row_00000040"), Bytes.toBytes("row_00000050") }; @@ -455,17 +434,17 @@ public void testSplitTmpFileCleanUp() throws Exception { // HFiles have been splitted, there is TMP_DIR assertTrue(fs.exists(tmpPath)); // TMP_DIR should have been cleaned-up - assertNull(BulkLoadHFilesTool.TMP_DIR + " should be empty.", - CommonFSUtils.listStatus(fs, tmpPath)); + assertNull(CommonFSUtils.listStatus(fs, tmpPath), + BulkLoadHFilesTool.TMP_DIR + " should be empty."); assertExpectedTable(util.getConnection(), table, ROWCOUNT, 2); } /** * This simulates an remote exception which should cause LIHF to exit with an exception. */ - @Test(expected = IOException.class) - public void testGroupOrSplitFailure() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + @Test + public void testGroupOrSplitFailure(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); setupTable(util.getConnection(), tableName, 10); BulkLoadHFilesTool loader = new BulkLoadHFilesTool(util.getConfiguration()) { @@ -486,7 +465,7 @@ protected Pair, String> groupOrSplit(AsyncClusterConnection // create HFiles for different column families Path dir = buildBulkFiles(tableName, 1); - loader.bulkLoad(tableName, dir); + assertThrows(IOException.class, () -> loader.bulkLoad(tableName, dir)); } /** @@ -496,8 +475,8 @@ protected Pair, String> groupOrSplit(AsyncClusterConnection * behavior. */ @Test - public void testSplitWhileBulkLoadPhaseWithoutItemMap() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); + public void testSplitWhileBulkLoadPhaseWithoutItemMap(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); setupTable(util.getConnection(), table, 10); populateTable(util.getConnection(), table, 1); assertExpectedTable(table, ROWCOUNT, 1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTest.java new file mode 100644 index 000000000000..b42cbcbc87ec --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.tool; + +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; +import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; +import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; + +/** + * Test cases for the "load" half of the HFileOutputFormat bulk load functionality. These tests run + * faster than the full MR cluster tests in TestHFileOutputFormat + */ +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) +public class BulkLoadHFilesTest extends BulkLoadHFilesTestBase { + + @BeforeAll + public static void setUpBeforeClass() throws Exception { + util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ""); + util.getConfiguration().setInt(BulkLoadHFiles.MAX_FILES_PER_REGION_PER_FAMILY, + MAX_FILES_PER_REGION_PER_FAMILY); + // change default behavior so that tag values are returned with normal rpcs + util.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY, + KeyValueCodecWithTags.class.getCanonicalName()); + util.startMiniCluster(); + + setupNamespace(); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestBase.java similarity index 88% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFiles.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestBase.java index 40b5ef440b2b..91ad15853044 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestBase.java @@ -20,11 +20,11 @@ import static org.apache.hadoop.hbase.HBaseTestingUtil.countRows; import static org.apache.hadoop.hbase.util.LocatedBlockHelper.getLocatedBlockLocations; import static org.hamcrest.Matchers.greaterThan; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.net.InetAddress; @@ -42,7 +42,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; @@ -56,15 +55,11 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; -import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; -import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.regionserver.BloomType; -import org.apache.hadoop.hbase.testclassification.LargeTests; -import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.FutureUtils; @@ -74,13 +69,9 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.hamcrest.MatcherAssert; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; @@ -88,15 +79,7 @@ * Test cases for the "load" half of the HFileOutputFormat bulk load functionality. These tests run * faster than the full MR cluster tests in TestHFileOutputFormat */ -@Category({ MiscTests.class, LargeTests.class }) -public class TestBulkLoadHFiles { - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBulkLoadHFiles.class); - - @Rule - public TestName tn = new TestName(); +public class BulkLoadHFilesTestBase { private static final byte[] QUALIFIER = Bytes.toBytes("myqual"); private static final byte[] FAMILY = Bytes.toBytes("myfam"); @@ -110,24 +93,11 @@ public class TestBulkLoadHFiles { static HBaseTestingUtil util = new HBaseTestingUtil(); - @BeforeClass - public static void setUpBeforeClass() throws Exception { - util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ""); - util.getConfiguration().setInt(BulkLoadHFiles.MAX_FILES_PER_REGION_PER_FAMILY, - MAX_FILES_PER_REGION_PER_FAMILY); - // change default behavior so that tag values are returned with normal rpcs - util.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY, - KeyValueCodecWithTags.class.getCanonicalName()); - util.startMiniCluster(); - - setupNamespace(); - } - protected static void setupNamespace() throws Exception { util.getAdmin().createNamespace(NamespaceDescriptor.create(NAMESPACE).build()); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { util.shutdownMiniCluster(); } @@ -151,8 +121,8 @@ public void testSimpleLoad() throws Exception { } @Test - public void testSimpleLoadWithFileCopy() throws Exception { - String testName = tn.getMethodName(); + public void testSimpleLoadWithFileCopy(TestInfo testInfo) throws Exception { + String testName = testInfo.getTestMethod().get().getName(); final byte[] TABLE_NAME = Bytes.toBytes("mytable_" + testName); runTest(testName, buildHTD(TableName.valueOf(TABLE_NAME), BloomType.NONE), false, null, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, @@ -379,7 +349,7 @@ public static int loadHFiles(String testName, TableDescriptor htd, HBaseTestingU if (copyFiles) { for (Path p : list) { - assertTrue(p + " should exist", fs.exists(p)); + assertTrue(fs.exists(p), p + " should exist"); } } @@ -404,8 +374,8 @@ private void runTest(String testName, TableDescriptor htd, boolean preCreateTabl if (fs.exists(stagingBasePath)) { FileStatus[] files = fs.listStatus(stagingBasePath); for (FileStatus file : files) { - assertTrue("Folder=" + file.getPath() + " is not cleaned up.", - file.getPath().getName() != "DONOTERASE"); + assertTrue(file.getPath().getName() != "DONOTERASE", + "Folder=" + file.getPath() + " is not cleaned up."); } } @@ -418,8 +388,9 @@ private void runTest(String testName, TableDescriptor htd, boolean preCreateTabl * responses. */ @Test - public void testTagsSurviveBulkLoadSplit() throws Exception { - Path dir = util.getDataTestDirOnTestFS(tn.getMethodName()); + public void testTagsSurviveBulkLoadSplit(TestInfo testInfo) throws Exception { + String name = testInfo.getTestMethod().get().getName(); + Path dir = util.getDataTestDirOnTestFS(name); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); Path familyDir = new Path(dir, Bytes.toString(FAMILY)); @@ -431,10 +402,10 @@ public void testTagsSurviveBulkLoadSplit() throws Exception { byte[] from = Bytes.toBytes("ddd"); byte[] to = Bytes.toBytes("ooo"); HFileTestUtil.createHFileWithTags(util.getConfiguration(), fs, - new Path(familyDir, tn.getMethodName() + "_hfile"), FAMILY, QUALIFIER, from, to, 1000); + new Path(familyDir, name + "_hfile"), FAMILY, QUALIFIER, from, to, 1000); int expectedRows = 1000; - TableName tableName = TableName.valueOf(tn.getMethodName()); + TableName tableName = TableName.valueOf(name); TableDescriptor htd = buildHTD(tableName, BloomType.NONE); util.getAdmin().createTable(htd, tableSplitKeys); @@ -455,8 +426,8 @@ public void testTagsSurviveBulkLoadSplit() throws Exception { * Test loading into a column family that does not exist. */ @Test - public void testNonexistentColumnFamilyLoad() throws Exception { - String testName = tn.getMethodName(); + public void testNonexistentColumnFamilyLoad(TestInfo testInfo) throws Exception { + String testName = testInfo.getTestMethod().get().getName(); byte[][][] hFileRanges = new byte[][][] { new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("ccc") }, new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }; @@ -471,15 +442,14 @@ public void testNonexistentColumnFamilyLoad() throws Exception { try { runTest(testName, htd, true, SPLIT_KEYS, hFileRanges, false, false, 2); - assertTrue("Loading into table with non-existent family should have failed", false); + assertTrue(false, "Loading into table with non-existent family should have failed"); } catch (Exception e) { - assertTrue("IOException expected", e instanceof IOException); - // further check whether the exception message is correct + assertTrue(e instanceof IOException, "IOException expected"); + // further check whether exception message is correct String errMsg = e.getMessage(); - assertTrue( + assertTrue(errMsg.contains(EXPECTED_MSG_FOR_NON_EXISTING_FAMILY), "Incorrect exception message, expected message: [" + EXPECTED_MSG_FOR_NON_EXISTING_FAMILY - + "], current message: [" + errMsg + "]", - errMsg.contains(EXPECTED_MSG_FOR_NON_EXISTING_FAMILY)); + + "], current message: [" + errMsg + "]"); } } @@ -551,12 +521,12 @@ private static void createRandomDataFile(FileSystem fs, Path path, int size) thr } @Test - public void testSplitStoreFile() throws IOException { + public void testSplitStoreFile(TestInfo testInfo) throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitHFile"); FileSystem fs = util.getTestFileSystem(); Path testIn = new Path(dir, "testhfile"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); - String tableName = tn.getMethodName(); + String tableName = testInfo.getTestMethod().get().getName(); util.createTable(TableName.valueOf(tableName), familyDesc.getNameAsString()); HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); @@ -577,15 +547,15 @@ public void testSplitStoreFile() throws IOException { * Test hfile splits with the favored nodes */ @Test - public void testSplitStoreFileWithFavoriteNodes() throws IOException { + public void testSplitStoreFileWithFavoriteNodes(TestInfo testInfo) throws IOException { Path dir = new Path(util.getDefaultRootDirPath(), "testhfile"); FileSystem fs = util.getDFSCluster().getFileSystem(); Path testIn = new Path(dir, "testSplitStoreFileWithFavoriteNodes"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); - String tableName = tn.getMethodName(); - Table table = util.createTable(TableName.valueOf(tableName), familyDesc.getNameAsString()); + String tableName = testInfo.getTestMethod().get().getName(); + util.createTable(TableName.valueOf(tableName), familyDesc.getNameAsString()); HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); @@ -604,12 +574,12 @@ public void testSplitStoreFileWithFavoriteNodes() throws IOException { } @Test - public void testSplitStoreFileWithCreateTimeTS() throws IOException { + public void testSplitStoreFileWithCreateTimeTS(TestInfo testInfo) throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitStoreFileWithCreateTimeTS"); FileSystem fs = util.getTestFileSystem(); Path testIn = new Path(dir, "testhfile"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); - String tableName = tn.getMethodName(); + String tableName = testInfo.getTestMethod().get().getName(); util.createTable(TableName.valueOf(tableName), familyDesc.getNameAsString()); HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); @@ -626,33 +596,37 @@ public void testSplitStoreFileWithCreateTimeTS() throws IOException { } @Test - public void testSplitStoreFileWithNoneToNone() throws IOException { - testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE); + public void testSplitStoreFileWithNoneToNone(TestInfo testInfo) throws IOException { + testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE, + testInfo); } @Test - public void testSplitStoreFileWithEncodedToEncoded() throws IOException { - testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF); + public void testSplitStoreFileWithEncodedToEncoded(TestInfo testInfo) throws IOException { + testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF, + testInfo); } @Test - public void testSplitStoreFileWithEncodedToNone() throws IOException { - testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE); + public void testSplitStoreFileWithEncodedToNone(TestInfo testInfo) throws IOException { + testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE, + testInfo); } @Test - public void testSplitStoreFileWithNoneToEncoded() throws IOException { - testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF); + public void testSplitStoreFileWithNoneToEncoded(TestInfo testInfo) throws IOException { + testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF, + testInfo); } private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding, - DataBlockEncoding cfEncoding) throws IOException { + DataBlockEncoding cfEncoding, TestInfo testInfo) throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding"); FileSystem fs = util.getTestFileSystem(); Path testIn = new Path(dir, "testhfile"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setDataBlockEncoding(cfEncoding).build(); - String tableName = tn.getMethodName(); + String tableName = testInfo.getTestMethod().get().getName(); util.createTable(TableName.valueOf(tableName), familyDesc.getNameAsString()); HFileTestUtil.createHFileWithDataBlockEncoding(util.getConfiguration(), fs, testIn, bulkloadEncoding, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); @@ -719,7 +693,7 @@ private void verifyHFileFavoriteNode(Path p, AsyncTableRegionLocator regionLocat int index = 0; do { if (index > 0) { - assertTrue("failed use favored nodes", isFavoriteNode); + assertTrue(isFavoriteNode, "failed use favored nodes"); } isFavoriteNode = false; final LocatedBlock block = locatedBlocks.get(index); @@ -741,7 +715,7 @@ private void verifyHFileFavoriteNode(Path p, AsyncTableRegionLocator regionLocat index++; } while (index < locatedBlocks.size()); if (index > 0) { - assertTrue("failed use favored nodes", isFavoriteNode); + assertTrue(isFavoriteNode, "failed use favored nodes"); } } @@ -843,17 +817,17 @@ public void testLoadTooMayHFiles() throws Exception { } } - @Test(expected = TableNotFoundException.class) + @Test public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception { Configuration conf = util.getConfiguration(); conf.set(BulkLoadHFiles.CREATE_TABLE_CONF_KEY, "no"); BulkLoadHFilesTool loader = new BulkLoadHFilesTool(conf); String[] args = { "directory", "nonExistingTable" }; - loader.run(args); + assertThrows(TableNotFoundException.class, () -> loader.run(args)); } @Test - public void testTableWithCFNameStartWithUnderScore() throws Exception { + public void testTableWithCFNameStartWithUnderScore(TestInfo testInfo) throws Exception { Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore"); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); @@ -863,7 +837,7 @@ public void testTableWithCFNameStartWithUnderScore() throws Exception { byte[] from = Bytes.toBytes("begin"); byte[] to = Bytes.toBytes("end"); Configuration conf = util.getConfiguration(); - String tableName = tn.getMethodName(); + String tableName = testInfo.getTestMethod().get().getName(); try (Table table = util.createTable(TableName.valueOf(tableName), family)) { HFileTestUtil.createHFile(conf, fs, new Path(familyDir, "hfile"), Bytes.toBytes(family), QUALIFIER, from, to, 1000); @@ -873,11 +847,11 @@ public void testTableWithCFNameStartWithUnderScore() throws Exception { } @Test - public void testBulkLoadByFamily() throws Exception { + public void testBulkLoadByFamily(TestInfo testInfo) throws Exception { Path dir = util.getDataTestDirOnTestFS("testBulkLoadByFamily"); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); - String tableName = tn.getMethodName(); + String tableName = testInfo.getTestMethod().get().getName(); String[] families = { "cf1", "cf2", "cf3" }; for (int i = 0; i < families.length; i++) { byte[] from = Bytes.toBytes(i + "begin"); @@ -887,20 +861,20 @@ public void testBulkLoadByFamily() throws Exception { Bytes.toBytes(families[i]), QUALIFIER, from, to, 1000); } Table table = util.createTable(TableName.valueOf(tableName), families); - final AtomicInteger attmptedCalls = new AtomicInteger(); + final AtomicInteger attemptedCalls = new AtomicInteger(); util.getConfiguration().setBoolean(BulkLoadHFilesTool.BULK_LOAD_HFILES_BY_FAMILY, true); BulkLoadHFiles loader = new BulkLoadHFilesTool(util.getConfiguration()) { @Override protected CompletableFuture> tryAtomicRegionLoad( final AsyncClusterConnection conn, final TableName tableName, boolean copyFiles, final byte[] first, Collection lqis) { - attmptedCalls.incrementAndGet(); + attemptedCalls.incrementAndGet(); return super.tryAtomicRegionLoad(conn, tableName, copyFiles, first, lqis); } }; try { loader.bulkLoad(table.getName(), dir); - assertEquals(families.length, attmptedCalls.get()); + assertEquals(families.length, attemptedCalls.get()); assertEquals(1000 * families.length, HBaseTestingUtil.countRows(table)); } finally { if (null != table) { @@ -911,8 +885,8 @@ protected CompletableFuture> tryAtomicRegionLoad( } @Test - public void testFailIfNeedSplitHFile() throws IOException { - TableName tableName = TableName.valueOf(tn.getMethodName()); + public void testFailIfNeedSplitHFile(TestInfo testInfo) throws IOException { + TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table table = util.createTable(tableName, FAMILY); util.loadTable(table, FAMILY); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFilesSFT.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestSFT.java similarity index 82% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFilesSFT.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestSFT.java index e5403ee8d377..cc4476990fae 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFilesSFT.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestSFT.java @@ -19,28 +19,23 @@ import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Test cases for LoadIncrementalHFiles when SFT is enabled. */ -@Category({ MiscTests.class, LargeTests.class }) -public class TestBulkLoadHFilesSFT extends TestBulkLoadHFiles { +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) +public class BulkLoadHFilesTestSFT extends BulkLoadHFilesTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBulkLoadHFilesSFT.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ""); util.getConfiguration().setInt(BulkLoadHFiles.MAX_FILES_PER_REGION_PER_FAMILY, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureBulkLoadHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/SecureBulkLoadHFilesSplitRecoveryTest.java similarity index 83% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureBulkLoadHFilesSplitRecovery.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/tool/SecureBulkLoadHFilesSplitRecoveryTest.java index 0176d7c05cd2..2eb3937decc8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureBulkLoadHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/SecureBulkLoadHFilesSplitRecoveryTest.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.tool; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting; import org.apache.hadoop.hbase.security.UserProvider; @@ -25,10 +24,10 @@ import org.apache.hadoop.hbase.security.access.SecureTestUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Reruns TestBulkLoadHFilesSplitRecovery using BulkLoadHFiles in secure mode. This suite is unable @@ -39,16 +38,13 @@ * cluster. This suite is still invaluable as it verifies the other mechanisms that need to be * supported as part of a LoadIncrementalFiles call. */ -@Category({ MiscTests.class, LargeTests.class }) -public class TestSecureBulkLoadHFilesSplitRecovery extends TestBulkLoadHFilesSplitRecovery { - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSecureBulkLoadHFilesSplitRecovery.class); +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) +public class SecureBulkLoadHFilesSplitRecoveryTest extends BulkLoadHFilesSplitRecoveryTestBase { // This "overrides" the parent static method // make sure they are in sync - @BeforeClass + @BeforeAll public static void setupCluster() throws Exception { util = new HBaseTestingUtil(); // set the always on security provider @@ -65,7 +61,7 @@ public static void setupCluster() throws Exception { // Disabling this test as it does not work in secure mode @Test - @Override + @Disabled public void testBulkLoadPhaseFailure() { } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureBulkLoadHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/SecureBulkLoadHFilesTest.java similarity index 86% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureBulkLoadHFiles.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/tool/SecureBulkLoadHFilesTest.java index ced69e375f87..68e9940c938a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureBulkLoadHFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/SecureBulkLoadHFilesTest.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.tool; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; import org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting; @@ -26,9 +25,8 @@ import org.apache.hadoop.hbase.security.access.SecureTestUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Reruns TestBulkLoadHFiles using BulkLoadHFiles in secure mode. This suite is unable to verify the @@ -39,14 +37,11 @@ * cluster. This suite is still invaluable as it verifies the other mechanisms that need to be * supported as part of a LoadIncrementalFiles call. */ -@Category({ MiscTests.class, LargeTests.class }) -public class TestSecureBulkLoadHFiles extends TestBulkLoadHFiles { +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) +public class SecureBulkLoadHFilesTest extends BulkLoadHFilesTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSecureBulkLoadHFiles.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { // set the always on security provider UserProvider.setUserProviderForTesting(util.getConfiguration(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java index 799247e4b6a8..6b886afed581 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java @@ -19,11 +19,11 @@ import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.closeRegion; import static org.apache.hadoop.hbase.tool.CanaryTool.HBASE_CANARY_INFO_PORT; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; @@ -51,7 +51,6 @@ import java.util.concurrent.atomic.LongAdder; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -67,32 +66,24 @@ import org.apache.hadoop.hbase.util.JvmVersion; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.util.ToolRunner; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.ArgumentMatcher; -@Category({ LargeTests.class }) +@Tag(LargeTests.TAG) public class TestCanaryTool { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCanaryTool.class); - private HBaseTestingUtil testingUtility; private static final byte[] FAMILY = Bytes.toBytes("f"); private static final byte[] COLUMN = Bytes.toBytes("col"); - @Rule - public TestName name = new TestName(); - private org.apache.logging.log4j.core.Appender mockAppender; - @Before + @BeforeEach public void setUp() throws Exception { testingUtility = new HBaseTestingUtil(); testingUtility.startMiniCluster(); @@ -103,7 +94,7 @@ public void setUp() throws Exception { .getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender); } - @After + @AfterEach public void tearDown() throws Exception { testingUtility.shutdownMiniCluster(); ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager @@ -124,8 +115,8 @@ public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception } @Test - public void testBasicCanaryWorks() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testBasicCanaryWorks(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows for (int i = 0; i < 1000; i++) { @@ -139,8 +130,8 @@ public void testBasicCanaryWorks() throws Exception { CanaryTool canary = new CanaryTool(executor, sink); String[] args = { "-writeSniffing", "-t", "10000", tableName.getNameAsString() }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - assertEquals("verify no read error count", 0, canary.getReadFailures().size()); - assertEquals("verify no write error count", 0, canary.getWriteFailures().size()); + assertEquals(0, canary.getReadFailures().size(), "verify no read error count"); + assertEquals(0, canary.getWriteFailures().size(), "verify no write error count"); verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), isA(ColumnFamilyDescriptor.class), anyLong()); } @@ -152,14 +143,14 @@ public void testBasicCanaryWorks() throws Exception { * @throws Exception if it can't create a table, communicate with minicluster, or run the canary. */ @Test - public void testCanaryStopsScanningAfterTimeout() throws Exception { + public void testCanaryStopsScanningAfterTimeout(TestInfo testInfo) throws Exception { // Prepare a table with multiple regions, and close those regions on the regionserver. // Do not notify HMaster or META. CanaryTool will scan and receive NotServingRegionExceptions. - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); // Close the unused Table reference returned by createMultiRegionTable. testingUtility.createMultiRegionTable(tableName, new byte[][] { FAMILY }).close(); List regions = testingUtility.getAdmin().getRegions(tableName); - assertTrue("verify table has multiple regions", regions.size() > 1); + assertTrue(regions.size() > 1, "verify table has multiple regions"); HRegionServer regionserver = testingUtility.getMiniHBaseCluster().getRegionServer(0); for (RegionInfo region : regions) { closeRegion(testingUtility, regionserver, region); @@ -183,16 +174,16 @@ public void testCanaryStopsScanningAfterTimeout() throws Exception { } CanaryTool.Sink sink = canary.getActiveSink(); - assertEquals("verify canary timed out with TIMEOUT_ERROR_EXIT_CODE", 3, retCode); - assertEquals("verify only the first region failed", 1, sink.getReadFailureCount()); - assertEquals("verify no successful reads", 0, sink.getReadSuccessCount()); - assertEquals("verify we were attempting to scan all regions", regions.size(), - ((CanaryTool.RegionStdOutSink) sink).getTotalExpectedRegions()); + assertEquals(3, retCode, "verify canary timed out with TIMEOUT_ERROR_EXIT_CODE"); + assertEquals(1, sink.getReadFailureCount(), "verify only the first region failed"); + assertEquals(0, sink.getReadSuccessCount(), "verify no successful reads"); + assertEquals(regions.size(), ((CanaryTool.RegionStdOutSink) sink).getTotalExpectedRegions(), + "verify we were attempting to scan all regions"); } @Test - public void testCanaryRegionTaskReadAllCF() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testCanaryRegionTaskReadAllCF(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table table = testingUtility.createTable(tableName, new byte[][] { Bytes.toBytes("f1"), Bytes.toBytes("f2") }); // insert some test rows @@ -215,23 +206,23 @@ public void testCanaryRegionTaskReadAllCF() throws Exception { // we expect read count is double of region count int expectedReadCount = readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions(); - assertEquals("canary region success count should equal total expected read count", - expectedReadCount, sink.getReadSuccessCount()); + assertEquals(expectedReadCount, sink.getReadSuccessCount(), + "canary region success count should equal total expected read count"); Map> regionMap = sink.getRegionMap(); - assertFalse("verify region map has size > 0", regionMap.isEmpty()); + assertFalse(regionMap.isEmpty(), "verify region map has size > 0"); for (String regionName : regionMap.keySet()) { for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) { - assertNotNull("verify getRegionNameAsString()", regionName); - assertNotNull("verify getRegionInfo()", res.getRegionInfo()); - assertNotNull("verify getTableName()", res.getTableName()); - assertNotNull("verify getTableNameAsString()", res.getTableNameAsString()); - assertNotNull("verify getServerName()", res.getServerName()); - assertNotNull("verify getServerNameAsString()", res.getServerNameAsString()); - assertNotNull("verify getColumnFamily()", res.getColumnFamily()); - assertNotNull("verify getColumnFamilyNameAsString()", res.getColumnFamilyNameAsString()); - assertTrue("read from region " + regionName + " succeeded", res.isReadSuccess()); - assertTrue("read took some time", res.getReadLatency() > -1); + assertNotNull(regionName, "verify getRegionNameAsString()"); + assertNotNull(res.getRegionInfo(), "verify getRegionInfo()"); + assertNotNull(res.getTableName(), "verify getTableName()"); + assertNotNull(res.getTableNameAsString(), "verify getTableNameAsString()"); + assertNotNull(res.getServerName(), "verify getServerName()"); + assertNotNull(res.getServerNameAsString(), "verify getServerNameAsString()"); + assertNotNull(res.getColumnFamily(), "verify getColumnFamily()"); + assertNotNull(res.getColumnFamilyNameAsString(), "verify getColumnFamilyNameAsString()"); + assertTrue(res.isReadSuccess(), "read from region " + regionName + " succeeded"); + assertTrue(res.getReadLatency() > -1, "read took some time"); } } } @@ -254,39 +245,40 @@ public void testCanaryRegionTaskResult() throws Exception { String[] args = { "-writeSniffing", "-t", "10000", "testCanaryRegionTaskResult" }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - assertTrue("canary should expect to scan at least 1 region", - sink.getTotalExpectedRegions() > 0); - assertTrue("there should be no read failures", sink.getReadFailureCount() == 0); - assertTrue("there should be no write failures", sink.getWriteFailureCount() == 0); - assertTrue("verify read success count > 0", sink.getReadSuccessCount() > 0); - assertTrue("verify write success count > 0", sink.getWriteSuccessCount() > 0); + assertTrue(sink.getTotalExpectedRegions() > 0, + "canary should expect to scan at least 1 region"); + assertTrue(sink.getReadFailureCount() == 0, "there should be no read failures"); + assertTrue(sink.getWriteFailureCount() == 0, "there should be no write failures"); + assertTrue(sink.getReadSuccessCount() > 0, "verify read success count > 0"); + assertTrue(sink.getWriteSuccessCount() > 0, "verify write success count > 0"); verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), isA(ColumnFamilyDescriptor.class), anyLong()); verify(sink, atLeastOnce()).publishWriteTiming(isA(ServerName.class), isA(RegionInfo.class), isA(ColumnFamilyDescriptor.class), anyLong()); - assertEquals("canary region success count should equal total expected regions", - sink.getReadSuccessCount() + sink.getWriteSuccessCount(), sink.getTotalExpectedRegions()); + assertEquals(sink.getReadSuccessCount() + sink.getWriteSuccessCount(), + sink.getTotalExpectedRegions(), + "canary region success count should equal total expected regions"); Map> regionMap = sink.getRegionMap(); - assertFalse("verify region map has size > 0", regionMap.isEmpty()); + assertFalse(regionMap.isEmpty(), "verify region map has size > 0"); for (String regionName : regionMap.keySet()) { for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) { - assertNotNull("verify getRegionNameAsString()", regionName); - assertNotNull("verify getRegionInfo()", res.getRegionInfo()); - assertNotNull("verify getTableName()", res.getTableName()); - assertNotNull("verify getTableNameAsString()", res.getTableNameAsString()); - assertNotNull("verify getServerName()", res.getServerName()); - assertNotNull("verify getServerNameAsString()", res.getServerNameAsString()); - assertNotNull("verify getColumnFamily()", res.getColumnFamily()); - assertNotNull("verify getColumnFamilyNameAsString()", res.getColumnFamilyNameAsString()); + assertNotNull(regionName, "verify getRegionNameAsString()"); + assertNotNull(res.getRegionInfo(), "verify getRegionInfo()"); + assertNotNull(res.getTableName(), "verify getTableName()"); + assertNotNull(res.getTableNameAsString(), "verify getTableNameAsString()"); + assertNotNull(res.getServerName(), "verify getServerName()"); + assertNotNull(res.getServerNameAsString(), "verify getServerNameAsString()"); + assertNotNull(res.getColumnFamily(), "verify getColumnFamily()"); + assertNotNull(res.getColumnFamilyNameAsString(), "verify getColumnFamilyNameAsString()"); if (regionName.contains(CanaryTool.DEFAULT_WRITE_TABLE_NAME.getNameAsString())) { - assertTrue("write to region " + regionName + " succeeded", res.isWriteSuccess()); - assertTrue("write took some time", res.getWriteLatency() > -1); + assertTrue(res.isWriteSuccess(), "write to region " + regionName + " succeeded"); + assertTrue(res.getWriteLatency() > -1, "write took some time"); } else { - assertTrue("read from region " + regionName + " succeeded", res.isReadSuccess()); - assertTrue("read took some time", res.getReadLatency() > -1); + assertTrue(res.isReadSuccess(), "read from region " + regionName + " succeeded"); + assertTrue(res.getReadLatency() > -1, "read took some time"); } } } @@ -308,11 +300,12 @@ public void testCanaryRegionTaskResult() throws Exception { // ) // ) // - @org.junit.Ignore + @Disabled @Test - public void testReadTableTimeouts() throws Exception { - final TableName[] tableNames = new TableName[] { TableName.valueOf(name.getMethodName() + "1"), - TableName.valueOf(name.getMethodName() + "2") }; + public void testReadTableTimeouts(TestInfo testInfo) throws Exception { + final TableName[] tableNames = + new TableName[] { TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"), + TableName.valueOf(testInfo.getTestMethod().get().getName() + "2") }; // Create 2 test tables. for (int j = 0; j < 2; j++) { Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY }); @@ -329,15 +322,16 @@ public void testReadTableTimeouts() throws Exception { CanaryTool canary = new CanaryTool(executor, sink); String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," + tableNames[1].getNameAsString() + "=0"; - String[] args = { "-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1", - name.getMethodName() + "2" }; + String[] args = + { "-readTableTimeouts", configuredTimeoutStr, testInfo.getTestMethod().get().getName() + "1", + testInfo.getTestMethod().get().getName() + "2" }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class)); for (int i = 0; i < 2; i++) { - assertNotEquals("verify non-null read latency", null, - sink.getReadLatencyMap().get(tableNames[i].getNameAsString())); - assertNotEquals("verify non-zero read latency", 0L, - sink.getReadLatencyMap().get(tableNames[i].getNameAsString())); + assertNotEquals(null, sink.getReadLatencyMap().get(tableNames[i].getNameAsString()), + "verify non-null read latency"); + assertNotEquals(0L, sink.getReadLatencyMap().get(tableNames[i].getNameAsString()), + "verify non-zero read latency"); } // One table's timeout is set for 0 ms and thus, should lead to an error. verify(mockAppender, times(1)) @@ -364,8 +358,8 @@ public void testWriteTableTimeout() throws Exception { CanaryTool canary = new CanaryTool(executor, sink); String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE) }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - assertNotEquals("verify non-null write latency", null, sink.getWriteLatency()); - assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency()); + assertNotEquals(null, sink.getWriteLatency(), "verify non-null write latency"); + assertNotEquals(0L, sink.getWriteLatency(), "verify non-zero write latency"); verify(mockAppender, times(1)) .append(argThat(new ArgumentMatcher() { @Override @@ -391,8 +385,8 @@ public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { // by creating a table, there shouldn't be any region servers not serving any regions @Test - public void testRegionserverWithRegions() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testRegionserverWithRegions(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); testingUtility.createTable(tableName, new byte[][] { FAMILY }); runRegionserverCanary(); verify(mockAppender, never()) @@ -406,8 +400,8 @@ public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { } @Test - public void testRawScanConfig() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testRawScanConfig(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows for (int i = 0; i < 1000; i++) { @@ -419,22 +413,23 @@ public void testRawScanConfig() throws Exception { ExecutorService executor = new ScheduledThreadPoolExecutor(1); CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink()); CanaryTool canary = new CanaryTool(executor, sink); - String[] args = { "-t", "10000", name.getMethodName() }; + String[] args = { "-t", "10000", testInfo.getTestMethod().get().getName() }; org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration()); conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true); assertEquals(0, ToolRunner.run(conf, canary, args)); verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), isA(ColumnFamilyDescriptor.class), anyLong()); - assertEquals("verify no read error count", 0, canary.getReadFailures().size()); + assertEquals(0, canary.getReadFailures().size(), "verify no read error count"); } private void runRegionserverCanary() throws Exception { ExecutorService executor = new ScheduledThreadPoolExecutor(1); CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink()); String[] args = { "-t", "10000", "-regionserver" }; - assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - assertEquals("verify no read error count", 0, canary.getReadFailures().size()); + assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args), + "verify no read error count"); + assertEquals(0, canary.getReadFailures().size(), "verify no read error count"); } private void testZookeeperCanaryWithArgs(String[] args) throws Exception { @@ -463,31 +458,33 @@ public void testWebUI() throws Exception { // Test that old canary status page URL redirects to JSP URL oldPageUrl = new URL("http://localhost:" + infoPort + "/canary-status"); String oldPageContent = getPageContent(oldPageUrl); - assertTrue("expected=canary.jsp, content=" + oldPageContent, - oldPageContent.contains("canary.jsp")); + assertTrue(oldPageContent.contains("canary.jsp"), + "expected=canary.jsp, content=" + oldPageContent); // Test web UI page content URL url = new URL("http://localhost:" + infoPort + "/canary.jsp"); String page = getPageContent(url); - assertTrue("Page should contain page title.", page.contains("Canary")); + assertTrue(page.contains("Canary"), "Page should contain page title."); - assertTrue("Page should contain Failed Servers header.", - page.contains("

Failed Servers

")); - assertTrue("Page should have zero Failed Servers.", - page.contains("Total Failed Servers: 0")); + assertTrue(page.contains("

Failed Servers

"), + "Page should contain Failed Servers header."); + assertTrue(page.contains("Total Failed Servers: 0"), + "Page should have zero Failed Servers."); - assertTrue("Page should contain Failed Tables header.", - page.contains("

Failed Tables

")); - assertTrue("Page should have zero Failed Tables.", - page.contains("Total Failed Tables: 0")); + assertTrue(page.contains("

Failed Tables

"), + "Page should contain Failed Tables header."); + assertTrue(page.contains("Total Failed Tables: 0"), + "Page should have zero Failed Tables."); - assertTrue("Page should contain Software Attributes header.", - page.contains("

Software Attributes

")); - assertTrue("Page should contain JVM version.", - page.contains("" + JvmVersion.getVersion() + "")); - assertTrue("Page should contain HBase version.", page - .contains("" + VersionInfo.getVersion() + ", r" + VersionInfo.getRevision() + "")); + assertTrue(page.contains("

Software Attributes

"), + "Page should contain Software Attributes header."); + assertTrue(page.contains("" + JvmVersion.getVersion() + ""), + "Page should contain JVM version."); + assertTrue( + page + .contains("" + VersionInfo.getVersion() + ", r" + VersionInfo.getRevision() + ""), + "Page should contain HBase version."); // Stop Canary tool daemon executorService.shutdown(); @@ -518,23 +515,24 @@ public void testWebUIWithFailures() throws Exception { URL url = new URL("http://localhost:" + infoPort + "/canary.jsp"); String page = getPageContent(url); - assertTrue("Page should contain page title.", page.contains("Canary")); - - assertTrue("Page should contain Failed Servers header.", - page.contains("

Failed Servers

")); - assertTrue("Page should contain the failed server link.", page.contains( - "asf903.gq1.ygridcore.net,52690,1517835491385")); - assertTrue("Page should summarize 1 failed server.", - page.contains("Total Failed Servers: 1")); - - assertTrue("Page should contain Failed Tables header.", - page.contains("

Failed Tables

")); - assertTrue("Page should contain awesome-table as failed table link.", - page.contains("awesome-table")); - assertTrue("Page should contain awesome-table-two as failed table link.", - page.contains("awesome-table-two")); - assertTrue("Page should summarize 2 failed tables.", - page.contains("Total Failed Tables: 2")); + assertTrue(page.contains("Canary"), "Page should contain page title."); + + assertTrue(page.contains("

Failed Servers

"), + "Page should contain Failed Servers header."); + assertTrue(page.contains( + "asf903.gq1.ygridcore.net,52690,1517835491385"), + "Page should contain the failed server link."); + assertTrue(page.contains("Total Failed Servers: 1"), + "Page should summarize 1 failed server."); + + assertTrue(page.contains("

Failed Tables

"), + "Page should contain Failed Tables header."); + assertTrue(page.contains("awesome-table"), + "Page should contain awesome-table as failed table link."); + assertTrue(page.contains("awesome-table-two"), + "Page should contain awesome-table-two as failed table link."); + assertTrue(page.contains("Total Failed Tables: 2"), + "Page should summarize 2 failed tables."); // Stop Canary tool daemon executorService.shutdown(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java index 807f4fb7a62b..a7e53aec4a42 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.tool.coprocessor; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -34,7 +34,6 @@ import java.util.jar.JarOutputStream; import java.util.regex.Pattern; import java.util.zip.ZipEntry; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; @@ -44,20 +43,15 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams; -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) public class CoprocessorValidatorTest { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(CoprocessorValidatorTest.class); - private CoprocessorValidator validator; public CoprocessorValidatorTest() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java index 9240d5cce2c5..bf3c1333ce20 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.zookeeper; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.FileOutputStream; @@ -30,7 +30,6 @@ import java.util.List; import javax.security.auth.login.AppConfigurationEntry; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -41,29 +40,25 @@ import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Stat; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ ZKTests.class, MediumTests.class }) +@Tag(ZKTests.TAG) +@Tag(MediumTests.TAG) public class TestZooKeeperACL { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestZooKeeperACL.class); - private final static Logger LOG = LoggerFactory.getLogger(TestZooKeeperACL.class); private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static ZKWatcher zkw; private static boolean secureZKAvailable; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { File saslConfFile = File.createTempFile("tmp", "jaas.conf"); try (OutputStreamWriter fwriter = @@ -92,7 +87,7 @@ public static void setUpBeforeClass() throws Exception { TestZooKeeper.class.getName(), null); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { if (!secureZKAvailable) { return; @@ -100,7 +95,7 @@ public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void setUp() throws Exception { if (!secureZKAvailable) { return;