diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml index 9854ccf98330..8b2063e64b8d 100644 --- a/hbase-mapreduce/pom.xml +++ b/hbase-mapreduce/pom.xml @@ -161,7 +161,7 @@ org.mockito - mockito-core + mockito-junit-jupiter test @@ -191,11 +191,6 @@ junit-jupiter-params test - - org.junit.vintage - junit-vintage-engine - test - org.slf4j jcl-over-slf4j diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java index 6c49a43bf463..8b8d240088da 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java @@ -20,21 +20,17 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.util.ProgramDriver; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category({ MapReduceTests.class, SmallTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(SmallTests.TAG) public class TestDriver { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestDriver.class); - @Test public void testDriverMainMethod() throws Throwable { ProgramDriver programDriverMock = mock(ProgramDriver.class); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java index 2912fd4d025c..5f1a05684456 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase.mapred; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -32,7 +33,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -42,20 +42,15 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList; -@Category({ MapReduceTests.class, SmallTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(SmallTests.TAG) public class TestGroupingTableMap { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestGroupingTableMap.class); - @Test @SuppressWarnings("unchecked") public void shouldNotCallCollectonSinceFindUniqueKeyValueMoreThanOnes() throws Exception { @@ -156,7 +151,7 @@ public void collect(ImmutableBytesWritable arg, Result result) throws IOExceptio gTableMap.map(null, result, outputCollector, reporter); verify(result).listCells(); - Assert.assertTrue("Output not received", outputCollected.get()); + assertTrue(outputCollected.get(), "Output not received"); final byte[] firstPartValue = Bytes.toBytes("238947928"); final byte[] secondPartValue = Bytes.toBytes("4678456942345"); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java index 96e25b51f659..14df668d79a6 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java @@ -22,25 +22,20 @@ import static org.mockito.Mockito.verify; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category({ MapReduceTests.class, SmallTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(SmallTests.TAG) public class TestIdentityTableMap { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestIdentityTableMap.class); - @Test @SuppressWarnings({ "deprecation", "unchecked" }) public void shouldCollectPredefinedTimes() throws IOException { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java index c042bd35a56d..065acfafc0dc 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java @@ -17,14 +17,13 @@ */ package org.apache.hadoop.hbase.mapred; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.Iterator; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -37,21 +36,17 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.RunningJob; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestMultiTableSnapshotInputFormat extends org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiTableSnapshotInputFormat.class); - private static final Logger LOG = LoggerFactory.getLogger(TestMultiTableSnapshotInputFormat.class); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java index 0f71055c6a72..51bae4b52139 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.mapred; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.mock; @@ -28,7 +28,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -38,20 +37,16 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.apache.hbase.thirdparty.com.google.common.base.Joiner; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestRowCounter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRowCounter.class); - @Test @SuppressWarnings("deprecation") public void shouldPrintUsage() throws Exception { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java index 3e093430a92e..60ba178eea58 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java @@ -17,31 +17,21 @@ */ package org.apache.hadoop.hbase.mapred; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ MapReduceTests.class, SmallTests.class }) -public class TestSplitTable { - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSplitTable.class); +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; - @Rule - public TestName name = new TestName(); +@Tag(MapReduceTests.TAG) +@Tag(SmallTests.TAG) +public class TestSplitTable { @Test @SuppressWarnings({ "deprecation", "SelfComparison" }) @@ -104,16 +94,16 @@ public void testSplitTableEquals() { @Test @SuppressWarnings("deprecation") - public void testToString() { - TableSplit split = new TableSplit(TableName.valueOf(name.getMethodName()), + public void testToString(TestInfo testInfo) { + TableSplit split = new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()), Bytes.toBytes("row-start"), Bytes.toBytes("row-end"), "location"); - String str = "HBase table split(table name: " + name.getMethodName() + String str = "HBase table split(table name: " + testInfo.getTestMethod().get().getName() + ", start row: row-start, " + "end row: row-end, region location: location)"; - Assert.assertEquals(str, split.toString()); + assertEquals(str, split.toString()); split = new TableSplit((TableName) null, null, null, null); str = "HBase table split(table name: null, start row: null, " + "end row: null, region location: null)"; - Assert.assertEquals(str, split.toString()); + assertEquals(str, split.toString()); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java index d15d3a574640..4b633325c5b8 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hbase.mapred; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; @@ -33,7 +34,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.NotServingRegionException; @@ -61,12 +61,11 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.lib.NullOutputFormat; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; @@ -75,13 +74,10 @@ /** * This tests the TableInputFormat and its recovery semantics */ -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestTableInputFormat { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormat.class); - private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormat.class); private final static HBaseTestingUtil UTIL = new HBaseTestingUtil(); @@ -90,17 +86,17 @@ public class TestTableInputFormat { private static final byte[][] columns = new byte[][] { FAMILY }; - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void before() throws IOException { LOG.info("before"); UTIL.ensureSomeRegionServersAvailable(1); @@ -265,10 +261,10 @@ public void testTableRecordReaderScannerFail() throws IOException { /** * Run test assuming Scanner IOException failure using mapred api, */ - @Test(expected = IOException.class) + @Test public void testTableRecordReaderScannerFailTwice() throws IOException { Table htable = createIOEScannerTable(Bytes.toBytes("table3"), 2); - runTestMapred(htable); + assertThrows(IOException.class, () -> runTestMapred(htable)); } /** @@ -285,10 +281,11 @@ public void testTableRecordReaderScannerTimeout() throws IOException { * Run test assuming NotServingRegionException using mapred api. * @throws org.apache.hadoop.hbase.DoNotRetryIOException */ - @Test(expected = org.apache.hadoop.hbase.NotServingRegionException.class) + @Test public void testTableRecordReaderScannerTimeoutTwice() throws IOException { Table htable = createDNRIOEScannerTable(Bytes.toBytes("table5"), 2); - runTestMapred(htable); + assertThrows(org.apache.hadoop.hbase.NotServingRegionException.class, + () -> runTestMapred(htable)); } /** @@ -329,19 +326,31 @@ void testInputFormat(Class clazz) throws IOException { job.setNumReduceTasks(0); LOG.debug("submitting job."); final RunningJob run = JobClient.runJob(job); - assertTrue("job failed!", run.isSuccessful()); - assertEquals("Saw the wrong number of instances of the filtered-for row.", 2, run.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":row", "aaa").getCounter()); - assertEquals("Saw any instances of the filtered out row.", 0, run.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":row", "bbb").getCounter()); - assertEquals("Saw the wrong number of instances of columnA.", 1, run.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":family", "columnA").getCounter()); - assertEquals("Saw the wrong number of instances of columnB.", 1, run.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":family", "columnB").getCounter()); - assertEquals("Saw the wrong count of values for the filtered-for row.", 2, run.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":value", "value aaa").getCounter()); - assertEquals("Saw the wrong count of values for the filtered-out row.", 0, run.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":value", "value bbb").getCounter()); + assertTrue(run.isSuccessful(), "job failed!"); + assertEquals(2, + run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":row", "aaa") + .getCounter(), + "Saw the wrong number of instances of the filtered-for row."); + assertEquals(0, + run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":row", "bbb") + .getCounter(), + "Saw any instances of the filtered out row."); + assertEquals(1, + run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":family", "columnA") + .getCounter(), + "Saw the wrong number of instances of columnA."); + assertEquals(1, + run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":family", "columnB") + .getCounter(), + "Saw the wrong number of instances of columnB."); + assertEquals(2, + run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":value", "value aaa") + .getCounter(), + "Saw the wrong count of values for the filtered-for row."); + assertEquals(0, + run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":value", "value bbb") + .getCounter(), + "Saw the wrong count of values for the filtered-out row."); } public static class ExampleVerifier implements TableMap { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java index 2820d9111277..92b6301d0b4d 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java @@ -17,12 +17,11 @@ */ package org.apache.hadoop.hbase.mapred; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; @@ -37,8 +36,7 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.RunningJob; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,14 +45,11 @@ * simple - take every row in the table, reverse the value of a particular cell, and write it back * to the table. */ -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) @SuppressWarnings("deprecation") public class TestTableMapReduce extends TestTableMapReduceBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableMapReduce.class); - private static final Logger LOG = LoggerFactory.getLogger(TestTableMapReduce.class.getName()); protected Logger getLog() { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java index 77ac55a1b6d0..1ec3df6152cd 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase.mapred; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -28,7 +29,6 @@ import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; @@ -44,26 +44,21 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.RunningJob; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet; -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestTableMapReduceUtil { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableMapReduceUtil.class); - private static final Logger LOG = LoggerFactory.getLogger(TestTableMapReduceUtil.class); private static Table presidentsTable; @@ -88,18 +83,18 @@ public class TestTableMapReduceUtil { private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { UTIL.startMiniCluster(); presidentsTable = createAndFillTable(TableName.valueOf(TABLE_NAME)); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void before() throws IOException { LOG.info("before"); UTIL.ensureSomeRegionServersAvailable(1); @@ -136,7 +131,7 @@ private static void createPutCommand(Table table) throws IOException { */ @Test public void shouldNumberOfReduceTaskNotExceedNumberOfRegionsForGivenTable() throws IOException { - Assert.assertNotNull(presidentsTable); + assertNotNull(presidentsTable); Configuration cfg = UTIL.getConfiguration(); JobConf jobConf = new JobConf(cfg); TableMapReduceUtil.setNumReduceTasks(TABLE_NAME, jobConf); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java index fec2c8cf0204..056058251499 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java @@ -17,22 +17,20 @@ */ package org.apache.hadoop.hbase.mapred; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordWriter; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,13 +39,9 @@ * we can have many instances and not leak connections. This test creates a few TableOutputFormats * and shouldn't fail due to ZK connection exhaustion. */ -@Category(MediumTests.class) +@Tag(MediumTests.TAG) public class TestTableOutputFormatConnectionExhaust { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableOutputFormatConnectionExhaust.class); - private static final Logger LOG = LoggerFactory.getLogger(TestTableOutputFormatConnectionExhaust.class); @@ -55,7 +49,7 @@ public class TestTableOutputFormatConnectionExhaust { static final String TABLE = "TestTableOutputFormatConnectionExhaust"; static final String FAMILY = "family"; - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { // Default in ZookeeperMiniCluster is 1000, setting artificially low to trigger exhaustion. // need min of 7 to properly start the default mini HBase cluster @@ -63,12 +57,12 @@ public static void beforeClass() throws Exception { UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void before() throws IOException { LOG.info("before"); UTIL.ensureSomeRegionServersAvailable(1); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java index 3c1b717d5abf..9f008b82857f 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java @@ -18,12 +18,13 @@ package org.apache.hadoop.hbase.mapred; import static org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatImpl.SNAPSHOT_INPUTFORMAT_LOCALITY_ENABLED_DEFAULT; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -45,28 +46,27 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.lib.NullOutputFormat; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; + +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableSnapshotInputFormat.class); - private static final byte[] aaa = Bytes.toBytes("aaa"); private static final byte[] after_zzz = Bytes.toBytes("zz{"); // 'z' + 1 => '{' private static final String COLUMNS = Bytes.toString(FAMILIES[0]) + " " + Bytes.toString(FAMILIES[1]); - @Rule - public TestName name = new TestName(); + private String methodName; + + @BeforeEach + public void beforeEach(TestInfo testInfo) { + methodName = testInfo.getTestMethod().get().getName(); + } @Override protected byte[] getStartRow() { @@ -108,7 +108,7 @@ public void close() { @Test public void testInitTableSnapshotMapperJobConfig() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(methodName); String snapshotName = "foo"; try { @@ -122,11 +122,11 @@ public void testInitTableSnapshotMapperJobConfig() throws Exception { // TODO: would be better to examine directly the cache instance that results from this // config. Currently this is not possible because BlockCache initialization is static. - Assert.assertEquals("Snapshot job should be configured for default LruBlockCache.", - HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT, - job.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, -1), 0.01); - Assert.assertEquals("Snapshot job should not use BucketCache.", 0, - job.getFloat("hbase.bucketcache.size", -1), 0.01); + assertEquals(HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT, + job.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, -1), 0.01, + "Snapshot job should be configured for default LruBlockCache."); + assertEquals(0, job.getFloat("hbase.bucketcache.size", -1), 0.01, + "Snapshot job should not use BucketCache."); } finally { UTIL.getAdmin().deleteSnapshot(snapshotName); UTIL.deleteTable(tableName); @@ -168,7 +168,7 @@ public void testRestoreSnapshotDoesNotCreateBackRefLinksInit(TableName tableName @Override protected void testWithMockedMapReduce(HBaseTestingUtil util, String snapshotName, int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean setLocalityEnabledTo) throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(methodName); try { createTableAndSnapshot(util, tableName, snapshotName, getStartRow(), getEndRow(), numRegions); @@ -202,7 +202,7 @@ private void verifyWithMockedMapReduce(JobConf job, int numRegions, int expected TableSnapshotInputFormat tsif = new TableSnapshotInputFormat(); InputSplit[] splits = tsif.getSplits(job, 0); - Assert.assertEquals(expectedNumSplits, splits.length); + assertEquals(expectedNumSplits, splits.length); HBaseTestingUtil.SeenRowTracker rowTracker = new HBaseTestingUtil.SeenRowTracker(startRow, stopRow); @@ -214,7 +214,7 @@ private void verifyWithMockedMapReduce(JobConf job, int numRegions, int expected for (int i = 0; i < splits.length; i++) { // validate input split InputSplit split = splits[i]; - Assert.assertTrue(split instanceof TableSnapshotInputFormat.TableSnapshotRegionSplit); + assertTrue(split instanceof TableSnapshotInputFormat.TableSnapshotRegionSplit); if (localityEnabled) { // When localityEnabled is true, meant to verify split.getLocations() // by the following statement: @@ -222,9 +222,9 @@ private void verifyWithMockedMapReduce(JobConf job, int numRegions, int expected // However, getLocations() of some splits could return an empty array (length is 0), // so drop the verification on length. // TODO: investigate how to verify split.getLocations() when localityEnabled is true - Assert.assertTrue(split.getLocations() != null); + assertTrue(split.getLocations() != null); } else { - Assert.assertTrue(split.getLocations() != null && split.getLocations().length == 0); + assertTrue(split.getLocations() != null && split.getLocations().length == 0); } // validate record reader @@ -290,7 +290,7 @@ public static void doTestWithMapReduce(HBaseTestingUtil util, TableName tableNam jobConf.setOutputFormat(NullOutputFormat.class); RunningJob job = JobClient.runJob(jobConf); - Assert.assertTrue(job.isSuccessful()); + assertTrue(job.isSuccessful()); } finally { if (!shutdownCluster) { util.getAdmin().deleteSnapshot(snapshotName); @@ -299,7 +299,7 @@ public static void doTestWithMapReduce(HBaseTestingUtil util, TableName tableNam } } - @Ignore // Ignored in mapred package because it keeps failing but allowed in mapreduce package. + @Disabled // Ignored in mapred package because it keeps failing but allowed in mapreduce package. @Test public void testWithMapReduceMultipleMappersPerRegion() throws Exception { testWithMapReduce(UTIL, "testWithMapReduceMultiRegion", 10, 5, 50, false); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableTestBase.java index d7648c26406d..da07981320d7 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableTestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableTestBase.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import org.apache.commons.lang3.ArrayUtils; @@ -39,8 +39,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.ToolRunner; -import org.junit.Rule; -import org.junit.rules.TestName; +import org.junit.jupiter.api.TestInfo; /** * Base class for testing CopyTable MR tool. @@ -55,9 +54,6 @@ public abstract class CopyTableTestBase { protected static final byte[] FAMILY_B = Bytes.toBytes(FAMILY_B_STRING); protected static final byte[] QUALIFIER = Bytes.toBytes("q"); - @Rule - public TestName name = new TestName(); - protected abstract Table createSourceTable(TableDescriptor desc) throws Exception; protected abstract Table createTargetTable(TableDescriptor desc) throws Exception; @@ -91,9 +87,10 @@ protected final void verifyRows(Table t, byte[] family, byte[] column) throws IO } } - protected final void doCopyTableTest(Configuration conf, boolean bulkload) throws Exception { - TableName tableName1 = TableName.valueOf(name.getMethodName() + "1"); - TableName tableName2 = TableName.valueOf(name.getMethodName() + "2"); + protected final void doCopyTableTest(Configuration conf, boolean bulkload, TestInfo testInfo) + throws Exception { + TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"); + TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2"); byte[] family = Bytes.toBytes("family"); byte[] column = Bytes.toBytes("c1"); TableDescriptor desc1 = TableDescriptorBuilder.newBuilder(tableName1) @@ -123,10 +120,10 @@ protected final void doCopyTableTest(Configuration conf, boolean bulkload) throw } } - protected final void doCopyTableTestWithMob(Configuration conf, boolean bulkload) - throws Exception { - TableName tableName1 = TableName.valueOf(name.getMethodName() + "1"); - TableName tableName2 = TableName.valueOf(name.getMethodName() + "2"); + protected final void doCopyTableTestWithMob(Configuration conf, boolean bulkload, + TestInfo testInfo) throws Exception { + TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"); + TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2"); byte[] family = Bytes.toBytes("mob"); byte[] column = Bytes.toBytes("c1"); @@ -163,15 +160,15 @@ protected final void doCopyTableTestWithMob(Configuration conf, boolean bulkload Result r = t2.get(g); assertEquals(1, r.size()); assertTrue(CellUtil.matchingQualifier(r.rawCells()[0], column)); - assertEquals("compare row values between two tables", - t1.getDescriptor().getValue("row" + i), t2.getDescriptor().getValue("row" + i)); + assertEquals(t1.getDescriptor().getValue("row" + i), t2.getDescriptor().getValue("row" + i), + "compare row values between two tables"); } - assertEquals("compare count of mob rows after table copy", MobTestUtil.countMobRows(t1), - MobTestUtil.countMobRows(t2)); - assertEquals("compare count of mob row values between two tables", - t1.getDescriptor().getValues().size(), t2.getDescriptor().getValues().size()); - assertTrue("The mob row count is 0 but should be > 0", MobTestUtil.countMobRows(t2) > 0); + assertEquals(MobTestUtil.countMobRows(t1), MobTestUtil.countMobRows(t2), + "compare count of mob rows after table copy"); + assertEquals(t1.getDescriptor().getValues().size(), t2.getDescriptor().getValues().size(), + "compare count of mob row values between two tables"); + assertTrue(MobTestUtil.countMobRows(t2) > 0, "The mob row count is 0 but should be > 0"); } finally { dropSourceTable(tableName1); dropTargetTable(tableName2); @@ -183,9 +180,9 @@ protected final boolean runCopy(Configuration conf, String[] args) throws Except return status == 0; } - protected final void testStartStopRow(Configuration conf) throws Exception { - final TableName tableName1 = TableName.valueOf(name.getMethodName() + "1"); - final TableName tableName2 = TableName.valueOf(name.getMethodName() + "2"); + protected final void testStartStopRow(Configuration conf, TestInfo testInfo) throws Exception { + final TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"); + final TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2"); final byte[] family = Bytes.toBytes("family"); final byte[] column = Bytes.toBytes("c1"); final byte[] row0 = Bytes.toBytesBinary("\\x01row0"); @@ -231,9 +228,11 @@ protected final void testStartStopRow(Configuration conf) throws Exception { } } - protected final void testRenameFamily(Configuration conf) throws Exception { - TableName sourceTable = TableName.valueOf(name.getMethodName() + "-source"); - TableName targetTable = TableName.valueOf(name.getMethodName() + "-target"); + protected final void testRenameFamily(Configuration conf, TestInfo testInfo) throws Exception { + TableName sourceTable = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "-source"); + TableName targetTable = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "-target"); TableDescriptor desc1 = TableDescriptorBuilder.newBuilder(sourceTable) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_A)) diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableToPeerClusterTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableToPeerClusterTestBase.java index d9219c9420f4..e089b3037d81 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableToPeerClusterTestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableToPeerClusterTestBase.java @@ -17,16 +17,17 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertFalse; import org.apache.commons.lang3.ArrayUtils; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; /** * Test CopyTable between clusters @@ -37,13 +38,13 @@ public abstract class CopyTableToPeerClusterTestBase extends CopyTableTestBase { protected static final HBaseTestingUtil UTIL2 = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { UTIL1.startMiniCluster(3); UTIL2.startMiniCluster(3); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL1.shutdownMiniCluster(); UTIL2.shutdownMiniCluster(); @@ -78,35 +79,35 @@ protected String[] getPeerClusterOptions() throws Exception { * Simple end-to-end test */ @Test - public void testCopyTable() throws Exception { - doCopyTableTest(UTIL1.getConfiguration(), false); + public void testCopyTable(TestInfo testInfo) throws Exception { + doCopyTableTest(UTIL1.getConfiguration(), false, testInfo); } /** * Simple end-to-end test on table with MOB */ @Test - public void testCopyTableWithMob() throws Exception { - doCopyTableTestWithMob(UTIL1.getConfiguration(), false); + public void testCopyTableWithMob(TestInfo testInfo) throws Exception { + doCopyTableTestWithMob(UTIL1.getConfiguration(), false, testInfo); } @Test - public void testStartStopRow() throws Exception { - testStartStopRow(UTIL1.getConfiguration()); + public void testStartStopRow(TestInfo testInfo) throws Exception { + testStartStopRow(UTIL1.getConfiguration(), testInfo); } /** * Test copy of table from sourceTable to targetTable all rows from family a */ @Test - public void testRenameFamily() throws Exception { - testRenameFamily(UTIL1.getConfiguration()); + public void testRenameFamily(TestInfo testInfo) throws Exception { + testRenameFamily(UTIL1.getConfiguration(), testInfo); } @Test - public void testBulkLoadNotSupported() throws Exception { - TableName tableName1 = TableName.valueOf(name.getMethodName() + "1"); - TableName tableName2 = TableName.valueOf(name.getMethodName() + "2"); + public void testBulkLoadNotSupported(TestInfo testInfo) throws Exception { + TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"); + TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2"); try (Table t1 = UTIL1.createTable(tableName1, FAMILY_A); Table t2 = UTIL2.createTable(tableName2, FAMILY_A)) { String[] args = ArrayUtils.addAll(getPeerClusterOptions(), @@ -119,9 +120,9 @@ public void testBulkLoadNotSupported() throws Exception { } @Test - public void testSnapshotNotSupported() throws Exception { - TableName tableName1 = TableName.valueOf(name.getMethodName() + "1"); - TableName tableName2 = TableName.valueOf(name.getMethodName() + "2"); + public void testSnapshotNotSupported(TestInfo testInfo) throws Exception { + TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"); + TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2"); String snapshot = tableName1.getNameAsString() + "_snapshot"; try (Table t1 = UTIL1.createTable(tableName1, FAMILY_A); Table t2 = UTIL2.createTable(tableName2, FAMILY_A)) { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2TestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2TestBase.java index ac9810a8825a..2e459402e94a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2TestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2TestBase.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.List; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MRIncrementalLoadTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MRIncrementalLoadTestBase.java index ad2f841c19df..6e44fec8480d 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MRIncrementalLoadTestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MRIncrementalLoadTestBase.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -44,15 +44,11 @@ import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.tool.BulkLoadHFiles; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.Test; -import org.junit.runners.Parameterized.Parameter; +import org.junit.jupiter.api.AfterAll; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class MRIncrementalLoadTestBase extends HFileOutputFormat2TestBase { +public abstract class MRIncrementalLoadTestBase extends HFileOutputFormat2TestBase { private static final Logger LOG = LoggerFactory.getLogger(MRIncrementalLoadTestBase.class); @@ -60,13 +56,10 @@ public class MRIncrementalLoadTestBase extends HFileOutputFormat2TestBase { private static String[] HOSTNAMES; - @Parameter(0) public boolean shouldChangeRegions; - @Parameter(1) public boolean putSortReducer; - @Parameter(2) public List tableStr; private Map allTables; @@ -94,12 +87,11 @@ protected static void setupCluster(boolean shouldKeepLocality) throws Exception } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws IOException { UTIL.shutdownMiniCluster(); } - @Before public void setUp() throws IOException { int regionNum = SHOULD_KEEP_LOCALITY ? 20 : 5; allTables = new HashMap<>(tableStr.size()); @@ -110,9 +102,9 @@ public void setUp() throws IOException { Table table = UTIL.createTable(tableName, FAMILIES, splitKeys); RegionLocator r = UTIL.getConnection().getRegionLocator(tableName); - assertEquals("Should start with empty table", 0, HBaseTestingUtil.countRows(table)); + assertEquals(0, HBaseTestingUtil.countRows(table), "Should start with empty table"); int numRegions = r.getStartKeys().length; - assertEquals("Should make " + regionNum + " regions", numRegions, regionNum); + assertEquals(numRegions, regionNum, "Should make " + regionNum + " regions"); allTables.put(tableStrSingle, table); tableInfo.add(new HFileOutputFormat2.TableInfo(table.getDescriptor(), r)); @@ -120,7 +112,6 @@ public void setUp() throws IOException { testDir = UTIL.getDataTestDirOnTestFS(tableStr.get(0)); } - @After public void tearDown() throws IOException { for (HFileOutputFormat2.TableInfo tableInfoSingle : tableInfo) { tableInfoSingle.getRegionLocator().close(); @@ -132,7 +123,19 @@ public void tearDown() throws IOException { } } - @Test + protected void runTest(boolean shouldChangeRegions, boolean putSortReducer, + List tableStr) throws Exception { + this.shouldChangeRegions = shouldChangeRegions; + this.putSortReducer = putSortReducer; + this.tableStr = tableStr; + setUp(); + try { + doIncrementalLoadTest(); + } finally { + tearDown(); + } + } + public void doIncrementalLoadTest() throws Exception { boolean writeMultipleTables = tableStr.size() > 1; // Generate the bulk load files @@ -143,8 +146,8 @@ public void doIncrementalLoadTest() throws Exception { for (Table tableSingle : allTables.values()) { // This doesn't write into the table, just makes files - assertEquals("HFOF should not touch actual table", 0, - HBaseTestingUtil.countRows(tableSingle)); + assertEquals(0, HBaseTestingUtil.countRows(tableSingle), + "HFOF should not touch actual table"); } int numTableDirs = 0; FileStatus[] fss = testDir.getFileSystem(UTIL.getConfiguration()).listStatus(testDir); @@ -169,10 +172,10 @@ public void doIncrementalLoadTest() throws Exception { } } } - assertEquals("Column family not found in FS.", FAMILIES.length, dir); + assertEquals(FAMILIES.length, dir, "Column family not found in FS."); } if (writeMultipleTables) { - assertEquals("Dir for all input tables not created", numTableDirs, allTables.size()); + assertEquals(numTableDirs, allTables.size(), "Dir for all input tables not created"); } Admin admin = UTIL.getAdmin(); @@ -207,12 +210,12 @@ public void doIncrementalLoadTest() throws Exception { int expectedRows = 0; if (putSortReducer) { // no rows should be extracted - assertEquals("BulkLoadHFiles should put expected data in table", expectedRows, - HBaseTestingUtil.countRows(currentTable)); + assertEquals(expectedRows, HBaseTestingUtil.countRows(currentTable), + "BulkLoadHFiles should put expected data in table"); } else { expectedRows = NMapInputFormat.getNumMapTasks(UTIL.getConfiguration()) * ROWSPERSPLIT; - assertEquals("BulkLoadHFiles should put expected data in table", expectedRows, - HBaseTestingUtil.countRows(currentTable)); + assertEquals(expectedRows, HBaseTestingUtil.countRows(currentTable), + "BulkLoadHFiles should put expected data in table"); Scan scan = new Scan(); ResultScanner results = currentTable.getScanner(scan); for (Result res : results) { @@ -245,8 +248,8 @@ public void doIncrementalLoadTest() throws Exception { } admin.enableTable(currentTableName); UTIL.waitTableAvailable(currentTableName); - assertEquals("Data should remain after reopening of regions", tableDigestBefore, - UTIL.checksumRows(currentTable)); + assertEquals(tableDigestBefore, UTIL.checksumRows(currentTable), + "Data should remain after reopening of regions"); } } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java index 0e7ff24a1dab..c18a5c307663 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -42,10 +42,10 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -70,7 +70,7 @@ public abstract class MultiTableInputFormatTestBase { } } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { // switch TIF to log at DEBUG level Log4jUtils.enableDebug(MultiTableInputFormatBase.class); @@ -85,12 +85,12 @@ public static void setUpBeforeClass() throws Exception { } } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @After + @AfterEach public void tearDown() throws Exception { Configuration c = TEST_UTIL.getConfiguration(); FileUtil.fullyDelete(new File(c.get("hadoop.tmp.dir"))); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java index 7a0615a5ff8e..c2002069d93f 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java @@ -17,7 +17,9 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.IOException; import java.util.Arrays; @@ -42,10 +44,9 @@ import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,7 +59,7 @@ public abstract class TableSnapshotInputFormatTestBase { protected FileSystem fs; protected Path rootDir; - @Before + @BeforeEach public void setupCluster() throws Exception { setupConf(UTIL.getConfiguration()); StartTestingClusterOption option = @@ -69,7 +70,7 @@ public void setupCluster() throws Exception { fs = rootDir.getFileSystem(UTIL.getConfiguration()); } - @After + @AfterEach public void tearDownCluster() throws Exception { UTIL.shutdownMiniCluster(); } @@ -142,11 +143,11 @@ public void testRestoreSnapshotDoesNotCreateBackRefLinks() throws Exception { Path path = HFileLink.getBackReferencesDir(storeDir, status.getPath().getName()); // assert back references directory is empty - assertFalse("There is a back reference in " + path, fs.exists(path)); + assertFalse(fs.exists(path), "There is a back reference in " + path); path = HFileLink.getBackReferencesDir(archiveStoreDir, status.getPath().getName()); // assert back references directory is empty - assertFalse("There is a back reference in " + path, fs.exists(path)); + assertFalse(fs.exists(path), "There is a back reference in " + path); } } } @@ -176,14 +177,14 @@ protected static void verifyRowFromMap(ImmutableBytesWritable key, Result result Cell cell = scanner.current(); // assert that all Cells in the Result have the same key - Assert.assertEquals(0, Bytes.compareTo(row, 0, row.length, cell.getRowArray(), - cell.getRowOffset(), cell.getRowLength())); + assertEquals(0, Bytes.compareTo(row, 0, row.length, cell.getRowArray(), cell.getRowOffset(), + cell.getRowLength())); } for (byte[] family : FAMILIES) { byte[] actual = result.getValue(family, family); - Assert.assertArrayEquals("Row in snapshot does not match, expected:" + Bytes.toString(row) - + " ,actual:" + Bytes.toString(actual), row, actual); + assertArrayEquals(row, actual, "Row in snapshot does not match, expected:" + + Bytes.toString(row) + " ,actual:" + Bytes.toString(actual)); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java index 7fbb5bc16255..d8133b1ec98a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java @@ -17,10 +17,6 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; @@ -30,7 +26,6 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; @@ -42,19 +37,19 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.LauncherSecurityManager; import org.apache.hadoop.util.ToolRunner; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestCellCounter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCellCounter.class); private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); private static final byte[] ROW1 = Bytes.toBytesBinary("\\x01row1"); @@ -70,17 +65,14 @@ public class TestCellCounter { "target" + File.separator + "test-data" + File.separator + "output"; private static long now = EnvironmentEdgeManager.currentTime(); - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { UTIL.startMiniCluster(); FQ_OUTPUT_DIR = new Path(OUTPUT_DIR).makeQualified(new LocalFileSystem()); FileUtil.fullyDelete(new File(OUTPUT_DIR)); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } @@ -89,8 +81,8 @@ public static void afterClass() throws Exception { * Test CellCounter all data should print to output */ @Test - public void testCellCounter() throws Exception { - final TableName sourceTable = TableName.valueOf(name.getMethodName()); + public void testCellCounter(TestInfo testInfo) throws Exception { + final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[][] families = { FAMILY_A, FAMILY_B }; try (Table t = UTIL.createTable(sourceTable, families)) { Put p = new Put(ROW1); @@ -125,8 +117,8 @@ public void testCellCounter() throws Exception { * Test CellCounter all data should print to output */ @Test - public void testCellCounterPrefix() throws Exception { - final TableName sourceTable = TableName.valueOf(name.getMethodName()); + public void testCellCounterPrefix(TestInfo testInfo) throws Exception { + final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[][] families = { FAMILY_A, FAMILY_B }; try (Table t = UTIL.createTable(sourceTable, families)) { Put p = new Put(ROW1); @@ -161,8 +153,8 @@ public void testCellCounterPrefix() throws Exception { * Test CellCounter with time range all data should print to output */ @Test - public void testCellCounterStartTimeRange() throws Exception { - final TableName sourceTable = TableName.valueOf(name.getMethodName()); + public void testCellCounterStartTimeRange(TestInfo testInfo) throws Exception { + final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[][] families = { FAMILY_A, FAMILY_B }; try (Table t = UTIL.createTable(sourceTable, families)) { Put p = new Put(ROW1); @@ -198,8 +190,8 @@ public void testCellCounterStartTimeRange() throws Exception { * Test CellCounter with time range all data should print to output */ @Test - public void testCellCounteEndTimeRange() throws Exception { - final TableName sourceTable = TableName.valueOf(name.getMethodName()); + public void testCellCounteEndTimeRange(TestInfo testInfo) throws Exception { + final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[][] families = { FAMILY_A, FAMILY_B }; try (Table t = UTIL.createTable(sourceTable, families)) { Put p = new Put(ROW1); @@ -235,8 +227,8 @@ public void testCellCounteEndTimeRange() throws Exception { * Test CellCounter with time range all data should print to output */ @Test - public void testCellCounteOutOfTimeRange() throws Exception { - final TableName sourceTable = TableName.valueOf(name.getMethodName()); + public void testCellCounteOutOfTimeRange(TestInfo testInfo) throws Exception { + final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[][] families = { FAMILY_A, FAMILY_B }; try (Table t = UTIL.createTable(sourceTable, families)) { Put p = new Put(ROW1); @@ -307,8 +299,8 @@ public void testCellCounterMain() throws Exception { * Test CellCounter for complete table all data should print to output */ @Test - public void testCellCounterForCompleteTable() throws Exception { - final TableName sourceTable = TableName.valueOf(name.getMethodName()); + public void testCellCounterForCompleteTable(TestInfo testInfo) throws Exception { + final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName()); String outputPath = OUTPUT_DIR + sourceTable; LocalFileSystem localFileSystem = new LocalFileSystem(); Path outputDir = new Path(outputPath).makeQualified(localFileSystem.getUri(), @@ -360,7 +352,7 @@ public void testCellCounterForCompleteTable() throws Exception { @Test public void TestCellCounterWithoutOutputDir() throws Exception { String[] args = new String[] { "tableName" }; - assertEquals("CellCounter should exit with -1 as output directory is not specified.", -1, - ToolRunner.run(HBaseConfiguration.create(), new CellCounter(), args)); + assertEquals(-1, + ToolRunner.run(HBaseConfiguration.create(), new CellCounter(), args), "CellCounter should exit with -1 as output directory is not specified."); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestConfigurePartitioner.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestConfigurePartitioner.java index 49c08a463abe..f2e51a8a73bb 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestConfigurePartitioner.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestConfigurePartitioner.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.verify; import java.io.IOException; @@ -28,7 +28,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.testclassification.MapReduceTests; @@ -36,32 +35,28 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner; import org.apache.hadoop.security.UserGroupInformation; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestConfigurePartitioner { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestConfigurePartitioner.class); - private static final Logger LOG = LoggerFactory.getLogger(TestConfigurePartitioner.class); private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); - @Before + @BeforeEach public void setUp() throws Exception { UTIL.startMiniDFSCluster(1); } - @After + @AfterEach public void tearDown() throws IOException { UTIL.shutdownMiniDFSCluster(); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java index 5c3e9b65079d..b07fd6c0e95d 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java @@ -17,18 +17,17 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.HashMap; import java.util.Map; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; @@ -45,35 +44,27 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.LauncherSecurityManager; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; /** * Basic test for the CopyTable M/R tool */ -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestCopyTable extends CopyTableTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCopyTable.class); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { TEST_UTIL.startMiniCluster(3); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -107,45 +98,45 @@ protected String[] getPeerClusterOptions() throws Exception { * Simple end-to-end test */ @Test - public void testCopyTable() throws Exception { - doCopyTableTest(TEST_UTIL.getConfiguration(), false); + public void testCopyTable(TestInfo testInfo) throws Exception { + doCopyTableTest(TEST_UTIL.getConfiguration(), false, testInfo); } /** * Simple end-to-end test with bulkload. */ @Test - public void testCopyTableWithBulkload() throws Exception { - doCopyTableTest(TEST_UTIL.getConfiguration(), true); + public void testCopyTableWithBulkload(TestInfo testInfo) throws Exception { + doCopyTableTest(TEST_UTIL.getConfiguration(), true, testInfo); } /** * Simple end-to-end test on table with MOB */ @Test - public void testCopyTableWithMob() throws Exception { - doCopyTableTestWithMob(TEST_UTIL.getConfiguration(), false); + public void testCopyTableWithMob(TestInfo testInfo) throws Exception { + doCopyTableTestWithMob(TEST_UTIL.getConfiguration(), false, testInfo); } /** * Simple end-to-end test with bulkload on table with MOB. */ @Test - public void testCopyTableWithBulkloadWithMob() throws Exception { - doCopyTableTestWithMob(TEST_UTIL.getConfiguration(), true); + public void testCopyTableWithBulkloadWithMob(TestInfo testInfo) throws Exception { + doCopyTableTestWithMob(TEST_UTIL.getConfiguration(), true, testInfo); } @Test - public void testStartStopRow() throws Exception { - testStartStopRow(TEST_UTIL.getConfiguration()); + public void testStartStopRow(TestInfo testInfo) throws Exception { + testStartStopRow(TEST_UTIL.getConfiguration(), testInfo); } /** * Test copy of table from sourceTable to targetTable all rows from family a */ @Test - public void testRenameFamily() throws Exception { - testRenameFamily(TEST_UTIL.getConfiguration()); + public void testRenameFamily(TestInfo testInfo) throws Exception { + testRenameFamily(TEST_UTIL.getConfiguration(), testInfo); } /** diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithClusterKey.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithClusterKey.java index 6ff9afda5357..eb5d4549831b 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithClusterKey.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithClusterKey.java @@ -20,16 +20,12 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestCopyTableToPeerClusterWithClusterKey extends CopyTableToPeerClusterTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCopyTableToPeerClusterWithClusterKey.class); - @Override protected String[] getPeerClusterOptions() throws Exception { return new String[] { "--peer.adr=" + UTIL2.getClusterKey() }; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithRpcUri.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithRpcUri.java index 4e6293712ec2..11e8755077a8 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithRpcUri.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithRpcUri.java @@ -17,22 +17,16 @@ */ package org.apache.hadoop.hbase.mapreduce; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestCopyTableToPeerClusterWithRpcUri extends CopyTableToPeerClusterTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCopyTableToPeerClusterWithRpcUri.class); - @Override protected String[] getPeerClusterOptions() throws Exception { return new String[] { "--peer.uri=" + UTIL2.getZkConnectionURI() }; } - } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithZkUri.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithZkUri.java index 720c367eb739..7a7968601137 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithZkUri.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithZkUri.java @@ -22,14 +22,13 @@ import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.junit.ClassRule; import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Tags; -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestCopyTableToPeerClusterWithZkUri extends CopyTableToPeerClusterTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCopyTableToPeerClusterWithZkUri.class); - @Override protected String[] getPeerClusterOptions() throws Exception { return new String[] { "--peer.uri=" + UTIL2.getRpcConnnectionURI() }; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java index 34d197be02fa..9a59a7ada484 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java @@ -31,17 +31,13 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Mapper; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, SmallTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(SmallTests.TAG) public class TestGroupingTableMapper { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestGroupingTableMapper.class); - /** * Test GroupingTableMapper class */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHBaseMRTestingUtility.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHBaseMRTestingUtility.java index 37dd817f94a3..5086d2badabc 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHBaseMRTestingUtility.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHBaseMRTestingUtility.java @@ -17,25 +17,21 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.HashMap; import java.util.Map; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestHBaseMRTestingUtility { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHBaseMRTestingUtility.class); @Test public void testMRYarnConfigsPopulation() throws IOException { @@ -55,20 +51,18 @@ public void testMRYarnConfigsPopulation() throws IOException { } for (Map.Entry entry : dummyProps.entrySet()) { - assertTrue( + assertTrue(hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()), "The Configuration for key " + entry.getKey() + " and value: " + entry.getValue() - + " is not populated correctly", - hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue())); + + " is not populated correctly"); } hbt.startMiniMapReduceCluster(); // Confirm that MiniMapReduceCluster overwrites the mr properties and updates the Configuration for (Map.Entry entry : dummyProps.entrySet()) { - assertFalse( + assertFalse(hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()), "The MR prop: " + entry.getValue() + " is not overwritten when map reduce mini" - + "cluster is started", - hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue())); + + "cluster is started"); } hbt.shutdownMiniMapReduceCluster(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java index 37096e408a74..6cc2a48817ce 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hbase.mapreduce; import static org.apache.hadoop.hbase.regionserver.HStoreFile.BLOOM_FILTER_TYPE_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.lang.reflect.Field; @@ -108,10 +108,8 @@ import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -121,13 +119,10 @@ * output. Creates a few inner classes to implement splits and an inputformat that emits keys and * values. */ -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@org.junit.jupiter.api.Tag(VerySlowMapReduceTests.TAG) +@org.junit.jupiter.api.Tag(LargeTests.TAG) public class TestHFileOutputFormat2 extends HFileOutputFormat2TestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileOutputFormat2.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHFileOutputFormat2.class); /** @@ -135,7 +130,7 @@ public class TestHFileOutputFormat2 extends HFileOutputFormat2TestBase { * timestamp is {@link HConstants#LATEST_TIMESTAMP}. * @see HBASE-2615 */ - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void test_LATEST_TIMESTAMP_isReplaced() throws Exception { Configuration conf = new Configuration(this.UTIL.getConfiguration()); @@ -185,7 +180,7 @@ private TaskAttemptContext createTestTaskAttemptContext(final Job job) throws Ex * Test that {@link HFileOutputFormat2} creates an HFile with TIMERANGE metadata used by * time-restricted scans. */ - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void test_TIMERANGE() throws Exception { Configuration conf = new Configuration(this.UTIL.getConfiguration()); @@ -249,7 +244,7 @@ public void test_TIMERANGE() throws Exception { /** * Run small MR job. */ - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void testWritingPEData() throws Exception { Configuration conf = UTIL.getConfiguration(); @@ -302,10 +297,10 @@ public void testWritingPEData() throws Exception { kvCount += reader.getEntries(); scanner.seekTo(); long perKVSize = scanner.getCell().getSerializedSize(); - assertTrue("Data size of each file should not be too large.", - perKVSize * reader.getEntries() <= hregionMaxFilesize); + assertTrue(perKVSize * reader.getEntries() <= hregionMaxFilesize, + "Data size of each file should not be too large."); } - assertEquals("Should write expected data in output file.", ROWSPERSPLIT, kvCount); + assertEquals(ROWSPERSPLIT, kvCount, "Should write expected data in output file."); } } @@ -358,7 +353,7 @@ public void test_WritingTagData() throws Exception { } } - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void testJobConfiguration() throws Exception { Configuration conf = new Configuration(this.UTIL.getConfiguration()); @@ -371,14 +366,14 @@ public void testJobConfiguration() throws Exception { setupMockStartKeys(regionLocator); setupMockTableName(regionLocator); HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(), regionLocator); - assertEquals(job.getNumReduceTasks(), 4); + assertEquals(4, job.getNumReduceTasks()); } /** * Test for {@link HFileOutputFormat2#createFamilyCompressionMap(Configuration)}. Tests that the * family compression map is correctly serialized into and deserialized from configuration */ - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void testSerializeDeserializeFamilyCompressionMap() throws IOException { for (int numCfs = 0; numCfs <= 3; numCfs++) { @@ -398,8 +393,9 @@ public void testSerializeDeserializeFamilyCompressionMap() throws IOException { // test that we have a value for all column families that matches with the // used mock values for (Entry entry : familyToCompression.entrySet()) { - assertEquals("Compression configuration incorrect for column family:" + entry.getKey(), - entry.getValue(), retrievedFamilyToCompressionMap.get(Bytes.toBytes(entry.getKey()))); + assertEquals(entry.getValue(), + retrievedFamilyToCompressionMap.get(Bytes.toBytes(entry.getKey())), + "Compression configuration incorrect for column family:" + entry.getKey()); } } } @@ -444,7 +440,7 @@ private Map getMockColumnFamiliesForCompression(i * Test for {@link HFileOutputFormat2#createFamilyBloomTypeMap(Configuration)}. Tests that the * family bloom type map is correctly serialized into and deserialized from configuration */ - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void testSerializeDeserializeFamilyBloomTypeMap() throws IOException { for (int numCfs = 0; numCfs <= 2; numCfs++) { @@ -464,8 +460,9 @@ public void testSerializeDeserializeFamilyBloomTypeMap() throws IOException { // test that we have a value for all column families that matches with the // used mock values for (Entry entry : familyToBloomType.entrySet()) { - assertEquals("BloomType configuration incorrect for column family:" + entry.getKey(), - entry.getValue(), retrievedFamilyToBloomTypeMap.get(Bytes.toBytes(entry.getKey()))); + assertEquals(entry.getValue(), + retrievedFamilyToBloomTypeMap.get(Bytes.toBytes(entry.getKey())), + "BloomType configuration incorrect for column family:" + entry.getKey()); } } } @@ -505,7 +502,7 @@ private Map getMockColumnFamiliesForBloomType(int numCfs) { * Test for {@link HFileOutputFormat2#createFamilyBlockSizeMap(Configuration)}. Tests that the * family block size map is correctly serialized into and deserialized from configuration */ - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void testSerializeDeserializeFamilyBlockSizeMap() throws IOException { for (int numCfs = 0; numCfs <= 3; numCfs++) { @@ -525,8 +522,9 @@ public void testSerializeDeserializeFamilyBlockSizeMap() throws IOException { // test that we have a value for all column families that matches with the // used mock values for (Entry entry : familyToBlockSize.entrySet()) { - assertEquals("BlockSize configuration incorrect for column family:" + entry.getKey(), - entry.getValue(), retrievedFamilyToBlockSizeMap.get(Bytes.toBytes(entry.getKey()))); + assertEquals(entry.getValue(), + retrievedFamilyToBlockSizeMap.get(Bytes.toBytes(entry.getKey())), + "BlockSize configuration incorrect for column family:" + entry.getKey()); } } } @@ -570,7 +568,7 @@ private Map getMockColumnFamiliesForBlockSize(int numCfs) { * the family data block encoding map is correctly serialized into and deserialized from * configuration */ - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void testSerializeDeserializeFamilyDataBlockEncodingMap() throws IOException { for (int numCfs = 0; numCfs <= 3; numCfs++) { @@ -592,10 +590,9 @@ public void testSerializeDeserializeFamilyDataBlockEncodingMap() throws IOExcept // test that we have a value for all column families that matches with the // used mock values for (Entry entry : familyToDataBlockEncoding.entrySet()) { - assertEquals( - "DataBlockEncoding configuration incorrect for column family:" + entry.getKey(), - entry.getValue(), - retrievedFamilyToDataBlockEncodingMap.get(Bytes.toBytes(entry.getKey()))); + assertEquals(entry.getValue(), + retrievedFamilyToDataBlockEncodingMap.get(Bytes.toBytes(entry.getKey())), + "DataBlockEncoding configuration incorrect for column family:" + entry.getKey()); } } } @@ -650,7 +647,7 @@ private void setupMockTableName(RegionLocator table) throws IOException { * Test that {@link HFileOutputFormat2} RecordWriter uses compression and bloom filter settings * from the column family descriptor */ - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void testColumnFamilySettings() throws Exception { Configuration conf = new Configuration(this.UTIL.getConfiguration()); @@ -715,12 +712,10 @@ public void testColumnFamilySettings() throws Exception { byte[] bloomFilter = fileInfo.get(BLOOM_FILTER_TYPE_KEY); if (bloomFilter == null) bloomFilter = Bytes.toBytes("NONE"); - assertEquals( - "Incorrect bloom filter used for column family " + familyStr + "(reader: " + reader + ")", - hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter))); - assertEquals( - "Incorrect compression used for column family " + familyStr + "(reader: " + reader + ")", - hcd.getCompressionType(), reader.getFileContext().getCompression()); + assertEquals(hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter)), + "Incorrect bloom filter used for column family " + familyStr + "(reader: " + reader + ")"); + assertEquals(hcd.getCompressionType(), reader.getFileContext().getCompression(), + "Incorrect compression used for column family " + familyStr + "(reader: " + reader + ")"); } } finally { dir.getFileSystem(conf).delete(dir, true); @@ -757,7 +752,7 @@ private void writeRandomKeyValues(RecordWriter wri * excluded from minor compaction. Without the fix of HBASE-6901, an * ArrayIndexOutOfBoundsException will be thrown. */ - @Ignore("Flakey: See HBASE-9051") + @Disabled("Flakey: See HBASE-9051") @Test public void testExcludeAllFromMinorCompaction() throws Exception { Configuration conf = UTIL.getConfiguration(); @@ -769,7 +764,7 @@ public void testExcludeAllFromMinorCompaction() throws Exception { Table table = UTIL.createTable(TABLE_NAMES[0], FAMILIES); RegionLocator locator = conn.getRegionLocator(TABLE_NAMES[0])) { final FileSystem fs = UTIL.getDFSCluster().getFileSystem(); - assertEquals("Should start with empty table", 0, UTIL.countRows(table)); + assertEquals(0, UTIL.countRows(table), "Should start with empty table"); // deep inspection: get the StoreFile dir final Path storePath = @@ -793,8 +788,8 @@ public void testExcludeAllFromMinorCompaction() throws Exception { // Ensure data shows up int expectedRows = 2 * NMapInputFormat.getNumMapTasks(conf) * ROWSPERSPLIT; - assertEquals("BulkLoadHFiles should put expected data in table", expectedRows, - UTIL.countRows(table)); + assertEquals(expectedRows, UTIL.countRows(table), + "BulkLoadHFiles should put expected data in table"); // should have a second StoreFile now assertEquals(2, fs.listStatus(storePath).length); @@ -839,7 +834,7 @@ public Boolean call() throws Exception { } } - @Ignore("Goes zombie too frequently; needs work. See HBASE-14563") + @Disabled("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void testExcludeMinorCompaction() throws Exception { Configuration conf = UTIL.getConfiguration(); @@ -852,7 +847,7 @@ public void testExcludeMinorCompaction() throws Exception { Path testDir = UTIL.getDataTestDirOnTestFS("testExcludeMinorCompaction"); final FileSystem fs = UTIL.getDFSCluster().getFileSystem(); Table table = UTIL.createTable(TABLE_NAMES[0], FAMILIES); - assertEquals("Should start with empty table", 0, UTIL.countRows(table)); + assertEquals(0, UTIL.countRows(table), "Should start with empty table"); // deep inspection: get the StoreFile dir final Path storePath = @@ -887,8 +882,8 @@ public Boolean call() throws Exception { // Ensure data shows up int expectedRows = NMapInputFormat.getNumMapTasks(conf) * ROWSPERSPLIT; - assertEquals("BulkLoadHFiles should put expected data in table", expectedRows + 1, - UTIL.countRows(table)); + assertEquals(expectedRows + 1, UTIL.countRows(table), + "BulkLoadHFiles should put expected data in table"); // should have a second StoreFile now assertEquals(2, fs.listStatus(storePath).length); @@ -1075,8 +1070,8 @@ public void TestConfigureCompression() throws Exception { LocatedFileStatus keyFileStatus = iterator.next(); HFile.Reader reader = HFile.createReader(fs, keyFileStatus.getPath(), new CacheConfig(conf), true, conf); - assertEquals(reader.getTrailer().getCompressionCodec().getName(), - hfileoutputformatCompression); + assertEquals(hfileoutputformatCompression, + reader.getTrailer().getCompressionCodec().getName()); } } finally { if (writer != null && context != null) { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2WithSecurity.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2WithSecurity.java index ac767f23775c..7dd42c522e68 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2WithSecurity.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2WithSecurity.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.mapreduce; import static org.apache.hadoop.security.UserGroupInformation.loginUserFromKeytab; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.Closeable; import java.io.File; @@ -42,20 +42,17 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for {@link HFileOutputFormat2} with secure mode. */ -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestHFileOutputFormat2WithSecurity extends HFileOutputFormat2TestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileOutputFormat2WithSecurity.class); private static final byte[] FAMILIES = Bytes.toBytes("test_cf"); @@ -71,7 +68,7 @@ public class TestHFileOutputFormat2WithSecurity extends HFileOutputFormat2TestBa private List clusters = new ArrayList<>(); - @Before + @BeforeEach public void setupSecurityClusters() throws Exception { utilA = new HBaseTestingUtil(); confA = utilA.getConfiguration(); @@ -93,7 +90,7 @@ public void setupSecurityClusters() throws Exception { clusters.add(utilB.startSecureMiniCluster(kdc, userPrincipal, HTTP_PRINCIPAL)); } - @After + @AfterEach public void teardownSecurityClusters() { IOUtils.closeQuietly(clusters); clusters.clear(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java index 9cffb4089bd7..813758bdca92 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java @@ -17,42 +17,33 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ MapReduceTests.class, MediumTests.class }) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; + +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestHRegionPartitioner { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHRegionPartitioner.class); - private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } @@ -61,16 +52,17 @@ public static void afterClass() throws Exception { * Test HRegionPartitioner */ @Test - public void testHRegionPartitioner() throws Exception { + public void testHRegionPartitioner(TestInfo testInfo) throws Exception { byte[][] families = { Bytes.toBytes("familyA"), Bytes.toBytes("familyB") }; - UTIL.createTable(TableName.valueOf(name.getMethodName()), families, 1, Bytes.toBytes("aa"), + String tableName = testInfo.getTestMethod().get().getName(); + UTIL.createTable(TableName.valueOf(tableName), families, 1, Bytes.toBytes("aa"), Bytes.toBytes("cc"), 3); HRegionPartitioner partitioner = new HRegionPartitioner<>(); Configuration configuration = UTIL.getConfiguration(); - configuration.set(TableOutputFormat.OUTPUT_TABLE, name.getMethodName()); + configuration.set(TableOutputFormat.OUTPUT_TABLE, tableName); partitioner.setConf(configuration); ImmutableBytesWritable writable = new ImmutableBytesWritable(Bytes.toBytes("bb")); @@ -79,10 +71,11 @@ public void testHRegionPartitioner() throws Exception { } @Test - public void testHRegionPartitionerMoreRegions() throws Exception { + public void testHRegionPartitionerMoreRegions(TestInfo testInfo) throws Exception { byte[][] families = { Bytes.toBytes("familyA"), Bytes.toBytes("familyB") }; - TableName tableName = TableName.valueOf(name.getMethodName()); + String tableNameStr = testInfo.getTestMethod().get().getName(); + TableName tableName = TableName.valueOf(tableNameStr); UTIL.createTable(tableName, families, 1, Bytes.toBytes("aa"), Bytes.toBytes("cc"), 5); Configuration configuration = UTIL.getConfiguration(); @@ -90,7 +83,7 @@ public void testHRegionPartitionerMoreRegions() throws Exception { assertEquals(5, numberOfRegions); HRegionPartitioner partitioner = new HRegionPartitioner<>(); - configuration.set(TableOutputFormat.OUTPUT_TABLE, name.getMethodName()); + configuration.set(TableOutputFormat.OUTPUT_TABLE, tableNameStr); partitioner.setConf(configuration); // Get some rowKey for the lastRegion diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java index 05736f939e13..ec4d3ce3f02a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; @@ -33,14 +33,11 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.MapFile; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,32 +47,26 @@ /** * Basic test for the HashTable M/R tool */ -@Category(LargeTests.class) +@Tag(LargeTests.TAG) public class TestHashTable { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHashTable.class); private static final Logger LOG = LoggerFactory.getLogger(TestHashTable.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { TEST_UTIL.startMiniCluster(3); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Test - public void testHashTable() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testHashTable(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); final byte[] family = Bytes.toBytes("family"); final byte[] column1 = Bytes.toBytes("c1"); final byte[] column2 = Bytes.toBytes("c2"); @@ -110,7 +101,7 @@ public void testHashTable() throws Exception { int code = hashTable.run(new String[] { "--batchsize=" + batchSize, "--numhashfiles=" + numHashFiles, "--scanbatch=2", tableName.getNameAsString(), testDir.toString() }); - assertEquals("test job failed", 0, code); + assertEquals(0, code, "test job failed"); FileSystem fs = TEST_UTIL.getTestFileSystem(); @@ -165,7 +156,7 @@ ImmutableMap. builder() intKey = Bytes.toInt(key.get(), key.getOffset(), key.getLength()); } if (actualHashes.containsKey(intKey)) { - Assert.fail("duplicate key in data files: " + intKey); + fail("duplicate key in data files: " + intKey); } actualHashes.put(intKey, new ImmutableBytesWritable(hash.copyBytes())); } @@ -185,7 +176,7 @@ ImmutableMap. builder() if (!expectedHashes.equals(actualHashes)) { LOG.error("Diff: " + Maps.difference(expectedHashes, actualHashes)); } - Assert.assertEquals(expectedHashes, actualHashes); + assertEquals(expectedHashes, actualHashes); TEST_UTIL.deleteTable(tableName); TEST_UTIL.cleanupDataTestDirOnTestFS(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index af0749dafc6d..e005e6750040 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -19,10 +19,10 @@ import static org.apache.hadoop.hbase.HConstants.RPC_CODEC_CONF_KEY; import static org.apache.hadoop.hbase.ipc.RpcClient.DEFAULT_CODEC_CLASS; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -45,7 +45,6 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellScanner; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeepDeletedCells; @@ -94,16 +93,12 @@ import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.ToolRunner; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; @@ -112,13 +107,10 @@ /** * Tests the table import and table export MR job functionality */ -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@org.junit.jupiter.api.Tag(VerySlowMapReduceTests.TAG) +@org.junit.jupiter.api.Tag(LargeTests.TAG) public class TestImportExport { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestImportExport.class); - private static final Logger LOG = LoggerFactory.getLogger(TestImportExport.class); protected static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1"); @@ -140,7 +132,7 @@ public class TestImportExport { public static final String TEST_ATTR = "source_op"; public static final String TEST_TAG = "test_tag"; - @BeforeClass + @BeforeAll public static void beforeClass() throws Throwable { // Up the handlers; this test needs more than usual. UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10); @@ -149,20 +141,17 @@ public static void beforeClass() throws Throwable { new Path(OUTPUT_DIR).makeQualified(FileSystem.get(UTIL.getConfiguration())).toString(); } - @AfterClass + @AfterAll public static void afterClass() throws Throwable { UTIL.shutdownMiniCluster(); } - @Rule - public final TestName name = new TestName(); - - @Before - public void announce() { - LOG.info("Running " + name.getMethodName()); + @BeforeEach + public void announce(TestInfo testInfo) { + LOG.info("Running " + testInfo.getTestMethod().get().getName()); } - @After + @AfterEach public void cleanup() throws Throwable { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); fs.delete(new Path(OUTPUT_DIR), true); @@ -202,8 +191,9 @@ boolean runImport(String[] args) throws Throwable { * Test simple replication case with column mapping */ @Test - public void testSimpleCase() throws Throwable { - try (Table t = UTIL.createTable(TableName.valueOf(name.getMethodName()), FAMILYA, 3)) { + public void testSimpleCase(TestInfo testInfo) throws Throwable { + String tableName = testInfo.getTestMethod().get().getName(); + try (Table t = UTIL.createTable(TableName.valueOf(tableName), FAMILYA, 3)) { Put p = new Put(ROW1); p.addColumn(FAMILYA, QUAL, now, QUAL); p.addColumn(FAMILYA, QUAL, now + 1, QUAL); @@ -224,12 +214,12 @@ public void testSimpleCase() throws Throwable { String[] args = new String[] { // Only export row1 & row2. "-D" + TableInputFormat.SCAN_ROW_START + "=\\x32row1", - "-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3", name.getMethodName(), FQ_OUTPUT_DIR, + "-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3", tableName, FQ_OUTPUT_DIR, "1000", // max number of key versions per key to export }; assertTrue(runExport(args)); - final String IMPORT_TABLE = name.getMethodName() + "import"; + final String IMPORT_TABLE = tableName + "import"; try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3)) { args = new String[] { "-D" + Import.CF_RENAME_PROP + "=" + FAMILYA_STRING + ":" + FAMILYB_STRING, @@ -298,9 +288,10 @@ public void testImport94Table() throws Throwable { * Test export scanner batching */ @Test - public void testExportScannerBatching() throws Throwable { + public void testExportScannerBatching(TestInfo testInfo) throws Throwable { + String tableName = testInfo.getTestMethod().get().getName(); TableDescriptor desc = TableDescriptorBuilder - .newBuilder(TableName.valueOf(name.getMethodName())) + .newBuilder(TableName.valueOf(tableName)) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(1).build()) .build(); UTIL.getAdmin().createTable(desc); @@ -314,7 +305,7 @@ public void testExportScannerBatching() throws Throwable { t.put(p); // added scanner batching arg. String[] args = new String[] { "-D" + ExportUtils.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE, - name.getMethodName(), FQ_OUTPUT_DIR }; + tableName, FQ_OUTPUT_DIR }; assertTrue(runExport(args)); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); @@ -323,9 +314,10 @@ public void testExportScannerBatching() throws Throwable { } @Test - public void testWithDeletes() throws Throwable { + public void testWithDeletes(TestInfo testInfo) throws Throwable { + String tableName = testInfo.getTestMethod().get().getName(); TableDescriptor desc = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) + TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE).build()) .build(); @@ -346,12 +338,12 @@ public void testWithDeletes() throws Throwable { t.delete(d); } - String[] args = new String[] { "-D" + ExportUtils.RAW_SCAN + "=true", name.getMethodName(), + String[] args = new String[] { "-D" + ExportUtils.RAW_SCAN + "=true", tableName, FQ_OUTPUT_DIR, "1000", // max number of key versions per key to export }; assertTrue(runExport(args)); - final String IMPORT_TABLE = name.getMethodName() + "import"; + final String IMPORT_TABLE = tableName + "import"; desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(IMPORT_TABLE)).setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE).build()) @@ -378,10 +370,11 @@ public void testWithDeletes() throws Throwable { } @Test - public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Throwable { - final TableName exportTable = TableName.valueOf(name.getMethodName()); + public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily(TestInfo testInfo) throws Throwable { + String tableName = testInfo.getTestMethod().get().getName(); + final TableName exportTable = TableName.valueOf(tableName); TableDescriptor desc = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) + TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE).build()) .build(); @@ -413,7 +406,7 @@ public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Thro }; assertTrue(runExport(args)); - final String importTable = name.getMethodName() + "import"; + final String importTable = tableName + "import"; desc = TableDescriptorBuilder .newBuilder(TableName.valueOf(importTable)).setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE).build()) @@ -448,10 +441,11 @@ public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Thro * attempt with invalid values. */ @Test - public void testWithFilter() throws Throwable { + public void testWithFilter(TestInfo testInfo) throws Throwable { // Create simple table to export + String tableName = testInfo.getTestMethod().get().getName(); TableDescriptor desc = TableDescriptorBuilder - .newBuilder(TableName.valueOf(name.getMethodName())) + .newBuilder(TableName.valueOf(tableName)) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build()) .build(); UTIL.getAdmin().createTable(desc); @@ -471,11 +465,11 @@ public void testWithFilter() throws Throwable { exportTable.put(Arrays.asList(p1, p2)); // Export the simple table - String[] args = new String[] { name.getMethodName(), FQ_OUTPUT_DIR, "1000" }; + String[] args = new String[] { tableName, FQ_OUTPUT_DIR, "1000" }; assertTrue(runExport(args)); // Import to a new table - final String IMPORT_TABLE = name.getMethodName() + "import"; + final String IMPORT_TABLE = tableName + "import"; desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(IMPORT_TABLE)) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build()) .build(); @@ -491,14 +485,14 @@ public void testWithFilter() throws Throwable { PrefixFilter filter = new PrefixFilter(ROW1); int count = getCount(exportTable, filter); - Assert.assertEquals("Unexpected row count between export and import tables", count, - getCount(importTable, null)); + assertEquals(count, getCount(importTable, null), + "Unexpected row count between export and import tables"); // and then test that a broken command doesn't bork everything - easier here because we don't // need to re-run the export job args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(), - "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", name.getMethodName(), + "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", tableName, FQ_OUTPUT_DIR, "1000" }; assertFalse(runImport(args)); @@ -511,10 +505,11 @@ public void testWithFilter() throws Throwable { * Create a simple table, run an Export Job on it, Import with bulk output and enable largeResult */ @Test - public void testBulkImportAndLargeResult() throws Throwable { + public void testBulkImportAndLargeResult(TestInfo testInfo) throws Throwable { // Create simple table to export + String tableName = testInfo.getTestMethod().get().getName(); TableDescriptor desc = TableDescriptorBuilder - .newBuilder(TableName.valueOf(name.getMethodName())) + .newBuilder(TableName.valueOf(tableName)) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build()) .build(); UTIL.getAdmin().createTable(desc); @@ -530,11 +525,11 @@ public void testBulkImportAndLargeResult() throws Throwable { exportTable.put(Arrays.asList(p1, p2)); // Export the simple table - String[] args = new String[] { name.getMethodName(), FQ_OUTPUT_DIR, "1000" }; + String[] args = new String[] { tableName, FQ_OUTPUT_DIR, "1000" }; assertTrue(runExport(args)); // Import to a new table - final String IMPORT_TABLE = name.getMethodName() + "import"; + final String IMPORT_TABLE = tableName + "import"; desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(IMPORT_TABLE)) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build()) .build(); @@ -717,9 +712,10 @@ public void testAddFilterAndArguments() throws IOException { } @Test - public void testDurability() throws Throwable { + public void testDurability(TestInfo testInfo) throws Throwable { // Create an export table. - String exportTableName = name.getMethodName() + "export"; + String methodName = testInfo.getTestMethod().get().getName(); + String exportTableName = methodName + "export"; try (Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3)) { // Insert some data Put put = new Put(ROW1); @@ -739,7 +735,7 @@ public void testDurability() throws Throwable { assertTrue(runExport(args)); // Create the table for import - String importTableName = name.getMethodName() + "import1"; + String importTableName = methodName + "import1"; Table importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3); // Register the wal listener for the import table @@ -759,7 +755,7 @@ public void testDurability() throws Throwable { assertTrue(getCount(importTable, null) == 2); // Run the import with the default durability option - importTableName = name.getMethodName() + "import2"; + importTableName = methodName + "import2"; importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3); region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer() .getRegions(importTable.getName()).get(0).getRegionInfo(); @@ -809,8 +805,8 @@ public boolean isWALVisited() { * @throws Throwable throws Throwable. */ @Test - public void testTagsAddition() throws Throwable { - final TableName exportTable = TableName.valueOf(name.getMethodName()); + public void testTagsAddition(TestInfo testInfo) throws Throwable { + final TableName exportTable = TableName.valueOf(testInfo.getTestMethod().get().getName()); TableDescriptor desc = TableDescriptorBuilder.newBuilder(exportTable) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE).build()) @@ -883,9 +879,9 @@ private void checkWhetherTagExists(TableName table, boolean tagExists) throws IO List tags = PrivateCellUtil.getTags(cell); // If tagExists flag is true then validate whether tag contents are as expected. if (tagExists) { - Assert.assertEquals(1, tags.size()); + assertEquals(1, tags.size()); for (Tag tag : tags) { - Assert.assertEquals(TEST_TAG, Tag.getValueAsString(tag)); + assertEquals(TEST_TAG, Tag.getValueAsString(tag)); } } else { // If tagExists flag is disabled then check for 0 size tags. @@ -893,7 +889,7 @@ private void checkWhetherTagExists(TableName table, boolean tagExists) throws IO } } } - Assert.assertTrue(deleteFound); + assertTrue(deleteFound); } /* @@ -945,8 +941,8 @@ public void preBatchMutate(ObserverContext 0); + assertTrue( + hfile.getLen() > 0, String.format("HFile %s appears to contain no data.", hfile.getPath())); if (expectedKVCount > -1) { actualKVCount += getKVCountFromHfile(fs, hfile.getPath()); } @@ -404,9 +397,8 @@ private static void validateHFiles(FileSystem fs, String outputPath, String fami } if (expectedKVCount > -1) { assertTrue( - String.format("KV count in output hfile=<%d> doesn't match with expected KV count=<%d>", - actualKVCount, expectedKVCount), - actualKVCount == expectedKVCount); + actualKVCount == expectedKVCount, String.format("KV count in output hfile=<%d> doesn't match with expected KV count=<%d>", + actualKVCount, expectedKVCount)); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java index 04fc2c8d3b8f..760e11021c38 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.Arrays; @@ -61,24 +62,18 @@ import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestImportTsv implements Configurable { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestImportTsv.class); - private static final Logger LOG = LoggerFactory.getLogger(TestImportTsv.class); protected static final String NAME = TestImportTsv.class.getSimpleName(); protected static HBaseTestingUtil util = new HBaseTestingUtil(); @@ -95,9 +90,6 @@ public class TestImportTsv implements Configurable { private TableName tn; private Map args; - @Rule - public ExpectedException exception = ExpectedException.none(); - public Configuration getConf() { return util.getConfiguration(); } @@ -106,17 +98,17 @@ public void setConf(Configuration conf) { throw new IllegalArgumentException("setConf not supported"); } - @BeforeClass + @BeforeAll public static void provisionCluster() throws Exception { util.startMiniCluster(); } - @AfterClass + @AfterAll public static void releaseCluster() throws Exception { util.shutdownMiniCluster(); } - @Before + @BeforeEach public void setup() throws Exception { tn = TableName.valueOf("test-" + util.getRandomUUID()); args = new HashMap<>(); @@ -198,7 +190,7 @@ public void testJobConfigurationsWithTsvImporterTextMapper() throws Exception { "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,", "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), tn.getNameAsString(), INPUT_FILE }; - assertEquals("running test job configuration failed.", 0, + assertEquals(0, ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() { @Override public int run(String[] args) throws Exception { @@ -208,7 +200,7 @@ public int run(String[] args) throws Exception { assertTrue(job.getMapOutputValueClass().equals(Text.class)); return 0; } - }, args)); + }, args), "running test job configuration failed."); // Delete table created by createSubmittableJob. util.deleteTable(tn); } @@ -231,15 +223,15 @@ public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception { conf.set(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A"); conf.set(ImportTsv.BULK_OUTPUT_CONF_KEY, "/output"); conf.set(ImportTsv.CREATE_TABLE_CONF_KEY, "no"); - exception.expect(TableNotFoundException.class); - assertEquals("running test job configuration failed.", 0, + assertThrows(TableNotFoundException.class, () -> { ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() { @Override public int run(String[] args) throws Exception { createSubmittableJob(getConf(), args); return 0; } - }, args)); + }, args); + }); } @Test @@ -250,15 +242,15 @@ public void testMRNoMatchedColumnFamily() throws Exception { "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM01_ERROR:A,FAM01_ERROR:B,FAM02_ERROR:C", tn.getNameAsString(), "/inputFile" }; - exception.expect(NoSuchColumnFamilyException.class); - assertEquals("running test job configuration failed.", 0, + assertThrows(NoSuchColumnFamilyException.class, () -> { ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() { @Override public int run(String[] args) throws Exception { createSubmittableJob(getConf(), args); return 0; } - }, args)); + }, args); + }); util.deleteTable(tn); } @@ -267,15 +259,15 @@ public int run(String[] args) throws Exception { public void testMRWithoutAnExistingTable() throws Exception { String[] args = new String[] { tn.getNameAsString(), "/inputFile" }; - exception.expect(TableNotFoundException.class); - assertEquals("running test job configuration failed.", 0, + assertThrows(TableNotFoundException.class, () -> { ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() { @Override public int run(String[] args) throws Exception { createSubmittableJob(getConf(), args); return 0; } - }, args)); + }, args); + }); } @Test @@ -288,7 +280,7 @@ public void testJobConfigurationsWithDryMode() throws Exception { "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,", "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), "-D" + ImportTsv.DRY_RUN_CONF_KEY + "=true", tn.getNameAsString(), INPUT_FILE }; - assertEquals("running test job configuration failed.", 0, + assertEquals(0, ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() { @Override public int run(String[] args) throws Exception { @@ -296,7 +288,7 @@ public int run(String[] args) throws Exception { assertTrue(job.getOutputFormatClass().equals(NullOutputFormat.class)); return 0; } - }, argsArray)); + }, argsArray), "running test job configuration failed."); // Delete table created by createSubmittableJob. util.deleteTable(tn); } @@ -317,8 +309,7 @@ public void testDryModeWithoutBulkOutputAndTableExists() throws Exception { @Test public void testDryModeWithoutBulkOutputAndTableDoesNotExists() throws Exception { args.put(ImportTsv.DRY_RUN_CONF_KEY, "true"); - exception.expect(TableNotFoundException.class); - doMROnTableTest(null, 1); + assertThrows(TableNotFoundException.class, () -> doMROnTableTest(null, 1)); } @Test @@ -345,8 +336,7 @@ public void testDryModeWithBulkOutputAndTableDoesNotExistsCreateTableSetToNo() t args.put(ImportTsv.BULK_OUTPUT_CONF_KEY, hfiles.toString()); args.put(ImportTsv.DRY_RUN_CONF_KEY, "true"); args.put(ImportTsv.CREATE_TABLE_CONF_KEY, "no"); - exception.expect(TableNotFoundException.class); - doMROnTableTest(null, 1); + assertThrows(TableNotFoundException.class, () -> doMROnTableTest(null, 1)); } @Test @@ -358,8 +348,7 @@ public void testDryModeWithBulkModeAndTableDoesNotExistsCreateTableSetToYes() th args.put(ImportTsv.CREATE_TABLE_CONF_KEY, "yes"); doMROnTableTest(null, 1); // Verify temporary table was deleted. - exception.expect(TableNotFoundException.class); - util.deleteTable(tn); + assertThrows(TableNotFoundException.class, () -> util.deleteTable(tn)); } /** @@ -453,8 +442,8 @@ protected static Tool doMROnTableTest(HBaseTestingUtil util, TableName table, St && "true".equalsIgnoreCase(args.get(ImportTsv.DRY_RUN_CONF_KEY)); if (args.containsKey(ImportTsv.BULK_OUTPUT_CONF_KEY)) { if (isDryRun) { - assertFalse(String.format("Dry run mode, %s should not have been created.", - ImportTsv.BULK_OUTPUT_CONF_KEY), fs.exists(new Path(ImportTsv.BULK_OUTPUT_CONF_KEY))); + assertFalse(fs.exists(new Path(ImportTsv.BULK_OUTPUT_CONF_KEY)), String.format( + "Dry run mode, %s should not have been created.", ImportTsv.BULK_OUTPUT_CONF_KEY)); } else { validateHFiles(fs, args.get(ImportTsv.BULK_OUTPUT_CONF_KEY), family, expectedKVCount); } @@ -536,25 +525,24 @@ private static void validateHFiles(FileSystem fs, String outputPath, String fami String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR); String cf = elements[elements.length - 1]; foundFamilies.add(cf); - assertTrue(String.format( + assertTrue(configFamilies.contains(cf), String.format( "HFile output contains a column family (%s) not present in input families (%s)", cf, - configFamilies), configFamilies.contains(cf)); + configFamilies)); for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) { - assertTrue(String.format("HFile %s appears to contain no data.", hfile.getPath()), - hfile.getLen() > 0); + assertTrue(hfile.getLen() > 0, + String.format("HFile %s appears to contain no data.", hfile.getPath())); // count the number of KVs from all the hfiles if (expectedKVCount > -1) { actualKVCount += getKVCountFromHfile(fs, hfile.getPath()); } } } - assertTrue(String.format("HFile output does not contain the input family '%s'.", family), - foundFamilies.contains(family)); + assertTrue(foundFamilies.contains(family), + String.format("HFile output does not contain the input family '%s'.", family)); if (expectedKVCount > -1) { - assertTrue( + assertTrue(actualKVCount == expectedKVCount, String.format("KV count in ouput hfile=<%d> doesn't match with expected KV count=<%d>", - actualKVCount, expectedKVCount), - actualKVCount == expectedKVCount); + actualKVCount, expectedKVCount)); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java index adb0589c9805..aa9fd1b10941 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java @@ -17,11 +17,12 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.util.ArrayList; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -33,9 +34,8 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.base.Joiner; import org.apache.hbase.thirdparty.com.google.common.base.Splitter; @@ -44,11 +44,9 @@ /** * Tests for {@link TsvParser}. */ -@Category({ MapReduceTests.class, SmallTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(SmallTests.TAG) public class TestImportTsvParser { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestImportTsvParser.class); private void assertBytesEquals(byte[] a, byte[] b) { assertEquals(Bytes.toStringBinary(a), Bytes.toStringBinary(b)); @@ -171,50 +169,50 @@ public void testTsvParserWithTimestamp() throws BadTsvLineException { /** * Test cases that throw BadTsvLineException */ - @Test(expected = BadTsvLineException.class) + @Test public void testTsvParserBadTsvLineExcessiveColumns() throws BadTsvLineException { TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t"); byte[] line = Bytes.toBytes("val_a\tval_b\tval_c"); - parser.parse(line, line.length); + assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length)); } - @Test(expected = BadTsvLineException.class) + @Test public void testTsvParserBadTsvLineZeroColumn() throws BadTsvLineException { TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t"); byte[] line = Bytes.toBytes(""); - parser.parse(line, line.length); + assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length)); } - @Test(expected = BadTsvLineException.class) + @Test public void testTsvParserBadTsvLineOnlyKey() throws BadTsvLineException { TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t"); byte[] line = Bytes.toBytes("key_only"); - parser.parse(line, line.length); + assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length)); } - @Test(expected = BadTsvLineException.class) + @Test public void testTsvParserBadTsvLineNoRowKey() throws BadTsvLineException { TsvParser parser = new TsvParser("col_a,HBASE_ROW_KEY", "\t"); byte[] line = Bytes.toBytes("only_cola_data_and_no_row_key"); - parser.parse(line, line.length); + assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length)); } - @Test(expected = BadTsvLineException.class) + @Test public void testTsvParserInvalidTimestamp() throws BadTsvLineException { TsvParser parser = new TsvParser("HBASE_ROW_KEY,HBASE_TS_KEY,col_a,", "\t"); assertEquals(1, parser.getTimestampKeyColumnIndex()); byte[] line = Bytes.toBytes("rowkey\ttimestamp\tval_a"); ParsedLine parsed = parser.parse(line, line.length); - assertEquals(-1, parsed.getTimestamp(-1)); + assertThrows(BadTsvLineException.class, () -> parsed.getTimestamp(-1)); checkParsing(parsed, Splitter.on("\t").split(Bytes.toString(line))); } - @Test(expected = BadTsvLineException.class) + @Test public void testTsvParserNoTimestampValue() throws BadTsvLineException { TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a,HBASE_TS_KEY", "\t"); assertEquals(2, parser.getTimestampKeyColumnIndex()); byte[] line = Bytes.toBytes("rowkey\tval_a"); - parser.parse(line, line.length); + assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length)); } @Test @@ -225,30 +223,24 @@ public void testTsvParserParseRowKey() throws BadTsvLineException { Pair rowKeyOffsets = parser.parseRowKey(line, line.length); assertEquals(0, rowKeyOffsets.getFirst().intValue()); assertEquals(6, rowKeyOffsets.getSecond().intValue()); - try { - line = Bytes.toBytes("\t\tval_a\t1234"); - parser.parseRowKey(line, line.length); - fail("Should get BadTsvLineException on empty rowkey."); - } catch (BadTsvLineException ignored) { - } - parser = new TsvParser("col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t"); - assertEquals(1, parser.getRowKeyColumnIndex()); + byte[] line2 = Bytes.toBytes("\t\tval_a\t1234"); + assertThrows(BadTsvLineException.class, () -> parser.parseRowKey(line2, line2.length)); + + TsvParser parser2 = new TsvParser("col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t"); + assertEquals(1, parser2.getRowKeyColumnIndex()); line = Bytes.toBytes("val_a\trowkey\t1234"); - rowKeyOffsets = parser.parseRowKey(line, line.length); + rowKeyOffsets = parser2.parseRowKey(line, line.length); assertEquals(6, rowKeyOffsets.getFirst().intValue()); assertEquals(6, rowKeyOffsets.getSecond().intValue()); - try { - line = Bytes.toBytes("val_a"); - rowKeyOffsets = parser.parseRowKey(line, line.length); - fail("Should get BadTsvLineException when number of columns less than rowkey position."); - } catch (BadTsvLineException ignored) { - } - parser = new TsvParser("col_a,HBASE_TS_KEY,HBASE_ROW_KEY", "\t"); - assertEquals(2, parser.getRowKeyColumnIndex()); + byte[] line3 = Bytes.toBytes("val_a"); + assertThrows(BadTsvLineException.class, () -> parser2.parseRowKey(line3, line3.length)); + + TsvParser parser3 = new TsvParser("col_a,HBASE_TS_KEY,HBASE_ROW_KEY", "\t"); + assertEquals(2, parser3.getRowKeyColumnIndex()); line = Bytes.toBytes("val_a\t1234\trowkey"); - rowKeyOffsets = parser.parseRowKey(line, line.length); + rowKeyOffsets = parser3.parseRowKey(line, line.length); assertEquals(11, rowKeyOffsets.getFirst().intValue()); assertEquals(6, rowKeyOffsets.getSecond().intValue()); } @@ -263,27 +255,20 @@ public void testTsvParseAttributesKey() throws BadTsvLineException { assertEquals(3, parser.getAttributesKeyColumnIndex()); String[] attributes = parse.getIndividualAttributes(); assertEquals("key=>value", attributes[0]); - try { - line = Bytes.toBytes("rowkey\tval_a\t1234"); - parser.parse(line, line.length); - fail("Should get BadTsvLineException on empty rowkey."); - } catch (BadTsvLineException ignored) { - } + byte[] line2 = Bytes.toBytes("rowkey\tval_a\t1234"); + TsvParser finalParser = parser; + assertThrows(BadTsvLineException.class, () -> finalParser.parse(line2, line2.length)); - parser = new TsvParser("HBASE_ATTRIBUTES_KEY,col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t"); - assertEquals(2, parser.getRowKeyColumnIndex()); + TsvParser parser2 = new TsvParser("HBASE_ATTRIBUTES_KEY,col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t"); + assertEquals(2, parser2.getRowKeyColumnIndex()); line = Bytes.toBytes("key=>value\tval_a\trowkey\t1234"); - parse = parser.parse(line, line.length); + parse = parser2.parse(line, line.length); assertEquals(0, parse.getAttributeKeyOffset()); - assertEquals(0, parser.getAttributesKeyColumnIndex()); + assertEquals(0, parser2.getAttributesKeyColumnIndex()); attributes = parse.getIndividualAttributes(); assertEquals("key=>value", attributes[0]); - try { - line = Bytes.toBytes("val_a"); - ParsedLine parse2 = parser.parse(line, line.length); - fail("Should get BadTsvLineException when number of columns less than rowkey position."); - } catch (BadTsvLineException ignored) { - } + byte[] line3 = Bytes.toBytes("val_a"); + assertThrows(BadTsvLineException.class, () -> parser2.parse(line3, line3.length)); parser = new TsvParser("col_a,HBASE_ATTRIBUTES_KEY,HBASE_TS_KEY,HBASE_ROW_KEY", "\t"); assertEquals(3, parser.getRowKeyColumnIndex()); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java index 87461c2735f0..40fe6abf83d8 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java @@ -36,18 +36,15 @@ import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; import org.slf4j.LoggerFactory; /** * This file was forked from hadoop/common/branches/branch-2@1350012. */ -@Category(SmallTests.class) +@Tag(SmallTests.TAG) public class TestJarFinder { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestJarFinder.class); - @Test public void testJar() throws Exception { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoad.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoad.java index 1e7cb0e41037..5542f982d00f 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoad.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoad.java @@ -20,31 +20,23 @@ import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; -@RunWith(Parameterized.class) -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestMRIncrementalLoad extends MRIncrementalLoadTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMRIncrementalLoad.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { setupCluster(false); } - @Parameters(name = "{index}: shouldChangeRegions={0}, putSortReducer={1}," + " tableStr={2}") public static List params() { return Arrays.asList(new Object[] { false, false, Arrays.asList("testMRIncrementalLoad") }, new Object[] { true, false, Arrays.asList("testMRIncrementalLoadWithSplit") }, @@ -52,4 +44,11 @@ public static List params() { new Object[] { false, true, Arrays.stream(TABLE_NAMES).map(TableName::getNameAsString).collect(Collectors.toList()) }); } + + @ParameterizedTest + @MethodSource("params") + public void testMRIncrementalLoad(boolean shouldChangeRegions, boolean putSortReducer, + List tableStr) throws Exception { + runTest(shouldChangeRegions, putSortReducer, tableStr); + } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoadWithLocality.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoadWithLocality.java index e27273b15101..27edae971e61 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoadWithLocality.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoadWithLocality.java @@ -19,33 +19,32 @@ import java.util.Arrays; import java.util.List; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; -@RunWith(Parameterized.class) -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestMRIncrementalLoadWithLocality extends MRIncrementalLoadTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMRIncrementalLoadWithLocality.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { setupCluster(true); } - @Parameters(name = "{index}: shouldChangeRegions={0}, putSortReducer={1}," + " tableStr={2}") public static List params() { return Arrays.asList( new Object[] { false, false, Arrays.asList("testMRIncrementalLoadWithLocality1") }, new Object[] { true, false, Arrays.asList("testMRIncrementalLoadWithLocality2") }); } + + @ParameterizedTest + @MethodSource("params") + public void testMRIncrementalLoadWithLocality(boolean shouldChangeRegions, boolean putSortReducer, + List tableStr) throws Exception { + runTest(shouldChangeRegions, putSortReducer, tableStr); + } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java index 5aa14c3561af..6a2ad22f43c6 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java @@ -26,22 +26,19 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests; import org.apache.hadoop.mapreduce.Job; -import org.junit.BeforeClass; -import org.junit.ClassRule; +import org.junit.jupiter.api.BeforeAll; import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; /** * Tests various scan start and stop row scenarios. This is set in a scan and tested in a MapReduce * job to see if that is handed over and done properly too. */ -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestMultiTableInputFormat extends MultiTableInputFormatTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiTableInputFormat.class); - - @BeforeClass + @BeforeAll public static void setupLogging() { Log4jUtils.enableDebug(MultiTableInputFormat.class); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormatBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormatBase.java index 7c136fa2a19f..45d97cb27479 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormatBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormatBase.java @@ -55,36 +55,29 @@ import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * Tests of MultiTableInputFormatBase. */ -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) public class TestMultiTableInputFormatBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiTableInputFormatBase.class); - - @Rule - public final TestName name = new TestName(); - /** * Test getSplits only puts up one Connection. In past it has put up many Connections. Each * Connection setup comes with a fresh new cache so we have to do fresh hit on hbase:meta. Should * only do one Connection when doing getSplits even if a MultiTableInputFormat. */ @Test - public void testMRSplitsConnectionCount() throws IOException { + public void testMRSplitsConnectionCount(TestInfo testInfo) throws IOException { // Make instance of MTIFB. MultiTableInputFormatBase mtif = new MultiTableInputFormatBase() { @Override @@ -104,17 +97,17 @@ public RecordReader createRecordReader(InputSpli List scans = new ArrayList<>(); for (int i = 0; i < 10; i++) { Scan scan = new Scan(); - String tableName = this.name.getMethodName() + i; + String tableName = testInfo.getTestMethod().get().getName() + i; scan.setAttribute(SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes(tableName)); scans.add(scan); } mtif.setScans(scans); // Get splits. Assert that that more than one. List splits = mtif.getSplits(mockedJobContext); - Assert.assertTrue(splits.size() > 0); + assertTrue(splits.size() > 0); // Assert only one Connection was made (see the static counter we have in the mocked // Connection MRSplitsConnection Constructor. - Assert.assertEquals(1, MRSplitsConnection.creations.get()); + assertEquals(1, MRSplitsConnection.creations.get()); } /** diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java index fbf9e7ef64c8..9cd0c8870a2d 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java @@ -34,25 +34,21 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.mapreduce.Job; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.apache.hbase.thirdparty.com.google.common.base.Function; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList; import org.apache.hbase.thirdparty.com.google.common.collect.Multimaps; -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestMultiTableSnapshotInputFormat extends MultiTableInputFormatTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiTableSnapshotInputFormat.class); - protected Path restoreDir; - @BeforeClass + @BeforeAll public static void setUpSnapshots() throws Exception { Log4jUtils.enableDebug(MultiTableSnapshotInputFormat.class); Log4jUtils.enableDebug(MultiTableSnapshotInputFormatImpl.class); @@ -66,7 +62,7 @@ public static void setUpSnapshots() throws Exception { } } - @Before + @BeforeEach public void setUp() throws Exception { this.restoreDir = TEST_UTIL.getRandomDir(); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java index 409c8d7f195d..be48196c0a6d 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; @@ -35,10 +35,9 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList; @@ -46,20 +45,16 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.collect.Maps; -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) public class TestMultiTableSnapshotInputFormatImpl { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiTableSnapshotInputFormatImpl.class); - private MultiTableSnapshotInputFormatImpl subject; private Map> snapshotScans; private Path restoreDir; private Configuration conf; private Path rootDir; - @Before + @BeforeEach public void setUp() throws Exception { this.subject = Mockito.spy(new MultiTableSnapshotInputFormatImpl()); @@ -173,8 +168,8 @@ public void testSetInputCreatesRestoreDirectoriesUnderRootRestoreDir() throws Ex Map restoreDirs = subject.getSnapshotDirs(conf); for (Path snapshotDir : restoreDirs.values()) { - assertEquals("Expected " + snapshotDir + " to be a child of " + restoreDir, restoreDir, - snapshotDir.getParent()); + assertEquals(restoreDir, + snapshotDir.getParent(), "Expected " + snapshotDir + " to be a child of " + restoreDir); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java index f5f0fdf169a9..05f645ac6280 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; @@ -44,11 +44,10 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,13 +56,10 @@ * simple - take every row in the table, reverse the value of a particular cell, and write it back * to the table. */ -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestMultithreadedTableMapper { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultithreadedTableMapper.class); - private static final Logger LOG = LoggerFactory.getLogger(TestMultithreadedTableMapper.class); private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); static final TableName MULTI_REGION_TABLE_NAME = TableName.valueOf("mrtest"); @@ -71,7 +67,7 @@ public class TestMultithreadedTableMapper { static final byte[] OUTPUT_FAMILY = Bytes.toBytes("text"); static final int NUMBER_OF_THREADS = 10; - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { // Up the handlers; this test needs more than usual. UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10); @@ -82,7 +78,7 @@ public static void beforeClass() throws Exception { UTIL.waitUntilAllRegionsAssigned(MULTI_REGION_TABLE_NAME); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRegionSizeCalculator.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRegionSizeCalculator.java index 583223691da8..3dc003c09cbd 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRegionSizeCalculator.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRegionSizeCalculator.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.mapreduce; import static org.apache.hadoop.hbase.HConstants.DEFAULT_REGIONSERVER_PORT; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.when; import java.io.IOException; @@ -39,17 +39,15 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestRegionSizeCalculator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionSizeCalculator.class); private Configuration configuration = new Configuration(); private final long megabyte = 1024L * 1024L; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRoundRobinTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRoundRobinTableInputFormat.java index 34bd76937d33..41f42a005838 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRoundRobinTableInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRoundRobinTableInputFormat.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -36,19 +36,16 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; /** * Basic test of {@link RoundRobinTableInputFormat}; i.e. RRTIF. */ -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) public class TestRoundRobinTableInputFormat { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRoundRobinTableInputFormat.class); private static final int SERVERS_COUNT = 5; private static final String[] KEYS = { "aa", "ab", "ac", "ad", "ae", "ba", "bb", "bc", "bd", "be", @@ -127,7 +124,7 @@ private void assertLengthDescending(List list) long previousLength = Long.MAX_VALUE; for (InputSplit is : list) { long length = is.getLength(); - assertTrue(previousLength + " " + length, previousLength > length); + assertTrue(previousLength > length, previousLength + " " + length); previousLength = length; } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java index 1922b89bc2c8..90e9493d5015 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -41,10 +41,10 @@ import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.junit.experimental.categories.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -52,13 +52,10 @@ /** * Test the rowcounter map reduce job. */ -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestRowCounter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRowCounter.class); - private static final Logger LOG = LoggerFactory.getLogger(TestRowCounter.class); private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private final static String TABLE_NAME = "testRowCounter"; @@ -73,7 +70,7 @@ public class TestRowCounter { /** * @throws java.lang.Exception */ - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); Table table = TEST_UTIL.createTable(TableName.valueOf(TABLE_NAME), Bytes.toBytes(COL_FAM)); @@ -84,7 +81,7 @@ public static void setUpBeforeClass() throws Exception { /** * @throws java.lang.Exception */ - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java index 9bb10d9dbf46..7a6555b95cf5 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.nio.charset.StandardCharsets; import java.util.Base64; @@ -29,17 +29,18 @@ import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Tags; +import org.junit.jupiter.api.Test; import org.junit.experimental.categories.Category; /** * Test of simple partitioner. */ -@Category({ MapReduceTests.class, SmallTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(SmallTests.TAG) public class TestSimpleTotalOrderPartitioner { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSimpleTotalOrderPartitioner.class); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java index 2434df6adf51..95d32eaffea2 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.util.Arrays; import java.util.function.BooleanSupplier; @@ -43,24 +43,21 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.mapreduce.Counters; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Basic test for the SyncTable M/R tool */ -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestSyncTable { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSyncTable.class); private static final Logger LOG = LoggerFactory.getLogger(TestSyncTable.class); @@ -68,16 +65,13 @@ public class TestSyncTable { private static final HBaseTestingUtil UTIL2 = new HBaseTestingUtil(); - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { UTIL1.startMiniCluster(3); UTIL2.startMiniCluster(3); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL2.shutdownMiniCluster(); UTIL1.shutdownMiniCluster(); @@ -91,11 +85,13 @@ private static byte[][] generateSplits(int numRows, int numRegions) { return splitRows; } - private void testSyncTable(HBaseTestingUtil source, HBaseTestingUtil target, String... options) - throws Exception { - final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); - final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); - Path testDir = source.getDataTestDirOnTestFS(name.getMethodName()); + private void testSyncTable(TestInfo testInfo, HBaseTestingUtil source, HBaseTestingUtil target, + String... options) throws Exception { + final TableName sourceTableName = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "_source"); + final TableName targetTableName = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "_target"); + Path testDir = source.getDataTestDirOnTestFS(testInfo.getTestMethod().get().getName()); writeTestData(source, sourceTableName, target, targetTableName); hashSourceTable(source, sourceTableName, testDir); @@ -115,32 +111,35 @@ private void testSyncTable(HBaseTestingUtil source, HBaseTestingUtil target, Str } @Test - public void testSyncTable() throws Exception { - testSyncTable(UTIL1, UTIL1); + public void testSyncTable(TestInfo testInfo) throws Exception { + testSyncTable(testInfo, UTIL1, UTIL1); } @Test - public void testSyncTableToPeerCluster() throws Exception { - testSyncTable(UTIL1, UTIL2, "--sourceuri=" + UTIL1.getRpcConnnectionURI()); + public void testSyncTableToPeerCluster(TestInfo testInfo) throws Exception { + testSyncTable(testInfo, UTIL1, UTIL2, "--sourceuri=" + UTIL1.getRpcConnnectionURI()); } @Test - public void testSyncTableFromSourceToPeerCluster() throws Exception { - testSyncTable(UTIL2, UTIL1, "--sourceuri=" + UTIL2.getRpcConnnectionURI(), + public void testSyncTableFromSourceToPeerCluster(TestInfo testInfo) throws Exception { + testSyncTable(testInfo, UTIL2, UTIL1, "--sourceuri=" + UTIL2.getRpcConnnectionURI(), "--targeturi=" + UTIL1.getZkConnectionURI()); } @Test - public void testSyncTableFromSourceToPeerClusterWithClusterKey() throws Exception { - testSyncTable(UTIL2, UTIL1, "--sourcezkcluster=" + UTIL2.getClusterKey(), + public void testSyncTableFromSourceToPeerClusterWithClusterKey(TestInfo testInfo) + throws Exception { + testSyncTable(testInfo, UTIL2, UTIL1, "--sourcezkcluster=" + UTIL2.getClusterKey(), "--targetzkcluster=" + UTIL1.getClusterKey()); } @Test - public void testSyncTableDoDeletesFalse() throws Exception { - final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); - final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); - Path testDir = UTIL1.getDataTestDirOnTestFS(name.getMethodName()); + public void testSyncTableDoDeletesFalse(TestInfo testInfo) throws Exception { + final TableName sourceTableName = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "_source"); + final TableName targetTableName = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "_target"); + Path testDir = UTIL1.getDataTestDirOnTestFS(testInfo.getTestMethod().get().getName()); writeTestData(UTIL1, sourceTableName, UTIL1, targetTableName); hashSourceTable(UTIL1, sourceTableName, testDir); @@ -160,10 +159,12 @@ public void testSyncTableDoDeletesFalse() throws Exception { } @Test - public void testSyncTableDoPutsFalse() throws Exception { - final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); - final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); - Path testDir = UTIL2.getDataTestDirOnTestFS(name.getMethodName()); + public void testSyncTableDoPutsFalse(TestInfo testInfo) throws Exception { + final TableName sourceTableName = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "_source"); + final TableName targetTableName = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "_target"); + Path testDir = UTIL2.getDataTestDirOnTestFS(testInfo.getTestMethod().get().getName()); writeTestData(UTIL2, sourceTableName, UTIL2, targetTableName); hashSourceTable(UTIL2, sourceTableName, testDir); @@ -183,10 +184,12 @@ public void testSyncTableDoPutsFalse() throws Exception { } @Test - public void testSyncTableIgnoreTimestampsTrue() throws Exception { - final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); - final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); - Path testDir = UTIL1.getDataTestDirOnTestFS(name.getMethodName()); + public void testSyncTableIgnoreTimestampsTrue(TestInfo testInfo) throws Exception { + final TableName sourceTableName = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "_source"); + final TableName targetTableName = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "_target"); + Path testDir = UTIL1.getDataTestDirOnTestFS(testInfo.getTestMethod().get().getName()); long current = EnvironmentEdgeManager.currentTime(); writeTestData(UTIL1, sourceTableName, UTIL2, targetTableName, current - 1000, current); hashSourceTable(UTIL1, sourceTableName, testDir, "--ignoreTimestamps=true"); @@ -206,18 +209,18 @@ public void testSyncTableIgnoreTimestampsTrue() throws Exception { } private void assertCellEquals(Cell sourceCell, Cell targetCell, BooleanSupplier checkTimestamp) { - assertTrue("Rows don't match, source: " + sourceCell + ", target: " + targetCell, - CellUtil.matchingRows(sourceCell, targetCell)); - assertTrue("Families don't match, source: " + sourceCell + ", target: " + targetCell, - CellUtil.matchingFamily(sourceCell, targetCell)); - assertTrue("Qualifiers don't match, source: " + sourceCell + ", target: " + targetCell, - CellUtil.matchingQualifier(sourceCell, targetCell)); + assertTrue(CellUtil.matchingRows(sourceCell, targetCell), + "Rows don't match, source: " + sourceCell + ", target: " + targetCell); + assertTrue(CellUtil.matchingFamily(sourceCell, targetCell), + "Families don't match, source: " + sourceCell + ", target: " + targetCell); + assertTrue(CellUtil.matchingQualifier(sourceCell, targetCell), + "Qualifiers don't match, source: " + sourceCell + ", target: " + targetCell); if (checkTimestamp.getAsBoolean()) { - assertTrue("Timestamps don't match, source: " + sourceCell + ", target: " + targetCell, - CellUtil.matchingTimestamp(sourceCell, targetCell)); + assertTrue(CellUtil.matchingTimestamp(sourceCell, targetCell), + "Timestamps don't match, source: " + sourceCell + ", target: " + targetCell); } - assertTrue("Values don't match, source: " + sourceCell + ", target: " + targetCell, - CellUtil.matchingValue(sourceCell, targetCell)); + assertTrue(CellUtil.matchingValue(sourceCell, targetCell), + "Values don't match, source: " + sourceCell + ", target: " + targetCell); } private void assertEqualTables(int expectedRows, HBaseTestingUtil sourceCluster, @@ -320,7 +323,7 @@ private void assertTargetDoDeletesFalse(int expectedRows, HBaseTestingUtil sourc targetRow = targetScanner.next(); sourceRow = sourceScanner.next(); } - assertEquals("Target expected rows does not match.", expectedRows, rowsCount); + assertEquals(expectedRows, rowsCount, "Target expected rows does not match."); } } @@ -390,7 +393,7 @@ private void assertTargetDoPutsFalse(int expectedRows, HBaseTestingUtil sourceCl targetRow = targetScanner.next(); sourceRow = sourceScanner.next(); } - assertEquals("Target expected rows does not match.", expectedRows, rowsCount); + assertEquals(expectedRows, rowsCount, "Target expected rows does not match."); } } @@ -402,7 +405,7 @@ private Counters syncTables(Configuration conf, TableName sourceTableName, args[options.length + 1] = sourceTableName.getNameAsString(); args[options.length + 2] = targetTableName.getNameAsString(); int code = syncTable.run(args); - assertEquals("sync table job failed", 0, code); + assertEquals(0, code, "sync table job failed"); LOG.info("Sync tables completed"); return syncTable.counters; @@ -421,7 +424,7 @@ private void hashSourceTable(HBaseTestingUtil sourceCluster, TableName sourceTab args[options.length + 3] = sourceTableName.getNameAsString(); args[options.length + 4] = testDir.toString(); int code = hashTable.run(args); - assertEquals("hash table job failed", 0, code); + assertEquals(0, code, "hash table job failed"); FileSystem fs = sourceCluster.getTestFileSystem(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java index c12a7e817bb7..994015fa795c 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.*; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; @@ -57,11 +58,11 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.junit.experimental.categories.Category; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -71,10 +72,9 @@ /** * This tests the TableInputFormat and its recovery semantics */ -@Category(LargeTests.class) +@Tag(LargeTests.TAG) public class TestTableInputFormat { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestTableInputFormat.class); @@ -86,17 +86,17 @@ public class TestTableInputFormat { private static final byte[][] columns = new byte[][] { FAMILY }; - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void before() throws IOException { LOG.info("before"); UTIL.ensureSomeRegionServersAvailable(1); @@ -266,11 +266,11 @@ public void testTableRecordReaderScannerFailMapreduce() throws IOException, Inte /** * Run test assuming Scanner IOException failure using newer mapreduce api */ - @Test(expected = IOException.class) + @Test public void testTableRecordReaderScannerFailMapreduceTwice() throws IOException, InterruptedException { Table htable = createIOEScannerTable(Bytes.toBytes("table3-mr"), 2); - runTestMapreduce(htable); + assertThrows(IOException.class, () -> runTestMapreduce(htable)); } /** @@ -286,11 +286,12 @@ public void testTableRecordReaderScannerTimeoutMapreduce() /** * Run test assuming NotServingRegionException using newer mapreduce api */ - @Test(expected = org.apache.hadoop.hbase.NotServingRegionException.class) + @Test public void testTableRecordReaderScannerTimeoutMapreduceTwice() throws IOException, InterruptedException { Table htable = createDNRIOEScannerTable(Bytes.toBytes("table5-mr"), 2); - runTestMapreduce(htable); + assertThrows(org.apache.hadoop.hbase.NotServingRegionException.class, + () -> runTestMapreduce(htable)); } /** @@ -334,19 +335,19 @@ void testInputFormat(Class clazz) job.setNumReduceTasks(0); LOG.debug("submitting job."); - assertTrue("job failed!", job.waitForCompletion(true)); - assertEquals("Saw the wrong number of instances of the filtered-for row.", 2, job.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":row", "aaa").getValue()); - assertEquals("Saw any instances of the filtered out row.", 0, job.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":row", "bbb").getValue()); - assertEquals("Saw the wrong number of instances of columnA.", 1, job.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":family", "columnA").getValue()); - assertEquals("Saw the wrong number of instances of columnB.", 1, job.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":family", "columnB").getValue()); - assertEquals("Saw the wrong count of values for the filtered-for row.", 2, job.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":value", "value aaa").getValue()); - assertEquals("Saw the wrong count of values for the filtered-out row.", 0, job.getCounters() - .findCounter(TestTableInputFormat.class.getName() + ":value", "value bbb").getValue()); + assertTrue(job.waitForCompletion(true), "job failed!"); + assertEquals(2, job.getCounters() + .findCounter(TestTableInputFormat.class.getName() + ":row", "aaa").getValue(), "Saw the wrong number of instances of the filtered-for row."); + assertEquals(0, job.getCounters() + .findCounter(TestTableInputFormat.class.getName() + ":row", "bbb").getValue(), "Saw any instances of the filtered out row."); + assertEquals(1, job.getCounters() + .findCounter(TestTableInputFormat.class.getName() + ":family", "columnA").getValue(), "Saw the wrong number of instances of columnA."); + assertEquals(1, job.getCounters() + .findCounter(TestTableInputFormat.class.getName() + ":family", "columnB").getValue(), "Saw the wrong number of instances of columnB."); + assertEquals(2, job.getCounters() + .findCounter(TestTableInputFormat.class.getName() + ":value", "value aaa").getValue(), "Saw the wrong count of values for the filtered-for row."); + assertEquals(0, job.getCounters() + .findCounter(TestTableInputFormat.class.getName() + ":value", "value bbb").getValue(), "Saw the wrong count of values for the filtered-out row."); } public static class ExampleVerifier extends TableMapper { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java index 7b2170d19520..bddf5eda16f7 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.Mockito.mock; @@ -54,20 +54,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.mapreduce.JobContext; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) public class TestTableInputFormatBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatBase.class); - @Test public void testReuseRegionSizeCalculator() throws IOException { JobContext context = mock(JobContext.class); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java index aeea1dffbf51..3cf0004018e9 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestTableInputFormatScan extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScan.class); - /** * Tests a MR scan using specific number of mappers. The test table has 26 regions, */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java index ced6e156e87b..5cecc20cec58 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -44,9 +44,9 @@ import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +66,7 @@ public abstract class TestTableInputFormatScanBase { private static Table table = null; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { // start mini hbase cluster TEST_UTIL.startMiniCluster(3); @@ -75,7 +75,7 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.loadTable(table, INPUT_FAMILYS, null, false); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -238,15 +238,15 @@ protected void testNumOfSplits(int splitsPerRegion, int expectedNumOfSplits) ImmutableBytesWritable.class, ImmutableBytesWritable.class, job); TableInputFormat tif = new TableInputFormat(); tif.setConf(job.getConfiguration()); - Assert.assertEquals(TABLE_NAME, table.getName()); + Assertions.assertEquals(TABLE_NAME, table.getName()); List splits = tif.getSplits(job); for (InputSplit split : splits) { TableSplit tableSplit = (TableSplit) split; // In table input format, we do no store the scanner at the split level // because we use the scan object from the map-reduce job conf itself. - Assert.assertTrue(tableSplit.getScanAsString().isEmpty()); + Assertions.assertTrue(tableSplit.getScanAsString().isEmpty()); } - Assert.assertEquals(expectedNumOfSplits, splits.size()); + Assertions.assertEquals(expectedNumOfSplits, splits.size()); } /** @@ -269,11 +269,11 @@ protected void testNumOfSplitsMR(int splitsPerRegion, int expectedNumOfSplits) job.setReducerClass(ScanReducer.class); job.setNumReduceTasks(1); job.setOutputFormatClass(NullOutputFormat.class); - assertTrue("job failed!", job.waitForCompletion(true)); + assertTrue(job.waitForCompletion(true), "job failed!"); // for some reason, hbase does not expose JobCounter.TOTAL_LAUNCHED_MAPS, // we use TaskCounter.SHUFFLED_MAPS to get total launched maps - assertEquals("Saw the wrong count of mappers per region", expectedNumOfSplits, - job.getCounters().findCounter(TaskCounter.SHUFFLED_MAPS).getValue()); + assertEquals(expectedNumOfSplits, + job.getCounters().findCounter(TaskCounter.SHUFFLED_MAPS).getValue(), "Saw the wrong count of mappers per region"); } /** @@ -292,14 +292,14 @@ protected void testAutobalanceNumOfSplit() throws IOException { TableInputFormat tif = new TableInputFormat(); List res = tif.calculateAutoBalancedSplits(splits, 1073741824); - assertEquals("Saw the wrong number of splits", 5, res.size()); + assertEquals(5, res.size(), "Saw the wrong number of splits"); TableSplit ts1 = (TableSplit) res.get(0); - assertEquals("The first split end key should be", 2, Bytes.toInt(ts1.getEndRow())); + assertEquals(2, Bytes.toInt(ts1.getEndRow()), "The first split end key should be"); TableSplit ts2 = (TableSplit) res.get(1); - assertEquals("The second split regionsize should be", 20 * 1048576, ts2.getLength()); + assertEquals(20 * 1048576, ts2.getLength(), "The second split regionsize should be"); TableSplit ts3 = (TableSplit) res.get(2); - assertEquals("The third split start key should be", 3, Bytes.toInt(ts3.getStartRow())); + assertEquals(3, Bytes.toInt(ts3.getStartRow()), "The third split start key should be"); TableSplit ts4 = (TableSplit) res.get(4); - assertNotEquals("The seventh split start key should not be", 4, Bytes.toInt(ts4.getStartRow())); + assertNotEquals(4, Bytes.toInt(ts4.getStartRow()), "The seventh split start key should not be"); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToAPP.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToAPP.java index addcdc898c8e..6e4fc4e90c58 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToAPP.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToAPP.java @@ -21,17 +21,14 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.junit.experimental.categories.Category; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanEmptyToAPP extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToAPP.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBA.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBA.java index e395b36e2a70..2a0357e5a46d 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBA.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBA.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanEmptyToBBA extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToBBA.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBB.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBB.java index f86578712ae8..9107c0efb6e6 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBB.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBB.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanEmptyToBBB extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToBBB.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToEmpty.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToEmpty.java index ef7b38b21be1..c4e73a945bb3 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToEmpty.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToEmpty.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanEmptyToEmpty extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToEmpty.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToOPP.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToOPP.java index f20d8113f780..d88e9c57205a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToOPP.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToOPP.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanEmptyToOPP extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToOPP.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOBBToOPP.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOBBToOPP.java index fe3d703a289b..0eb16e3ff236 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOBBToOPP.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOBBToOPP.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestTableInputFormatScanOBBToOPP extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanOBBToOPP.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOBBToQPP.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOBBToQPP.java index f6985a3fd773..1c32ce6e2227 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOBBToQPP.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOBBToQPP.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanOBBToQPP extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanOBBToQPP.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOPPToEmpty.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOPPToEmpty.java index e57051dfd192..7543cc39537f 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOPPToEmpty.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanOPPToEmpty.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanOPPToEmpty extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanOPPToEmpty.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYYXToEmpty.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYYXToEmpty.java index c8b3394e54b4..a01fc83caa79 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYYXToEmpty.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYYXToEmpty.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanYYXToEmpty extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanYYXToEmpty.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYYYToEmpty.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYYYToEmpty.java index 175d10e1f755..c01477cffae3 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYYYToEmpty.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYYYToEmpty.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanYYYToEmpty extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanYYYToEmpty.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYZYToEmpty.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYZYToEmpty.java index 9ce2f0782b2f..68d1ecc8c439 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYZYToEmpty.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanYZYToEmpty.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestTableInputFormatScanYZYToEmpty extends TestTableInputFormatScanBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableInputFormatScanYZYToEmpty.class); - /** * Tests a MR scan using specific start and stop rows. */ diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java index 99606050667a..6e9bd908b71b 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; @@ -27,7 +28,6 @@ import java.util.NavigableMap; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotFoundException; @@ -44,9 +44,8 @@ import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,13 +55,10 @@ * to the table. */ -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestTableMapReduce extends TestTableMapReduceBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableMapReduce.class); - private static final Logger LOG = LoggerFactory.getLogger(TestTableMapReduce.class); @Override @@ -142,27 +138,27 @@ private void verifyJobCountersAreEmitted(Job job) throws IOException { Counters counters = job.getCounters(); Counter counter = counters.findCounter(TableRecordReaderImpl.HBASE_COUNTER_GROUP_NAME, "RPC_CALLS"); - assertNotNull("Unable to find Job counter for HBase scan metrics, RPC_CALLS", counter); - assertTrue("Counter value for RPC_CALLS should be larger than 0", counter.getValue() > 0); + assertNotNull(counter, "Unable to find Job counter for HBase scan metrics, RPC_CALLS"); + assertTrue(counter.getValue() > 0, "Counter value for RPC_CALLS should be larger than 0"); } - @Test(expected = TableNotEnabledException.class) + @Test public void testWritingToDisabledTable() throws IOException { - - try (Admin admin = UTIL.getConnection().getAdmin(); - Table table = UTIL.getConnection().getTable(TABLE_FOR_NEGATIVE_TESTS)) { - admin.disableTable(table.getName()); - runTestOnTable(table); - fail("Should not have reached here, should have thrown an exception"); - } + assertThrows(TableNotEnabledException.class, () -> { + try (Admin admin = UTIL.getConnection().getAdmin(); + Table table = UTIL.getConnection().getTable(TABLE_FOR_NEGATIVE_TESTS)) { + admin.disableTable(table.getName()); + runTestOnTable(table); + } + }); } - @Test(expected = TableNotFoundException.class) + @Test public void testWritingToNonExistentTable() throws IOException { - - try (Table table = UTIL.getConnection().getTable(TableName.valueOf("table-does-not-exist"))) { - runTestOnTable(table); - fail("Should not have reached here, should have thrown an exception"); - } + assertThrows(TableNotFoundException.class, () -> { + try (Table table = UTIL.getConnection().getTable(TableName.valueOf("table-does-not-exist"))) { + runTestOnTable(table); + } + }); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java index 477ea5d7f6dd..7f00914abb31 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.Iterator; @@ -37,9 +37,9 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; /** @@ -67,7 +67,7 @@ public abstract class TestTableMapReduceBase { */ protected abstract void runTestOnTable(Table table) throws IOException; - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { UTIL.startMiniCluster(); Table table = UTIL.createMultiRegionTable(MULTI_REGION_TABLE_NAME, @@ -76,7 +76,7 @@ public static void beforeClass() throws Exception { UTIL.createTable(TABLE_FOR_NEGATIVE_TESTS, new byte[][] { INPUT_FAMILY, OUTPUT_FAMILY }); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL.deleteTable(TABLE_FOR_NEGATIVE_TESTS); UTIL.shutdownMiniCluster(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java index 22688485c971..5cd386c1f9d1 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java @@ -18,16 +18,15 @@ package org.apache.hadoop.hbase.mapreduce; import static org.apache.hadoop.security.UserGroupInformation.loginUserFromKeytab; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.Closeable; import java.io.File; import java.net.URI; import java.util.Collection; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier; @@ -43,21 +42,17 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test different variants of initTableMapperJob method */ -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestTableMapReduceUtil { private static final String HTTP_PRINCIPAL = "HTTP/localhost"; - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableMapReduceUtil.class); - /* * initTableSnapshotMapperJob is tested in {@link TestTableSnapshotInputFormat} because the method * depends on an online cluster. diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableOutputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableOutputFormat.java index 52c7321617a4..404f945d594a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableOutputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableOutputFormat.java @@ -20,7 +20,6 @@ import java.io.IOException; import javax.validation.constraints.Null; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; @@ -33,24 +32,20 @@ import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; /** * Simple Tests to check whether the durability of the Mutation is changed or not, for * {@link TableOutputFormat} if {@link TableOutputFormat#WAL_PROPERTY} is set to false. */ -@Category(MediumTests.class) +@Tag(MediumTests.TAG) public class TestTableOutputFormat { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableOutputFormat.class); private static final HBaseTestingUtil util = new HBaseTestingUtil(); private static final TableName TABLE_NAME = TableName.valueOf("TEST_TABLE"); @@ -60,7 +55,7 @@ public class TestTableOutputFormat { private static TaskAttemptContext context; private static TableOutputFormat tableOutputFormat; - @BeforeClass + @BeforeAll public static void setUp() throws Exception { util.startMiniCluster(); util.createTable(TABLE_NAME, columnFamily); @@ -71,12 +66,12 @@ public static void setUp() throws Exception { conf.set(TableOutputFormat.OUTPUT_TABLE, "TEST_TABLE"); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { util.shutdownMiniCluster(); } - @After + @AfterEach public void close() throws IOException, InterruptedException { if (writer != null && context != null) { writer.close(context); @@ -96,14 +91,14 @@ public void testTableOutputFormatWhenWalIsOFFForPut() throws IOException, Interr put.addColumn(columnFamily, Bytes.toBytes("aa"), Bytes.toBytes("value")); // verifying whether durability of mutation is USE_DEFAULT or not, before commiting write. - Assert.assertEquals("Durability of the mutation should be USE_DEFAULT", Durability.USE_DEFAULT, - put.getDurability()); + Assertions.assertEquals(Durability.USE_DEFAULT, put.getDurability(), + "Durability of the mutation should be USE_DEFAULT"); writer.write(null, put); // verifying whether durability of mutation got changed to the SKIP_WAL or not. - Assert.assertEquals("Durability of the mutation should be SKIP_WAL", Durability.SKIP_WAL, - put.getDurability()); + Assertions.assertEquals(Durability.SKIP_WAL, put.getDurability(), + "Durability of the mutation should be SKIP_WAL"); } @Test @@ -120,14 +115,14 @@ public void testTableOutputFormatWhenWalIsOFFForDelete() delete.addColumn(columnFamily, Bytes.toBytes("aa")); // verifying whether durability of mutation is USE_DEFAULT or not, before commiting write. - Assert.assertEquals("Durability of the mutation should be USE_DEFAULT", Durability.USE_DEFAULT, - delete.getDurability()); + Assertions.assertEquals(Durability.USE_DEFAULT, delete.getDurability(), + "Durability of the mutation should be USE_DEFAULT"); writer.write(null, delete); // verifying whether durability of mutation got changed from USE_DEFAULT to the SKIP_WAL or not. - Assert.assertEquals("Durability of the mutation should be SKIP_WAL", Durability.SKIP_WAL, - delete.getDurability()); + Assertions.assertEquals(Durability.SKIP_WAL, delete.getDurability(), + "Durability of the mutation should be SKIP_WAL"); } @Test @@ -135,14 +130,14 @@ public void testOutputCommitterConfiguration() throws IOException, InterruptedEx // 1. Verify it returns the default committer when the property is not set. conf.unset(TableOutputFormat.OUTPUT_COMMITTER_CLASS); tableOutputFormat.setConf(conf); - Assert.assertEquals("Should use default committer", TableOutputCommitter.class, - tableOutputFormat.getOutputCommitter(context).getClass()); + Assertions.assertEquals(TableOutputCommitter.class, + tableOutputFormat.getOutputCommitter(context).getClass(), "Should use default committer"); // 2. Verify it returns the custom committer when the property is set. conf.set(TableOutputFormat.OUTPUT_COMMITTER_CLASS, DummyCommitter.class.getName()); tableOutputFormat.setConf(conf); - Assert.assertEquals("Should use custom committer", DummyCommitter.class, - tableOutputFormat.getOutputCommitter(context).getClass()); + Assertions.assertEquals(DummyCommitter.class, + tableOutputFormat.getOutputCommitter(context).getClass(), "Should use custom committer"); } // Simple dummy committer for testing diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableRecordReader.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableRecordReader.java index 232083ea7e78..7184c020713f 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableRecordReader.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableRecordReader.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTestConst; @@ -37,20 +36,15 @@ import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(MediumTests.class) +@Tag(MediumTests.TAG) public class TestTableRecordReader { private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableRecordReader.class); - private static TableName TABLE_NAME = TableName.valueOf("TestTableRecordReader"); private static int NUM_ROWS = 5; @@ -70,7 +64,7 @@ public class TestTableRecordReader { private static final int TIMEOUT = 4000; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); @@ -105,7 +99,7 @@ private static List createPuts(byte[][] rows, byte[][] families, byte[][] q return puts; } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java index c24f8e62c816..66259e7b5680 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java @@ -23,7 +23,9 @@ import static org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatImpl.SNAPSHOT_INPUTFORMAT_LOCALITY_ENABLED_KEY; import static org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatImpl.SNAPSHOT_INPUTFORMAT_ROW_LIMIT_PER_INPUTSPLIT; import static org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatImpl.SNAPSHOT_INPUTFORMAT_SCANNER_READTYPE; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -33,7 +35,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HDFSBlocksDistribution; @@ -60,24 +61,18 @@ import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableSnapshotInputFormat.class); - private static final Logger LOG = LoggerFactory.getLogger(TestTableSnapshotInputFormat.class); private static final byte[] bbb = Bytes.toBytes("bbb"); @@ -85,9 +80,6 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa private static final byte[] bbc = Bytes.toBytes("bbc"); private static final byte[] yya = Bytes.toBytes("yya"); - @Rule - public TestName name = new TestName(); - @Override protected byte[] getStartRow() { return bbb; @@ -104,19 +96,18 @@ public void testGetBestLocations() throws IOException { Configuration conf = UTIL.getConfiguration(); HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution(); - Assert.assertEquals(null, - TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); + assertEquals(null, TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] { "h1" }, 1); - Assert.assertEquals(Lists.newArrayList("h1"), + assertEquals(Lists.newArrayList("h1"), TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] { "h1" }, 1); - Assert.assertEquals(Lists.newArrayList("h1"), + assertEquals(Lists.newArrayList("h1"), TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] { "h2" }, 1); - Assert.assertEquals(Lists.newArrayList("h1"), + assertEquals(Lists.newArrayList("h1"), TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution = new HDFSBlocksDistribution(); @@ -124,21 +115,21 @@ public void testGetBestLocations() throws IOException { blockDistribution.addHostsAndBlockWeight(new String[] { "h2" }, 7); blockDistribution.addHostsAndBlockWeight(new String[] { "h3" }, 5); blockDistribution.addHostsAndBlockWeight(new String[] { "h4" }, 1); - Assert.assertEquals(Lists.newArrayList("h1"), + assertEquals(Lists.newArrayList("h1"), TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] { "h2" }, 2); - Assert.assertEquals(Lists.newArrayList("h1", "h2"), + assertEquals(Lists.newArrayList("h1", "h2"), TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] { "h2" }, 3); - Assert.assertEquals(Lists.newArrayList("h2", "h1"), + assertEquals(Lists.newArrayList("h2", "h1"), TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] { "h3" }, 6); blockDistribution.addHostsAndBlockWeight(new String[] { "h4" }, 9); - Assert.assertEquals(Lists.newArrayList("h2", "h3", "h4"), + assertEquals(Lists.newArrayList("h2", "h3", "h4"), TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); } @@ -174,8 +165,8 @@ protected void cleanup(Context context) throws IOException, InterruptedException } @Test - public void testInitTableSnapshotMapperJobConfig() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testInitTableSnapshotMapperJobConfig(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); String snapshotName = "foo"; try { @@ -189,11 +180,11 @@ public void testInitTableSnapshotMapperJobConfig() throws Exception { // TODO: would be better to examine directly the cache instance that results from this // config. Currently this is not possible because BlockCache initialization is static. - Assert.assertEquals("Snapshot job should be configured for default LruBlockCache.", - HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT, - job.getConfiguration().getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, -1), 0.01); - Assert.assertEquals("Snapshot job should not use BucketCache.", 0, - job.getConfiguration().getFloat("hbase.bucketcache.size", -1), 0.01); + assertEquals(HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT, + job.getConfiguration().getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, -1), 0.01, + "Snapshot job should be configured for default LruBlockCache."); + assertEquals(0, job.getConfiguration().getFloat("hbase.bucketcache.size", -1), 0.01, + "Snapshot job should not use BucketCache."); } finally { UTIL.getAdmin().deleteSnapshot(snapshotName); UTIL.deleteTable(tableName); @@ -201,11 +192,13 @@ public void testInitTableSnapshotMapperJobConfig() throws Exception { } @Test - public void testWithMockedMapReduceSingleRegionByRegionLocation() throws Exception { + public void testWithMockedMapReduceSingleRegionByRegionLocation(TestInfo testInfo) + throws Exception { Configuration conf = UTIL.getConfiguration(); conf.setBoolean(SNAPSHOT_INPUTFORMAT_LOCALITY_BY_REGION_LOCATION, true); try { - testWithMockedMapReduce(UTIL, name.getMethodName() + "Snapshot", 1, 1, 1, true); + testWithMockedMapReduce(UTIL, testInfo.getTestMethod().get().getName() + "Snapshot", 1, 1, 1, + true); } finally { conf.unset(SNAPSHOT_INPUTFORMAT_LOCALITY_BY_REGION_LOCATION); } @@ -223,7 +216,7 @@ public void testRestoreSnapshotDoesNotCreateBackRefLinksInit(TableName tableName @Override public void testWithMockedMapReduce(HBaseTestingUtil util, String snapshotName, int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean setLocalityEnabledTo) throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(snapshotName + "_table"); try { createTableAndSnapshot(util, tableName, snapshotName, getStartRow(), getEndRow(), numRegions); @@ -254,9 +247,9 @@ public void testWithMockedMapReduce(HBaseTestingUtil util, String snapshotName, } @Test - public void testWithMockedMapReduceWithSplitsPerRegion() throws Exception { + public void testWithMockedMapReduceWithSplitsPerRegion(TestInfo testInfo) throws Exception { String snapshotName = "testWithMockedMapReduceMultiRegion"; - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); try { createTableAndSnapshot(UTIL, tableName, snapshotName, getStartRow(), getEndRow(), 10); @@ -279,9 +272,9 @@ public void testWithMockedMapReduceWithSplitsPerRegion() throws Exception { } @Test - public void testWithMockedMapReduceWithNoStartRowStopRow() throws Exception { + public void testWithMockedMapReduceWithNoStartRowStopRow(TestInfo testInfo) throws Exception { String snapshotName = "testWithMockedMapReduceMultiRegion"; - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); try { createTableAndSnapshot(UTIL, tableName, snapshotName, getStartRow(), getEndRow(), 10); @@ -306,8 +299,8 @@ public void testWithMockedMapReduceWithNoStartRowStopRow() throws Exception { } @Test - public void testScanLimit() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testScanLimit(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); final String snapshotName = tableName + "Snapshot"; Table table = null; try { @@ -340,8 +333,8 @@ public void testScanLimit() throws Exception { TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName, scan, RowCounter.RowCounterMapper.class, NullWritable.class, NullWritable.class, job, true, tmpTableDir); - Assert.assertTrue(job.waitForCompletion(true)); - Assert.assertEquals(10 * regionNum, + assertTrue(job.waitForCompletion(true)); + assertEquals(10 * regionNum, job.getCounters().findCounter(RowCounter.RowCounterMapper.Counters.ROWS).getValue()); } finally { if (table != null) { @@ -438,7 +431,7 @@ private void verifyWithMockedMapReduce(Job job, int numRegions, int expectedNumS TableSnapshotInputFormat tsif = new TableSnapshotInputFormat(); List splits = tsif.getSplits(job); - Assert.assertEquals(expectedNumSplits, splits.size()); + assertEquals(expectedNumSplits, splits.size()); HBaseTestingUtil.SeenRowTracker rowTracker = new HBaseTestingUtil.SeenRowTracker(startRow, stopRow.length > 0 ? stopRow : Bytes.toBytes("\uffff")); @@ -452,38 +445,34 @@ private void verifyWithMockedMapReduce(Job job, int numRegions, int expectedNumS for (int i = 0; i < splits.size(); i++) { // validate input split InputSplit split = splits.get(i); - Assert.assertTrue(split instanceof TableSnapshotRegionSplit); + assertTrue(split instanceof TableSnapshotRegionSplit); TableSnapshotRegionSplit snapshotRegionSplit = (TableSnapshotRegionSplit) split; if (localityEnabled) { - Assert.assertTrue(split.getLocations() != null && split.getLocations().length != 0); + assertTrue(split.getLocations() != null && split.getLocations().length != 0); if (byRegionLoc) { // When it uses region location from meta, the hostname will be "localhost", // the location from hdfs block location is "127.0.0.1". - Assert.assertEquals(1, split.getLocations().length); - Assert.assertTrue("Not using region location!", - split.getLocations()[0].equals("localhost")); + assertEquals(1, split.getLocations().length); + assertTrue(split.getLocations()[0].equals("localhost"), "Not using region location!"); } else { - Assert.assertTrue("Not using region location!", - split.getLocations()[0].equals("127.0.0.1")); + assertTrue(split.getLocations()[0].equals("127.0.0.1"), "Not using region location!"); } } else { - Assert.assertTrue(split.getLocations() != null && split.getLocations().length == 0); + assertTrue(split.getLocations() != null && split.getLocations().length == 0); } Scan scan = TableMapReduceUtil.convertStringToScan(snapshotRegionSplit.getDelegate().getScan()); if (startRow.length > 0) { - Assert.assertTrue( - Bytes.toStringBinary(startRow) + " should <= " + Bytes.toStringBinary(scan.getStartRow()), - Bytes.compareTo(startRow, scan.getStartRow()) <= 0); + assertTrue(Bytes.compareTo(startRow, scan.getStartRow()) <= 0, + Bytes.toStringBinary(startRow) + " should <= " + Bytes.toStringBinary(scan.getStartRow())); } if (stopRow.length > 0) { - Assert.assertTrue( - Bytes.toStringBinary(stopRow) + " should >= " + Bytes.toStringBinary(scan.getStopRow()), - Bytes.compareTo(stopRow, scan.getStopRow()) >= 0); + assertTrue(Bytes.compareTo(stopRow, scan.getStopRow()) >= 0, + Bytes.toStringBinary(stopRow) + " should >= " + Bytes.toStringBinary(scan.getStopRow())); } - Assert.assertTrue("startRow should < stopRow", - Bytes.compareTo(scan.getStartRow(), scan.getStopRow()) < 0); + assertTrue(Bytes.compareTo(scan.getStartRow(), scan.getStopRow()) < 0, + "startRow should < stopRow"); // validate record reader TaskAttemptContext taskAttemptContext = mock(TaskAttemptContext.class); @@ -552,7 +541,7 @@ public static void doTestWithMapReduce(HBaseTestingUtil util, TableName tableNam job.setNumReduceTasks(1); job.setOutputFormatClass(NullOutputFormat.class); - Assert.assertTrue(job.waitForCompletion(true)); + assertTrue(job.waitForCompletion(true)); } finally { if (!shutdownCluster) { util.getAdmin().deleteSnapshot(snapshotName); @@ -579,9 +568,9 @@ public void testCleanRestoreDir() throws Exception { FileSystem fs = workingDir.getFileSystem(job.getConfiguration()); Path restorePath = new Path(job.getConfiguration().get("hbase.TableSnapshotInputFormat.restore.dir")); - Assert.assertTrue(fs.exists(restorePath)); + assertTrue(fs.exists(restorePath)); TableSnapshotInputFormat.cleanRestoreDir(job, snapshotName); - Assert.assertFalse(fs.exists(restorePath)); + assertFalse(fs.exists(restorePath)); } /** @@ -594,8 +583,8 @@ public void testCleanRestoreDir() throws Exception { * 4. Delete restored temporary directory 5. Configure a new job and verify that it fails */ @Test - public void testReadFromRestoredSnapshotViaMR() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testReadFromRestoredSnapshotViaMR(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); final String snapshotName = tableName + "_snapshot"; try { if (UTIL.getAdmin().tableExists(tableName)) { @@ -618,7 +607,7 @@ public void testReadFromRestoredSnapshotViaMR() throws Exception { Path tempRestoreDir = UTIL.getDataTestDirOnTestFS("restore_" + snapshotName); RestoreSnapshotHelper.copySnapshotForScanner(UTIL.getConfiguration(), fs, rootDir, tempRestoreDir, snapshotName); - Assert.assertTrue("Restore directory should exist", fs.exists(tempRestoreDir)); + assertTrue(fs.exists(tempRestoreDir), "Restore directory should exist"); Job job = Job.getInstance(UTIL.getConfiguration()); job.setJarByClass(TestTableSnapshotInputFormat.class); @@ -636,13 +625,12 @@ public void testReadFromRestoredSnapshotViaMR() throws Exception { scan, TestTableSnapshotMapper.class, ImmutableBytesWritable.class, NullWritable.class, job, false, false, TableSnapshotInputFormat.class); TableMapReduceUtil.resetCacheConfig(conf); - Assert.assertTrue(job.waitForCompletion(true)); - Assert.assertTrue(job.isSuccessful()); + assertTrue(job.waitForCompletion(true)); + assertTrue(job.isSuccessful()); // Now verify that job fails when restore directory is deleted - Assert.assertTrue(fs.delete(tempRestoreDir, true)); - Assert.assertFalse("Restore directory should not exist after deletion", - fs.exists(tempRestoreDir)); + assertTrue(fs.delete(tempRestoreDir, true)); + assertFalse(fs.exists(tempRestoreDir), "Restore directory should not exist after deletion"); Job failureJob = Job.getInstance(UTIL.getConfiguration()); failureJob.setJarByClass(TestTableSnapshotInputFormat.class); TableMapReduceUtil.addDependencyJarsForClasses(failureJob.getConfiguration(), @@ -661,12 +649,12 @@ public void testReadFromRestoredSnapshotViaMR() throws Exception { TableSnapshotInputFormat.class); TableMapReduceUtil.resetCacheConfig(failureConf); - Assert.assertFalse("Restore directory should not exist before job execution", - fs.exists(tempRestoreDir)); + assertFalse(fs.exists(tempRestoreDir), + "Restore directory should not exist before job execution"); failureJob.waitForCompletion(true); - Assert.assertFalse("Job should fail since the restored snapshot directory is deleted", - failureJob.isSuccessful()); + assertFalse(failureJob.isSuccessful(), + "Job should fail since the restored snapshot directory is deleted"); } finally { try { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java index e61cb6c6de7b..e23e074bd5e7 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java @@ -17,38 +17,29 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.HashSet; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; -@Category({ MapReduceTests.class, SmallTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(SmallTests.TAG) public class TestTableSplit { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableSplit.class); - - @Rule - public TestName name = new TestName(); @Test - public void testHashCode() { - TableSplit split1 = new TableSplit(TableName.valueOf(name.getMethodName()), + public void testHashCode(TestInfo testInfo) { + TableSplit split1 = new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()), Bytes.toBytes("row-start"), Bytes.toBytes("row-end"), "location"); - TableSplit split2 = new TableSplit(TableName.valueOf(name.getMethodName()), + TableSplit split2 = new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()), Bytes.toBytes("row-start"), Bytes.toBytes("row-end"), "location"); assertEquals(split1, split2); assertTrue(split1.hashCode() == split2.hashCode()); @@ -62,10 +53,10 @@ public void testHashCode() { * length of region should not influence hashcode */ @Test - public void testHashCode_length() { - TableSplit split1 = new TableSplit(TableName.valueOf(name.getMethodName()), + public void testHashCode_length(TestInfo testInfo) { + TableSplit split1 = new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()), Bytes.toBytes("row-start"), Bytes.toBytes("row-end"), "location", 1984); - TableSplit split2 = new TableSplit(TableName.valueOf(name.getMethodName()), + TableSplit split2 = new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()), Bytes.toBytes("row-start"), Bytes.toBytes("row-end"), "location", 1982); assertEquals(split1, split2); @@ -80,40 +71,40 @@ public void testHashCode_length() { * Length of region need to be properly serialized. */ @Test - public void testLengthIsSerialized() throws Exception { - TableSplit split1 = new TableSplit(TableName.valueOf(name.getMethodName()), + public void testLengthIsSerialized(TestInfo testInfo) throws Exception { + TableSplit split1 = new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()), Bytes.toBytes("row-start"), Bytes.toBytes("row-end"), "location", 666); - TableSplit deserialized = new TableSplit(TableName.valueOf(name.getMethodName()), + TableSplit deserialized = new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()), Bytes.toBytes("row-start2"), Bytes.toBytes("row-end2"), "location1"); ReflectionUtils.copy(new Configuration(), split1, deserialized); - Assert.assertEquals(666, deserialized.getLength()); + assertEquals(666, deserialized.getLength()); } @Test - public void testToString() { - TableSplit split = new TableSplit(TableName.valueOf(name.getMethodName()), + public void testToString(TestInfo testInfo) { + TableSplit split = new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()), Bytes.toBytes("row-start"), Bytes.toBytes("row-end"), "location"); - String str = "Split(tablename=" + name.getMethodName() + ", startrow=row-start, " + String str = "Split(tablename=" + testInfo.getTestMethod().get().getName() + ", startrow=row-start, " + "endrow=row-end, regionLocation=location, " + "regionname=)"; - Assert.assertEquals(str, split.toString()); + assertEquals(str, split.toString()); split = - new TableSplit(TableName.valueOf(name.getMethodName()), null, Bytes.toBytes("row-start"), + new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()), null, Bytes.toBytes("row-start"), Bytes.toBytes("row-end"), "location", "encoded-region-name", 1000L); - str = "Split(tablename=" + name.getMethodName() + ", startrow=row-start, " + str = "Split(tablename=" + testInfo.getTestMethod().get().getName() + ", startrow=row-start, " + "endrow=row-end, regionLocation=location, " + "regionname=encoded-region-name)"; - Assert.assertEquals(str, split.toString()); + assertEquals(str, split.toString()); split = new TableSplit(null, null, null, null); str = "Split(tablename=null, startrow=null, " + "endrow=null, regionLocation=null, " + "regionname=)"; - Assert.assertEquals(str, split.toString()); + assertEquals(str, split.toString()); split = new TableSplit(null, null, null, null, null, null, 1000L); str = "Split(tablename=null, startrow=null, " + "endrow=null, regionLocation=null, " + "regionname=null)"; - Assert.assertEquals(str, split.toString()); + assertEquals(str, split.toString()); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java index 596932edf24f..487a40745064 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java @@ -52,22 +52,19 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestTimeRangeMapRed { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTimeRangeMapRed.class); - private final static Logger log = LoggerFactory.getLogger(TestTimeRangeMapRed.class); private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); private Admin admin; @@ -90,17 +87,17 @@ public class TestTimeRangeMapRed { static final byte[] FAMILY_NAME = Bytes.toBytes("text"); static final byte[] COLUMN_NAME = Bytes.toBytes("input"); - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void before() throws Exception { this.admin = UTIL.getAdmin(); } @@ -199,7 +196,7 @@ private void verify(final Table table) throws IOException { log.debug(Bytes.toString(r.getRow()) + "\t" + Bytes.toString(CellUtil.cloneFamily(kv)) + "\t" + Bytes.toString(CellUtil.cloneQualifier(kv)) + "\t" + kv.getTimestamp() + "\t" + Bytes.toBoolean(CellUtil.cloneValue(kv))); - org.junit.Assert.assertEquals(TIMESTAMP.get(kv.getTimestamp()), + org.junit.jupiter.api.Assertions.assertEquals(TIMESTAMP.get(kv.getTimestamp()), Bytes.toBoolean(CellUtil.cloneValue(kv))); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALInputFormat.java index 930c8d11375f..03fe089fb7cf 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALInputFormat.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.ArrayList; import java.util.List; @@ -38,21 +38,17 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestWALInputFormat { private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALInputFormat.class); - - @BeforeClass + @BeforeAll public static void setupClass() throws Exception { TEST_UTIL.startMiniCluster(); TEST_UTIL.createWALRootDir(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java index 220e9a3793cd..11b3d55ee0d5 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java @@ -21,9 +21,9 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -68,22 +68,22 @@ import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.util.ToolRunner; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * Basic test for the WALPlayer M/R tool */ -@Category({ MapReduceTests.class, LargeTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestWALPlayer { - @ClassRule + public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestWALPlayer.class); @@ -95,10 +95,7 @@ public class TestWALPlayer { private static FileSystem logFs; private static Configuration conf; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { conf = TEST_UTIL.getConfiguration(); rootDir = TEST_UTIL.createRootDir(); @@ -108,7 +105,7 @@ public static void beforeClass() throws Exception { cluster = TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); fs.delete(rootDir, true); @@ -148,8 +145,9 @@ public void testPlayingRecoveredEdit() throws Exception { * the resulting bulkloaded HFiles. See HBASE-27649 */ @Test - public void testWALPlayerBulkLoadWithOverriddenTimestamps() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + "1"); + public void testWALPlayerBulkLoadWithOverriddenTimestamps(TestInfo testInfo) throws Exception { + final TableName tableName = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"); final byte[] family = Bytes.toBytes("family"); final byte[] column1 = Bytes.toBytes("c1"); final byte[] column2 = Bytes.toBytes("c2"); @@ -187,7 +185,7 @@ public void testWALPlayerBulkLoadWithOverriddenTimestamps() throws Exception { HConstants.HREGION_LOGDIR_NAME).toString(); Configuration configuration = new Configuration(TEST_UTIL.getConfiguration()); - String outPath = "/tmp/" + name.getMethodName(); + String outPath = "/tmp/" + testInfo.getTestMethod().get().getName(); configuration.set(WALPlayer.BULK_OUTPUT_CONF_KEY, outPath); configuration.setBoolean(WALPlayer.MULTI_TABLES_SUPPORT, true); @@ -229,9 +227,11 @@ public void testWALPlayerBulkLoadWithOverriddenTimestamps() throws Exception { * Simple end-to-end test */ @Test - public void testWALPlayer() throws Exception { - final TableName tableName1 = TableName.valueOf(name.getMethodName() + "1"); - final TableName tableName2 = TableName.valueOf(name.getMethodName() + "2"); + public void testWALPlayer(TestInfo testInfo) throws Exception { + final TableName tableName1 = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"); + final TableName tableName2 = + TableName.valueOf(testInfo.getTestMethod().get().getName() + "2"); final byte[] FAMILY = Bytes.toBytes("family"); final byte[] COLUMN1 = Bytes.toBytes("c1"); final byte[] COLUMN2 = Bytes.toBytes("c2"); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java index 3a457ee4d9c1..51c4f8e1c180 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.List; @@ -56,25 +57,21 @@ import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.MapReduceTestUtil; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * JUnit tests for the WALRecordReader */ -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestWALRecordReader { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALRecordReader.class); - private static final Logger LOG = LoggerFactory.getLogger(TestWALRecordReader.class); private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static Configuration conf; @@ -102,14 +99,14 @@ private static String getServerName() { return serverName.toString(); } - @Before + @BeforeEach public void setUp() throws Exception { fs.delete(hbaseDir, true); walFs.delete(walRootDir, true); mvcc = new MultiVersionConcurrencyControl(); } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { // Make block sizes small. conf = TEST_UTIL.getConfiguration(); @@ -126,7 +123,7 @@ public static void setUpBeforeClass() throws Exception { logDir = new Path(walRootDir, HConstants.HREGION_LOGDIR_NAME); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { fs.delete(hbaseDir, true); walFs.delete(walRootDir, true); @@ -316,10 +313,8 @@ private void testSplit(InputSplit split, byte[]... columns) throws Exception { !Bytes.equals(column, 0, column.length, cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) ) { - assertTrue( - "expected [" + Bytes.toString(column) + "], actual [" + Bytes.toString( - cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]", - false); + fail("expected [" + Bytes.toString(column) + "], actual [" + Bytes.toString( + cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]"); } } assertFalse(reader.nextKeyValue()); @@ -340,10 +335,8 @@ private void testSplitWithMovingWAL(InputSplit split, byte[] col1, byte[] col2) !Bytes.equals(col1, 0, col1.length, cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) ) { - assertTrue( - "expected [" + Bytes.toString(col1) + "], actual [" + Bytes.toString( - cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]", - false); + fail("expected [" + Bytes.toString(col1) + "], actual [" + Bytes.toString( + cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]"); } // Move log file to archive directory // While WAL record reader is open @@ -364,10 +357,8 @@ private void testSplitWithMovingWAL(InputSplit split, byte[] col1, byte[] col2) !Bytes.equals(col2, 0, col2.length, cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) ) { - assertTrue( - "expected [" + Bytes.toString(col2) + "], actual [" + Bytes.toString( - cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]", - false); + fail("expected [" + Bytes.toString(col2) + "], actual [" + Bytes.toString( + cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]"); } reader.close(); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionTool.java index a14febd21e6d..91efdca88989 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionTool.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionTool.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; @@ -33,19 +32,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.util.ToolRunner; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MediumTests.class, RegionServerTests.class }) +@Tag(MediumTests.TAG) +@Tag(RegionServerTests.TAG) public class TestCompactionTool { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCompactionTool.class); - private final HBaseTestingUtil testUtil = new HBaseTestingUtil(); private HRegion region; @@ -53,7 +48,7 @@ public class TestCompactionTool { private Path rootDir; private final TableName tableName = TableName.valueOf(getClass().getSimpleName()); - @Before + @BeforeEach public void setUp() throws Exception { this.testUtil.startMiniCluster(); testUtil.createTable(tableName, HBaseTestingUtil.fam1); @@ -61,7 +56,7 @@ public void setUp() throws Exception { this.region = testUtil.getMiniHBaseCluster().getRegions(tableName).get(0); } - @After + @AfterEach public void tearDown() throws Exception { this.testUtil.shutdownMiniCluster(); testUtil.cleanupTestDir(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionToolNpeFix.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionToolNpeFix.java index b230fd6c4d93..21142f91f96c 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionToolNpeFix.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionToolNpeFix.java @@ -17,14 +17,13 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; @@ -34,21 +33,17 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.util.ToolRunner; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category({ MediumTests.class, RegionServerTests.class }) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; + +@Tag(MediumTests.TAG) +@Tag(RegionServerTests.TAG) public class TestCompactionToolNpeFix { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCompactionToolNpeFix.class); - private static final HBaseTestingUtil TESTUTIL = new HBaseTestingUtil(); private HRegion region; @@ -56,7 +51,7 @@ public class TestCompactionToolNpeFix { private static Path rootDir; private final TableName tableName = TableName.valueOf(getClass().getSimpleName()); - @BeforeClass + @BeforeAll public static void setUpAfterClass() throws Exception { TESTUTIL.getConfiguration().setBoolean(MemStoreLAB.USEMSLAB_KEY, false); TESTUTIL.startMiniCluster(); @@ -65,7 +60,7 @@ public static void setUpAfterClass() throws Exception { } - @AfterClass + @AfterAll public static void tearDown() throws Exception { TESTUTIL.shutdownMiniMapReduceCluster(); TESTUTIL.shutdownMiniCluster(); @@ -73,13 +68,13 @@ public static void tearDown() throws Exception { } - @Before + @BeforeEach public void setUp() throws IOException { TESTUTIL.createTable(tableName, HBaseTestingUtil.fam1); this.region = TESTUTIL.getMiniHBaseCluster().getRegions(tableName).get(0); } - @After + @AfterEach public void after() throws IOException { TESTUTIL.deleteTable(tableName); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationAdjunct.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationAdjunct.java index db7cead8c5db..bbce83ccccfc 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationAdjunct.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationAdjunct.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.replication; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -48,14 +48,11 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,22 +62,16 @@ * We moved some of {@link TestVerifyReplicationZkClusterKey}'s tests here because it could take too * long to complete. In here we have miscellaneous. */ -@Category({ ReplicationTests.class, LargeTests.class }) +@Tag("ReplicationTests") +@Tag("LargeTests") public class TestVerifyReplicationAdjunct extends TestReplicationBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestVerifyReplicationAdjunct.class); - private static final Logger LOG = LoggerFactory.getLogger(TestVerifyReplicationAdjunct.class); private static final String PEER_ID = "2"; private static final TableName peerTableName = TableName.valueOf("peerTest"); private static Table htable3; - @Rule - public TestName name = new TestName(); - @Override protected String getClusterKey(HBaseTestingUtil util) throws Exception { // TODO: VerifyReplication does not support connection uri yet, so here we need to use cluster @@ -89,13 +80,13 @@ protected String getClusterKey(HBaseTestingUtil util) throws Exception { return util.getClusterKey(); } - @Before + @BeforeEach public void setUp() throws Exception { cleanUp(); UTIL2.deleteTableData(peerTableName); } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TestReplicationBase.setUpBeforeClass(); TableDescriptor peerTable = @@ -261,31 +252,31 @@ public void testVerifyReplicationPrefixFiltering() throws Exception { public void testVerifyReplicationSnapshotArguments() { String[] args = new String[] { "--sourceSnapshotName=snapshot1", "2", tableName.getNameAsString() }; - assertFalse(Lists.newArrayList(args).toString(), new VerifyReplication().doCommandLine(args)); + assertFalse(new VerifyReplication().doCommandLine(args), Lists.newArrayList(args).toString()); args = new String[] { "--sourceSnapshotTmpDir=tmp", "2", tableName.getNameAsString() }; - assertFalse(Lists.newArrayList(args).toString(), new VerifyReplication().doCommandLine(args)); + assertFalse(new VerifyReplication().doCommandLine(args), Lists.newArrayList(args).toString()); args = new String[] { "--sourceSnapshotName=snapshot1", "--sourceSnapshotTmpDir=tmp", "2", tableName.getNameAsString() }; - assertTrue(Lists.newArrayList(args).toString(), new VerifyReplication().doCommandLine(args)); + assertTrue(new VerifyReplication().doCommandLine(args), Lists.newArrayList(args).toString()); args = new String[] { "--peerSnapshotName=snapshot1", "2", tableName.getNameAsString() }; - assertFalse(Lists.newArrayList(args).toString(), new VerifyReplication().doCommandLine(args)); + assertFalse(new VerifyReplication().doCommandLine(args), Lists.newArrayList(args).toString()); args = new String[] { "--peerSnapshotTmpDir=/tmp/", "2", tableName.getNameAsString() }; - assertFalse(Lists.newArrayList(args).toString(), new VerifyReplication().doCommandLine(args)); + assertFalse(new VerifyReplication().doCommandLine(args), Lists.newArrayList(args).toString()); args = new String[] { "--peerSnapshotName=snapshot1", "--peerSnapshotTmpDir=/tmp/", "--peerFSAddress=tempfs", "--peerHBaseRootAddress=hdfs://tempfs:50070/hbase/", "2", tableName.getNameAsString() }; - assertTrue(Lists.newArrayList(args).toString(), new VerifyReplication().doCommandLine(args)); + assertTrue(new VerifyReplication().doCommandLine(args), Lists.newArrayList(args).toString()); args = new String[] { "--sourceSnapshotName=snapshot1", "--sourceSnapshotTmpDir=/tmp/", "--peerSnapshotName=snapshot2", "--peerSnapshotTmpDir=/tmp/", "--peerFSAddress=tempfs", "--peerHBaseRootAddress=hdfs://tempfs:50070/hbase/", "2", tableName.getNameAsString() }; - assertTrue(Lists.newArrayList(args).toString(), new VerifyReplication().doCommandLine(args)); + assertTrue(new VerifyReplication().doCommandLine(args), Lists.newArrayList(args).toString()); } @Test @@ -352,7 +343,7 @@ public void testVerifyReplicationWithSnapshotSupport() throws Exception { TestVerifyReplicationZkClusterKey.checkRestoreTmpDir(CONF2, temPath2, 2); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { htable3.close(); TestReplicationBase.tearDownAfterClass(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationCrossDiffHdfs.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationCrossDiffHdfs.java index 9edc6245295c..b1ee77681552 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationCrossDiffHdfs.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationCrossDiffHdfs.java @@ -17,14 +17,13 @@ */ package org.apache.hadoop.hbase.replication; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -47,23 +46,20 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.mapreduce.Job; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap; -@Category({ ReplicationTests.class, LargeTests.class }) +@Tag(ReplicationTests.TAG) +@Tag(LargeTests.TAG) public class TestVerifyReplicationCrossDiffHdfs { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestVerifyReplicationCrossDiffHdfs.class); private static final Logger LOG = LoggerFactory.getLogger(TestVerifyReplicationCrossDiffHdfs.class); @@ -80,7 +76,7 @@ public class TestVerifyReplicationCrossDiffHdfs { private static final String PEER_ID = "1"; private static final TableName TABLE_NAME = TableName.valueOf("testVerifyRepCrossDiffHDFS"); - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { conf1.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1"); util1 = new HBaseTestingUtil(conf1); @@ -144,11 +140,11 @@ private static void loadSomeData() throws IOException, InterruptedException { } } } - Assert.assertNotNull(results); - Assert.assertEquals(10, results.length); + Assertions.assertNotNull(results); + Assertions.assertEquals(10, results.length); } - @AfterClass + @AfterAll public static void tearDownClass() throws Exception { if (mapReduceUtil != null) { mapReduceUtil.shutdownMiniCluster(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationRecompareRunnable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationRecompareRunnable.java index 49c52fbcc3b3..9488a2b2dc4c 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationRecompareRunnable.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationRecompareRunnable.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.replication; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.concurrent.ThreadLocalRandom; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Result; @@ -37,22 +36,18 @@ import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.counters.GenericCounter; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; -@Category({ ReplicationTests.class, SmallTests.class }) -@RunWith(MockitoJUnitRunner.class) +@Tag("ReplicationTests") +@Tag("SmallTests") +@ExtendWith(MockitoExtension.class) public class TestVerifyReplicationRecompareRunnable { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestVerifyReplicationRecompareRunnable.class); - @Mock private Table sourceTable; @@ -77,7 +72,7 @@ static byte[] genBytes() { return Bytes.toBytes(ThreadLocalRandom.current().nextInt()); } - @Before + @BeforeEach public void setUp() { for (VerifyReplication.Verifier.Counters counter : VerifyReplication.Verifier.Counters .values()) { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationRpcConnectionUri.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationRpcConnectionUri.java index 3e603ec41ac8..6df8a20cad01 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationRpcConnectionUri.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationRpcConnectionUri.java @@ -17,20 +17,15 @@ */ package org.apache.hadoop.hbase.replication; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; -@Category({ ReplicationTests.class, LargeTests.class }) +@Tag(ReplicationTests.TAG) +@Tag(LargeTests.TAG) public class TestVerifyReplicationRpcConnectionUri extends VerifyReplicationTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestVerifyReplicationRpcConnectionUri.class); - @Override protected String getClusterKey(HBaseTestingUtil util) throws Exception { return util.getRpcConnnectionURI(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationSecureClusterCredentials.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationSecureClusterCredentials.java index 6c1e77d609e5..a4b77a3293c0 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationSecureClusterCredentials.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationSecureClusterCredentials.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.replication; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.File; import java.io.IOException; @@ -25,7 +25,6 @@ import java.util.Collection; import java.util.function.Supplier; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -47,22 +46,15 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; - -@Category({ ReplicationTests.class, LargeTests.class }) -@RunWith(Parameterized.class) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +@Tag(ReplicationTests.TAG) +@Tag(LargeTests.TAG) public class TestVerifyReplicationSecureClusterCredentials { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestVerifyReplicationSecureClusterCredentials.class); private static MiniKdc KDC; private static final HBaseTestingUtil UTIL1 = new HBaseTestingUtil(); @@ -105,7 +97,7 @@ private static void setupCluster(HBaseTestingUtil util) throws Exception { /** * Sets the security firstly for getting the correct default realm. */ - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { setUpKdcServer(); setupCluster(UTIL1); @@ -123,24 +115,21 @@ public static void beforeClass() throws Exception { } } - @AfterClass + @AfterAll public static void cleanup() throws IOException { UTIL1.shutdownMiniCluster(); UTIL2.shutdownMiniCluster(); } - @Parameters public static Collection> peer() { return Arrays.asList(() -> "1", () -> ZKConfig.getZooKeeperClusterKey(UTIL2.getConfiguration())); } - @Parameter - public Supplier peer; - - @Test + @ParameterizedTest + @MethodSource("peer") @SuppressWarnings("unchecked") - public void testJobCredentials() throws Exception { + public void testJobCredentials(Supplier peer) throws Exception { Job job = new VerifyReplication().createSubmittableJob( new Configuration(UTIL1.getConfiguration()), new String[] { peer.get(), "table" }); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationZkClusterKey.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationZkClusterKey.java index 718cba231ff4..6d7307c60475 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationZkClusterKey.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationZkClusterKey.java @@ -17,20 +17,15 @@ */ package org.apache.hadoop.hbase.replication; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; -@Category({ ReplicationTests.class, LargeTests.class }) +@Tag(ReplicationTests.TAG) +@Tag(LargeTests.TAG) public class TestVerifyReplicationZkClusterKey extends VerifyReplicationTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestVerifyReplicationZkClusterKey.class); - @Override protected String getClusterKey(HBaseTestingUtil util) throws Exception { return util.getClusterKey(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationZkConnectionUri.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationZkConnectionUri.java index 046d2d06664c..401e36d00b79 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationZkConnectionUri.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestVerifyReplicationZkConnectionUri.java @@ -17,20 +17,15 @@ */ package org.apache.hadoop.hbase.replication; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; -@Category({ ReplicationTests.class, LargeTests.class }) +@Tag(ReplicationTests.TAG) +@Tag(LargeTests.TAG) public class TestVerifyReplicationZkConnectionUri extends VerifyReplicationTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestVerifyReplicationZkConnectionUri.class); - @Override protected String getClusterKey(HBaseTestingUtil util) throws Exception { return util.getZkConnectionURI(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/VerifyReplicationTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/VerifyReplicationTestBase.java index e263076677a5..e446f9f43240 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/VerifyReplicationTestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/VerifyReplicationTestBase.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.replication; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.ArrayList; @@ -56,12 +56,11 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -74,16 +73,13 @@ public abstract class VerifyReplicationTestBase extends TestReplicationBase { private static final TableName peerTableName = TableName.valueOf("peerTest"); private static Table htable3; - @Rule - public TestName name = new TestName(); - - @Before + @BeforeEach public void setUp() throws Exception { cleanUp(); UTIL2.deleteTableData(peerTableName); } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TestReplicationBase.setUpBeforeClass(); @@ -150,10 +146,10 @@ public void testVerifyRepJob() throws Exception { * delete marker is replicated, run verify replication with and without raw to check the results. */ @Test - public void testVerifyRepJobWithRawOptions() throws Exception { - LOG.info(name.getMethodName()); + public void testVerifyRepJobWithRawOptions(TestInfo testInfo) throws Exception { + LOG.info(testInfo.getTestMethod().get().getName()); - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[] familyname = Bytes.toBytes("fam_raw"); byte[] row = Bytes.toBytes("row_raw"); @@ -464,18 +460,15 @@ public void testVerifyReplicationThreadedRecompares() throws Exception { "--recompareSleep=1", "--peerTableName=" + peerTableName.getNameAsString(), getClusterKey(UTIL2), tableName.getNameAsString() }; Counters counters = runVerifyReplication(args, NB_ROWS_IN_BATCH - 1, 3); - assertEquals( - counters.findCounter(VerifyReplication.Verifier.Counters.FAILED_RECOMPARE).getValue(), 9); - assertEquals(counters.findCounter(VerifyReplication.Verifier.Counters.RECOMPARES).getValue(), - 9); - assertEquals( - counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_PEER_TABLE_ROWS).getValue(), - 1); - assertEquals( - counters.findCounter(VerifyReplication.Verifier.Counters.CONTENT_DIFFERENT_ROWS).getValue(), - 1); - assertEquals(counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_SOURCE_TABLE_ROWS) - .getValue(), 1); + assertEquals(9, + counters.findCounter(VerifyReplication.Verifier.Counters.FAILED_RECOMPARE).getValue()); + assertEquals(9, counters.findCounter(VerifyReplication.Verifier.Counters.RECOMPARES).getValue()); + assertEquals(1, + counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_PEER_TABLE_ROWS).getValue()); + assertEquals(1, + counters.findCounter(VerifyReplication.Verifier.Counters.CONTENT_DIFFERENT_ROWS).getValue()); + assertEquals(1, counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_SOURCE_TABLE_ROWS) + .getValue()); } @Test @@ -509,18 +502,15 @@ public void testFailsRemainingComparesAfterShutdown() throws Exception { getClusterKey(UTIL2), tableName.getNameAsString() }; Counters counters = runVerifyReplication(args, NB_ROWS_IN_BATCH - 1, 3); - assertEquals( - counters.findCounter(VerifyReplication.Verifier.Counters.FAILED_RECOMPARE).getValue(), 3); - assertEquals(counters.findCounter(VerifyReplication.Verifier.Counters.RECOMPARES).getValue(), - 3); - assertEquals( - counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_PEER_TABLE_ROWS).getValue(), - 1); - assertEquals( - counters.findCounter(VerifyReplication.Verifier.Counters.CONTENT_DIFFERENT_ROWS).getValue(), - 1); - assertEquals(counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_SOURCE_TABLE_ROWS) - .getValue(), 1); + assertEquals(3, + counters.findCounter(VerifyReplication.Verifier.Counters.FAILED_RECOMPARE).getValue()); + assertEquals(3, counters.findCounter(VerifyReplication.Verifier.Counters.RECOMPARES).getValue()); + assertEquals(1, + counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_PEER_TABLE_ROWS).getValue()); + assertEquals(1, + counters.findCounter(VerifyReplication.Verifier.Counters.CONTENT_DIFFERENT_ROWS).getValue()); + assertEquals(1, counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_SOURCE_TABLE_ROWS) + .getValue()); } @Test @@ -547,21 +537,18 @@ public void testVerifyReplicationSynchronousRecompares() throws Exception { "--peerTableName=" + peerTableName.getNameAsString(), getClusterKey(UTIL2), tableName.getNameAsString() }; Counters counters = runVerifyReplication(args, NB_ROWS_IN_BATCH - 1, 3); - assertEquals( - counters.findCounter(VerifyReplication.Verifier.Counters.FAILED_RECOMPARE).getValue(), 9); - assertEquals(counters.findCounter(VerifyReplication.Verifier.Counters.RECOMPARES).getValue(), - 9); - assertEquals( - counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_PEER_TABLE_ROWS).getValue(), - 1); - assertEquals( - counters.findCounter(VerifyReplication.Verifier.Counters.CONTENT_DIFFERENT_ROWS).getValue(), - 1); - assertEquals(counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_SOURCE_TABLE_ROWS) - .getValue(), 1); + assertEquals(9, + counters.findCounter(VerifyReplication.Verifier.Counters.FAILED_RECOMPARE).getValue()); + assertEquals(9, counters.findCounter(VerifyReplication.Verifier.Counters.RECOMPARES).getValue()); + assertEquals(1, + counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_PEER_TABLE_ROWS).getValue()); + assertEquals(1, + counters.findCounter(VerifyReplication.Verifier.Counters.CONTENT_DIFFERENT_ROWS).getValue()); + assertEquals(1, counters.findCounter(VerifyReplication.Verifier.Counters.ONLY_IN_SOURCE_TABLE_ROWS) + .getValue()); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { htable3.close(); TestReplicationBase.tearDownAfterClass(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java index 33deeeba2a26..071f97aab449 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.snapshot; import static org.apache.hadoop.util.ToolRunner.run; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -58,15 +58,13 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.HFileTestUtil; import org.apache.hadoop.hbase.util.Pair; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -78,22 +76,16 @@ /** * Test Export Snapshot Tool */ -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestExportSnapshot { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestExportSnapshot.class); - private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshot.class); protected final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); protected final static byte[] FAMILY = Bytes.toBytes("cf"); - @Rule - public final TestName testName = new TestName(); - protected TableName tableName; private String emptySnapshotName; private String snapshotName; @@ -108,14 +100,14 @@ public static void setUpBaseConf(Configuration conf) { conf.setInt("mapreduce.job.maxtaskfailures.per.tracker", 100); } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { setUpBaseConf(TEST_UTIL.getConfiguration()); TEST_UTIL.startMiniCluster(1); TEST_UTIL.startMiniMapReduceCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniMapReduceCluster(); TEST_UTIL.shutdownMiniCluster(); @@ -124,13 +116,13 @@ public static void tearDownAfterClass() throws Exception { /** * Create a table and take a snapshot of the table used by the export test. */ - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { this.admin = TEST_UTIL.getAdmin(); - tableName = TableName.valueOf("testtb-" + testName.getMethodName()); - snapshotName = "snaptb0-" + testName.getMethodName(); - emptySnapshotName = "emptySnaptb0-" + testName.getMethodName(); + tableName = TableName.valueOf("testtb-" + testInfo.getTestMethod().get().getName()); + snapshotName = "snaptb0-" + testInfo.getTestMethod().get().getName(); + emptySnapshotName = "emptySnaptb0-" + testInfo.getTestMethod().get().getName(); // create Table createTable(this.tableName); @@ -158,7 +150,7 @@ protected RegionPredicate getBypassRegionPredicate() { return null; } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.deleteTable(tableName); SnapshotTestingUtils.deleteAllSnapshots(TEST_UTIL.getAdmin()); @@ -174,13 +166,14 @@ public void testExportFileSystemState() throws Exception { } @Test - public void testExportFileSystemStateWithMergeRegion() throws Exception { + public void testExportFileSystemStateWithMergeRegion(TestInfo testInfo) throws Exception { // disable compaction admin.compactionSwitch(false, admin.getRegionServers().stream().map(a -> a.getServerName()).collect(Collectors.toList())); // create Table - TableName tableName0 = TableName.valueOf("testtb-" + testName.getMethodName() + "-1"); - String snapshotName0 = "snaptb0-" + testName.getMethodName() + "-1"; + TableName tableName0 = + TableName.valueOf("testtb-" + testInfo.getTestMethod().get().getName() + "-1"); + String snapshotName0 = "snaptb0-" + testInfo.getTestMethod().get().getName() + "-1"; admin.createTable( TableDescriptorBuilder.newBuilder(tableName0) .setColumnFamilies( @@ -207,13 +200,13 @@ public void testExportFileSystemStateWithMergeRegion() throws Exception { } @Test - public void testExportFileSystemStateWithSplitRegion() throws Exception { + public void testExportFileSystemStateWithSplitRegion(TestInfo testInfo) throws Exception { // disable compaction admin.compactionSwitch(false, admin.getRegionServers().stream().map(a -> a.getServerName()).collect(Collectors.toList())); // create Table - TableName splitTableName = TableName.valueOf(testName.getMethodName()); - String splitTableSnap = "snapshot-" + testName.getMethodName(); + TableName splitTableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); + String splitTableSnap = "snapshot-" + testInfo.getTestMethod().get().getName(); admin.createTable(TableDescriptorBuilder.newBuilder(splitTableName).setColumnFamilies( Lists.newArrayList(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build())).build()); @@ -397,11 +390,11 @@ protected static void testExportFileSystemState(final Configuration conf, // Export Snapshot int res = runExportSnapshot(conf, snapshotName, targetName, srcDir, rawTgtDir, overwrite, resetTtl, checksumVerify, true, true); - assertEquals("success " + success + ", res=" + res, success ? 0 : 1, res); + assertEquals(success ? 0 : 1, res, "success " + success + ", res=" + res); if (!success) { final Path targetDir = new Path(HConstants.SNAPSHOT_DIR_NAME, targetName); - assertFalse(tgtDir.toString() + " " + targetDir.toString(), - tgtFs.exists(new Path(tgtDir, targetDir))); + assertFalse( + tgtFs.exists(new Path(tgtDir, targetDir)), tgtDir.toString() + " " + targetDir.toString()); return; } LOG.info("Exported snapshot"); @@ -411,9 +404,9 @@ protected static void testExportFileSystemState(final Configuration conf, assertEquals(filesExpected > 0 ? 2 : 1, rootFiles.length); for (FileStatus fileStatus : rootFiles) { String name = fileStatus.getPath().getName(); - assertTrue(fileStatus.toString(), fileStatus.isDirectory()); - assertTrue(name.toString(), name.equals(HConstants.SNAPSHOT_DIR_NAME) - || name.equals(HConstants.HFILE_ARCHIVE_DIRECTORY)); + assertTrue(fileStatus.isDirectory(), fileStatus.toString()); + assertTrue(name.equals(HConstants.SNAPSHOT_DIR_NAME) + || name.equals(HConstants.HFILE_ARCHIVE_DIRECTORY), name.toString()); } LOG.info("Verified filesystem state"); @@ -472,8 +465,8 @@ public void storeFile(final RegionInfo regionInfo, final String family, } private void verifyNonEmptyFile(final Path path) throws IOException { - assertTrue(path + " should exists", fs.exists(path)); - assertTrue(path + " should not be empty", fs.getFileStatus(path).getLen() > 0); + assertTrue(fs.exists(path), path + " should exists"); + assertTrue(fs.getFileStatus(path).getLen() > 0, path + " should not be empty"); } }); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotAdjunct.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotAdjunct.java index 9453b9fcaf46..ef288ad12beb 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotAdjunct.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotAdjunct.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.snapshot; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.util.Iterator; import java.util.Map; @@ -30,16 +30,16 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,17 +49,12 @@ * TestExportSnapshot where possible. * @see TestExportSnapshot */ -@Ignore // HBASE-24493 -@Category({ VerySlowMapReduceTests.class, LargeTests.class }) +@Disabled // HBASE-24493 +@Tag(VerySlowMapReduceTests.TAG) +@Tag(LargeTests.TAG) public class TestExportSnapshotAdjunct { private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshotAdjunct.class); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestExportSnapshotAdjunct.class); - @Rule - public final TestName testName = new TestName(); - protected final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); protected TableName tableName; @@ -68,7 +63,7 @@ public class TestExportSnapshotAdjunct { private int tableNumFiles; private Admin admin; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TestExportSnapshot.setUpBaseConf(TEST_UTIL.getConfiguration()); TEST_UTIL.startMiniCluster(3); @@ -98,11 +93,11 @@ private void checkForReferencesToTmpDir() { if (e.getValue().contains("hbase.tmp.dir")) { continue; } - assertFalse(e.getKey() + " " + e.getValue(), e.getValue().contains("tmp")); + assertFalse(e.getValue().contains("tmp"), e.getKey() + " " + e.getValue()); } } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniMapReduceCluster(); TEST_UTIL.shutdownMiniCluster(); @@ -111,13 +106,13 @@ public static void tearDownAfterClass() throws Exception { /** * Create a table and take a snapshot of the table used by the export test. */ - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { this.admin = TEST_UTIL.getAdmin(); - tableName = TableName.valueOf("testtb-" + testName.getMethodName()); - snapshotName = "snaptb0-" + testName.getMethodName(); - emptySnapshotName = "emptySnaptb0-" + testName.getMethodName(); + tableName = TableName.valueOf("testtb-" + testInfo.getTestMethod().get().getName()); + snapshotName = "snaptb0-" + testInfo.getTestMethod().get().getName(); + emptySnapshotName = "emptySnaptb0-" + testInfo.getTestMethod().get().getName(); // Create Table SnapshotTestingUtils.createPreSplitTable(TEST_UTIL, tableName, 2, TestExportSnapshot.FAMILY); @@ -133,7 +128,7 @@ public void setUp() throws Exception { admin.snapshot(snapshotName, tableName); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.deleteTable(tableName); SnapshotTestingUtils.deleteAllSnapshots(TEST_UTIL.getAdmin()); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotHelpers.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotHelpers.java index 72ca0c3f7c29..cdd06a427d33 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotHelpers.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotHelpers.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.snapshot; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Collection; @@ -26,26 +26,21 @@ import java.util.List; import java.util.Set; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Pair; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo; /** * Test Export Snapshot Tool helpers */ -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestExportSnapshotHelpers { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestExportSnapshotHelpers.class); - /** * Verfy the result of getBalanceSplits() method. The result are groups of files, used as input * list for the "export" mappers. All the groups should have similar amount of data. The input @@ -112,7 +107,7 @@ public void testGroupFilesForSplitsWithoutCustomFileGrouper() { Collection>> groups = inputFormat.groupFilesForSplits(conf, files); - assertEquals("Should create 3 groups", 3, groups.size()); + assertEquals(3, groups.size(), "Should create 3 groups"); long totalSize = 0; int totalFiles = 0; @@ -123,8 +118,8 @@ public void testGroupFilesForSplitsWithoutCustomFileGrouper() { } } - assertEquals("All files should be included", 10, totalFiles); - assertEquals("Total size should be preserved", 450, totalSize); + assertEquals(10, totalFiles, "All files should be included"); + assertEquals(450, totalSize, "Total size should be preserved"); } @Test @@ -146,7 +141,7 @@ public void testGroupFilesForSplitsWithCustomFileGrouper() { Collection>> groups = inputFormat.groupFilesForSplits(conf, files); - assertEquals("Should create splits based on custom grouper output", 4, groups.size()); + assertEquals(4, groups.size(), "Should create splits based on custom grouper output"); long totalSize = 0; int totalFiles = 0; @@ -157,8 +152,8 @@ public void testGroupFilesForSplitsWithCustomFileGrouper() { } } - assertEquals("All files should be included", 8, totalFiles); - assertEquals("Total size should be preserved", 140, totalSize); + assertEquals(8, totalFiles, "All files should be included"); + assertEquals(140, totalSize, "Total size should be preserved"); } @Test @@ -174,7 +169,7 @@ public void testFileLocationResolverWithNoopResolver() { new ExportSnapshot.NoopFileLocationResolver(); Set locations = resolver.getLocationsForInputFiles(files); - assertTrue("NoopFileLocationResolver should return empty locations", locations.isEmpty()); + assertTrue(locations.isEmpty(), "NoopFileLocationResolver should return empty locations"); } @Test @@ -189,9 +184,9 @@ public void testFileLocationResolverWithCustomResolver() { TestFileLocationResolver resolver = new TestFileLocationResolver(); Set locations = resolver.getLocationsForInputFiles(files); - assertEquals("Should return expected locations", 2, locations.size()); - assertTrue("Should contain rack1", locations.contains("rack1")); - assertTrue("Should contain rack2", locations.contains("rack2")); + assertEquals(2, locations.size(), "Should return expected locations"); + assertTrue(locations.contains("rack1"), "Should contain rack1"); + assertTrue(locations.contains("rack2"), "Should contain rack2"); } @Test @@ -209,7 +204,7 @@ public void testInputSplitWithFileLocationResolver() { try { String[] locations = split.getLocations(); - assertEquals("Should return 2 locations", 2, locations.length); + assertEquals(2, locations.length, "Should return 2 locations"); boolean hasRack1 = false; boolean hasRack2 = false; @@ -222,8 +217,8 @@ public void testInputSplitWithFileLocationResolver() { } } - assertTrue("Should contain rack1", hasRack1); - assertTrue("Should contain rack2", hasRack2); + assertTrue(hasRack1, "Should contain rack1"); + assertTrue(hasRack2, "Should contain rack2"); } catch (Exception e) { throw new RuntimeException("Failed to get locations", e); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV1NoCluster.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV1NoCluster.java index 0215711070f4..e9b5bfda1e35 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV1NoCluster.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV1NoCluster.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.snapshot; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.HashSet; @@ -36,10 +36,9 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -48,25 +47,24 @@ * separate the tests. See companion file for test of v2 snapshot. * @see TestExportSnapshotV2NoCluster */ -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestExportSnapshotV1NoCluster { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestExportSnapshotV1NoCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshotV1NoCluster.class); private HBaseCommonTestingUtil testUtil = new HBaseCommonTestingUtil(); private Path testDir; private FileSystem fs; - @Before + @BeforeEach public void setUpBefore() throws Exception { // Make sure testDir is on LocalFileSystem this.fs = FileSystem.getLocal(this.testUtil.getConfiguration()); this.testDir = setup(fs, this.testUtil); LOG.info("fs={}, fsuri={}, fswd={}, testDir={}", this.fs, this.fs.getUri(), this.fs.getWorkingDirectory(), this.testDir); - assertTrue("FileSystem '" + fs + "' is not local", fs instanceof LocalFileSystem); + assertTrue(fs instanceof LocalFileSystem, "FileSystem '" + fs + "' is not local"); } /** diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV2NoCluster.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV2NoCluster.java index c07a4400c190..796832fb5222 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV2NoCluster.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotV2NoCluster.java @@ -17,20 +17,18 @@ */ package org.apache.hadoop.hbase.snapshot; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,11 +36,9 @@ * Test Export Snapshot Tool; tests v2 snapshots. * @see TestExportSnapshotV1NoCluster */ -@Category({ MapReduceTests.class, MediumTests.class }) +@Tag(MapReduceTests.TAG) +@Tag(MediumTests.TAG) public class TestExportSnapshotV2NoCluster { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestExportSnapshotV2NoCluster.class); private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshotV2NoCluster.class); @@ -50,13 +46,13 @@ public class TestExportSnapshotV2NoCluster { private Path testDir; private FileSystem fs; - @Before + @BeforeEach public void before() throws Exception { // Make sure testDir is on LocalFileSystem this.fs = FileSystem.getLocal(this.testUtil.getConfiguration()); this.testDir = TestExportSnapshotV1NoCluster.setup(this.fs, this.testUtil); LOG.info("fs={}, testDir={}", this.fs, this.testDir); - assertTrue("FileSystem '" + fs + "' is not local", fs instanceof LocalFileSystem); + assertTrue(fs instanceof LocalFileSystem, "FileSystem '" + fs + "' is not local"); } @Test diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.java index fe380e683db0..72651a65a516 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.java @@ -22,30 +22,24 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; -@Ignore // HBASE-24493 -@Category({ MediumTests.class }) +@Disabled // HBASE-24493 +@Tag(MediumTests.TAG) public class TestExportSnapshotWithTemporaryDirectory extends TestExportSnapshot { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestExportSnapshotWithTemporaryDirectory.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { setUpBaseConf(TEST_UTIL.getConfiguration()); TEST_UTIL.startMiniCluster(3); TEST_UTIL.startMiniMapReduceCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TestExportSnapshot.tearDownAfterClass(); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobExportSnapshot.java index 4943b40d6a71..f4c6e39a84d8 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobExportSnapshot.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobExportSnapshot.java @@ -18,35 +18,30 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.mob.MobUtils; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; /** * Test Export Snapshot Tool */ -@Ignore // HBASE-24493 -@Category({ VerySlowRegionServerTests.class, LargeTests.class }) +@Disabled // HBASE-24493 +@Tag(VerySlowRegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestMobExportSnapshot extends TestExportSnapshot { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMobExportSnapshot.class); - public static void setUpBaseConf(Configuration conf) { TestExportSnapshot.setUpBaseConf(conf); conf.setInt(MobConstants.MOB_FILE_CACHE_SIZE_KEY, 0); } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { setUpBaseConf(TEST_UTIL.getConfiguration()); TEST_UTIL.startMiniCluster(3); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.java index 2fa686f768f1..95c7d5c8a879 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.java @@ -17,28 +17,23 @@ */ package org.apache.hadoop.hbase.snapshot; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.access.PermissionStorage; import org.apache.hadoop.hbase.security.access.SecureTestUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Reruns TestMobExportSnapshot using MobExportSnapshot in secure mode. */ -@Category({ VerySlowRegionServerTests.class, LargeTests.class }) +@Tag(VerySlowRegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestMobSecureExportSnapshot extends TestMobExportSnapshot { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMobSecureExportSnapshot.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { setUpBaseConf(TEST_UTIL.getConfiguration()); // Setup separate test-data directory for MR cluster and set corresponding configurations. diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java index a2a588ac5724..87125bae7578 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.java @@ -24,21 +24,23 @@ import org.apache.hadoop.hbase.security.access.SecureTestUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests; -import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Reruns TestExportSnapshot using ExportSnapshot in secure mode. */ -@Category({ VerySlowRegionServerTests.class, LargeTests.class }) +@Tag(VerySlowRegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestSecureExportSnapshot extends TestExportSnapshot { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSecureExportSnapshot.class); - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { setUpBaseConf(TEST_UTIL.getConfiguration()); // Setup separate test-data directory for MR cluster and set corresponding configurations.