diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 9854ccf98330..8b2063e64b8d 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -161,7 +161,7 @@
org.mockito
- mockito-core
+ mockito-junit-jupiter
test
@@ -191,11 +191,6 @@
junit-jupiter-params
test
-
- org.junit.vintage
- junit-vintage-engine
- test
-
org.slf4j
jcl-over-slf4j
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
index 6c49a43bf463..8b8d240088da 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
@@ -20,21 +20,17 @@
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.util.ProgramDriver;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
-@Category({ MapReduceTests.class, SmallTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(SmallTests.TAG)
public class TestDriver {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestDriver.class);
-
@Test
public void testDriverMainMethod() throws Throwable {
ProgramDriver programDriverMock = mock(ProgramDriver.class);
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
index 2912fd4d025c..5f1a05684456 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
@@ -17,8 +17,9 @@
*/
package org.apache.hadoop.hbase.mapred;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertNull;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
@@ -32,7 +33,6 @@
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -42,20 +42,15 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;
-@Category({ MapReduceTests.class, SmallTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(SmallTests.TAG)
public class TestGroupingTableMap {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestGroupingTableMap.class);
-
@Test
@SuppressWarnings("unchecked")
public void shouldNotCallCollectonSinceFindUniqueKeyValueMoreThanOnes() throws Exception {
@@ -156,7 +151,7 @@ public void collect(ImmutableBytesWritable arg, Result result) throws IOExceptio
gTableMap.map(null, result, outputCollector, reporter);
verify(result).listCells();
- Assert.assertTrue("Output not received", outputCollected.get());
+ assertTrue(outputCollected.get(), "Output not received");
final byte[] firstPartValue = Bytes.toBytes("238947928");
final byte[] secondPartValue = Bytes.toBytes("4678456942345");
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
index 96e25b51f659..14df668d79a6 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
@@ -22,25 +22,20 @@
import static org.mockito.Mockito.verify;
import java.io.IOException;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
-@Category({ MapReduceTests.class, SmallTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(SmallTests.TAG)
public class TestIdentityTableMap {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestIdentityTableMap.class);
-
@Test
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldCollectPredefinedTimes() throws IOException {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
index c042bd35a56d..065acfafc0dc 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
@@ -17,14 +17,13 @@
*/
package org.apache.hadoop.hbase.mapred;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -37,21 +36,17 @@
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
-import org.junit.ClassRule;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-@Category({ VerySlowMapReduceTests.class, LargeTests.class })
+@Tag(VerySlowMapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestMultiTableSnapshotInputFormat
extends org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestMultiTableSnapshotInputFormat.class);
-
private static final Logger LOG =
LoggerFactory.getLogger(TestMultiTableSnapshotInputFormat.class);
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
index 0f71055c6a72..51bae4b52139 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.mapred;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.mock;
@@ -28,7 +28,6 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -38,20 +37,16 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.apache.hbase.thirdparty.com.google.common.base.Joiner;
-@Category({ MapReduceTests.class, MediumTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(MediumTests.TAG)
public class TestRowCounter {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestRowCounter.class);
-
@Test
@SuppressWarnings("deprecation")
public void shouldPrintUsage() throws Exception {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
index 3e093430a92e..60ba178eea58 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
@@ -17,31 +17,21 @@
*/
package org.apache.hadoop.hbase.mapred;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-
-@Category({ MapReduceTests.class, SmallTests.class })
-public class TestSplitTable {
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestSplitTable.class);
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
- @Rule
- public TestName name = new TestName();
+@Tag(MapReduceTests.TAG)
+@Tag(SmallTests.TAG)
+public class TestSplitTable {
@Test
@SuppressWarnings({ "deprecation", "SelfComparison" })
@@ -104,16 +94,16 @@ public void testSplitTableEquals() {
@Test
@SuppressWarnings("deprecation")
- public void testToString() {
- TableSplit split = new TableSplit(TableName.valueOf(name.getMethodName()),
+ public void testToString(TestInfo testInfo) {
+ TableSplit split = new TableSplit(TableName.valueOf(testInfo.getTestMethod().get().getName()),
Bytes.toBytes("row-start"), Bytes.toBytes("row-end"), "location");
- String str = "HBase table split(table name: " + name.getMethodName()
+ String str = "HBase table split(table name: " + testInfo.getTestMethod().get().getName()
+ ", start row: row-start, " + "end row: row-end, region location: location)";
- Assert.assertEquals(str, split.toString());
+ assertEquals(str, split.toString());
split = new TableSplit((TableName) null, null, null, null);
str = "HBase table split(table name: null, start row: null, "
+ "end row: null, region location: null)";
- Assert.assertEquals(str, split.toString());
+ assertEquals(str, split.toString());
}
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
index d15d3a574640..4b633325c5b8 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
@@ -17,9 +17,10 @@
*/
package org.apache.hadoop.hbase.mapred;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
@@ -33,7 +34,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompareOperator;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.NotServingRegionException;
@@ -61,12 +61,11 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
@@ -75,13 +74,10 @@
/**
* This tests the TableInputFormat and its recovery semantics
*/
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestTableInputFormat {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestTableInputFormat.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormat.class);
private final static HBaseTestingUtil UTIL = new HBaseTestingUtil();
@@ -90,17 +86,17 @@ public class TestTableInputFormat {
private static final byte[][] columns = new byte[][] { FAMILY };
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
UTIL.startMiniCluster();
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
UTIL.shutdownMiniCluster();
}
- @Before
+ @BeforeEach
public void before() throws IOException {
LOG.info("before");
UTIL.ensureSomeRegionServersAvailable(1);
@@ -265,10 +261,10 @@ public void testTableRecordReaderScannerFail() throws IOException {
/**
* Run test assuming Scanner IOException failure using mapred api,
*/
- @Test(expected = IOException.class)
+ @Test
public void testTableRecordReaderScannerFailTwice() throws IOException {
Table htable = createIOEScannerTable(Bytes.toBytes("table3"), 2);
- runTestMapred(htable);
+ assertThrows(IOException.class, () -> runTestMapred(htable));
}
/**
@@ -285,10 +281,11 @@ public void testTableRecordReaderScannerTimeout() throws IOException {
* Run test assuming NotServingRegionException using mapred api.
* @throws org.apache.hadoop.hbase.DoNotRetryIOException
*/
- @Test(expected = org.apache.hadoop.hbase.NotServingRegionException.class)
+ @Test
public void testTableRecordReaderScannerTimeoutTwice() throws IOException {
Table htable = createDNRIOEScannerTable(Bytes.toBytes("table5"), 2);
- runTestMapred(htable);
+ assertThrows(org.apache.hadoop.hbase.NotServingRegionException.class,
+ () -> runTestMapred(htable));
}
/**
@@ -329,19 +326,31 @@ void testInputFormat(Class extends InputFormat> clazz) throws IOException {
job.setNumReduceTasks(0);
LOG.debug("submitting job.");
final RunningJob run = JobClient.runJob(job);
- assertTrue("job failed!", run.isSuccessful());
- assertEquals("Saw the wrong number of instances of the filtered-for row.", 2, run.getCounters()
- .findCounter(TestTableInputFormat.class.getName() + ":row", "aaa").getCounter());
- assertEquals("Saw any instances of the filtered out row.", 0, run.getCounters()
- .findCounter(TestTableInputFormat.class.getName() + ":row", "bbb").getCounter());
- assertEquals("Saw the wrong number of instances of columnA.", 1, run.getCounters()
- .findCounter(TestTableInputFormat.class.getName() + ":family", "columnA").getCounter());
- assertEquals("Saw the wrong number of instances of columnB.", 1, run.getCounters()
- .findCounter(TestTableInputFormat.class.getName() + ":family", "columnB").getCounter());
- assertEquals("Saw the wrong count of values for the filtered-for row.", 2, run.getCounters()
- .findCounter(TestTableInputFormat.class.getName() + ":value", "value aaa").getCounter());
- assertEquals("Saw the wrong count of values for the filtered-out row.", 0, run.getCounters()
- .findCounter(TestTableInputFormat.class.getName() + ":value", "value bbb").getCounter());
+ assertTrue(run.isSuccessful(), "job failed!");
+ assertEquals(2,
+ run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":row", "aaa")
+ .getCounter(),
+ "Saw the wrong number of instances of the filtered-for row.");
+ assertEquals(0,
+ run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":row", "bbb")
+ .getCounter(),
+ "Saw any instances of the filtered out row.");
+ assertEquals(1,
+ run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":family", "columnA")
+ .getCounter(),
+ "Saw the wrong number of instances of columnA.");
+ assertEquals(1,
+ run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":family", "columnB")
+ .getCounter(),
+ "Saw the wrong number of instances of columnB.");
+ assertEquals(2,
+ run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":value", "value aaa")
+ .getCounter(),
+ "Saw the wrong count of values for the filtered-for row.");
+ assertEquals(0,
+ run.getCounters().findCounter(TestTableInputFormat.class.getName() + ":value", "value bbb")
+ .getCounter(),
+ "Saw the wrong count of values for the filtered-out row.");
}
public static class ExampleVerifier implements TableMap {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
index 2820d9111277..92b6301d0b4d 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
@@ -17,12 +17,11 @@
*/
package org.apache.hadoop.hbase.mapred;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
@@ -37,8 +36,7 @@
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
-import org.junit.ClassRule;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -47,14 +45,11 @@
* simple - take every row in the table, reverse the value of a particular cell, and write it back
* to the table.
*/
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
@SuppressWarnings("deprecation")
public class TestTableMapReduce extends TestTableMapReduceBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestTableMapReduce.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestTableMapReduce.class.getName());
protected Logger getLog() {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
index 77ac55a1b6d0..1ec3df6152cd 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
@@ -17,8 +17,9 @@
*/
package org.apache.hadoop.hbase.mapred;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
@@ -28,7 +29,6 @@
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
@@ -44,26 +44,21 @@
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestTableMapReduceUtil {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestTableMapReduceUtil.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestTableMapReduceUtil.class);
private static Table presidentsTable;
@@ -88,18 +83,18 @@ public class TestTableMapReduceUtil {
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
UTIL.startMiniCluster();
presidentsTable = createAndFillTable(TableName.valueOf(TABLE_NAME));
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
UTIL.shutdownMiniCluster();
}
- @Before
+ @BeforeEach
public void before() throws IOException {
LOG.info("before");
UTIL.ensureSomeRegionServersAvailable(1);
@@ -136,7 +131,7 @@ private static void createPutCommand(Table table) throws IOException {
*/
@Test
public void shouldNumberOfReduceTaskNotExceedNumberOfRegionsForGivenTable() throws IOException {
- Assert.assertNotNull(presidentsTable);
+ assertNotNull(presidentsTable);
Configuration cfg = UTIL.getConfiguration();
JobConf jobConf = new JobConf(cfg);
TableMapReduceUtil.setNumReduceTasks(TABLE_NAME, jobConf);
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java
index fec2c8cf0204..056058251499 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java
@@ -17,22 +17,20 @@
*/
package org.apache.hadoop.hbase.mapred;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordWriter;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -41,13 +39,9 @@
* we can have many instances and not leak connections. This test creates a few TableOutputFormats
* and shouldn't fail due to ZK connection exhaustion.
*/
-@Category(MediumTests.class)
+@Tag(MediumTests.TAG)
public class TestTableOutputFormatConnectionExhaust {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestTableOutputFormatConnectionExhaust.class);
-
private static final Logger LOG =
LoggerFactory.getLogger(TestTableOutputFormatConnectionExhaust.class);
@@ -55,7 +49,7 @@ public class TestTableOutputFormatConnectionExhaust {
static final String TABLE = "TestTableOutputFormatConnectionExhaust";
static final String FAMILY = "family";
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
// Default in ZookeeperMiniCluster is 1000, setting artificially low to trigger exhaustion.
// need min of 7 to properly start the default mini HBase cluster
@@ -63,12 +57,12 @@ public static void beforeClass() throws Exception {
UTIL.startMiniCluster();
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
UTIL.shutdownMiniCluster();
}
- @Before
+ @BeforeEach
public void before() throws IOException {
LOG.info("before");
UTIL.ensureSomeRegionServersAvailable(1);
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java
index 3c1b717d5abf..9f008b82857f 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java
@@ -18,12 +18,13 @@
package org.apache.hadoop.hbase.mapred;
import static org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatImpl.SNAPSHOT_INPUTFORMAT_LOCALITY_ENABLED_DEFAULT;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -45,28 +46,27 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-
-@Category({ VerySlowMapReduceTests.class, LargeTests.class })
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
+
+@Tag(VerySlowMapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestTableSnapshotInputFormat.class);
-
private static final byte[] aaa = Bytes.toBytes("aaa");
private static final byte[] after_zzz = Bytes.toBytes("zz{"); // 'z' + 1 => '{'
private static final String COLUMNS =
Bytes.toString(FAMILIES[0]) + " " + Bytes.toString(FAMILIES[1]);
- @Rule
- public TestName name = new TestName();
+ private String methodName;
+
+ @BeforeEach
+ public void beforeEach(TestInfo testInfo) {
+ methodName = testInfo.getTestMethod().get().getName();
+ }
@Override
protected byte[] getStartRow() {
@@ -108,7 +108,7 @@ public void close() {
@Test
public void testInitTableSnapshotMapperJobConfig() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName());
+ final TableName tableName = TableName.valueOf(methodName);
String snapshotName = "foo";
try {
@@ -122,11 +122,11 @@ public void testInitTableSnapshotMapperJobConfig() throws Exception {
// TODO: would be better to examine directly the cache instance that results from this
// config. Currently this is not possible because BlockCache initialization is static.
- Assert.assertEquals("Snapshot job should be configured for default LruBlockCache.",
- HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT,
- job.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, -1), 0.01);
- Assert.assertEquals("Snapshot job should not use BucketCache.", 0,
- job.getFloat("hbase.bucketcache.size", -1), 0.01);
+ assertEquals(HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT,
+ job.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, -1), 0.01,
+ "Snapshot job should be configured for default LruBlockCache.");
+ assertEquals(0, job.getFloat("hbase.bucketcache.size", -1), 0.01,
+ "Snapshot job should not use BucketCache.");
} finally {
UTIL.getAdmin().deleteSnapshot(snapshotName);
UTIL.deleteTable(tableName);
@@ -168,7 +168,7 @@ public void testRestoreSnapshotDoesNotCreateBackRefLinksInit(TableName tableName
@Override
protected void testWithMockedMapReduce(HBaseTestingUtil util, String snapshotName, int numRegions,
int numSplitsPerRegion, int expectedNumSplits, boolean setLocalityEnabledTo) throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName());
+ final TableName tableName = TableName.valueOf(methodName);
try {
createTableAndSnapshot(util, tableName, snapshotName, getStartRow(), getEndRow(), numRegions);
@@ -202,7 +202,7 @@ private void verifyWithMockedMapReduce(JobConf job, int numRegions, int expected
TableSnapshotInputFormat tsif = new TableSnapshotInputFormat();
InputSplit[] splits = tsif.getSplits(job, 0);
- Assert.assertEquals(expectedNumSplits, splits.length);
+ assertEquals(expectedNumSplits, splits.length);
HBaseTestingUtil.SeenRowTracker rowTracker =
new HBaseTestingUtil.SeenRowTracker(startRow, stopRow);
@@ -214,7 +214,7 @@ private void verifyWithMockedMapReduce(JobConf job, int numRegions, int expected
for (int i = 0; i < splits.length; i++) {
// validate input split
InputSplit split = splits[i];
- Assert.assertTrue(split instanceof TableSnapshotInputFormat.TableSnapshotRegionSplit);
+ assertTrue(split instanceof TableSnapshotInputFormat.TableSnapshotRegionSplit);
if (localityEnabled) {
// When localityEnabled is true, meant to verify split.getLocations()
// by the following statement:
@@ -222,9 +222,9 @@ private void verifyWithMockedMapReduce(JobConf job, int numRegions, int expected
// However, getLocations() of some splits could return an empty array (length is 0),
// so drop the verification on length.
// TODO: investigate how to verify split.getLocations() when localityEnabled is true
- Assert.assertTrue(split.getLocations() != null);
+ assertTrue(split.getLocations() != null);
} else {
- Assert.assertTrue(split.getLocations() != null && split.getLocations().length == 0);
+ assertTrue(split.getLocations() != null && split.getLocations().length == 0);
}
// validate record reader
@@ -290,7 +290,7 @@ public static void doTestWithMapReduce(HBaseTestingUtil util, TableName tableNam
jobConf.setOutputFormat(NullOutputFormat.class);
RunningJob job = JobClient.runJob(jobConf);
- Assert.assertTrue(job.isSuccessful());
+ assertTrue(job.isSuccessful());
} finally {
if (!shutdownCluster) {
util.getAdmin().deleteSnapshot(snapshotName);
@@ -299,7 +299,7 @@ public static void doTestWithMapReduce(HBaseTestingUtil util, TableName tableNam
}
}
- @Ignore // Ignored in mapred package because it keeps failing but allowed in mapreduce package.
+ @Disabled // Ignored in mapred package because it keeps failing but allowed in mapreduce package.
@Test
public void testWithMapReduceMultipleMappersPerRegion() throws Exception {
testWithMapReduce(UTIL, "testWithMapReduceMultiRegion", 10, 5, 50, false);
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableTestBase.java
index d7648c26406d..da07981320d7 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableTestBase.java
@@ -17,10 +17,10 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import org.apache.commons.lang3.ArrayUtils;
@@ -39,8 +39,7 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Rule;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.TestInfo;
/**
* Base class for testing CopyTable MR tool.
@@ -55,9 +54,6 @@ public abstract class CopyTableTestBase {
protected static final byte[] FAMILY_B = Bytes.toBytes(FAMILY_B_STRING);
protected static final byte[] QUALIFIER = Bytes.toBytes("q");
- @Rule
- public TestName name = new TestName();
-
protected abstract Table createSourceTable(TableDescriptor desc) throws Exception;
protected abstract Table createTargetTable(TableDescriptor desc) throws Exception;
@@ -91,9 +87,10 @@ protected final void verifyRows(Table t, byte[] family, byte[] column) throws IO
}
}
- protected final void doCopyTableTest(Configuration conf, boolean bulkload) throws Exception {
- TableName tableName1 = TableName.valueOf(name.getMethodName() + "1");
- TableName tableName2 = TableName.valueOf(name.getMethodName() + "2");
+ protected final void doCopyTableTest(Configuration conf, boolean bulkload, TestInfo testInfo)
+ throws Exception {
+ TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1");
+ TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2");
byte[] family = Bytes.toBytes("family");
byte[] column = Bytes.toBytes("c1");
TableDescriptor desc1 = TableDescriptorBuilder.newBuilder(tableName1)
@@ -123,10 +120,10 @@ protected final void doCopyTableTest(Configuration conf, boolean bulkload) throw
}
}
- protected final void doCopyTableTestWithMob(Configuration conf, boolean bulkload)
- throws Exception {
- TableName tableName1 = TableName.valueOf(name.getMethodName() + "1");
- TableName tableName2 = TableName.valueOf(name.getMethodName() + "2");
+ protected final void doCopyTableTestWithMob(Configuration conf, boolean bulkload,
+ TestInfo testInfo) throws Exception {
+ TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1");
+ TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2");
byte[] family = Bytes.toBytes("mob");
byte[] column = Bytes.toBytes("c1");
@@ -163,15 +160,15 @@ protected final void doCopyTableTestWithMob(Configuration conf, boolean bulkload
Result r = t2.get(g);
assertEquals(1, r.size());
assertTrue(CellUtil.matchingQualifier(r.rawCells()[0], column));
- assertEquals("compare row values between two tables",
- t1.getDescriptor().getValue("row" + i), t2.getDescriptor().getValue("row" + i));
+ assertEquals(t1.getDescriptor().getValue("row" + i), t2.getDescriptor().getValue("row" + i),
+ "compare row values between two tables");
}
- assertEquals("compare count of mob rows after table copy", MobTestUtil.countMobRows(t1),
- MobTestUtil.countMobRows(t2));
- assertEquals("compare count of mob row values between two tables",
- t1.getDescriptor().getValues().size(), t2.getDescriptor().getValues().size());
- assertTrue("The mob row count is 0 but should be > 0", MobTestUtil.countMobRows(t2) > 0);
+ assertEquals(MobTestUtil.countMobRows(t1), MobTestUtil.countMobRows(t2),
+ "compare count of mob rows after table copy");
+ assertEquals(t1.getDescriptor().getValues().size(), t2.getDescriptor().getValues().size(),
+ "compare count of mob row values between two tables");
+ assertTrue(MobTestUtil.countMobRows(t2) > 0, "The mob row count is 0 but should be > 0");
} finally {
dropSourceTable(tableName1);
dropTargetTable(tableName2);
@@ -183,9 +180,9 @@ protected final boolean runCopy(Configuration conf, String[] args) throws Except
return status == 0;
}
- protected final void testStartStopRow(Configuration conf) throws Exception {
- final TableName tableName1 = TableName.valueOf(name.getMethodName() + "1");
- final TableName tableName2 = TableName.valueOf(name.getMethodName() + "2");
+ protected final void testStartStopRow(Configuration conf, TestInfo testInfo) throws Exception {
+ final TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1");
+ final TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2");
final byte[] family = Bytes.toBytes("family");
final byte[] column = Bytes.toBytes("c1");
final byte[] row0 = Bytes.toBytesBinary("\\x01row0");
@@ -231,9 +228,11 @@ protected final void testStartStopRow(Configuration conf) throws Exception {
}
}
- protected final void testRenameFamily(Configuration conf) throws Exception {
- TableName sourceTable = TableName.valueOf(name.getMethodName() + "-source");
- TableName targetTable = TableName.valueOf(name.getMethodName() + "-target");
+ protected final void testRenameFamily(Configuration conf, TestInfo testInfo) throws Exception {
+ TableName sourceTable =
+ TableName.valueOf(testInfo.getTestMethod().get().getName() + "-source");
+ TableName targetTable =
+ TableName.valueOf(testInfo.getTestMethod().get().getName() + "-target");
TableDescriptor desc1 = TableDescriptorBuilder.newBuilder(sourceTable)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_A))
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableToPeerClusterTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableToPeerClusterTestBase.java
index d9219c9420f4..e089b3037d81 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableToPeerClusterTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/CopyTableToPeerClusterTestBase.java
@@ -17,16 +17,17 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
/**
* Test CopyTable between clusters
@@ -37,13 +38,13 @@ public abstract class CopyTableToPeerClusterTestBase extends CopyTableTestBase {
protected static final HBaseTestingUtil UTIL2 = new HBaseTestingUtil();
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
UTIL1.startMiniCluster(3);
UTIL2.startMiniCluster(3);
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
UTIL1.shutdownMiniCluster();
UTIL2.shutdownMiniCluster();
@@ -78,35 +79,35 @@ protected String[] getPeerClusterOptions() throws Exception {
* Simple end-to-end test
*/
@Test
- public void testCopyTable() throws Exception {
- doCopyTableTest(UTIL1.getConfiguration(), false);
+ public void testCopyTable(TestInfo testInfo) throws Exception {
+ doCopyTableTest(UTIL1.getConfiguration(), false, testInfo);
}
/**
* Simple end-to-end test on table with MOB
*/
@Test
- public void testCopyTableWithMob() throws Exception {
- doCopyTableTestWithMob(UTIL1.getConfiguration(), false);
+ public void testCopyTableWithMob(TestInfo testInfo) throws Exception {
+ doCopyTableTestWithMob(UTIL1.getConfiguration(), false, testInfo);
}
@Test
- public void testStartStopRow() throws Exception {
- testStartStopRow(UTIL1.getConfiguration());
+ public void testStartStopRow(TestInfo testInfo) throws Exception {
+ testStartStopRow(UTIL1.getConfiguration(), testInfo);
}
/**
* Test copy of table from sourceTable to targetTable all rows from family a
*/
@Test
- public void testRenameFamily() throws Exception {
- testRenameFamily(UTIL1.getConfiguration());
+ public void testRenameFamily(TestInfo testInfo) throws Exception {
+ testRenameFamily(UTIL1.getConfiguration(), testInfo);
}
@Test
- public void testBulkLoadNotSupported() throws Exception {
- TableName tableName1 = TableName.valueOf(name.getMethodName() + "1");
- TableName tableName2 = TableName.valueOf(name.getMethodName() + "2");
+ public void testBulkLoadNotSupported(TestInfo testInfo) throws Exception {
+ TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1");
+ TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2");
try (Table t1 = UTIL1.createTable(tableName1, FAMILY_A);
Table t2 = UTIL2.createTable(tableName2, FAMILY_A)) {
String[] args = ArrayUtils.addAll(getPeerClusterOptions(),
@@ -119,9 +120,9 @@ public void testBulkLoadNotSupported() throws Exception {
}
@Test
- public void testSnapshotNotSupported() throws Exception {
- TableName tableName1 = TableName.valueOf(name.getMethodName() + "1");
- TableName tableName2 = TableName.valueOf(name.getMethodName() + "2");
+ public void testSnapshotNotSupported(TestInfo testInfo) throws Exception {
+ TableName tableName1 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "1");
+ TableName tableName2 = TableName.valueOf(testInfo.getTestMethod().get().getName() + "2");
String snapshot = tableName1.getNameAsString() + "_snapshot";
try (Table t1 = UTIL1.createTable(tableName1, FAMILY_A);
Table t2 = UTIL2.createTable(tableName2, FAMILY_A)) {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2TestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2TestBase.java
index ac9810a8825a..2e459402e94a 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2TestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2TestBase.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.List;
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MRIncrementalLoadTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MRIncrementalLoadTestBase.java
index ad2f841c19df..6e44fec8480d 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MRIncrementalLoadTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MRIncrementalLoadTestBase.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
@@ -44,15 +44,11 @@
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.tool.BulkLoadHFiles;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runners.Parameterized.Parameter;
+import org.junit.jupiter.api.AfterAll;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public class MRIncrementalLoadTestBase extends HFileOutputFormat2TestBase {
+public abstract class MRIncrementalLoadTestBase extends HFileOutputFormat2TestBase {
private static final Logger LOG = LoggerFactory.getLogger(MRIncrementalLoadTestBase.class);
@@ -60,13 +56,10 @@ public class MRIncrementalLoadTestBase extends HFileOutputFormat2TestBase {
private static String[] HOSTNAMES;
- @Parameter(0)
public boolean shouldChangeRegions;
- @Parameter(1)
public boolean putSortReducer;
- @Parameter(2)
public List tableStr;
private Map allTables;
@@ -94,12 +87,11 @@ protected static void setupCluster(boolean shouldKeepLocality) throws Exception
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws IOException {
UTIL.shutdownMiniCluster();
}
- @Before
public void setUp() throws IOException {
int regionNum = SHOULD_KEEP_LOCALITY ? 20 : 5;
allTables = new HashMap<>(tableStr.size());
@@ -110,9 +102,9 @@ public void setUp() throws IOException {
Table table = UTIL.createTable(tableName, FAMILIES, splitKeys);
RegionLocator r = UTIL.getConnection().getRegionLocator(tableName);
- assertEquals("Should start with empty table", 0, HBaseTestingUtil.countRows(table));
+ assertEquals(0, HBaseTestingUtil.countRows(table), "Should start with empty table");
int numRegions = r.getStartKeys().length;
- assertEquals("Should make " + regionNum + " regions", numRegions, regionNum);
+ assertEquals(numRegions, regionNum, "Should make " + regionNum + " regions");
allTables.put(tableStrSingle, table);
tableInfo.add(new HFileOutputFormat2.TableInfo(table.getDescriptor(), r));
@@ -120,7 +112,6 @@ public void setUp() throws IOException {
testDir = UTIL.getDataTestDirOnTestFS(tableStr.get(0));
}
- @After
public void tearDown() throws IOException {
for (HFileOutputFormat2.TableInfo tableInfoSingle : tableInfo) {
tableInfoSingle.getRegionLocator().close();
@@ -132,7 +123,19 @@ public void tearDown() throws IOException {
}
}
- @Test
+ protected void runTest(boolean shouldChangeRegions, boolean putSortReducer,
+ List tableStr) throws Exception {
+ this.shouldChangeRegions = shouldChangeRegions;
+ this.putSortReducer = putSortReducer;
+ this.tableStr = tableStr;
+ setUp();
+ try {
+ doIncrementalLoadTest();
+ } finally {
+ tearDown();
+ }
+ }
+
public void doIncrementalLoadTest() throws Exception {
boolean writeMultipleTables = tableStr.size() > 1;
// Generate the bulk load files
@@ -143,8 +146,8 @@ public void doIncrementalLoadTest() throws Exception {
for (Table tableSingle : allTables.values()) {
// This doesn't write into the table, just makes files
- assertEquals("HFOF should not touch actual table", 0,
- HBaseTestingUtil.countRows(tableSingle));
+ assertEquals(0, HBaseTestingUtil.countRows(tableSingle),
+ "HFOF should not touch actual table");
}
int numTableDirs = 0;
FileStatus[] fss = testDir.getFileSystem(UTIL.getConfiguration()).listStatus(testDir);
@@ -169,10 +172,10 @@ public void doIncrementalLoadTest() throws Exception {
}
}
}
- assertEquals("Column family not found in FS.", FAMILIES.length, dir);
+ assertEquals(FAMILIES.length, dir, "Column family not found in FS.");
}
if (writeMultipleTables) {
- assertEquals("Dir for all input tables not created", numTableDirs, allTables.size());
+ assertEquals(numTableDirs, allTables.size(), "Dir for all input tables not created");
}
Admin admin = UTIL.getAdmin();
@@ -207,12 +210,12 @@ public void doIncrementalLoadTest() throws Exception {
int expectedRows = 0;
if (putSortReducer) {
// no rows should be extracted
- assertEquals("BulkLoadHFiles should put expected data in table", expectedRows,
- HBaseTestingUtil.countRows(currentTable));
+ assertEquals(expectedRows, HBaseTestingUtil.countRows(currentTable),
+ "BulkLoadHFiles should put expected data in table");
} else {
expectedRows = NMapInputFormat.getNumMapTasks(UTIL.getConfiguration()) * ROWSPERSPLIT;
- assertEquals("BulkLoadHFiles should put expected data in table", expectedRows,
- HBaseTestingUtil.countRows(currentTable));
+ assertEquals(expectedRows, HBaseTestingUtil.countRows(currentTable),
+ "BulkLoadHFiles should put expected data in table");
Scan scan = new Scan();
ResultScanner results = currentTable.getScanner(scan);
for (Result res : results) {
@@ -245,8 +248,8 @@ public void doIncrementalLoadTest() throws Exception {
}
admin.enableTable(currentTableName);
UTIL.waitTableAvailable(currentTableName);
- assertEquals("Data should remain after reopening of regions", tableDigestBefore,
- UTIL.checksumRows(currentTable));
+ assertEquals(tableDigestBefore, UTIL.checksumRows(currentTable),
+ "Data should remain after reopening of regions");
}
}
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
index 0e7ff24a1dab..c18a5c307663 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
@@ -42,10 +42,10 @@
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -70,7 +70,7 @@ public abstract class MultiTableInputFormatTestBase {
}
}
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
// switch TIF to log at DEBUG level
Log4jUtils.enableDebug(MultiTableInputFormatBase.class);
@@ -85,12 +85,12 @@ public static void setUpBeforeClass() throws Exception {
}
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
Configuration c = TEST_UTIL.getConfiguration();
FileUtil.fullyDelete(new File(c.get("hadoop.tmp.dir")));
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
index 7a0615a5ff8e..c2002069d93f 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
@@ -17,7 +17,9 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import java.io.IOException;
import java.util.Arrays;
@@ -42,10 +44,9 @@
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -58,7 +59,7 @@ public abstract class TableSnapshotInputFormatTestBase {
protected FileSystem fs;
protected Path rootDir;
- @Before
+ @BeforeEach
public void setupCluster() throws Exception {
setupConf(UTIL.getConfiguration());
StartTestingClusterOption option =
@@ -69,7 +70,7 @@ public void setupCluster() throws Exception {
fs = rootDir.getFileSystem(UTIL.getConfiguration());
}
- @After
+ @AfterEach
public void tearDownCluster() throws Exception {
UTIL.shutdownMiniCluster();
}
@@ -142,11 +143,11 @@ public void testRestoreSnapshotDoesNotCreateBackRefLinks() throws Exception {
Path path = HFileLink.getBackReferencesDir(storeDir, status.getPath().getName());
// assert back references directory is empty
- assertFalse("There is a back reference in " + path, fs.exists(path));
+ assertFalse(fs.exists(path), "There is a back reference in " + path);
path = HFileLink.getBackReferencesDir(archiveStoreDir, status.getPath().getName());
// assert back references directory is empty
- assertFalse("There is a back reference in " + path, fs.exists(path));
+ assertFalse(fs.exists(path), "There is a back reference in " + path);
}
}
}
@@ -176,14 +177,14 @@ protected static void verifyRowFromMap(ImmutableBytesWritable key, Result result
Cell cell = scanner.current();
// assert that all Cells in the Result have the same key
- Assert.assertEquals(0, Bytes.compareTo(row, 0, row.length, cell.getRowArray(),
- cell.getRowOffset(), cell.getRowLength()));
+ assertEquals(0, Bytes.compareTo(row, 0, row.length, cell.getRowArray(), cell.getRowOffset(),
+ cell.getRowLength()));
}
for (byte[] family : FAMILIES) {
byte[] actual = result.getValue(family, family);
- Assert.assertArrayEquals("Row in snapshot does not match, expected:" + Bytes.toString(row)
- + " ,actual:" + Bytes.toString(actual), row, actual);
+ assertArrayEquals(row, actual, "Row in snapshot does not match, expected:"
+ + Bytes.toString(row) + " ,actual:" + Bytes.toString(actual));
}
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
index 7fbb5bc16255..d8133b1ec98a 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
@@ -17,10 +17,6 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
@@ -30,7 +26,6 @@
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
@@ -42,19 +37,19 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.LauncherSecurityManager;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestCellCounter {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestCellCounter.class);
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static final byte[] ROW1 = Bytes.toBytesBinary("\\x01row1");
@@ -70,17 +65,14 @@ public class TestCellCounter {
"target" + File.separator + "test-data" + File.separator + "output";
private static long now = EnvironmentEdgeManager.currentTime();
- @Rule
- public TestName name = new TestName();
-
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
UTIL.startMiniCluster();
FQ_OUTPUT_DIR = new Path(OUTPUT_DIR).makeQualified(new LocalFileSystem());
FileUtil.fullyDelete(new File(OUTPUT_DIR));
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
UTIL.shutdownMiniCluster();
}
@@ -89,8 +81,8 @@ public static void afterClass() throws Exception {
* Test CellCounter all data should print to output
*/
@Test
- public void testCellCounter() throws Exception {
- final TableName sourceTable = TableName.valueOf(name.getMethodName());
+ public void testCellCounter(TestInfo testInfo) throws Exception {
+ final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName());
byte[][] families = { FAMILY_A, FAMILY_B };
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
@@ -125,8 +117,8 @@ public void testCellCounter() throws Exception {
* Test CellCounter all data should print to output
*/
@Test
- public void testCellCounterPrefix() throws Exception {
- final TableName sourceTable = TableName.valueOf(name.getMethodName());
+ public void testCellCounterPrefix(TestInfo testInfo) throws Exception {
+ final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName());
byte[][] families = { FAMILY_A, FAMILY_B };
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
@@ -161,8 +153,8 @@ public void testCellCounterPrefix() throws Exception {
* Test CellCounter with time range all data should print to output
*/
@Test
- public void testCellCounterStartTimeRange() throws Exception {
- final TableName sourceTable = TableName.valueOf(name.getMethodName());
+ public void testCellCounterStartTimeRange(TestInfo testInfo) throws Exception {
+ final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName());
byte[][] families = { FAMILY_A, FAMILY_B };
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
@@ -198,8 +190,8 @@ public void testCellCounterStartTimeRange() throws Exception {
* Test CellCounter with time range all data should print to output
*/
@Test
- public void testCellCounteEndTimeRange() throws Exception {
- final TableName sourceTable = TableName.valueOf(name.getMethodName());
+ public void testCellCounteEndTimeRange(TestInfo testInfo) throws Exception {
+ final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName());
byte[][] families = { FAMILY_A, FAMILY_B };
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
@@ -235,8 +227,8 @@ public void testCellCounteEndTimeRange() throws Exception {
* Test CellCounter with time range all data should print to output
*/
@Test
- public void testCellCounteOutOfTimeRange() throws Exception {
- final TableName sourceTable = TableName.valueOf(name.getMethodName());
+ public void testCellCounteOutOfTimeRange(TestInfo testInfo) throws Exception {
+ final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName());
byte[][] families = { FAMILY_A, FAMILY_B };
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
@@ -307,8 +299,8 @@ public void testCellCounterMain() throws Exception {
* Test CellCounter for complete table all data should print to output
*/
@Test
- public void testCellCounterForCompleteTable() throws Exception {
- final TableName sourceTable = TableName.valueOf(name.getMethodName());
+ public void testCellCounterForCompleteTable(TestInfo testInfo) throws Exception {
+ final TableName sourceTable = TableName.valueOf(testInfo.getTestMethod().get().getName());
String outputPath = OUTPUT_DIR + sourceTable;
LocalFileSystem localFileSystem = new LocalFileSystem();
Path outputDir = new Path(outputPath).makeQualified(localFileSystem.getUri(),
@@ -360,7 +352,7 @@ public void testCellCounterForCompleteTable() throws Exception {
@Test
public void TestCellCounterWithoutOutputDir() throws Exception {
String[] args = new String[] { "tableName" };
- assertEquals("CellCounter should exit with -1 as output directory is not specified.", -1,
- ToolRunner.run(HBaseConfiguration.create(), new CellCounter(), args));
+ assertEquals(-1,
+ ToolRunner.run(HBaseConfiguration.create(), new CellCounter(), args), "CellCounter should exit with -1 as output directory is not specified.");
}
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestConfigurePartitioner.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestConfigurePartitioner.java
index 49c08a463abe..f2e51a8a73bb 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestConfigurePartitioner.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestConfigurePartitioner.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.verify;
import java.io.IOException;
@@ -28,7 +28,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
@@ -36,32 +35,28 @@
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.hadoop.security.UserGroupInformation;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ MapReduceTests.class, MediumTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(MediumTests.TAG)
public class TestConfigurePartitioner {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestConfigurePartitioner.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestConfigurePartitioner.class);
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
- @Before
+ @BeforeEach
public void setUp() throws Exception {
UTIL.startMiniDFSCluster(1);
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
UTIL.shutdownMiniDFSCluster();
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
index 5c3e9b65079d..b07fd6c0e95d 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
@@ -17,18 +17,17 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThrows;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
@@ -45,35 +44,27 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.LauncherSecurityManager;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
/**
* Basic test for the CopyTable M/R tool
*/
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestCopyTable extends CopyTableTestBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestCopyTable.class);
-
private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
- @Rule
- public TestName name = new TestName();
-
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
TEST_UTIL.startMiniCluster(3);
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@@ -107,45 +98,45 @@ protected String[] getPeerClusterOptions() throws Exception {
* Simple end-to-end test
*/
@Test
- public void testCopyTable() throws Exception {
- doCopyTableTest(TEST_UTIL.getConfiguration(), false);
+ public void testCopyTable(TestInfo testInfo) throws Exception {
+ doCopyTableTest(TEST_UTIL.getConfiguration(), false, testInfo);
}
/**
* Simple end-to-end test with bulkload.
*/
@Test
- public void testCopyTableWithBulkload() throws Exception {
- doCopyTableTest(TEST_UTIL.getConfiguration(), true);
+ public void testCopyTableWithBulkload(TestInfo testInfo) throws Exception {
+ doCopyTableTest(TEST_UTIL.getConfiguration(), true, testInfo);
}
/**
* Simple end-to-end test on table with MOB
*/
@Test
- public void testCopyTableWithMob() throws Exception {
- doCopyTableTestWithMob(TEST_UTIL.getConfiguration(), false);
+ public void testCopyTableWithMob(TestInfo testInfo) throws Exception {
+ doCopyTableTestWithMob(TEST_UTIL.getConfiguration(), false, testInfo);
}
/**
* Simple end-to-end test with bulkload on table with MOB.
*/
@Test
- public void testCopyTableWithBulkloadWithMob() throws Exception {
- doCopyTableTestWithMob(TEST_UTIL.getConfiguration(), true);
+ public void testCopyTableWithBulkloadWithMob(TestInfo testInfo) throws Exception {
+ doCopyTableTestWithMob(TEST_UTIL.getConfiguration(), true, testInfo);
}
@Test
- public void testStartStopRow() throws Exception {
- testStartStopRow(TEST_UTIL.getConfiguration());
+ public void testStartStopRow(TestInfo testInfo) throws Exception {
+ testStartStopRow(TEST_UTIL.getConfiguration(), testInfo);
}
/**
* Test copy of table from sourceTable to targetTable all rows from family a
*/
@Test
- public void testRenameFamily() throws Exception {
- testRenameFamily(TEST_UTIL.getConfiguration());
+ public void testRenameFamily(TestInfo testInfo) throws Exception {
+ testRenameFamily(TEST_UTIL.getConfiguration(), testInfo);
}
/**
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithClusterKey.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithClusterKey.java
index 6ff9afda5357..eb5d4549831b 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithClusterKey.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithClusterKey.java
@@ -20,16 +20,12 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
-import org.junit.ClassRule;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestCopyTableToPeerClusterWithClusterKey extends CopyTableToPeerClusterTestBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestCopyTableToPeerClusterWithClusterKey.class);
-
@Override
protected String[] getPeerClusterOptions() throws Exception {
return new String[] { "--peer.adr=" + UTIL2.getClusterKey() };
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithRpcUri.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithRpcUri.java
index 4e6293712ec2..11e8755077a8 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithRpcUri.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithRpcUri.java
@@ -17,22 +17,16 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
-import org.junit.ClassRule;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestCopyTableToPeerClusterWithRpcUri extends CopyTableToPeerClusterTestBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestCopyTableToPeerClusterWithRpcUri.class);
-
@Override
protected String[] getPeerClusterOptions() throws Exception {
return new String[] { "--peer.uri=" + UTIL2.getZkConnectionURI() };
}
-
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithZkUri.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithZkUri.java
index 720c367eb739..7a7968601137 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithZkUri.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTableToPeerClusterWithZkUri.java
@@ -22,14 +22,13 @@
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Tags;
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestCopyTableToPeerClusterWithZkUri extends CopyTableToPeerClusterTestBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestCopyTableToPeerClusterWithZkUri.class);
-
@Override
protected String[] getPeerClusterOptions() throws Exception {
return new String[] { "--peer.uri=" + UTIL2.getRpcConnnectionURI() };
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
index 34d197be02fa..9a59a7ada484 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
@@ -31,17 +31,13 @@
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Mapper;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ MapReduceTests.class, SmallTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(SmallTests.TAG)
public class TestGroupingTableMapper {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestGroupingTableMapper.class);
-
/**
* Test GroupingTableMapper class
*/
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHBaseMRTestingUtility.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHBaseMRTestingUtility.java
index 37dd817f94a3..5086d2badabc 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHBaseMRTestingUtility.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHBaseMRTestingUtility.java
@@ -17,25 +17,21 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestHBaseMRTestingUtility {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHBaseMRTestingUtility.class);
@Test
public void testMRYarnConfigsPopulation() throws IOException {
@@ -55,20 +51,18 @@ public void testMRYarnConfigsPopulation() throws IOException {
}
for (Map.Entry entry : dummyProps.entrySet()) {
- assertTrue(
+ assertTrue(hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()),
"The Configuration for key " + entry.getKey() + " and value: " + entry.getValue()
- + " is not populated correctly",
- hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()));
+ + " is not populated correctly");
}
hbt.startMiniMapReduceCluster();
// Confirm that MiniMapReduceCluster overwrites the mr properties and updates the Configuration
for (Map.Entry entry : dummyProps.entrySet()) {
- assertFalse(
+ assertFalse(hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()),
"The MR prop: " + entry.getValue() + " is not overwritten when map reduce mini"
- + "cluster is started",
- hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()));
+ + "cluster is started");
}
hbt.shutdownMiniMapReduceCluster();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 37096e408a74..6cc2a48817ce 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -18,12 +18,12 @@
package org.apache.hadoop.hbase.mapreduce;
import static org.apache.hadoop.hbase.regionserver.HStoreFile.BLOOM_FILTER_TYPE_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNotSame;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNotSame;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.lang.reflect.Field;
@@ -108,10 +108,8 @@
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -121,13 +119,10 @@
* output. Creates a few inner classes to implement splits and an inputformat that emits keys and
* values.
*/
-@Category({ VerySlowMapReduceTests.class, LargeTests.class })
+@org.junit.jupiter.api.Tag(VerySlowMapReduceTests.TAG)
+@org.junit.jupiter.api.Tag(LargeTests.TAG)
public class TestHFileOutputFormat2 extends HFileOutputFormat2TestBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHFileOutputFormat2.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestHFileOutputFormat2.class);
/**
@@ -135,7 +130,7 @@ public class TestHFileOutputFormat2 extends HFileOutputFormat2TestBase {
* timestamp is {@link HConstants#LATEST_TIMESTAMP}.
* @see HBASE-2615
*/
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void test_LATEST_TIMESTAMP_isReplaced() throws Exception {
Configuration conf = new Configuration(this.UTIL.getConfiguration());
@@ -185,7 +180,7 @@ private TaskAttemptContext createTestTaskAttemptContext(final Job job) throws Ex
* Test that {@link HFileOutputFormat2} creates an HFile with TIMERANGE metadata used by
* time-restricted scans.
*/
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void test_TIMERANGE() throws Exception {
Configuration conf = new Configuration(this.UTIL.getConfiguration());
@@ -249,7 +244,7 @@ public void test_TIMERANGE() throws Exception {
/**
* Run small MR job.
*/
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void testWritingPEData() throws Exception {
Configuration conf = UTIL.getConfiguration();
@@ -302,10 +297,10 @@ public void testWritingPEData() throws Exception {
kvCount += reader.getEntries();
scanner.seekTo();
long perKVSize = scanner.getCell().getSerializedSize();
- assertTrue("Data size of each file should not be too large.",
- perKVSize * reader.getEntries() <= hregionMaxFilesize);
+ assertTrue(perKVSize * reader.getEntries() <= hregionMaxFilesize,
+ "Data size of each file should not be too large.");
}
- assertEquals("Should write expected data in output file.", ROWSPERSPLIT, kvCount);
+ assertEquals(ROWSPERSPLIT, kvCount, "Should write expected data in output file.");
}
}
@@ -358,7 +353,7 @@ public void test_WritingTagData() throws Exception {
}
}
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void testJobConfiguration() throws Exception {
Configuration conf = new Configuration(this.UTIL.getConfiguration());
@@ -371,14 +366,14 @@ public void testJobConfiguration() throws Exception {
setupMockStartKeys(regionLocator);
setupMockTableName(regionLocator);
HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(), regionLocator);
- assertEquals(job.getNumReduceTasks(), 4);
+ assertEquals(4, job.getNumReduceTasks());
}
/**
* Test for {@link HFileOutputFormat2#createFamilyCompressionMap(Configuration)}. Tests that the
* family compression map is correctly serialized into and deserialized from configuration
*/
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void testSerializeDeserializeFamilyCompressionMap() throws IOException {
for (int numCfs = 0; numCfs <= 3; numCfs++) {
@@ -398,8 +393,9 @@ public void testSerializeDeserializeFamilyCompressionMap() throws IOException {
// test that we have a value for all column families that matches with the
// used mock values
for (Entry entry : familyToCompression.entrySet()) {
- assertEquals("Compression configuration incorrect for column family:" + entry.getKey(),
- entry.getValue(), retrievedFamilyToCompressionMap.get(Bytes.toBytes(entry.getKey())));
+ assertEquals(entry.getValue(),
+ retrievedFamilyToCompressionMap.get(Bytes.toBytes(entry.getKey())),
+ "Compression configuration incorrect for column family:" + entry.getKey());
}
}
}
@@ -444,7 +440,7 @@ private Map getMockColumnFamiliesForCompression(i
* Test for {@link HFileOutputFormat2#createFamilyBloomTypeMap(Configuration)}. Tests that the
* family bloom type map is correctly serialized into and deserialized from configuration
*/
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void testSerializeDeserializeFamilyBloomTypeMap() throws IOException {
for (int numCfs = 0; numCfs <= 2; numCfs++) {
@@ -464,8 +460,9 @@ public void testSerializeDeserializeFamilyBloomTypeMap() throws IOException {
// test that we have a value for all column families that matches with the
// used mock values
for (Entry entry : familyToBloomType.entrySet()) {
- assertEquals("BloomType configuration incorrect for column family:" + entry.getKey(),
- entry.getValue(), retrievedFamilyToBloomTypeMap.get(Bytes.toBytes(entry.getKey())));
+ assertEquals(entry.getValue(),
+ retrievedFamilyToBloomTypeMap.get(Bytes.toBytes(entry.getKey())),
+ "BloomType configuration incorrect for column family:" + entry.getKey());
}
}
}
@@ -505,7 +502,7 @@ private Map getMockColumnFamiliesForBloomType(int numCfs) {
* Test for {@link HFileOutputFormat2#createFamilyBlockSizeMap(Configuration)}. Tests that the
* family block size map is correctly serialized into and deserialized from configuration
*/
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void testSerializeDeserializeFamilyBlockSizeMap() throws IOException {
for (int numCfs = 0; numCfs <= 3; numCfs++) {
@@ -525,8 +522,9 @@ public void testSerializeDeserializeFamilyBlockSizeMap() throws IOException {
// test that we have a value for all column families that matches with the
// used mock values
for (Entry entry : familyToBlockSize.entrySet()) {
- assertEquals("BlockSize configuration incorrect for column family:" + entry.getKey(),
- entry.getValue(), retrievedFamilyToBlockSizeMap.get(Bytes.toBytes(entry.getKey())));
+ assertEquals(entry.getValue(),
+ retrievedFamilyToBlockSizeMap.get(Bytes.toBytes(entry.getKey())),
+ "BlockSize configuration incorrect for column family:" + entry.getKey());
}
}
}
@@ -570,7 +568,7 @@ private Map getMockColumnFamiliesForBlockSize(int numCfs) {
* the family data block encoding map is correctly serialized into and deserialized from
* configuration
*/
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void testSerializeDeserializeFamilyDataBlockEncodingMap() throws IOException {
for (int numCfs = 0; numCfs <= 3; numCfs++) {
@@ -592,10 +590,9 @@ public void testSerializeDeserializeFamilyDataBlockEncodingMap() throws IOExcept
// test that we have a value for all column families that matches with the
// used mock values
for (Entry entry : familyToDataBlockEncoding.entrySet()) {
- assertEquals(
- "DataBlockEncoding configuration incorrect for column family:" + entry.getKey(),
- entry.getValue(),
- retrievedFamilyToDataBlockEncodingMap.get(Bytes.toBytes(entry.getKey())));
+ assertEquals(entry.getValue(),
+ retrievedFamilyToDataBlockEncodingMap.get(Bytes.toBytes(entry.getKey())),
+ "DataBlockEncoding configuration incorrect for column family:" + entry.getKey());
}
}
}
@@ -650,7 +647,7 @@ private void setupMockTableName(RegionLocator table) throws IOException {
* Test that {@link HFileOutputFormat2} RecordWriter uses compression and bloom filter settings
* from the column family descriptor
*/
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void testColumnFamilySettings() throws Exception {
Configuration conf = new Configuration(this.UTIL.getConfiguration());
@@ -715,12 +712,10 @@ public void testColumnFamilySettings() throws Exception {
byte[] bloomFilter = fileInfo.get(BLOOM_FILTER_TYPE_KEY);
if (bloomFilter == null) bloomFilter = Bytes.toBytes("NONE");
- assertEquals(
- "Incorrect bloom filter used for column family " + familyStr + "(reader: " + reader + ")",
- hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter)));
- assertEquals(
- "Incorrect compression used for column family " + familyStr + "(reader: " + reader + ")",
- hcd.getCompressionType(), reader.getFileContext().getCompression());
+ assertEquals(hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter)),
+ "Incorrect bloom filter used for column family " + familyStr + "(reader: " + reader + ")");
+ assertEquals(hcd.getCompressionType(), reader.getFileContext().getCompression(),
+ "Incorrect compression used for column family " + familyStr + "(reader: " + reader + ")");
}
} finally {
dir.getFileSystem(conf).delete(dir, true);
@@ -757,7 +752,7 @@ private void writeRandomKeyValues(RecordWriter wri
* excluded from minor compaction. Without the fix of HBASE-6901, an
* ArrayIndexOutOfBoundsException will be thrown.
*/
- @Ignore("Flakey: See HBASE-9051")
+ @Disabled("Flakey: See HBASE-9051")
@Test
public void testExcludeAllFromMinorCompaction() throws Exception {
Configuration conf = UTIL.getConfiguration();
@@ -769,7 +764,7 @@ public void testExcludeAllFromMinorCompaction() throws Exception {
Table table = UTIL.createTable(TABLE_NAMES[0], FAMILIES);
RegionLocator locator = conn.getRegionLocator(TABLE_NAMES[0])) {
final FileSystem fs = UTIL.getDFSCluster().getFileSystem();
- assertEquals("Should start with empty table", 0, UTIL.countRows(table));
+ assertEquals(0, UTIL.countRows(table), "Should start with empty table");
// deep inspection: get the StoreFile dir
final Path storePath =
@@ -793,8 +788,8 @@ public void testExcludeAllFromMinorCompaction() throws Exception {
// Ensure data shows up
int expectedRows = 2 * NMapInputFormat.getNumMapTasks(conf) * ROWSPERSPLIT;
- assertEquals("BulkLoadHFiles should put expected data in table", expectedRows,
- UTIL.countRows(table));
+ assertEquals(expectedRows, UTIL.countRows(table),
+ "BulkLoadHFiles should put expected data in table");
// should have a second StoreFile now
assertEquals(2, fs.listStatus(storePath).length);
@@ -839,7 +834,7 @@ public Boolean call() throws Exception {
}
}
- @Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
+ @Disabled("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void testExcludeMinorCompaction() throws Exception {
Configuration conf = UTIL.getConfiguration();
@@ -852,7 +847,7 @@ public void testExcludeMinorCompaction() throws Exception {
Path testDir = UTIL.getDataTestDirOnTestFS("testExcludeMinorCompaction");
final FileSystem fs = UTIL.getDFSCluster().getFileSystem();
Table table = UTIL.createTable(TABLE_NAMES[0], FAMILIES);
- assertEquals("Should start with empty table", 0, UTIL.countRows(table));
+ assertEquals(0, UTIL.countRows(table), "Should start with empty table");
// deep inspection: get the StoreFile dir
final Path storePath =
@@ -887,8 +882,8 @@ public Boolean call() throws Exception {
// Ensure data shows up
int expectedRows = NMapInputFormat.getNumMapTasks(conf) * ROWSPERSPLIT;
- assertEquals("BulkLoadHFiles should put expected data in table", expectedRows + 1,
- UTIL.countRows(table));
+ assertEquals(expectedRows + 1, UTIL.countRows(table),
+ "BulkLoadHFiles should put expected data in table");
// should have a second StoreFile now
assertEquals(2, fs.listStatus(storePath).length);
@@ -1075,8 +1070,8 @@ public void TestConfigureCompression() throws Exception {
LocatedFileStatus keyFileStatus = iterator.next();
HFile.Reader reader =
HFile.createReader(fs, keyFileStatus.getPath(), new CacheConfig(conf), true, conf);
- assertEquals(reader.getTrailer().getCompressionCodec().getName(),
- hfileoutputformatCompression);
+ assertEquals(hfileoutputformatCompression,
+ reader.getTrailer().getCompressionCodec().getName());
}
} finally {
if (writer != null && context != null) {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2WithSecurity.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2WithSecurity.java
index ac767f23775c..7dd42c522e68 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2WithSecurity.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2WithSecurity.java
@@ -18,8 +18,8 @@
package org.apache.hadoop.hbase.mapreduce;
import static org.apache.hadoop.security.UserGroupInformation.loginUserFromKeytab;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.Closeable;
import java.io.File;
@@ -42,20 +42,17 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Tests for {@link HFileOutputFormat2} with secure mode.
*/
-@Category({ VerySlowMapReduceTests.class, LargeTests.class })
+@Tag(VerySlowMapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestHFileOutputFormat2WithSecurity extends HFileOutputFormat2TestBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHFileOutputFormat2WithSecurity.class);
private static final byte[] FAMILIES = Bytes.toBytes("test_cf");
@@ -71,7 +68,7 @@ public class TestHFileOutputFormat2WithSecurity extends HFileOutputFormat2TestBa
private List clusters = new ArrayList<>();
- @Before
+ @BeforeEach
public void setupSecurityClusters() throws Exception {
utilA = new HBaseTestingUtil();
confA = utilA.getConfiguration();
@@ -93,7 +90,7 @@ public void setupSecurityClusters() throws Exception {
clusters.add(utilB.startSecureMiniCluster(kdc, userPrincipal, HTTP_PRINCIPAL));
}
- @After
+ @AfterEach
public void teardownSecurityClusters() {
IOUtils.closeQuietly(clusters);
clusters.clear();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
index 9cffb4089bd7..813758bdca92 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
@@ -17,42 +17,33 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-
-@Category({ MapReduceTests.class, MediumTests.class })
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
+
+@Tag(MapReduceTests.TAG)
+@Tag(MediumTests.TAG)
public class TestHRegionPartitioner {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHRegionPartitioner.class);
-
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
- @Rule
- public TestName name = new TestName();
-
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
UTIL.startMiniCluster();
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
UTIL.shutdownMiniCluster();
}
@@ -61,16 +52,17 @@ public static void afterClass() throws Exception {
* Test HRegionPartitioner
*/
@Test
- public void testHRegionPartitioner() throws Exception {
+ public void testHRegionPartitioner(TestInfo testInfo) throws Exception {
byte[][] families = { Bytes.toBytes("familyA"), Bytes.toBytes("familyB") };
- UTIL.createTable(TableName.valueOf(name.getMethodName()), families, 1, Bytes.toBytes("aa"),
+ String tableName = testInfo.getTestMethod().get().getName();
+ UTIL.createTable(TableName.valueOf(tableName), families, 1, Bytes.toBytes("aa"),
Bytes.toBytes("cc"), 3);
HRegionPartitioner partitioner = new HRegionPartitioner<>();
Configuration configuration = UTIL.getConfiguration();
- configuration.set(TableOutputFormat.OUTPUT_TABLE, name.getMethodName());
+ configuration.set(TableOutputFormat.OUTPUT_TABLE, tableName);
partitioner.setConf(configuration);
ImmutableBytesWritable writable = new ImmutableBytesWritable(Bytes.toBytes("bb"));
@@ -79,10 +71,11 @@ public void testHRegionPartitioner() throws Exception {
}
@Test
- public void testHRegionPartitionerMoreRegions() throws Exception {
+ public void testHRegionPartitionerMoreRegions(TestInfo testInfo) throws Exception {
byte[][] families = { Bytes.toBytes("familyA"), Bytes.toBytes("familyB") };
- TableName tableName = TableName.valueOf(name.getMethodName());
+ String tableNameStr = testInfo.getTestMethod().get().getName();
+ TableName tableName = TableName.valueOf(tableNameStr);
UTIL.createTable(tableName, families, 1, Bytes.toBytes("aa"), Bytes.toBytes("cc"), 5);
Configuration configuration = UTIL.getConfiguration();
@@ -90,7 +83,7 @@ public void testHRegionPartitionerMoreRegions() throws Exception {
assertEquals(5, numberOfRegions);
HRegionPartitioner partitioner = new HRegionPartitioner<>();
- configuration.set(TableOutputFormat.OUTPUT_TABLE, name.getMethodName());
+ configuration.set(TableOutputFormat.OUTPUT_TABLE, tableNameStr);
partitioner.setConf(configuration);
// Get some rowKey for the lastRegion
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
index 05736f939e13..ec4d3ce3f02a 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
@@ -17,14 +17,14 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
@@ -33,14 +33,11 @@
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.MapFile;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -50,32 +47,26 @@
/**
* Basic test for the HashTable M/R tool
*/
-@Category(LargeTests.class)
+@Tag(LargeTests.TAG)
public class TestHashTable {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHashTable.class);
private static final Logger LOG = LoggerFactory.getLogger(TestHashTable.class);
private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
- @Rule
- public TestName name = new TestName();
-
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
TEST_UTIL.startMiniCluster(3);
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@Test
- public void testHashTable() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName());
+ public void testHashTable(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName());
final byte[] family = Bytes.toBytes("family");
final byte[] column1 = Bytes.toBytes("c1");
final byte[] column2 = Bytes.toBytes("c2");
@@ -110,7 +101,7 @@ public void testHashTable() throws Exception {
int code =
hashTable.run(new String[] { "--batchsize=" + batchSize, "--numhashfiles=" + numHashFiles,
"--scanbatch=2", tableName.getNameAsString(), testDir.toString() });
- assertEquals("test job failed", 0, code);
+ assertEquals(0, code, "test job failed");
FileSystem fs = TEST_UTIL.getTestFileSystem();
@@ -165,7 +156,7 @@ ImmutableMap. builder()
intKey = Bytes.toInt(key.get(), key.getOffset(), key.getLength());
}
if (actualHashes.containsKey(intKey)) {
- Assert.fail("duplicate key in data files: " + intKey);
+ fail("duplicate key in data files: " + intKey);
}
actualHashes.put(intKey, new ImmutableBytesWritable(hash.copyBytes()));
}
@@ -185,7 +176,7 @@ ImmutableMap. builder()
if (!expectedHashes.equals(actualHashes)) {
LOG.error("Diff: " + Maps.difference(expectedHashes, actualHashes));
}
- Assert.assertEquals(expectedHashes, actualHashes);
+ assertEquals(expectedHashes, actualHashes);
TEST_UTIL.deleteTable(tableName);
TEST_UTIL.cleanupDataTestDirOnTestFS();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index af0749dafc6d..e005e6750040 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -19,10 +19,10 @@
import static org.apache.hadoop.hbase.HConstants.RPC_CODEC_CONF_KEY;
import static org.apache.hadoop.hbase.ipc.RpcClient.DEFAULT_CODEC_CLASS;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
@@ -45,7 +45,6 @@
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.ExtendedCellScanner;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeepDeletedCells;
@@ -94,16 +93,12 @@
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
@@ -112,13 +107,10 @@
/**
* Tests the table import and table export MR job functionality
*/
-@Category({ VerySlowMapReduceTests.class, LargeTests.class })
+@org.junit.jupiter.api.Tag(VerySlowMapReduceTests.TAG)
+@org.junit.jupiter.api.Tag(LargeTests.TAG)
public class TestImportExport {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestImportExport.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestImportExport.class);
protected static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1");
@@ -140,7 +132,7 @@ public class TestImportExport {
public static final String TEST_ATTR = "source_op";
public static final String TEST_TAG = "test_tag";
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Throwable {
// Up the handlers; this test needs more than usual.
UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10);
@@ -149,20 +141,17 @@ public static void beforeClass() throws Throwable {
new Path(OUTPUT_DIR).makeQualified(FileSystem.get(UTIL.getConfiguration())).toString();
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Throwable {
UTIL.shutdownMiniCluster();
}
- @Rule
- public final TestName name = new TestName();
-
- @Before
- public void announce() {
- LOG.info("Running " + name.getMethodName());
+ @BeforeEach
+ public void announce(TestInfo testInfo) {
+ LOG.info("Running " + testInfo.getTestMethod().get().getName());
}
- @After
+ @AfterEach
public void cleanup() throws Throwable {
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
fs.delete(new Path(OUTPUT_DIR), true);
@@ -202,8 +191,9 @@ boolean runImport(String[] args) throws Throwable {
* Test simple replication case with column mapping
*/
@Test
- public void testSimpleCase() throws Throwable {
- try (Table t = UTIL.createTable(TableName.valueOf(name.getMethodName()), FAMILYA, 3)) {
+ public void testSimpleCase(TestInfo testInfo) throws Throwable {
+ String tableName = testInfo.getTestMethod().get().getName();
+ try (Table t = UTIL.createTable(TableName.valueOf(tableName), FAMILYA, 3)) {
Put p = new Put(ROW1);
p.addColumn(FAMILYA, QUAL, now, QUAL);
p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
@@ -224,12 +214,12 @@ public void testSimpleCase() throws Throwable {
String[] args = new String[] {
// Only export row1 & row2.
"-D" + TableInputFormat.SCAN_ROW_START + "=\\x32row1",
- "-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3", name.getMethodName(), FQ_OUTPUT_DIR,
+ "-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3", tableName, FQ_OUTPUT_DIR,
"1000", // max number of key versions per key to export
};
assertTrue(runExport(args));
- final String IMPORT_TABLE = name.getMethodName() + "import";
+ final String IMPORT_TABLE = tableName + "import";
try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3)) {
args =
new String[] { "-D" + Import.CF_RENAME_PROP + "=" + FAMILYA_STRING + ":" + FAMILYB_STRING,
@@ -298,9 +288,10 @@ public void testImport94Table() throws Throwable {
* Test export scanner batching
*/
@Test
- public void testExportScannerBatching() throws Throwable {
+ public void testExportScannerBatching(TestInfo testInfo) throws Throwable {
+ String tableName = testInfo.getTestMethod().get().getName();
TableDescriptor desc = TableDescriptorBuilder
- .newBuilder(TableName.valueOf(name.getMethodName()))
+ .newBuilder(TableName.valueOf(tableName))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(1).build())
.build();
UTIL.getAdmin().createTable(desc);
@@ -314,7 +305,7 @@ public void testExportScannerBatching() throws Throwable {
t.put(p);
// added scanner batching arg.
String[] args = new String[] { "-D" + ExportUtils.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE,
- name.getMethodName(), FQ_OUTPUT_DIR };
+ tableName, FQ_OUTPUT_DIR };
assertTrue(runExport(args));
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
@@ -323,9 +314,10 @@ public void testExportScannerBatching() throws Throwable {
}
@Test
- public void testWithDeletes() throws Throwable {
+ public void testWithDeletes(TestInfo testInfo) throws Throwable {
+ String tableName = testInfo.getTestMethod().get().getName();
TableDescriptor desc =
- TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
+ TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
.setKeepDeletedCells(KeepDeletedCells.TRUE).build())
.build();
@@ -346,12 +338,12 @@ public void testWithDeletes() throws Throwable {
t.delete(d);
}
- String[] args = new String[] { "-D" + ExportUtils.RAW_SCAN + "=true", name.getMethodName(),
+ String[] args = new String[] { "-D" + ExportUtils.RAW_SCAN + "=true", tableName,
FQ_OUTPUT_DIR, "1000", // max number of key versions per key to export
};
assertTrue(runExport(args));
- final String IMPORT_TABLE = name.getMethodName() + "import";
+ final String IMPORT_TABLE = tableName + "import";
desc = TableDescriptorBuilder
.newBuilder(TableName.valueOf(IMPORT_TABLE)).setColumnFamily(ColumnFamilyDescriptorBuilder
.newBuilder(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE).build())
@@ -378,10 +370,11 @@ public void testWithDeletes() throws Throwable {
}
@Test
- public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Throwable {
- final TableName exportTable = TableName.valueOf(name.getMethodName());
+ public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily(TestInfo testInfo) throws Throwable {
+ String tableName = testInfo.getTestMethod().get().getName();
+ final TableName exportTable = TableName.valueOf(tableName);
TableDescriptor desc =
- TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
+ TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
.setKeepDeletedCells(KeepDeletedCells.TRUE).build())
.build();
@@ -413,7 +406,7 @@ public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Thro
};
assertTrue(runExport(args));
- final String importTable = name.getMethodName() + "import";
+ final String importTable = tableName + "import";
desc = TableDescriptorBuilder
.newBuilder(TableName.valueOf(importTable)).setColumnFamily(ColumnFamilyDescriptorBuilder
.newBuilder(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE).build())
@@ -448,10 +441,11 @@ public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Thro
* attempt with invalid values.
*/
@Test
- public void testWithFilter() throws Throwable {
+ public void testWithFilter(TestInfo testInfo) throws Throwable {
// Create simple table to export
+ String tableName = testInfo.getTestMethod().get().getName();
TableDescriptor desc = TableDescriptorBuilder
- .newBuilder(TableName.valueOf(name.getMethodName()))
+ .newBuilder(TableName.valueOf(tableName))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build())
.build();
UTIL.getAdmin().createTable(desc);
@@ -471,11 +465,11 @@ public void testWithFilter() throws Throwable {
exportTable.put(Arrays.asList(p1, p2));
// Export the simple table
- String[] args = new String[] { name.getMethodName(), FQ_OUTPUT_DIR, "1000" };
+ String[] args = new String[] { tableName, FQ_OUTPUT_DIR, "1000" };
assertTrue(runExport(args));
// Import to a new table
- final String IMPORT_TABLE = name.getMethodName() + "import";
+ final String IMPORT_TABLE = tableName + "import";
desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(IMPORT_TABLE))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build())
.build();
@@ -491,14 +485,14 @@ public void testWithFilter() throws Throwable {
PrefixFilter filter = new PrefixFilter(ROW1);
int count = getCount(exportTable, filter);
- Assert.assertEquals("Unexpected row count between export and import tables", count,
- getCount(importTable, null));
+ assertEquals(count, getCount(importTable, null),
+ "Unexpected row count between export and import tables");
// and then test that a broken command doesn't bork everything - easier here because we don't
// need to re-run the export job
args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(),
- "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", name.getMethodName(),
+ "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", tableName,
FQ_OUTPUT_DIR, "1000" };
assertFalse(runImport(args));
@@ -511,10 +505,11 @@ public void testWithFilter() throws Throwable {
* Create a simple table, run an Export Job on it, Import with bulk output and enable largeResult
*/
@Test
- public void testBulkImportAndLargeResult() throws Throwable {
+ public void testBulkImportAndLargeResult(TestInfo testInfo) throws Throwable {
// Create simple table to export
+ String tableName = testInfo.getTestMethod().get().getName();
TableDescriptor desc = TableDescriptorBuilder
- .newBuilder(TableName.valueOf(name.getMethodName()))
+ .newBuilder(TableName.valueOf(tableName))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build())
.build();
UTIL.getAdmin().createTable(desc);
@@ -530,11 +525,11 @@ public void testBulkImportAndLargeResult() throws Throwable {
exportTable.put(Arrays.asList(p1, p2));
// Export the simple table
- String[] args = new String[] { name.getMethodName(), FQ_OUTPUT_DIR, "1000" };
+ String[] args = new String[] { tableName, FQ_OUTPUT_DIR, "1000" };
assertTrue(runExport(args));
// Import to a new table
- final String IMPORT_TABLE = name.getMethodName() + "import";
+ final String IMPORT_TABLE = tableName + "import";
desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(IMPORT_TABLE))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build())
.build();
@@ -717,9 +712,10 @@ public void testAddFilterAndArguments() throws IOException {
}
@Test
- public void testDurability() throws Throwable {
+ public void testDurability(TestInfo testInfo) throws Throwable {
// Create an export table.
- String exportTableName = name.getMethodName() + "export";
+ String methodName = testInfo.getTestMethod().get().getName();
+ String exportTableName = methodName + "export";
try (Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3)) {
// Insert some data
Put put = new Put(ROW1);
@@ -739,7 +735,7 @@ public void testDurability() throws Throwable {
assertTrue(runExport(args));
// Create the table for import
- String importTableName = name.getMethodName() + "import1";
+ String importTableName = methodName + "import1";
Table importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
// Register the wal listener for the import table
@@ -759,7 +755,7 @@ public void testDurability() throws Throwable {
assertTrue(getCount(importTable, null) == 2);
// Run the import with the default durability option
- importTableName = name.getMethodName() + "import2";
+ importTableName = methodName + "import2";
importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
.getRegions(importTable.getName()).get(0).getRegionInfo();
@@ -809,8 +805,8 @@ public boolean isWALVisited() {
* @throws Throwable throws Throwable.
*/
@Test
- public void testTagsAddition() throws Throwable {
- final TableName exportTable = TableName.valueOf(name.getMethodName());
+ public void testTagsAddition(TestInfo testInfo) throws Throwable {
+ final TableName exportTable = TableName.valueOf(testInfo.getTestMethod().get().getName());
TableDescriptor desc = TableDescriptorBuilder.newBuilder(exportTable)
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
.setKeepDeletedCells(KeepDeletedCells.TRUE).build())
@@ -883,9 +879,9 @@ private void checkWhetherTagExists(TableName table, boolean tagExists) throws IO
List tags = PrivateCellUtil.getTags(cell);
// If tagExists flag is true then validate whether tag contents are as expected.
if (tagExists) {
- Assert.assertEquals(1, tags.size());
+ assertEquals(1, tags.size());
for (Tag tag : tags) {
- Assert.assertEquals(TEST_TAG, Tag.getValueAsString(tag));
+ assertEquals(TEST_TAG, Tag.getValueAsString(tag));
}
} else {
// If tagExists flag is disabled then check for 0 size tags.
@@ -893,7 +889,7 @@ private void checkWhetherTagExists(TableName table, boolean tagExists) throws IO
}
}
}
- Assert.assertTrue(deleteFound);
+ assertTrue(deleteFound);
}
/*
@@ -945,8 +941,8 @@ public void preBatchMutate(ObserverContext extends RegionCoprocessorEnvironmen
* @throws Exception Exception
*/
@Test
- public void testTagsWithEmptyCodec() throws Exception {
- TableName tableName = TableName.valueOf(name.getMethodName());
+ public void testTagsWithEmptyCodec(TestInfo testInfo) throws Exception {
+ TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName());
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
.setKeepDeletedCells(KeepDeletedCells.TRUE).build())
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
index 7b005089732c..778d6fd10294 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
@@ -32,7 +32,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -55,23 +54,18 @@
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestImportTSVWithOperationAttributes implements Configurable {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestImportTSVWithOperationAttributes.class);
-
private static final Logger LOG =
LoggerFactory.getLogger(TestImportTSVWithOperationAttributes.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
@@ -93,9 +87,6 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
private final String FAMILY = "FAM";
- @Rule
- public TestName name = new TestName();
-
@Override
public Configuration getConf() {
return util.getConfiguration();
@@ -106,7 +97,7 @@ public void setConf(Configuration conf) {
throw new IllegalArgumentException("setConf not supported");
}
- @BeforeClass
+ @BeforeAll
public static void provisionCluster() throws Exception {
conf = util.getConfiguration();
conf.set("hbase.coprocessor.master.classes", OperationAttributesTestController.class.getName());
@@ -114,14 +105,14 @@ public static void provisionCluster() throws Exception {
util.startMiniCluster();
}
- @AfterClass
+ @AfterAll
public static void releaseCluster() throws Exception {
util.shutdownMiniCluster();
}
@Test
- public void testMROnTable() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testMROnTable(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
@@ -136,8 +127,8 @@ public void testMROnTable() throws Exception {
}
@Test
- public void testMROnTableWithInvalidOperationAttr() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testMROnTableWithInvalidOperationAttr(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
index 9ac8f35a91de..b8180c6f292a 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.IOException;
import java.util.ArrayList;
@@ -29,7 +29,6 @@
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Durability;
@@ -45,23 +44,18 @@
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestImportTSVWithTTLs implements Configurable {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestImportTSVWithTTLs.class);
-
protected static final Logger LOG = LoggerFactory.getLogger(TestImportTSVWithTTLs.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtil util = new HBaseTestingUtil();
@@ -79,9 +73,6 @@ public class TestImportTSVWithTTLs implements Configurable {
private final String FAMILY = "FAM";
private static Configuration conf;
- @Rule
- public TestName name = new TestName();
-
@Override
public Configuration getConf() {
return util.getConfiguration();
@@ -92,7 +83,7 @@ public void setConf(Configuration conf) {
throw new IllegalArgumentException("setConf not supported");
}
- @BeforeClass
+ @BeforeAll
public static void provisionCluster() throws Exception {
conf = util.getConfiguration();
// We don't check persistence in HFiles in this test, but if we ever do we will
@@ -102,14 +93,14 @@ public static void provisionCluster() throws Exception {
util.startMiniCluster();
}
- @AfterClass
+ @AfterAll
public static void releaseCluster() throws Exception {
util.shutdownMiniCluster();
}
@Test
- public void testMROnTable() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testMROnTable(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
index e15181e9c94d..3aac9835177d 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
@@ -64,25 +64,21 @@
import org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputFilesFilter;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestImportTSVWithVisibilityLabels implements Configurable {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestImportTSVWithVisibilityLabels.class);
-
private static final Logger LOG =
LoggerFactory.getLogger(TestImportTSVWithVisibilityLabels.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
@@ -107,9 +103,6 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
private static User SUPERUSER;
private static Configuration conf;
- @Rule
- public TestName name = new TestName();
-
@Override
public Configuration getConf() {
return util.getConfiguration();
@@ -120,7 +113,7 @@ public void setConf(Configuration conf) {
throw new IllegalArgumentException("setConf not supported");
}
- @BeforeClass
+ @BeforeAll
public static void provisionCluster() throws Exception {
conf = util.getConfiguration();
SUPERUSER = User.createUserForTesting(conf, "admin", new String[] { "supergroup" });
@@ -153,14 +146,14 @@ public VisibilityLabelsResponse run() throws Exception {
SUPERUSER.runAs(action);
}
- @AfterClass
+ @AfterAll
public static void releaseCluster() throws Exception {
util.shutdownMiniCluster();
}
@Test
- public void testMROnTable() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testMROnTable(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
@@ -174,8 +167,8 @@ public void testMROnTable() throws Exception {
}
@Test
- public void testMROnTableWithDeletes() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testMROnTableWithDeletes(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
@@ -226,8 +219,8 @@ private void issueDeleteAndVerifyData(TableName tableName) throws IOException {
}
@Test
- public void testMROnTableWithBulkload() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testMROnTableWithBulkload(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args = new String[] { "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
@@ -240,8 +233,8 @@ public void testMROnTableWithBulkload() throws Exception {
}
@Test
- public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
- final TableName table = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testBulkOutputWithTsvImporterTextMapper(TestInfo testInfo) throws Exception {
+ final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
String FAMILY = "FAM";
Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
@@ -257,8 +250,8 @@ public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
}
@Test
- public void testMRWithOutputFormat() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testMRWithOutputFormat(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args = new String[] {
@@ -273,8 +266,8 @@ public void testMRWithOutputFormat() throws Exception {
}
@Test
- public void testBulkOutputWithInvalidLabels() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testBulkOutputWithInvalidLabels(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args = new String[] { "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
@@ -290,8 +283,8 @@ public void testBulkOutputWithInvalidLabels() throws Exception {
}
@Test
- public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
+ public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels(TestInfo testInfo) throws Exception {
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args = new String[] {
@@ -391,12 +384,12 @@ private static void validateHFiles(FileSystem fs, String outputPath, String fami
String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);
String cf = elements[elements.length - 1];
foundFamilies.add(cf);
- assertTrue(String.format(
+ assertTrue(configFamilies.contains(cf), String.format(
"HFile ouput contains a column family (%s) not present in input families (%s)", cf,
- configFamilies), configFamilies.contains(cf));
+ configFamilies));
for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) {
- assertTrue(String.format("HFile %s appears to contain no data.", hfile.getPath()),
- hfile.getLen() > 0);
+ assertTrue(
+ hfile.getLen() > 0, String.format("HFile %s appears to contain no data.", hfile.getPath()));
if (expectedKVCount > -1) {
actualKVCount += getKVCountFromHfile(fs, hfile.getPath());
}
@@ -404,9 +397,8 @@ private static void validateHFiles(FileSystem fs, String outputPath, String fami
}
if (expectedKVCount > -1) {
assertTrue(
- String.format("KV count in output hfile=<%d> doesn't match with expected KV count=<%d>",
- actualKVCount, expectedKVCount),
- actualKVCount == expectedKVCount);
+ actualKVCount == expectedKVCount, String.format("KV count in output hfile=<%d> doesn't match with expected KV count=<%d>",
+ actualKVCount, expectedKVCount));
}
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index 04fc2c8d3b8f..760e11021c38 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -17,9 +17,10 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.Arrays;
@@ -61,24 +62,18 @@
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.ExpectedException;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ VerySlowMapReduceTests.class, LargeTests.class })
+@Tag(VerySlowMapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestImportTsv implements Configurable {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestImportTsv.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestImportTsv.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtil util = new HBaseTestingUtil();
@@ -95,9 +90,6 @@ public class TestImportTsv implements Configurable {
private TableName tn;
private Map args;
- @Rule
- public ExpectedException exception = ExpectedException.none();
-
public Configuration getConf() {
return util.getConfiguration();
}
@@ -106,17 +98,17 @@ public void setConf(Configuration conf) {
throw new IllegalArgumentException("setConf not supported");
}
- @BeforeClass
+ @BeforeAll
public static void provisionCluster() throws Exception {
util.startMiniCluster();
}
- @AfterClass
+ @AfterAll
public static void releaseCluster() throws Exception {
util.shutdownMiniCluster();
}
- @Before
+ @BeforeEach
public void setup() throws Exception {
tn = TableName.valueOf("test-" + util.getRandomUUID());
args = new HashMap<>();
@@ -198,7 +190,7 @@ public void testJobConfigurationsWithTsvImporterTextMapper() throws Exception {
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
"-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), tn.getNameAsString(),
INPUT_FILE };
- assertEquals("running test job configuration failed.", 0,
+ assertEquals(0,
ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() {
@Override
public int run(String[] args) throws Exception {
@@ -208,7 +200,7 @@ public int run(String[] args) throws Exception {
assertTrue(job.getMapOutputValueClass().equals(Text.class));
return 0;
}
- }, args));
+ }, args), "running test job configuration failed.");
// Delete table created by createSubmittableJob.
util.deleteTable(tn);
}
@@ -231,15 +223,15 @@ public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception {
conf.set(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A");
conf.set(ImportTsv.BULK_OUTPUT_CONF_KEY, "/output");
conf.set(ImportTsv.CREATE_TABLE_CONF_KEY, "no");
- exception.expect(TableNotFoundException.class);
- assertEquals("running test job configuration failed.", 0,
+ assertThrows(TableNotFoundException.class, () -> {
ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() {
@Override
public int run(String[] args) throws Exception {
createSubmittableJob(getConf(), args);
return 0;
}
- }, args));
+ }, args);
+ });
}
@Test
@@ -250,15 +242,15 @@ public void testMRNoMatchedColumnFamily() throws Exception {
"-D" + ImportTsv.COLUMNS_CONF_KEY
+ "=HBASE_ROW_KEY,FAM:A,FAM01_ERROR:A,FAM01_ERROR:B,FAM02_ERROR:C",
tn.getNameAsString(), "/inputFile" };
- exception.expect(NoSuchColumnFamilyException.class);
- assertEquals("running test job configuration failed.", 0,
+ assertThrows(NoSuchColumnFamilyException.class, () -> {
ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() {
@Override
public int run(String[] args) throws Exception {
createSubmittableJob(getConf(), args);
return 0;
}
- }, args));
+ }, args);
+ });
util.deleteTable(tn);
}
@@ -267,15 +259,15 @@ public int run(String[] args) throws Exception {
public void testMRWithoutAnExistingTable() throws Exception {
String[] args = new String[] { tn.getNameAsString(), "/inputFile" };
- exception.expect(TableNotFoundException.class);
- assertEquals("running test job configuration failed.", 0,
+ assertThrows(TableNotFoundException.class, () -> {
ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() {
@Override
public int run(String[] args) throws Exception {
createSubmittableJob(getConf(), args);
return 0;
}
- }, args));
+ }, args);
+ });
}
@Test
@@ -288,7 +280,7 @@ public void testJobConfigurationsWithDryMode() throws Exception {
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
"-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(),
"-D" + ImportTsv.DRY_RUN_CONF_KEY + "=true", tn.getNameAsString(), INPUT_FILE };
- assertEquals("running test job configuration failed.", 0,
+ assertEquals(0,
ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() {
@Override
public int run(String[] args) throws Exception {
@@ -296,7 +288,7 @@ public int run(String[] args) throws Exception {
assertTrue(job.getOutputFormatClass().equals(NullOutputFormat.class));
return 0;
}
- }, argsArray));
+ }, argsArray), "running test job configuration failed.");
// Delete table created by createSubmittableJob.
util.deleteTable(tn);
}
@@ -317,8 +309,7 @@ public void testDryModeWithoutBulkOutputAndTableExists() throws Exception {
@Test
public void testDryModeWithoutBulkOutputAndTableDoesNotExists() throws Exception {
args.put(ImportTsv.DRY_RUN_CONF_KEY, "true");
- exception.expect(TableNotFoundException.class);
- doMROnTableTest(null, 1);
+ assertThrows(TableNotFoundException.class, () -> doMROnTableTest(null, 1));
}
@Test
@@ -345,8 +336,7 @@ public void testDryModeWithBulkOutputAndTableDoesNotExistsCreateTableSetToNo() t
args.put(ImportTsv.BULK_OUTPUT_CONF_KEY, hfiles.toString());
args.put(ImportTsv.DRY_RUN_CONF_KEY, "true");
args.put(ImportTsv.CREATE_TABLE_CONF_KEY, "no");
- exception.expect(TableNotFoundException.class);
- doMROnTableTest(null, 1);
+ assertThrows(TableNotFoundException.class, () -> doMROnTableTest(null, 1));
}
@Test
@@ -358,8 +348,7 @@ public void testDryModeWithBulkModeAndTableDoesNotExistsCreateTableSetToYes() th
args.put(ImportTsv.CREATE_TABLE_CONF_KEY, "yes");
doMROnTableTest(null, 1);
// Verify temporary table was deleted.
- exception.expect(TableNotFoundException.class);
- util.deleteTable(tn);
+ assertThrows(TableNotFoundException.class, () -> util.deleteTable(tn));
}
/**
@@ -453,8 +442,8 @@ protected static Tool doMROnTableTest(HBaseTestingUtil util, TableName table, St
&& "true".equalsIgnoreCase(args.get(ImportTsv.DRY_RUN_CONF_KEY));
if (args.containsKey(ImportTsv.BULK_OUTPUT_CONF_KEY)) {
if (isDryRun) {
- assertFalse(String.format("Dry run mode, %s should not have been created.",
- ImportTsv.BULK_OUTPUT_CONF_KEY), fs.exists(new Path(ImportTsv.BULK_OUTPUT_CONF_KEY)));
+ assertFalse(fs.exists(new Path(ImportTsv.BULK_OUTPUT_CONF_KEY)), String.format(
+ "Dry run mode, %s should not have been created.", ImportTsv.BULK_OUTPUT_CONF_KEY));
} else {
validateHFiles(fs, args.get(ImportTsv.BULK_OUTPUT_CONF_KEY), family, expectedKVCount);
}
@@ -536,25 +525,24 @@ private static void validateHFiles(FileSystem fs, String outputPath, String fami
String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);
String cf = elements[elements.length - 1];
foundFamilies.add(cf);
- assertTrue(String.format(
+ assertTrue(configFamilies.contains(cf), String.format(
"HFile output contains a column family (%s) not present in input families (%s)", cf,
- configFamilies), configFamilies.contains(cf));
+ configFamilies));
for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) {
- assertTrue(String.format("HFile %s appears to contain no data.", hfile.getPath()),
- hfile.getLen() > 0);
+ assertTrue(hfile.getLen() > 0,
+ String.format("HFile %s appears to contain no data.", hfile.getPath()));
// count the number of KVs from all the hfiles
if (expectedKVCount > -1) {
actualKVCount += getKVCountFromHfile(fs, hfile.getPath());
}
}
}
- assertTrue(String.format("HFile output does not contain the input family '%s'.", family),
- foundFamilies.contains(family));
+ assertTrue(foundFamilies.contains(family),
+ String.format("HFile output does not contain the input family '%s'.", family));
if (expectedKVCount > -1) {
- assertTrue(
+ assertTrue(actualKVCount == expectedKVCount,
String.format("KV count in ouput hfile=<%d> doesn't match with expected KV count=<%d>",
- actualKVCount, expectedKVCount),
- actualKVCount == expectedKVCount);
+ actualKVCount, expectedKVCount));
}
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
index adb0589c9805..aa9fd1b10941 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
@@ -17,11 +17,12 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.util.ArrayList;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -33,9 +34,8 @@
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.apache.hbase.thirdparty.com.google.common.base.Joiner;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
@@ -44,11 +44,9 @@
/**
* Tests for {@link TsvParser}.
*/
-@Category({ MapReduceTests.class, SmallTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(SmallTests.TAG)
public class TestImportTsvParser {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestImportTsvParser.class);
private void assertBytesEquals(byte[] a, byte[] b) {
assertEquals(Bytes.toStringBinary(a), Bytes.toStringBinary(b));
@@ -171,50 +169,50 @@ public void testTsvParserWithTimestamp() throws BadTsvLineException {
/**
* Test cases that throw BadTsvLineException
*/
- @Test(expected = BadTsvLineException.class)
+ @Test
public void testTsvParserBadTsvLineExcessiveColumns() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t");
byte[] line = Bytes.toBytes("val_a\tval_b\tval_c");
- parser.parse(line, line.length);
+ assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length));
}
- @Test(expected = BadTsvLineException.class)
+ @Test
public void testTsvParserBadTsvLineZeroColumn() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t");
byte[] line = Bytes.toBytes("");
- parser.parse(line, line.length);
+ assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length));
}
- @Test(expected = BadTsvLineException.class)
+ @Test
public void testTsvParserBadTsvLineOnlyKey() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t");
byte[] line = Bytes.toBytes("key_only");
- parser.parse(line, line.length);
+ assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length));
}
- @Test(expected = BadTsvLineException.class)
+ @Test
public void testTsvParserBadTsvLineNoRowKey() throws BadTsvLineException {
TsvParser parser = new TsvParser("col_a,HBASE_ROW_KEY", "\t");
byte[] line = Bytes.toBytes("only_cola_data_and_no_row_key");
- parser.parse(line, line.length);
+ assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length));
}
- @Test(expected = BadTsvLineException.class)
+ @Test
public void testTsvParserInvalidTimestamp() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,HBASE_TS_KEY,col_a,", "\t");
assertEquals(1, parser.getTimestampKeyColumnIndex());
byte[] line = Bytes.toBytes("rowkey\ttimestamp\tval_a");
ParsedLine parsed = parser.parse(line, line.length);
- assertEquals(-1, parsed.getTimestamp(-1));
+ assertThrows(BadTsvLineException.class, () -> parsed.getTimestamp(-1));
checkParsing(parsed, Splitter.on("\t").split(Bytes.toString(line)));
}
- @Test(expected = BadTsvLineException.class)
+ @Test
public void testTsvParserNoTimestampValue() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a,HBASE_TS_KEY", "\t");
assertEquals(2, parser.getTimestampKeyColumnIndex());
byte[] line = Bytes.toBytes("rowkey\tval_a");
- parser.parse(line, line.length);
+ assertThrows(BadTsvLineException.class, () -> parser.parse(line, line.length));
}
@Test
@@ -225,30 +223,24 @@ public void testTsvParserParseRowKey() throws BadTsvLineException {
Pair rowKeyOffsets = parser.parseRowKey(line, line.length);
assertEquals(0, rowKeyOffsets.getFirst().intValue());
assertEquals(6, rowKeyOffsets.getSecond().intValue());
- try {
- line = Bytes.toBytes("\t\tval_a\t1234");
- parser.parseRowKey(line, line.length);
- fail("Should get BadTsvLineException on empty rowkey.");
- } catch (BadTsvLineException ignored) {
- }
- parser = new TsvParser("col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
- assertEquals(1, parser.getRowKeyColumnIndex());
+ byte[] line2 = Bytes.toBytes("\t\tval_a\t1234");
+ assertThrows(BadTsvLineException.class, () -> parser.parseRowKey(line2, line2.length));
+
+ TsvParser parser2 = new TsvParser("col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
+ assertEquals(1, parser2.getRowKeyColumnIndex());
line = Bytes.toBytes("val_a\trowkey\t1234");
- rowKeyOffsets = parser.parseRowKey(line, line.length);
+ rowKeyOffsets = parser2.parseRowKey(line, line.length);
assertEquals(6, rowKeyOffsets.getFirst().intValue());
assertEquals(6, rowKeyOffsets.getSecond().intValue());
- try {
- line = Bytes.toBytes("val_a");
- rowKeyOffsets = parser.parseRowKey(line, line.length);
- fail("Should get BadTsvLineException when number of columns less than rowkey position.");
- } catch (BadTsvLineException ignored) {
- }
- parser = new TsvParser("col_a,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
- assertEquals(2, parser.getRowKeyColumnIndex());
+ byte[] line3 = Bytes.toBytes("val_a");
+ assertThrows(BadTsvLineException.class, () -> parser2.parseRowKey(line3, line3.length));
+
+ TsvParser parser3 = new TsvParser("col_a,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
+ assertEquals(2, parser3.getRowKeyColumnIndex());
line = Bytes.toBytes("val_a\t1234\trowkey");
- rowKeyOffsets = parser.parseRowKey(line, line.length);
+ rowKeyOffsets = parser3.parseRowKey(line, line.length);
assertEquals(11, rowKeyOffsets.getFirst().intValue());
assertEquals(6, rowKeyOffsets.getSecond().intValue());
}
@@ -263,27 +255,20 @@ public void testTsvParseAttributesKey() throws BadTsvLineException {
assertEquals(3, parser.getAttributesKeyColumnIndex());
String[] attributes = parse.getIndividualAttributes();
assertEquals("key=>value", attributes[0]);
- try {
- line = Bytes.toBytes("rowkey\tval_a\t1234");
- parser.parse(line, line.length);
- fail("Should get BadTsvLineException on empty rowkey.");
- } catch (BadTsvLineException ignored) {
- }
+ byte[] line2 = Bytes.toBytes("rowkey\tval_a\t1234");
+ TsvParser finalParser = parser;
+ assertThrows(BadTsvLineException.class, () -> finalParser.parse(line2, line2.length));
- parser = new TsvParser("HBASE_ATTRIBUTES_KEY,col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
- assertEquals(2, parser.getRowKeyColumnIndex());
+ TsvParser parser2 = new TsvParser("HBASE_ATTRIBUTES_KEY,col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
+ assertEquals(2, parser2.getRowKeyColumnIndex());
line = Bytes.toBytes("key=>value\tval_a\trowkey\t1234");
- parse = parser.parse(line, line.length);
+ parse = parser2.parse(line, line.length);
assertEquals(0, parse.getAttributeKeyOffset());
- assertEquals(0, parser.getAttributesKeyColumnIndex());
+ assertEquals(0, parser2.getAttributesKeyColumnIndex());
attributes = parse.getIndividualAttributes();
assertEquals("key=>value", attributes[0]);
- try {
- line = Bytes.toBytes("val_a");
- ParsedLine parse2 = parser.parse(line, line.length);
- fail("Should get BadTsvLineException when number of columns less than rowkey position.");
- } catch (BadTsvLineException ignored) {
- }
+ byte[] line3 = Bytes.toBytes("val_a");
+ assertThrows(BadTsvLineException.class, () -> parser2.parse(line3, line3.length));
parser = new TsvParser("col_a,HBASE_ATTRIBUTES_KEY,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
assertEquals(3, parser.getRowKeyColumnIndex());
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
index 87461c2735f0..40fe6abf83d8 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
@@ -36,18 +36,15 @@
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
import org.slf4j.LoggerFactory;
/**
* This file was forked from hadoop/common/branches/branch-2@1350012.
*/
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
public class TestJarFinder {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestJarFinder.class);
-
@Test
public void testJar() throws Exception {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoad.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoad.java
index 1e7cb0e41037..5542f982d00f 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoad.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMRIncrementalLoad.java
@@ -20,31 +20,23 @@
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
-@RunWith(Parameterized.class)
-@Category({ MapReduceTests.class, LargeTests.class })
+@Tag(MapReduceTests.TAG)
+@Tag(LargeTests.TAG)
public class TestMRIncrementalLoad extends MRIncrementalLoadTestBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestMRIncrementalLoad.class);
-
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
setupCluster(false);
}
- @Parameters(name = "{index}: shouldChangeRegions={0}, putSortReducer={1}," + " tableStr={2}")
public static List