将junit测试加载到不同的jvms

时间:2014-05-15 19:23:00

标签: java unit-testing maven junit

我有来自Apache Hbase项目的名为TestColumnPrefixFilter的junit测试用例。在这个测试案例中,有2个测试。当我使用命令行使用maven运行此测试用例时:

mvn test -Dtest=org.apache.hadoop.hbase.filter.TestColumnPrefixFilter

我得到了这个结果:

Running org.apache.hadoop.hbase.filter.TestColumnPrefixFilter
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.585 seconds

我的问题是,有没有办法在不修改代码的情况下分别从这个测试用例中运行这两个测试? maven或java有没有选择吗?或者是否可以在不同的jvms中运行这两个测试?因为当我调用上面的命令行时,这个测试用例被加载到一个jvm,这个jvm用于我不想要的两个测试。

我为maven尝试了perTest选项,但它无法正常工作

Java文件位于

之下
/**
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.apache.hadoop.hbase.filter;

import static org.junit.Assert.*;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;

@Category(SmallTests.class)
public class TestColumnPrefixFilter {

  private final static HBaseTestingUtility TEST_UTIL = new
      HBaseTestingUtility();

  @Test
  public void testColumnPrefixFilter() throws IOException {
    String family = "Family";
    HTableDescriptor htd = new HTableDescriptor("TestColumnPrefixFilter");
    htd.addFamily(new HColumnDescriptor(family));
    HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
    HRegion region = HRegion.createHRegion(info, TEST_UTIL.
      getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
    try {
      List<String> rows = generateRandomWords(100, "row");
      List<String> columns = generateRandomWords(10000, "column");
      long maxTimestamp = 2;

      List<KeyValue> kvList = new ArrayList<KeyValue>();

      Map<String, List<KeyValue>> prefixMap = new HashMap<String,
          List<KeyValue>>();

      prefixMap.put("p", new ArrayList<KeyValue>());
      prefixMap.put("s", new ArrayList<KeyValue>());

      String valueString = "ValueString";

      for (String row: rows) {
        Put p = new Put(Bytes.toBytes(row));
        p.setWriteToWAL(false);
        for (String column: columns) {
          for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) {
            KeyValue kv = KeyValueTestUtil.create(row, family, column, timestamp,
                valueString);
            p.add(kv);
            kvList.add(kv);
            for (String s: prefixMap.keySet()) {
              if (column.startsWith(s)) {
                prefixMap.get(s).add(kv);
              }
            }
          }
        }
        region.put(p);
      }

      ColumnPrefixFilter filter;
      Scan scan = new Scan();
      scan.setMaxVersions();
      for (String s: prefixMap.keySet()) {
        filter = new ColumnPrefixFilter(Bytes.toBytes(s));

        scan.setFilter(filter);

        InternalScanner scanner = region.getScanner(scan);
        List<KeyValue> results = new ArrayList<KeyValue>();
        while(scanner.next(results));
        assertEquals(prefixMap.get(s).size(), results.size());
      }
    } finally {
      region.close();
      region.getLog().closeAndDelete();
    }

    region.close();
    region.getLog().closeAndDelete();
  }

  @Test
  public void testColumnPrefixFilterWithFilterList() throws IOException {
    String family = "Family";
    HTableDescriptor htd = new HTableDescriptor("TestColumnPrefixFilter");
    htd.addFamily(new HColumnDescriptor(family));
    HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
    HRegion region = HRegion.createHRegion(info, TEST_UTIL.
      getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
    try {
      List<String> rows = generateRandomWords(100, "row");
      List<String> columns = generateRandomWords(10000, "column");
      long maxTimestamp = 2;

      List<KeyValue> kvList = new ArrayList<KeyValue>();

      Map<String, List<KeyValue>> prefixMap = new HashMap<String,
          List<KeyValue>>();

      prefixMap.put("p", new ArrayList<KeyValue>());
      prefixMap.put("s", new ArrayList<KeyValue>());

      String valueString = "ValueString";

      for (String row: rows) {
        Put p = new Put(Bytes.toBytes(row));
        p.setWriteToWAL(false);
        for (String column: columns) {
          for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) {
            KeyValue kv = KeyValueTestUtil.create(row, family, column, timestamp,
                valueString);
            p.add(kv);
            kvList.add(kv);
            for (String s: prefixMap.keySet()) {
              if (column.startsWith(s)) {
                prefixMap.get(s).add(kv);
              }
            }
          }
        }
        region.put(p);
      }

      ColumnPrefixFilter filter;
      Scan scan = new Scan();
      scan.setMaxVersions();
      for (String s: prefixMap.keySet()) {
        filter = new ColumnPrefixFilter(Bytes.toBytes(s));

        //this is how this test differs from the one above
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(filter);
        scan.setFilter(filterList);

        InternalScanner scanner = region.getScanner(scan);
        List<KeyValue> results = new ArrayList<KeyValue>();
        while(scanner.next(results));
        assertEquals(prefixMap.get(s).size(), results.size());
      }
    } finally {
      region.close();
      region.getLog().closeAndDelete();
    }

    region.close();
    region.getLog().closeAndDelete();
  }

  List<String> generateRandomWords(int numberOfWords, String suffix) {
    Set<String> wordSet = new HashSet<String>();
    for (int i = 0; i < numberOfWords; i++) {
      int lengthOfWords = (int) (Math.random()*2) + 1;
      char[] wordChar = new char[lengthOfWords];
      for (int j = 0; j < wordChar.length; j++) {
        wordChar[j] = (char) (Math.random() * 26 + 97);
      }
      String word;
      if (suffix == null) {
        word = new String(wordChar);
      } else {
        word = new String(wordChar) + suffix;
      }
      wordSet.add(word);
    }
    List<String> wordList = new ArrayList<String>(wordSet);
    return wordList;
  }

  @org.junit.Rule
  public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
    new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
}
编辑:我发现了一些有用的东西。在蚂蚁中,有一个名为“perBatch”的选项&#39; for forkmode。看起来这就是我要找的东西。但它不适用于maven。如何为maven找到相同的设置?

1 个答案:

答案 0 :(得分:1)

  

是否可以在不同的jvms中运行这两个测试?

是的,maven可以选择reuseForks。来自文档

  

指示是否可以重用分叉的VM。如果设置为“false”,则为要执行的每个测试类分叉一个新VM。如果设置为“true”,则forkCount VMs将被分叉,然后重新使用以执行所有测试。

     

默认值为:true。

     

用户属性是:reuseForks。

您可以在pom.xml中像这样设置它

<plugin>
    <groupId>org.apache.maven.plugins</groupId>
    <artifactId>maven-surefire-plugin</artifactId>
    <version>2.16</version>
    <configuration>
        <reuseForks>false</reuseForks> <!-- This will make sure to spawn a fresh jvm for each test -->
    </configuration>
</plugin>

修改

尝试以下配置

<plugin>
  <groupId>org.apache.maven.plugins</groupId>
  <artifactId>maven-surefire-plugin</artifactId>
  <version>2.9</version>
  <configuration>
    <forkMode>always</forkMode> <!-- try with 'pertest' option as well -->
  </configuration>
</plugin>