1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.util;
19 import java.util.concurrent.CountDownLatch;
20
21 import org.apache.hadoop.hbase.testclassification.MediumTests;
22 import org.junit.Assert;
23 import org.junit.Test;
24 import org.junit.experimental.categories.Category;
25
26 @Category(MediumTests.class)
27 public class TestCounter {
28 private static final int[] THREAD_COUNTS = {1, 10, 100};
29 private static final int DATA_COUNT = 1000000;
30
31 private interface Operation {
32 void execute();
33 }
34
35 @Test
36 public void testIncrement() throws Exception {
37 for(int threadCount : THREAD_COUNTS) {
38 final Counter counter = new Counter();
39
40 execute(new Operation() {
41 @Override
42 public void execute() {
43 counter.increment();
44 }
45 }, threadCount);
46
47 Assert.assertEquals(threadCount * (long)DATA_COUNT, counter.get());
48 }
49 }
50
51 @Test
52 public void testIncrementAndGet() throws Exception {
53 for(int threadCount: THREAD_COUNTS) {
54 final Counter counter = new Counter();
55
56 execute(new Operation() {
57 @Override
58 public void execute() {
59 counter.increment();
60 counter.get();
61 }
62 }, threadCount);
63
64 Assert.assertEquals(threadCount * (long)DATA_COUNT, counter.get());
65 }
66 }
67
68 private static void execute(final Operation op, int threadCount)
69 throws InterruptedException {
70
71 final CountDownLatch prepareLatch = new CountDownLatch(threadCount);
72 final CountDownLatch startLatch = new CountDownLatch(1);
73 final CountDownLatch endLatch = new CountDownLatch(threadCount);
74
75 class OperationThread extends Thread {
76 @Override
77 public void run() {
78 try {
79 prepareLatch.countDown();
80 startLatch.await();
81
82 for(int i=0; i<DATA_COUNT; i++) {
83 op.execute();
84 }
85
86 endLatch.countDown();
87
88 } catch(Exception e) {}
89 }
90 }
91
92 for(int j=0; j<threadCount; j++) {
93 new OperationThread().start();
94 }
95
96 prepareLatch.await();
97 startLatch.countDown();
98 endLatch.await();
99 }
100 }