readWriteLock并发map与concurrentHashMap对比

来源:互联网 发布:软件项目保密管理制度 编辑:程序博客网 时间:2024/06/05 08:49

测试子下用readLock与WriteLock实现的加锁map与concurrentHashMap,结果相差不是特别大


实现代码:

public class ReadWriteHashMap<K, V> {
    private Map<K, V> mapContainer;

    private ReadWriteLock lock = new ReentrantReadWriteLock();

    private Lock readLock = lock.readLock();

    private Lock writeLock = lock.writeLock();
    
    public static final int count=1000000;

    public ReadWriteHashMap() {
        mapContainer = new HashMap<K, V>();
    }

    public V put(K key, V value) {
        writeLock.lock();
        try {
            return mapContainer.put(key, value);
        } finally {
            writeLock.unlock();
        }
    }
    
    public V get(K key){
        readLock.lock();
        try{
            return mapContainer.get(key);
        }finally {
            readLock.unlock();
        }
    }
    
    
    @Test
    public void testWriteOpOfReadWriteMap() throws Exception {
        Long oldTime=System.currentTimeMillis();
        ReadWriteHashMap<String, String> map=new ReadWriteHashMap<String, String>();
        
        for(int i=0;i<count;i++){
            map.put(UUID.randomUUID().toString(), UUID.randomUUID().toString());
        }
        
        
        Long cost=System.currentTimeMillis()-oldTime;
        System.out.println("readWriteHashMap write cost:"+cost);
    }
    
    
    @Test
    public void testReadOpOfReadWriteMap() throws Exception {
        ReadWriteHashMap<String, String> map=new ReadWriteHashMap<String, String>();
        String k="d";
        for(int i=0;i<count;i++){
            if(i==0){
                k=UUID.randomUUID().toString();
                map.put(k, UUID.randomUUID().toString());
            }else{
                map.put(UUID.randomUUID().toString(), UUID.randomUUID().toString());
            }
        }
        
        Long oldTime=System.currentTimeMillis();
        
        map.get(k);
        for(int i=0;i<count;i++){
            map.get(String.valueOf(i));
        }
        
        Long cost=System.currentTimeMillis()-oldTime;
        System.out.println("readWriteHashMap read cost:"+cost);
    }
    
    
    //concurrentHashMap
    @Test
    public void testWriteOpOfConcurrentHashMap() throws Exception {
        Long oldTime=System.currentTimeMillis();
        Map<String, String> map=new ConcurrentHashMap<String, String>();
        
        for(int i=0;i<count;i++){
            map.put(UUID.randomUUID().toString(), UUID.randomUUID().toString());
        }
        
        Long cost=System.currentTimeMillis()-oldTime;
        System.out.println("concurrentHashmap write cost:"+cost);
    }
    
    @Test
    public void testReadOpOfConcurrentHashMap() throws Exception {
        Map<String, String> map=new ConcurrentHashMap<String, String>();
        String k="d";
        for(int i=0;i<count;i++){
            if(i==0){
                k=UUID.randomUUID().toString();
                map.put(k, UUID.randomUUID().toString());
            }else{
                map.put(UUID.randomUUID().toString(), UUID.randomUUID().toString());
            }
        }
        
        Long oldTime=System.currentTimeMillis();
        
        map.get(k);
        for(int i=0;i<count;i++){
            map.get(String.valueOf(i));
        }
        
        Long cost=System.currentTimeMillis()-oldTime;
        System.out.println("concurrentHashmap read cost:"+cost);
    }

}



用read与write实现的Map性能测试如下:

1.

10万数据

readWriteHashMap write cost:2305

readWriteHashMap read cost:36


100万

readWriteHashMap write cost:18605
readWriteHashMap read cost:293


2.

10万数据

readWriteHashMap read cost:35
readWriteHashMap write cost:1646


100万

readWriteHashMap write cost:20522
readWriteHashMap read cost:308


3.

10万数据

readWriteHashMap read cost:72
readWriteHashMap write cost:1884

100d万

readWriteHashMap write cost:20522
readWriteHashMap read cost:308

4.

10万数据

readWriteHashMap write cost:2512
readWriteHashMap read cost:34

ConcurrentHashMap如下:

1.

10万数据

concurrentHashmap write cost:2492
concurrentHashmap read cost:21

100万数据

concurrentHashmap write cost:21998
concurrentHashmap read cost:373

2.

10万数据

concurrentHashmap read cost:19
concurrentHashmap write cost:1646

100万数据

concurrentHashmap write cost:22475
concurrentHashmap read cost:423

3.

10万数据

concurrentHashmap write cost:2408
concurrentHashmap read cost:17


100万

concurrentHashmap write cost:22239
concurrentHashmap read cost:408

4.

10万数据

concurrentHashmap write cost:2385
concurrentHashmap read cost:36


100万

concurrentHashmap write cost:20524
concurrentHashmap read cost:319










0 0
原创粉丝点击