summaryrefslogtreecommitdiff
path: root/java/openjdk6/files/icedtea/security/20130416/8009063.patch
blob: a15331af4bbc3c37d2cc74c21874bb8ada36fb92 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# HG changeset patch
# User chegar
# Date 1362305505 0
# Node ID 98ad2f1e25d13aca196ad77b2f227f85072c9b16
# Parent  17ac71e7b72087f0f7b7ac793ae93a816ef22d96
8009063: Improve reliability of ConcurrentHashMap
Reviewed-by: alanb, ahgross

diff --git a/src/share/classes/java/util/concurrent/ConcurrentHashMap.java b/src/share/classes/java/util/concurrent/ConcurrentHashMap.java
--- jdk/src/share/classes/java/util/concurrent/ConcurrentHashMap.java
+++ jdk/src/share/classes/java/util/concurrent/ConcurrentHashMap.java
@@ -40,6 +40,7 @@ import java.io.IOException;
 import java.io.IOException;
 import java.io.ObjectInputStream;
 import java.io.ObjectOutputStream;
+import java.io.ObjectStreamField;
 
 /**
  * A hash table supporting full concurrency of retrievals and
@@ -1535,7 +1536,23 @@ public class ConcurrentHashMap<K, V> ext
     @SuppressWarnings("unchecked")
     private void readObject(java.io.ObjectInputStream s)
         throws IOException, ClassNotFoundException  {
-        s.defaultReadObject();
+        // Don't call defaultReadObject()
+        ObjectInputStream.GetField oisFields = s.readFields();
+        final Segment<K,V>[] oisSegments = (Segment<K,V>[])oisFields.get("segments", null);
+
+        final int ssize = oisSegments.length;
+        if (ssize < 1 || ssize > MAX_SEGMENTS
+            || (ssize & (ssize-1)) != 0 )  // ssize not power of two
+            throw new java.io.InvalidObjectException("Bad number of segments:"
+                                                     + ssize);
+        int sshift = 0, ssizeTmp = ssize;
+        while (ssizeTmp > 1) {
+            ++sshift;
+            ssizeTmp >>>= 1;
+        }
+        UNSAFE.putIntVolatile(this, SEGSHIFT_OFFSET, 32 - sshift);
+        UNSAFE.putIntVolatile(this, SEGMASK_OFFSET, ssize - 1);
+        UNSAFE.putObjectVolatile(this, SEGMENTS_OFFSET, oisSegments);
 
         // set hashMask
         UNSAFE.putIntVolatile(this, HASHSEED_OFFSET, randomHashSeed(this));
@@ -1568,6 +1585,9 @@ public class ConcurrentHashMap<K, V> ext
     private static final int SSHIFT;
     private static final long TBASE;
     private static final int TSHIFT;
+    private static final long SEGSHIFT_OFFSET;
+    private static final long SEGMASK_OFFSET;
+    private static final long SEGMENTS_OFFSET;
 
     static {
         int ss, ts;
@@ -1581,6 +1601,12 @@ public class ConcurrentHashMap<K, V> ext
             SBASE = UNSAFE.arrayBaseOffset(sc);
             ts = UNSAFE.arrayIndexScale(tc);
             ss = UNSAFE.arrayIndexScale(sc);
+            SEGSHIFT_OFFSET = UNSAFE.objectFieldOffset(
+                ConcurrentHashMap.class.getDeclaredField("segmentShift"));
+            SEGMASK_OFFSET = UNSAFE.objectFieldOffset(
+                ConcurrentHashMap.class.getDeclaredField("segmentMask"));
+            SEGMENTS_OFFSET = UNSAFE.objectFieldOffset(
+                ConcurrentHashMap.class.getDeclaredField("segments"));
         } catch (Exception e) {
             throw new Error(e);
         }