summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRoss Burton <ross.burton@intel.com>2013-04-29 15:41:43 +0100
committerRichard Purdie <richard.purdie@linuxfoundation.org>2013-04-29 18:00:00 +0100
commit995de756e344ab7965160a633c2a408bede7ed97 (patch)
tree9d9a31431165c02a4755c79b9e1fff13922b6765
parent23f366384277cb2cdd5ce952cb0af44c54179a1d (diff)
downloadpoky-995de756e344ab7965160a633c2a408bede7ed97.tar.gz
perl: fix CVE-2013-1667
From http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-1667: "The rehash mechanism in Perl 5.8.2 through 5.16.x allows context-dependent attackers to cause a denial of service (memory consumption and crash) via a crafted hash key." Patch taken from upstream git. (From OE-Core rev: ccc272a3f7bf240733dc3af1d8d7b870c55e1f5b) Signed-off-by: Ross Burton <ross.burton@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r--meta/recipes-devtools/perl/perl-5.14.2/cve-2013-1667.patch173
-rw-r--r--meta/recipes-devtools/perl/perl_5.14.2.bb1
2 files changed, 174 insertions, 0 deletions
diff --git a/meta/recipes-devtools/perl/perl-5.14.2/cve-2013-1667.patch b/meta/recipes-devtools/perl/perl-5.14.2/cve-2013-1667.patch
new file mode 100644
index 0000000000..a37cedbb8e
--- /dev/null
+++ b/meta/recipes-devtools/perl/perl-5.14.2/cve-2013-1667.patch
@@ -0,0 +1,173 @@
1Upstream-Status: Backport
2Signed-off-by: Ross Burton <ross.burton@intel.com>
3
4From d59e31fc729d8a39a774f03bc6bc457029a7aef2 Mon Sep 17 00:00:00 2001
5From: Yves Orton <demerphq@gmail.com>
6Date: Tue, 12 Feb 2013 10:53:05 +0100
7Subject: [PATCH] Prevent premature hsplit() calls, and only trigger REHASH after hsplit()
8
9Triggering a hsplit due to long chain length allows an attacker
10to create a carefully chosen set of keys which can cause the hash
11to use 2 * (2**32) * sizeof(void *) bytes ram. AKA a DOS via memory
12exhaustion. Doing so also takes non trivial time.
13
14Eliminating this check, and only inspecting chain length after a
15normal hsplit() (triggered when keys>buckets) prevents the attack
16entirely, and makes such attacks relatively benign.
17
18(cherry picked from commit f1220d61455253b170e81427c9d0357831ca0fac)
19---
20 ext/Hash-Util-FieldHash/t/10_hash.t | 18 ++++++++++++++++--
21 hv.c | 35 ++++++++---------------------------
22 t/op/hash.t | 20 +++++++++++++++++---
23 3 files changed, 41 insertions(+), 32 deletions(-)
24
25diff --git a/ext/Hash-Util-FieldHash/t/10_hash.t b/ext/Hash-Util-FieldHash/t/10_hash.t
26index 2cfb4e8..d58f053 100644
27--- a/ext/Hash-Util-FieldHash/t/10_hash.t
28+++ b/ext/Hash-Util-FieldHash/t/10_hash.t
29@@ -38,15 +38,29 @@ use constant START => "a";
30
31 # some initial hash data
32 fieldhash my %h2;
33-%h2 = map {$_ => 1} 'a'..'cc';
34+my $counter= "a";
35+$h2{$counter++}++ while $counter ne 'cd';
36
37 ok (!Internals::HvREHASH(%h2),
38 "starting with pre-populated non-pathological hash (rehash flag if off)");
39
40 my @keys = get_keys(\%h2);
41+my $buckets= buckets(\%h2);
42 $h2{$_}++ for @keys;
43+$h2{$counter++}++ while buckets(\%h2) == $buckets; # force a split
44 ok (Internals::HvREHASH(%h2),
45- scalar(@keys) . " colliding into the same bucket keys are triggering rehash");
46+ scalar(@keys) . " colliding into the same bucket keys are triggering rehash after split");
47+
48+# returns the number of buckets in a hash
49+sub buckets {
50+ my $hr = shift;
51+ my $keys_buckets= scalar(%$hr);
52+ if ($keys_buckets=~m!/([0-9]+)\z!) {
53+ return 0+$1;
54+ } else {
55+ return 8;
56+ }
57+}
58
59 sub get_keys {
60 my $hr = shift;
61diff --git a/hv.c b/hv.c
62index 2be1feb..abb9d76 100644
63--- a/hv.c
64+++ b/hv.c
65@@ -35,7 +35,8 @@ holds the key and hash value.
66 #define PERL_HASH_INTERNAL_ACCESS
67 #include "perl.h"
68
69-#define HV_MAX_LENGTH_BEFORE_SPLIT 14
70+#define HV_MAX_LENGTH_BEFORE_REHASH 14
71+#define SHOULD_DO_HSPLIT(xhv) ((xhv)->xhv_keys > (xhv)->xhv_max) /* HvTOTALKEYS(hv) > HvMAX(hv) */
72
73 static const char S_strtab_error[]
74 = "Cannot modify shared string table in hv_%s";
75@@ -794,29 +795,9 @@ Perl_hv_common(pTHX_ HV *hv, SV *keysv, const char *key, STRLEN klen,
76 if (masked_flags & HVhek_ENABLEHVKFLAGS)
77 HvHASKFLAGS_on(hv);
78
79- {
80- const HE *counter = HeNEXT(entry);
81-
82- xhv->xhv_keys++; /* HvTOTALKEYS(hv)++ */
83- if (!counter) { /* initial entry? */
84- } else if (xhv->xhv_keys > xhv->xhv_max) {
85- /* Use only the old HvKEYS(hv) > HvMAX(hv) condition to limit
86- bucket splits on a rehashed hash, as we're not going to
87- split it again, and if someone is lucky (evil) enough to
88- get all the keys in one list they could exhaust our memory
89- as we repeatedly double the number of buckets on every
90- entry. Linear search feels a less worse thing to do. */
91- hsplit(hv);
92- } else if(!HvREHASH(hv)) {
93- U32 n_links = 1;
94-
95- while ((counter = HeNEXT(counter)))
96- n_links++;
97-
98- if (n_links > HV_MAX_LENGTH_BEFORE_SPLIT) {
99- hsplit(hv);
100- }
101- }
102+ xhv->xhv_keys++; /* HvTOTALKEYS(hv)++ */
103+ if ( SHOULD_DO_HSPLIT(xhv) ) {
104+ hsplit(hv);
105 }
106
107 if (return_svp) {
108@@ -1192,7 +1173,7 @@ S_hsplit(pTHX_ HV *hv)
109
110
111 /* Pick your policy for "hashing isn't working" here: */
112- if (longest_chain <= HV_MAX_LENGTH_BEFORE_SPLIT /* split worked? */
113+ if (longest_chain <= HV_MAX_LENGTH_BEFORE_REHASH /* split worked? */
114 || HvREHASH(hv)) {
115 return;
116 }
117@@ -2831,8 +2812,8 @@ S_share_hek_flags(pTHX_ const char *str, I32 len, register U32 hash, int flags)
118
119 xhv->xhv_keys++; /* HvTOTALKEYS(hv)++ */
120 if (!next) { /* initial entry? */
121- } else if (xhv->xhv_keys > xhv->xhv_max /* HvKEYS(hv) > HvMAX(hv) */) {
122- hsplit(PL_strtab);
123+ } else if ( SHOULD_DO_HSPLIT(xhv) ) {
124+ hsplit(PL_strtab);
125 }
126 }
127
128diff --git a/t/op/hash.t b/t/op/hash.t
129index 278bea7..201260a 100644
130--- a/t/op/hash.t
131+++ b/t/op/hash.t
132@@ -39,22 +39,36 @@ use constant THRESHOLD => 14;
133 use constant START => "a";
134
135 # some initial hash data
136-my %h2 = map {$_ => 1} 'a'..'cc';
137+my %h2;
138+my $counter= "a";
139+$h2{$counter++}++ while $counter ne 'cd';
140
141 ok (!Internals::HvREHASH(%h2),
142 "starting with pre-populated non-pathological hash (rehash flag if off)");
143
144 my @keys = get_keys(\%h2);
145+my $buckets= buckets(\%h2);
146 $h2{$_}++ for @keys;
147+$h2{$counter++}++ while buckets(\%h2) == $buckets; # force a split
148 ok (Internals::HvREHASH(%h2),
149- scalar(@keys) . " colliding into the same bucket keys are triggering rehash");
150+ scalar(@keys) . " colliding into the same bucket keys are triggering rehash after split");
151+
152+# returns the number of buckets in a hash
153+sub buckets {
154+ my $hr = shift;
155+ my $keys_buckets= scalar(%$hr);
156+ if ($keys_buckets=~m!/([0-9]+)\z!) {
157+ return 0+$1;
158+ } else {
159+ return 8;
160+ }
161+}
162
163 sub get_keys {
164 my $hr = shift;
165
166 # the minimum of bits required to mount the attack on a hash
167 my $min_bits = log(THRESHOLD)/log(2);
168-
169 # if the hash has already been populated with a significant amount
170 # of entries the number of mask bits can be higher
171 my $keys = scalar keys %$hr;
172--
1731.7.4.1
diff --git a/meta/recipes-devtools/perl/perl_5.14.2.bb b/meta/recipes-devtools/perl/perl_5.14.2.bb
index 5e17661507..dbfcf4cf1f 100644
--- a/meta/recipes-devtools/perl/perl_5.14.2.bb
+++ b/meta/recipes-devtools/perl/perl_5.14.2.bb
@@ -69,6 +69,7 @@ SRC_URI = "http://www.cpan.org/src/5.0/perl-${PV}.tar.gz \
69 file://perl-archlib-exp.patch \ 69 file://perl-archlib-exp.patch \
70 file://perl-fix-CVE-2012-5195.patch \ 70 file://perl-fix-CVE-2012-5195.patch \
71 file://cve-2012-6329.patch \ 71 file://cve-2012-6329.patch \
72 file://cve-2013-1667.patch \
72 \ 73 \
73 file://config.sh \ 74 file://config.sh \
74 file://config.sh-32 \ 75 file://config.sh-32 \