!139 golang: fix CVE-2022-41715,CVE-2022-2880,CVE-2022-2879
From: @hcnbxx Reviewed-by: @jing-rui Signed-off-by: @jing-rui
This commit is contained in:
commit
c6bfc29504
386
0020-release-branch.go1.18-regexp-limit-size-of-parsed-re.patch
Normal file
386
0020-release-branch.go1.18-regexp-limit-size-of-parsed-re.patch
Normal file
@ -0,0 +1,386 @@
|
||||
From 8b3a5d153b7b255bafd1a82d61505088356d0458 Mon Sep 17 00:00:00 2001
|
||||
From: Russ Cox <rsc@golang.org>
|
||||
Date: Wed, 28 Sep 2022 11:18:51 -0400
|
||||
Subject: [PATCH] regexp: limit size of parsed regexps
|
||||
|
||||
Set a 128 MB limit on the amount of space used by []syntax.Inst
|
||||
in the compiled form corresponding to a given regexp.
|
||||
|
||||
Also set a 128 MB limit on the rune storage in the *syntax.Regexp
|
||||
tree itself.
|
||||
|
||||
Thanks to Adam Korczynski (ADA Logics) and OSS-Fuzz for reporting this issue.
|
||||
|
||||
Fixes CVE-2022-41715.
|
||||
Updates #55949.
|
||||
Fixes #55950.
|
||||
|
||||
Change-Id: Ia656baed81564436368cf950e1c5409752f28e1b
|
||||
Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1592136
|
||||
TryBot-Result: Security TryBots <security-trybots@go-security-trybots.iam.gserviceaccount.com>
|
||||
Reviewed-by: Damien Neil <dneil@google.com>
|
||||
Run-TryBot: Roland Shoemaker <bracewell@google.com>
|
||||
Reviewed-by: Julie Qiu <julieqiu@google.com>
|
||||
Reviewed-on: https://go-review.googlesource.com/c/go/+/438501
|
||||
Run-TryBot: Carlos Amedee <carlos@golang.org>
|
||||
Reviewed-by: Carlos Amedee <carlos@golang.org>
|
||||
Reviewed-by: Dmitri Shuralyov <dmitshur@google.com>
|
||||
TryBot-Result: Gopher Robot <gobot@golang.org>
|
||||
Reviewed-by: Dmitri Shuralyov <dmitshur@golang.org>
|
||||
---
|
||||
src/regexp/syntax/parse.go | 222 +++++++++++++++++++++++++++++++-
|
||||
src/regexp/syntax/parse_test.go | 11 +-
|
||||
2 files changed, 224 insertions(+), 9 deletions(-)
|
||||
|
||||
diff --git a/src/regexp/syntax/parse.go b/src/regexp/syntax/parse.go
|
||||
index 7b40309..67254d6 100644
|
||||
--- a/src/regexp/syntax/parse.go
|
||||
+++ b/src/regexp/syntax/parse.go
|
||||
@@ -43,6 +43,7 @@ const (
|
||||
ErrMissingRepeatArgument ErrorCode = "missing argument to repetition operator"
|
||||
ErrTrailingBackslash ErrorCode = "trailing backslash at end of expression"
|
||||
ErrUnexpectedParen ErrorCode = "unexpected )"
|
||||
+ ErrNestingDepth ErrorCode = "expression nests too deeply"
|
||||
)
|
||||
|
||||
func (e ErrorCode) String() string {
|
||||
@@ -76,13 +77,63 @@ const (
|
||||
opVerticalBar
|
||||
)
|
||||
|
||||
+// maxHeight is the maximum height of a regexp parse tree.
|
||||
+// It is somewhat arbitrarily chosen, but the idea is to be large enough
|
||||
+// that no one will actually hit in real use but at the same time small enough
|
||||
+// that recursion on the Regexp tree will not hit the 1GB Go stack limit.
|
||||
+// The maximum amount of stack for a single recursive frame is probably
|
||||
+// closer to 1kB, so this could potentially be raised, but it seems unlikely
|
||||
+// that people have regexps nested even this deeply.
|
||||
+// We ran a test on Google's C++ code base and turned up only
|
||||
+// a single use case with depth > 100; it had depth 128.
|
||||
+// Using depth 1000 should be plenty of margin.
|
||||
+// As an optimization, we don't even bother calculating heights
|
||||
+// until we've allocated at least maxHeight Regexp structures.
|
||||
+const maxHeight = 1000
|
||||
+
|
||||
+// maxSize is the maximum size of a compiled regexp in Insts.
|
||||
+// It too is somewhat arbitrarily chosen, but the idea is to be large enough
|
||||
+// to allow significant regexps while at the same time small enough that
|
||||
+// the compiled form will not take up too much memory.
|
||||
+// 128 MB is enough for a 3.3 million Inst structures, which roughly
|
||||
+// corresponds to a 3.3 MB regexp.
|
||||
+const (
|
||||
+ maxSize = 128 << 20 / instSize
|
||||
+ instSize = 5 * 8 // byte, 2 uint32, slice is 5 64-bit words
|
||||
+)
|
||||
+
|
||||
+// maxRunes is the maximum number of runes allowed in a regexp tree
|
||||
+// counting the runes in all the nodes.
|
||||
+// Ignoring character classes p.numRunes is always less than the length of the regexp.
|
||||
+// Character classes can make it much larger: each \pL adds 1292 runes.
|
||||
+// 128 MB is enough for 32M runes, which is over 26k \pL instances.
|
||||
+// Note that repetitions do not make copies of the rune slices,
|
||||
+// so \pL{1000} is only one rune slice, not 1000.
|
||||
+// We could keep a cache of character classes we've seen,
|
||||
+// so that all the \pL we see use the same rune list,
|
||||
+// but that doesn't remove the problem entirely:
|
||||
+// consider something like [\pL01234][\pL01235][\pL01236]...[\pL^&*()].
|
||||
+// And because the Rune slice is exposed directly in the Regexp,
|
||||
+// there is not an opportunity to change the representation to allow
|
||||
+// partial sharing between different character classes.
|
||||
+// So the limit is the best we can do.
|
||||
+const (
|
||||
+ maxRunes = 128 << 20 / runeSize
|
||||
+ runeSize = 4 // rune is int32
|
||||
+)
|
||||
+
|
||||
type parser struct {
|
||||
flags Flags // parse mode flags
|
||||
stack []*Regexp // stack of parsed expressions
|
||||
free *Regexp
|
||||
numCap int // number of capturing groups seen
|
||||
wholeRegexp string
|
||||
- tmpClass []rune // temporary char class work space
|
||||
+ tmpClass []rune // temporary char class work space
|
||||
+ numRegexp int // number of regexps allocated
|
||||
+ numRunes int // number of runes in char classes
|
||||
+ repeats int64 // product of all repetitions seen
|
||||
+ height map[*Regexp]int // regexp height, for height limit check
|
||||
+ size map[*Regexp]int64 // regexp compiled size, for size limit check
|
||||
}
|
||||
|
||||
func (p *parser) newRegexp(op Op) *Regexp {
|
||||
@@ -92,20 +143,155 @@ func (p *parser) newRegexp(op Op) *Regexp {
|
||||
*re = Regexp{}
|
||||
} else {
|
||||
re = new(Regexp)
|
||||
+ p.numRegexp++
|
||||
}
|
||||
re.Op = op
|
||||
return re
|
||||
}
|
||||
|
||||
func (p *parser) reuse(re *Regexp) {
|
||||
+ if p.height != nil {
|
||||
+ delete(p.height, re)
|
||||
+ }
|
||||
re.Sub0[0] = p.free
|
||||
p.free = re
|
||||
}
|
||||
|
||||
+func (p *parser) checkLimits(re *Regexp) {
|
||||
+ if p.numRunes > maxRunes {
|
||||
+ panic(ErrInternalError)
|
||||
+ }
|
||||
+ p.checkSize(re)
|
||||
+ p.checkHeight(re)
|
||||
+}
|
||||
+
|
||||
+func (p *parser) checkSize(re *Regexp) {
|
||||
+ if p.size == nil {
|
||||
+ // We haven't started tracking size yet.
|
||||
+ // Do a relatively cheap check to see if we need to start.
|
||||
+ // Maintain the product of all the repeats we've seen
|
||||
+ // and don't track if the total number of regexp nodes
|
||||
+ // we've seen times the repeat product is in budget.
|
||||
+ if p.repeats == 0 {
|
||||
+ p.repeats = 1
|
||||
+ }
|
||||
+ if re.Op == OpRepeat {
|
||||
+ n := re.Max
|
||||
+ if n == -1 {
|
||||
+ n = re.Min
|
||||
+ }
|
||||
+ if n <= 0 {
|
||||
+ n = 1
|
||||
+ }
|
||||
+ if int64(n) > maxSize/p.repeats {
|
||||
+ p.repeats = maxSize
|
||||
+ } else {
|
||||
+ p.repeats *= int64(n)
|
||||
+ }
|
||||
+ }
|
||||
+ if int64(p.numRegexp) < maxSize/p.repeats {
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // We need to start tracking size.
|
||||
+ // Make the map and belatedly populate it
|
||||
+ // with info about everything we've constructed so far.
|
||||
+ p.size = make(map[*Regexp]int64)
|
||||
+ for _, re := range p.stack {
|
||||
+ p.checkSize(re)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ if p.calcSize(re, true) > maxSize {
|
||||
+ panic(ErrInternalError)
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func (p *parser) calcSize(re *Regexp, force bool) int64 {
|
||||
+ if !force {
|
||||
+ if size, ok := p.size[re]; ok {
|
||||
+ return size
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ var size int64
|
||||
+ switch re.Op {
|
||||
+ case OpLiteral:
|
||||
+ size = int64(len(re.Rune))
|
||||
+ case OpCapture, OpStar:
|
||||
+ // star can be 1+ or 2+; assume 2 pessimistically
|
||||
+ size = 2 + p.calcSize(re.Sub[0], false)
|
||||
+ case OpPlus, OpQuest:
|
||||
+ size = 1 + p.calcSize(re.Sub[0], false)
|
||||
+ case OpConcat:
|
||||
+ for _, sub := range re.Sub {
|
||||
+ size += p.calcSize(sub, false)
|
||||
+ }
|
||||
+ case OpAlternate:
|
||||
+ for _, sub := range re.Sub {
|
||||
+ size += p.calcSize(sub, false)
|
||||
+ }
|
||||
+ if len(re.Sub) > 1 {
|
||||
+ size += int64(len(re.Sub)) - 1
|
||||
+ }
|
||||
+ case OpRepeat:
|
||||
+ sub := p.calcSize(re.Sub[0], false)
|
||||
+ if re.Max == -1 {
|
||||
+ if re.Min == 0 {
|
||||
+ size = 2 + sub // x*
|
||||
+ } else {
|
||||
+ size = 1 + int64(re.Min)*sub // xxx+
|
||||
+ }
|
||||
+ break
|
||||
+ }
|
||||
+ // x{2,5} = xx(x(x(x)?)?)?
|
||||
+ size = int64(re.Max)*sub + int64(re.Max-re.Min)
|
||||
+ }
|
||||
+
|
||||
+ if size < 1 {
|
||||
+ size = 1
|
||||
+ }
|
||||
+ p.size[re] = size
|
||||
+ return size
|
||||
+}
|
||||
+
|
||||
+func (p *parser) checkHeight(re *Regexp) {
|
||||
+ if p.numRegexp < maxHeight {
|
||||
+ return
|
||||
+ }
|
||||
+ if p.height == nil {
|
||||
+ p.height = make(map[*Regexp]int)
|
||||
+ for _, re := range p.stack {
|
||||
+ p.checkHeight(re)
|
||||
+ }
|
||||
+ }
|
||||
+ if p.calcHeight(re, true) > maxHeight {
|
||||
+ panic(ErrNestingDepth)
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func (p *parser) calcHeight(re *Regexp, force bool) int {
|
||||
+ if !force {
|
||||
+ if h, ok := p.height[re]; ok {
|
||||
+ return h
|
||||
+ }
|
||||
+ }
|
||||
+ h := 1
|
||||
+ for _, sub := range re.Sub {
|
||||
+ hsub := p.calcHeight(sub, false)
|
||||
+ if h < 1+hsub {
|
||||
+ h = 1 + hsub
|
||||
+ }
|
||||
+ }
|
||||
+ p.height[re] = h
|
||||
+ return h
|
||||
+}
|
||||
+
|
||||
// Parse stack manipulation.
|
||||
|
||||
// push pushes the regexp re onto the parse stack and returns the regexp.
|
||||
func (p *parser) push(re *Regexp) *Regexp {
|
||||
+ p.numRunes += len(re.Rune)
|
||||
if re.Op == OpCharClass && len(re.Rune) == 2 && re.Rune[0] == re.Rune[1] {
|
||||
// Single rune.
|
||||
if p.maybeConcat(re.Rune[0], p.flags&^FoldCase) {
|
||||
@@ -137,6 +323,7 @@ func (p *parser) push(re *Regexp) *Regexp {
|
||||
}
|
||||
|
||||
p.stack = append(p.stack, re)
|
||||
+ p.checkLimits(re)
|
||||
return re
|
||||
}
|
||||
|
||||
@@ -246,6 +433,7 @@ func (p *parser) repeat(op Op, min, max int, before, after, lastRepeat string) (
|
||||
re.Sub = re.Sub0[:1]
|
||||
re.Sub[0] = sub
|
||||
p.stack[n-1] = re
|
||||
+ p.checkLimits(re)
|
||||
|
||||
if op == OpRepeat && (min >= 2 || max >= 2) && !repeatIsValid(re, 1000) {
|
||||
return "", &Error{ErrInvalidRepeatSize, before[:len(before)-len(after)]}
|
||||
@@ -390,12 +578,16 @@ func (p *parser) collapse(subs []*Regexp, op Op) *Regexp {
|
||||
// frees (passes to p.reuse) any removed *Regexps.
|
||||
//
|
||||
// For example,
|
||||
-// ABC|ABD|AEF|BCX|BCY
|
||||
+//
|
||||
+// ABC|ABD|AEF|BCX|BCY
|
||||
+//
|
||||
// simplifies by literal prefix extraction to
|
||||
-// A(B(C|D)|EF)|BC(X|Y)
|
||||
+//
|
||||
+// A(B(C|D)|EF)|BC(X|Y)
|
||||
+//
|
||||
// which simplifies by character class introduction to
|
||||
-// A(B[CD]|EF)|BC[XY]
|
||||
//
|
||||
+// A(B[CD]|EF)|BC[XY]
|
||||
func (p *parser) factor(sub []*Regexp) []*Regexp {
|
||||
if len(sub) < 2 {
|
||||
return sub
|
||||
@@ -449,6 +641,7 @@ func (p *parser) factor(sub []*Regexp) []*Regexp {
|
||||
|
||||
for j := start; j < i; j++ {
|
||||
sub[j] = p.removeLeadingString(sub[j], len(str))
|
||||
+ p.checkLimits(sub[j])
|
||||
}
|
||||
suffix := p.collapse(sub[start:i], OpAlternate) // recurse
|
||||
|
||||
@@ -506,6 +699,7 @@ func (p *parser) factor(sub []*Regexp) []*Regexp {
|
||||
for j := start; j < i; j++ {
|
||||
reuse := j != start // prefix came from sub[start]
|
||||
sub[j] = p.removeLeadingRegexp(sub[j], reuse)
|
||||
+ p.checkLimits(sub[j])
|
||||
}
|
||||
suffix := p.collapse(sub[start:i], OpAlternate) // recurse
|
||||
|
||||
@@ -693,6 +887,23 @@ func literalRegexp(s string, flags Flags) *Regexp {
|
||||
// Flags, and returns a regular expression parse tree. The syntax is
|
||||
// described in the top-level comment.
|
||||
func Parse(s string, flags Flags) (*Regexp, error) {
|
||||
+ return parse(s, flags)
|
||||
+}
|
||||
+
|
||||
+func parse(s string, flags Flags) (_ *Regexp, err error) {
|
||||
+ defer func() {
|
||||
+ switch r := recover(); r {
|
||||
+ default:
|
||||
+ panic(r)
|
||||
+ case nil:
|
||||
+ // ok
|
||||
+ case ErrInternalError: // too big
|
||||
+ err = &Error{Code: ErrInternalError, Expr: s}
|
||||
+ case ErrNestingDepth:
|
||||
+ err = &Error{Code: ErrNestingDepth, Expr: s}
|
||||
+ }
|
||||
+ }()
|
||||
+
|
||||
if flags&Literal != 0 {
|
||||
// Trivial parser for literal string.
|
||||
if err := checkUTF8(s); err != nil {
|
||||
@@ -704,7 +915,6 @@ func Parse(s string, flags Flags) (*Regexp, error) {
|
||||
// Otherwise, must do real work.
|
||||
var (
|
||||
p parser
|
||||
- err error
|
||||
c rune
|
||||
op Op
|
||||
lastRepeat string
|
||||
@@ -1733,7 +1943,7 @@ func appendClass(r []rune, x []rune) []rune {
|
||||
return r
|
||||
}
|
||||
|
||||
-// appendFolded returns the result of appending the case folding of the class x to the class r.
|
||||
+// appendFoldedClass returns the result of appending the case folding of the class x to the class r.
|
||||
func appendFoldedClass(r []rune, x []rune) []rune {
|
||||
for i := 0; i < len(x); i += 2 {
|
||||
r = appendFoldedRange(r, x[i], x[i+1])
|
||||
diff --git a/src/regexp/syntax/parse_test.go b/src/regexp/syntax/parse_test.go
|
||||
index 5581ba1..6044da6 100644
|
||||
--- a/src/regexp/syntax/parse_test.go
|
||||
+++ b/src/regexp/syntax/parse_test.go
|
||||
@@ -479,10 +479,15 @@ var invalidRegexps = []string{
|
||||
`(?P<>a)`,
|
||||
`[a-Z]`,
|
||||
`(?i)[a-Z]`,
|
||||
- `a{100000}`,
|
||||
- `a{100000,}`,
|
||||
- "((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}){2})",
|
||||
`\Q\E*`,
|
||||
+ `a{100000}`, // too much repetition
|
||||
+ `a{100000,}`, // too much repetition
|
||||
+ "((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}){2})", // too much repetition
|
||||
+ strings.Repeat("(", 1000) + strings.Repeat(")", 1000), // too deep
|
||||
+ strings.Repeat("(?:", 1000) + strings.Repeat(")*", 1000), // too deep
|
||||
+ "(" + strings.Repeat("(xx?)", 1000) + "){1000}", // too long
|
||||
+ strings.Repeat("(xx?){1000}", 1000), // too long
|
||||
+ strings.Repeat(`\pL`, 27000), // too many runes
|
||||
}
|
||||
|
||||
var onlyPerl = []string{
|
||||
--
|
||||
2.33.0
|
||||
|
||||
174
0021-release-branch.go1.18-net-http-httputil-avoid-query-.patch
Normal file
174
0021-release-branch.go1.18-net-http-httputil-avoid-query-.patch
Normal file
@ -0,0 +1,174 @@
|
||||
From 51a477dc4f1130d53e66cd2003de0bac40e5e2be Mon Sep 17 00:00:00 2001
|
||||
From: Damien Neil <dneil@google.com>
|
||||
Date: Thu, 22 Sep 2022 13:32:00 -0700
|
||||
Subject: [PATCH 2/3] [release-branch.go1.18] net/http/httputil: avoid query
|
||||
parameter smuggling
|
||||
|
||||
Query parameter smuggling occurs when a proxy's interpretation
|
||||
of query parameters differs from that of a downstream server.
|
||||
Change ReverseProxy to avoid forwarding ignored query parameters.
|
||||
|
||||
Remove unparsable query parameters from the outbound request
|
||||
|
||||
* if req.Form != nil after calling ReverseProxy.Director; and
|
||||
* before calling ReverseProxy.Rewrite.
|
||||
|
||||
This change preserves the existing behavior of forwarding the
|
||||
raw query untouched if a Director hook does not parse the query
|
||||
by calling Request.ParseForm (possibly indirectly).
|
||||
|
||||
Fixes #55842
|
||||
For #54663
|
||||
For CVE-2022-2880
|
||||
|
||||
Change-Id: If1621f6b0e73a49d79059dae9e6b256e0ff18ca9
|
||||
Reviewed-on: https://go-review.googlesource.com/c/go/+/432976
|
||||
Reviewed-by: Roland Shoemaker <roland@golang.org>
|
||||
Reviewed-by: Brad Fitzpatrick <bradfitz@golang.org>
|
||||
TryBot-Result: Gopher Robot <gobot@golang.org>
|
||||
Run-TryBot: Damien Neil <dneil@google.com>
|
||||
(cherry picked from commit 7c84234142149bd24a4096c6cab691d3593f3431)
|
||||
Reviewed-on: https://go-review.googlesource.com/c/go/+/433695
|
||||
Reviewed-by: Dmitri Shuralyov <dmitshur@golang.org>
|
||||
Reviewed-by: Dmitri Shuralyov <dmitshur@google.com>
|
||||
---
|
||||
src/net/http/httputil/reverseproxy.go | 36 +++++++++++
|
||||
src/net/http/httputil/reverseproxy_test.go | 74 ++++++++++++++++++++++
|
||||
2 files changed, 110 insertions(+)
|
||||
|
||||
diff --git a/src/net/http/httputil/reverseproxy.go b/src/net/http/httputil/reverseproxy.go
|
||||
index 8b63368386..c76eec6987 100644
|
||||
--- a/src/net/http/httputil/reverseproxy.go
|
||||
+++ b/src/net/http/httputil/reverseproxy.go
|
||||
@@ -249,6 +249,9 @@ func (p *ReverseProxy) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
}
|
||||
|
||||
p.Director(outreq)
|
||||
+ if outreq.Form != nil {
|
||||
+ outreq.URL.RawQuery = cleanQueryParams(outreq.URL.RawQuery)
|
||||
+ }
|
||||
outreq.Close = false
|
||||
|
||||
reqUpType := upgradeType(outreq.Header)
|
||||
@@ -628,3 +631,36 @@ func (c switchProtocolCopier) copyToBackend(errc chan<- error) {
|
||||
_, err := io.Copy(c.backend, c.user)
|
||||
errc <- err
|
||||
}
|
||||
+
|
||||
+func cleanQueryParams(s string) string {
|
||||
+ reencode := func(s string) string {
|
||||
+ v, _ := url.ParseQuery(s)
|
||||
+ return v.Encode()
|
||||
+ }
|
||||
+ for i := 0; i < len(s); {
|
||||
+ switch s[i] {
|
||||
+ case ';':
|
||||
+ return reencode(s)
|
||||
+ case '%':
|
||||
+ if i+2 >= len(s) || !ishex(s[i+1]) || !ishex(s[i+2]) {
|
||||
+ return reencode(s)
|
||||
+ }
|
||||
+ i += 3
|
||||
+ default:
|
||||
+ i++
|
||||
+ }
|
||||
+ }
|
||||
+ return s
|
||||
+}
|
||||
+
|
||||
+func ishex(c byte) bool {
|
||||
+ switch {
|
||||
+ case '0' <= c && c <= '9':
|
||||
+ return true
|
||||
+ case 'a' <= c && c <= 'f':
|
||||
+ return true
|
||||
+ case 'A' <= c && c <= 'F':
|
||||
+ return true
|
||||
+ }
|
||||
+ return false
|
||||
+}
|
||||
diff --git a/src/net/http/httputil/reverseproxy_test.go b/src/net/http/httputil/reverseproxy_test.go
|
||||
index 4b6ad77a29..8c0a4f136b 100644
|
||||
--- a/src/net/http/httputil/reverseproxy_test.go
|
||||
+++ b/src/net/http/httputil/reverseproxy_test.go
|
||||
@@ -1517,3 +1517,77 @@ func TestJoinURLPath(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
+
|
||||
+const (
|
||||
+ testWantsCleanQuery = true
|
||||
+ testWantsRawQuery = false
|
||||
+)
|
||||
+
|
||||
+func TestReverseProxyQueryParameterSmugglingDirectorDoesNotParseForm(t *testing.T) {
|
||||
+ testReverseProxyQueryParameterSmuggling(t, testWantsRawQuery, func(u *url.URL) *ReverseProxy {
|
||||
+ proxyHandler := NewSingleHostReverseProxy(u)
|
||||
+ oldDirector := proxyHandler.Director
|
||||
+ proxyHandler.Director = func(r *http.Request) {
|
||||
+ oldDirector(r)
|
||||
+ }
|
||||
+ return proxyHandler
|
||||
+ })
|
||||
+}
|
||||
+
|
||||
+func TestReverseProxyQueryParameterSmugglingDirectorParsesForm(t *testing.T) {
|
||||
+ testReverseProxyQueryParameterSmuggling(t, testWantsCleanQuery, func(u *url.URL) *ReverseProxy {
|
||||
+ proxyHandler := NewSingleHostReverseProxy(u)
|
||||
+ oldDirector := proxyHandler.Director
|
||||
+ proxyHandler.Director = func(r *http.Request) {
|
||||
+ // Parsing the form causes ReverseProxy to remove unparsable
|
||||
+ // query parameters before forwarding.
|
||||
+ r.FormValue("a")
|
||||
+ oldDirector(r)
|
||||
+ }
|
||||
+ return proxyHandler
|
||||
+ })
|
||||
+}
|
||||
+
|
||||
+func testReverseProxyQueryParameterSmuggling(t *testing.T, wantCleanQuery bool, newProxy func(*url.URL) *ReverseProxy) {
|
||||
+ const content = "response_content"
|
||||
+ backend := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
+ w.Write([]byte(r.URL.RawQuery))
|
||||
+ }))
|
||||
+ defer backend.Close()
|
||||
+ backendURL, err := url.Parse(backend.URL)
|
||||
+ if err != nil {
|
||||
+ t.Fatal(err)
|
||||
+ }
|
||||
+ proxyHandler := newProxy(backendURL)
|
||||
+ frontend := httptest.NewServer(proxyHandler)
|
||||
+ defer frontend.Close()
|
||||
+
|
||||
+ // Don't spam output with logs of queries containing semicolons.
|
||||
+ backend.Config.ErrorLog = log.New(io.Discard, "", 0)
|
||||
+ frontend.Config.ErrorLog = log.New(io.Discard, "", 0)
|
||||
+
|
||||
+ for _, test := range []struct {
|
||||
+ rawQuery string
|
||||
+ cleanQuery string
|
||||
+ }{{
|
||||
+ rawQuery: "a=1&a=2;b=3",
|
||||
+ cleanQuery: "a=1",
|
||||
+ }, {
|
||||
+ rawQuery: "a=1&a=%zz&b=3",
|
||||
+ cleanQuery: "a=1&b=3",
|
||||
+ }} {
|
||||
+ res, err := frontend.Client().Get(frontend.URL + "?" + test.rawQuery)
|
||||
+ if err != nil {
|
||||
+ t.Fatalf("Get: %v", err)
|
||||
+ }
|
||||
+ defer res.Body.Close()
|
||||
+ body, _ := io.ReadAll(res.Body)
|
||||
+ wantQuery := test.rawQuery
|
||||
+ if wantCleanQuery {
|
||||
+ wantQuery = test.cleanQuery
|
||||
+ }
|
||||
+ if got, want := string(body), wantQuery; got != want {
|
||||
+ t.Errorf("proxy forwarded raw query %q as %q, want %q", test.rawQuery, got, want)
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
--
|
||||
2.33.0
|
||||
|
||||
186
0022-release-branch.go1.18-archive-tar-limit-size-of-head.patch
Normal file
186
0022-release-branch.go1.18-archive-tar-limit-size-of-head.patch
Normal file
@ -0,0 +1,186 @@
|
||||
From 7dd44b287830fbb2256aceac4a36756b955c0279 Mon Sep 17 00:00:00 2001
|
||||
From: Damien Neil <dneil@google.com>
|
||||
Date: Fri, 2 Sep 2022 20:45:18 -0700
|
||||
Subject: [PATCH] archive/tar: limit size of headers
|
||||
|
||||
Set a 1MiB limit on special file blocks (PAX headers, GNU long names,
|
||||
GNU link names), to avoid reading arbitrarily large amounts of data
|
||||
into memory.
|
||||
|
||||
Thanks to Adam Korczynski (ADA Logics) and OSS-Fuzz for reporting
|
||||
this issue.
|
||||
|
||||
Fixes CVE-2022-2879
|
||||
Updates #54853
|
||||
Fixes #55925
|
||||
|
||||
Change-Id: I85136d6ff1e0af101a112190e027987ab4335680
|
||||
Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1565555
|
||||
Reviewed-by: Tatiana Bradley <tatianabradley@google.com>
|
||||
Run-TryBot: Roland Shoemaker <bracewell@google.com>
|
||||
Reviewed-by: Roland Shoemaker <bracewell@google.com>
|
||||
(cherry picked from commit 6ee768cef6b82adf7a90dcf367a1699ef694f3b2)
|
||||
Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1590622
|
||||
Reviewed-by: Damien Neil <dneil@google.com>
|
||||
Reviewed-by: Julie Qiu <julieqiu@google.com>
|
||||
Reviewed-on: https://go-review.googlesource.com/c/go/+/438500
|
||||
Reviewed-by: Dmitri Shuralyov <dmitshur@golang.org>
|
||||
Reviewed-by: Carlos Amedee <carlos@golang.org>
|
||||
Reviewed-by: Dmitri Shuralyov <dmitshur@google.com>
|
||||
Run-TryBot: Carlos Amedee <carlos@golang.org>
|
||||
TryBot-Result: Gopher Robot <gobot@golang.org>
|
||||
---
|
||||
src/archive/tar/format.go | 4 ++++
|
||||
src/archive/tar/reader.go | 14 ++++++++++++--
|
||||
src/archive/tar/reader_test.go | 11 ++++++++++-
|
||||
src/archive/tar/writer.go | 3 +++
|
||||
src/archive/tar/writer_test.go | 27 +++++++++++++++++++++++++++
|
||||
5 files changed, 56 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/src/archive/tar/format.go b/src/archive/tar/format.go
|
||||
index cfe24a5..6642364 100644
|
||||
--- a/src/archive/tar/format.go
|
||||
+++ b/src/archive/tar/format.go
|
||||
@@ -143,6 +143,10 @@ const (
|
||||
blockSize = 512 // Size of each block in a tar stream
|
||||
nameSize = 100 // Max length of the name field in USTAR format
|
||||
prefixSize = 155 // Max length of the prefix field in USTAR format
|
||||
+
|
||||
+ // Max length of a special file (PAX header, GNU long name or link).
|
||||
+ // This matches the limit used by libarchive.
|
||||
+ maxSpecialFileSize = 1 << 20
|
||||
)
|
||||
|
||||
// blockPadding computes the number of bytes needed to pad offset up to the
|
||||
diff --git a/src/archive/tar/reader.go b/src/archive/tar/reader.go
|
||||
index 1b1d5b4..f645af8 100644
|
||||
--- a/src/archive/tar/reader.go
|
||||
+++ b/src/archive/tar/reader.go
|
||||
@@ -103,7 +103,7 @@ func (tr *Reader) next() (*Header, error) {
|
||||
continue // This is a meta header affecting the next header
|
||||
case TypeGNULongName, TypeGNULongLink:
|
||||
format.mayOnlyBe(FormatGNU)
|
||||
- realname, err := io.ReadAll(tr)
|
||||
+ realname, err := readSpecialFile(tr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -293,7 +293,7 @@ func mergePAX(hdr *Header, paxHdrs map[string]string) (err error) {
|
||||
// parsePAX parses PAX headers.
|
||||
// If an extended header (type 'x') is invalid, ErrHeader is returned
|
||||
func parsePAX(r io.Reader) (map[string]string, error) {
|
||||
- buf, err := io.ReadAll(r)
|
||||
+ buf, err := readSpecialFile(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -826,6 +826,16 @@ func tryReadFull(r io.Reader, b []byte) (n int, err error) {
|
||||
return n, err
|
||||
}
|
||||
|
||||
+// readSpecialFile is like io.ReadAll except it returns
|
||||
+// ErrFieldTooLong if more than maxSpecialFileSize is read.
|
||||
+func readSpecialFile(r io.Reader) ([]byte, error) {
|
||||
+ buf, err := io.ReadAll(io.LimitReader(r, maxSpecialFileSize+1))
|
||||
+ if len(buf) > maxSpecialFileSize {
|
||||
+ return nil, ErrFieldTooLong
|
||||
+ }
|
||||
+ return buf, err
|
||||
+}
|
||||
+
|
||||
// discard skips n bytes in r, reporting an error if unable to do so.
|
||||
func discard(r io.Reader, n int64) error {
|
||||
// If possible, Seek to the last byte before the end of the data section.
|
||||
diff --git a/src/archive/tar/reader_test.go b/src/archive/tar/reader_test.go
|
||||
index 789ddc1..5a644a4 100644
|
||||
--- a/src/archive/tar/reader_test.go
|
||||
+++ b/src/archive/tar/reader_test.go
|
||||
@@ -6,6 +6,7 @@ package tar
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
+ "compress/bzip2"
|
||||
"crypto/md5"
|
||||
"errors"
|
||||
"fmt"
|
||||
@@ -243,6 +244,9 @@ func TestReader(t *testing.T) {
|
||||
}, {
|
||||
file: "testdata/pax-bad-hdr-file.tar",
|
||||
err: ErrHeader,
|
||||
+ }, {
|
||||
+ file: "testdata/pax-bad-hdr-large.tar.bz2",
|
||||
+ err: ErrFieldTooLong,
|
||||
}, {
|
||||
file: "testdata/pax-bad-mtime-file.tar",
|
||||
err: ErrHeader,
|
||||
@@ -625,9 +629,14 @@ func TestReader(t *testing.T) {
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
+ var fr io.Reader = f
|
||||
+ if strings.HasSuffix(v.file, ".bz2") {
|
||||
+ fr = bzip2.NewReader(fr)
|
||||
+ }
|
||||
+
|
||||
// Capture all headers and checksums.
|
||||
var (
|
||||
- tr = NewReader(f)
|
||||
+ tr = NewReader(fr)
|
||||
hdrs []*Header
|
||||
chksums []string
|
||||
rdbuf = make([]byte, 8)
|
||||
diff --git a/src/archive/tar/writer.go b/src/archive/tar/writer.go
|
||||
index e80498d..893eac0 100644
|
||||
--- a/src/archive/tar/writer.go
|
||||
+++ b/src/archive/tar/writer.go
|
||||
@@ -199,6 +199,9 @@ func (tw *Writer) writePAXHeader(hdr *Header, paxHdrs map[string]string) error {
|
||||
flag = TypeXHeader
|
||||
}
|
||||
data := buf.String()
|
||||
+ if len(data) > maxSpecialFileSize {
|
||||
+ return ErrFieldTooLong
|
||||
+ }
|
||||
if err := tw.writeRawFile(name, data, flag, FormatPAX); err != nil || isGlobal {
|
||||
return err // Global headers return here
|
||||
}
|
||||
diff --git a/src/archive/tar/writer_test.go b/src/archive/tar/writer_test.go
|
||||
index a00f02d..4e709e5 100644
|
||||
--- a/src/archive/tar/writer_test.go
|
||||
+++ b/src/archive/tar/writer_test.go
|
||||
@@ -1006,6 +1006,33 @@ func TestIssue12594(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
+func TestWriteLongHeader(t *testing.T) {
|
||||
+ for _, test := range []struct {
|
||||
+ name string
|
||||
+ h *Header
|
||||
+ }{{
|
||||
+ name: "name too long",
|
||||
+ h: &Header{Name: strings.Repeat("a", maxSpecialFileSize)},
|
||||
+ }, {
|
||||
+ name: "linkname too long",
|
||||
+ h: &Header{Linkname: strings.Repeat("a", maxSpecialFileSize)},
|
||||
+ }, {
|
||||
+ name: "uname too long",
|
||||
+ h: &Header{Uname: strings.Repeat("a", maxSpecialFileSize)},
|
||||
+ }, {
|
||||
+ name: "gname too long",
|
||||
+ h: &Header{Gname: strings.Repeat("a", maxSpecialFileSize)},
|
||||
+ }, {
|
||||
+ name: "PAX header too long",
|
||||
+ h: &Header{PAXRecords: map[string]string{"GOLANG.x": strings.Repeat("a", maxSpecialFileSize)}},
|
||||
+ }} {
|
||||
+ w := NewWriter(io.Discard)
|
||||
+ if err := w.WriteHeader(test.h); err != ErrFieldTooLong {
|
||||
+ t.Errorf("%v: w.WriteHeader() = %v, want ErrFieldTooLong", test.name, err)
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
// testNonEmptyWriter wraps an io.Writer and ensures that
|
||||
// Write is never called with an empty buffer.
|
||||
type testNonEmptyWriter struct{ io.Writer }
|
||||
--
|
||||
2.33.0
|
||||
|
||||
13
golang.spec
13
golang.spec
@ -62,7 +62,7 @@
|
||||
|
||||
Name: golang
|
||||
Version: 1.17.3
|
||||
Release: 9
|
||||
Release: 10
|
||||
Summary: The Go Programming Language
|
||||
License: BSD and Public Domain
|
||||
URL: https://golang.org/
|
||||
@ -168,6 +168,9 @@ Patch6016: 0016-release-branch.go1.17-math-big-check-buffer-lengths-.patch
|
||||
Patch6017: 0017-path-filepath-do-not-remove-prefix-.-when-following-.patch
|
||||
Patch6018: 0018-release-branch.go1.17-syscall-check-correct-group-in.patch
|
||||
Patch6019: 0019-release-branch.go1.18-net-http-update-bundled-golang.patch
|
||||
Patch6020: 0020-release-branch.go1.18-regexp-limit-size-of-parsed-re.patch
|
||||
Patch6021: 0021-release-branch.go1.18-net-http-httputil-avoid-query-.patch
|
||||
Patch6022: 0022-release-branch.go1.18-archive-tar-limit-size-of-head.patch
|
||||
|
||||
ExclusiveArch: %{golang_arches}
|
||||
|
||||
@ -390,7 +393,7 @@ fi
|
||||
%exclude %{goroot}/doc/
|
||||
%exclude %{goroot}/misc/
|
||||
%exclude %{goroot}/test/
|
||||
%exclude %goroot}/lib/
|
||||
%exclude %{goroot}/lib/
|
||||
%{goroot}/*
|
||||
%dir %{gopath}
|
||||
%dir %{gopath}/src
|
||||
@ -406,6 +409,12 @@ fi
|
||||
%files devel -f go-tests.list -f go-misc.list -f go-src.list
|
||||
|
||||
%changelog
|
||||
* Mon Oct 10 2022 hanchao <hanchao47@huawei.com> - 1.17.3-10
|
||||
- Type:CVE
|
||||
- CVE:CVE-2022-41715,CVE-2022-2880,CVE-2022-2879
|
||||
- SUG:NA
|
||||
- DESC: fix CVE-2022-41715,CVE-2022-2880,CVE-2022-2879
|
||||
|
||||
* Thu Sep 15 2022 hanchao <hanchao47@huawei.com> - 1.17.3-9
|
||||
- Type:CVE
|
||||
- CVE:CVE-2022-27664
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user