Skip to content

remote: header reuse may lead to panic in http calls #4795

@tonistiigi

Description

@tonistiigi

Got this report of a panic in containerd codepath in Buildkit moby/buildkit#1867

fatal error: concurrent map read and map write
goroutine 12136 [running]:
runtime.throw(0x55935e5d25e1, 0x21)
        /usr/local/go/src/runtime/panic.go:774 +0x74 fp=0xc0014ea538 sp=0xc0014ea508 pc=0x55935cc68e74
runtime.mapaccess1_faststr(0x55935f506cc0, 0xc002ea81b0, 0x55935e5999c7, 0xa, 0x0)
        /usr/local/go/src/runtime/map_faststr.go:21 +0x451 fp=0xc0014ea5a8 sp=0xc0014ea538 pc=0x55935cc4c2e1
net/textproto.MIMEHeader.Get(0xc002ea81b0, 0x55935e5999c7, 0xa, 0x55936094bb30, 0xc0000f2840)
        /usr/local/go/src/net/textproto/header.go:35 +0x5f fp=0xc0014ea5e0 sp=0xc0014ea5a8 pc=0x55935cf3543f
net/http.Header.Get(...)
        /usr/local/go/src/net/http/header.go:47
net/http.(*Request).requiresHTTP1(0xc004414e00, 0x50)
        /usr/local/go/src/net/http/request.go:1440 +0x4a fp=0xc0014ea618 sp=0xc0014ea5e0 pc=0x55935cfaf9ba
net/http.(*Transport).useRegisteredProtocol(...)
        /usr/local/go/src/net/http/transport.go:443
net/http.(*Transport).roundTrip(0x55936094c140, 0xc004414e00, 0x55935e9f07f0, 0x0, 0x0)
        /usr/local/go/src/net/http/transport.go:482 +0xe95 fp=0xc0014ea870 sp=0xc0014ea618 pc=0x55935cfcb215
net/http.(*Transport).RoundTrip(0x55936094c140, 0xc004414e00, 0x55935e9f07f0, 0x0, 0x0)
        /usr/local/go/src/net/http/roundtrip.go:17 +0x37 fp=0xc0014ea8a8 sp=0xc0014ea870 pc=0x55935cfb1197
github.com/docker/docker/vendor/github.com/opentracing-contrib/go-stdlib/nethttp.(*Transport).RoundTrip(0x5593609cd4c0, 0xc004414e00, 0x0, 0x0, 0x150)
        /go/src/github.com/docker/docker/vendor/github.com/opentracing-contrib/go-stdlib/nethttp/client.go:124 +0x530 fp=0xc0014ea9c8 sp=0xc0014ea8a8 pc=0x55935ddb2e00
github.com/docker/docker/vendor/github.com/moby/buildkit/util/tracing.(*Transport).RoundTrip(0x559360936e90, 0xc004414e00, 0x559360936e90, 0x0, 0x0)
        /go/src/github.com/docker/docker/vendor/github.com/moby/buildkit/util/tracing/tracing.go:80 +0x2a4 fp=0xc0014eaa50 sp=0xc0014ea9c8 pc=0x55935ddb7234
net/http.send(0xc004414e00, 0x55935f7cde80, 0x559360936e90, 0x0, 0x0, 0x0, 0xc000011228, 0x8, 0x1, 0x0)
        /usr/local/go/src/net/http/client.go:250 +0x445 fp=0xc0014eabb8 sp=0xc0014eaa50 pc=0x55935cf741b5
net/http.(*Client).send(0x5593609d3dc0, 0xc004414e00, 0x0, 0x0, 0x0, 0xc000011228, 0x0, 0x1, 0x120)
        /usr/local/go/src/net/http/client.go:174 +0xfc fp=0xc0014eac38 sp=0xc0014eabb8 pc=0x55935cf73bcc
net/http.(*Client).do(0x5593609d3dc0, 0xc004414e00, 0x0, 0x0, 0x0)
        /usr/local/go/src/net/http/client.go:641 +0x3d0 fp=0xc0014eae38 sp=0xc0014eac38 pc=0x55935cf752c0
net/http.(*Client).Do(...)
        /usr/local/go/src/net/http/client.go:509
github.com/docker/docker/vendor/golang.org/x/net/context/ctxhttp.Do(0x55935f82ad00, 0xc002f933b0, 0x5593609d3dc0, 0xc004414d00, 0x0, 0x0, 0x55935f82ad00)
        /go/src/github.com/docker/docker/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go:27 +0xf1 fp=0xc0014eaea8 sp=0xc0014eae38 pc=0x55935dbe4281
github.com/docker/docker/vendor/github.com/containerd/containerd/remotes/docker.(*dockerBase).doRequest(0xc0010e4a80, 0x55935f82ad00, 0xc002ea82d0, 0xc004414d00, 0x0, 0x0, 0x0)
        /go/src/github.com/docker/docker/vendor/github.com/containerd/containerd/remotes/docker/resolver.go:413 +0x483 fp=0xc0014eb460 sp=0xc0014eaea8 pc=0x55935dbf8f23
github.com/docker/docker/vendor/github.com/containerd/containerd/remotes/docker.(*dockerBase).doRequestWithRetries(0xc0010e4a80, 0x55935f82ad00, 0xc002ea82d0, 0xc004414d00, 0xc0000e7540, 0x1, 0x1, 0xc004414d00, 0x0, 0x0)
        /go/src/github.com/docker/docker/vendor/github.com/containerd/containerd/remotes/docker/resolver.go:425 +0x58 fp=0xc0014eb4d8 sp=0xc0014eb460 pc=0x55935dbf9328
github.com/docker/docker/vendor/github.com/containerd/containerd/remotes/docker.(*dockerBase).doRequestWithRetries(0xc0010e4a80, 0x55935f82ad00, 0xc002ea82d0, 0xc004414d00, 0xc0000e7540, 0x1, 0x1, 0x0, 0xc001605300, 0x0)
        /go/src/github.com/docker/docker/vendor/github.com/containerd/containerd/remotes/docker/resolver.go:438 +0x1a7 fp=0xc0014eb550 sp=0xc0014eb4d8 pc=0x55935dbf9477
github.com/docker/docker/vendor/github.com/containerd/containerd/remotes/docker.(*dockerResolver).Resolve(0xc002500a40, 0x55935f82ad00, 0xc002ea82d0, 0xc002fd32d0, 0x61, 0x0, 0x0, 0x0, 0x0, 0x0, ...)
        /go/src/github.com/docker/docker/vendor/github.com/containerd/containerd/remotes/docker/resolver.go:235 +0x556 fp=0xc0014eb8c0 sp=0xc0014eb550 pc=0x55935dbf7116
github.com/docker/docker/builder/builder-next/adapters/containerimage.(*cachedResolver).Resolve(0xc002ea82a0, 0x55935f82ac40, 0xc002500980, 0xc002fd32d0, 0x61, 0x61, 0x0, 0x0, 0x0, 0x0, ...)
        /go/src/github.com/docker/docker/builder/builder-next/adapters/containerimage/pull.go:838 +0xab fp=0xc0014eb9b8 sp=0xc0014eb8c0 pc=0x55935e2e1d8b
github.com/docker/docker/vendor/github.com/moby/buildkit/util/imageutil.Config(0x55935f82ac40, 0xc002500980, 0xc00160a070, 0x61, 0x55935f80cc40, 0xc002ea82a0, 0x7f0240856738, 0xc0004770a0, 0x0, 0x0, ...)
        /go/src/github.com/docker/docker/vendor/github.com/moby/buildkit/util/imageutil/config.go:81 +0xa21 fp=0xc0014ebd20 sp=0xc0014eb9b8 pc=0x55935e2a7e61
github.com/docker/docker/builder/builder-next/adapters/containerimage.(*imageSource).resolveRemote.func1(0x55935f82ac40, 0xc002500980, 0x55935f82ac40, 0xc002500980, 0xc00323a710, 0x2)
        /go/src/github.com/docker/docker/builder/builder-next/adapters/containerimage/pull.go:138 +0x15e fp=0xc0014ebe18 sp=0xc0014ebd20 pc=0x55935e2e2e9e
github.com/docker/docker/vendor/github.com/moby/buildkit/util/flightcontrol.(*call).run(0xc002c4f0e0)
        /go/src/github.com/docker/docker/vendor/github.com/moby/buildkit/util/flightcontrol/flightcontrol.go:121 +0xf0 fp=0xc0014ebef8 sp=0xc0014ebe18 pc=0x55935e23dd40
github.com/docker/docker/vendor/github.com/moby/buildkit/util/flightcontrol.(*call).run-fm()
        /go/src/github.com/docker/docker/vendor/github.com/moby/buildkit/util/flightcontrol/flightcontrol.go:117 +0x2c fp=0xc0014ebf10 sp=0xc0014ebef8 pc=0x55935e23f8dc
sync.(*Once).doSlow(0xc002c4f140, 0xc00323a700)
        /usr/local/go/src/sync/once.go:66 +0xe5 fp=0xc0014ebfb0 sp=0xc0014ebf10 pc=0x55935ccb68f5
sync.(*Once).Do(0xc002c4f140, 0xc00323a700)
        /usr/local/go/src/sync/once.go:57 +0x47 fp=0xc0014ebfd0 sp=0xc0014ebfb0 pc=0x55935ccb6807
runtime.goexit()
        /usr/local/go/src/runtime/asm_amd64.s:1357 +0x1 fp=0xc0014ebfd8 sp=0xc0014ebfd0 pc=0x55935cc998c1
created by github.com/docker/docker/vendor/github.com/moby/buildkit/util/flightcontrol.(*call).wait
        /go/src/github.com/docker/docker/vendor/github.com/moby/buildkit/util/flightcontrol/flightcontrol.go:148 +0x216

Afaics the possible reason is the header reuse in the container retry loop.

req.Header = r.header
that causes one request still in the roundtripper to read from map when it is changing.

Similar report I found via google caddyserver/caddy#1045 that also hints to request reuse issue

Metadata

Metadata

Assignees

No one assigned

    Labels

    Type

    No type

    Projects

    No projects

    Milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions