diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 433366052ea..965d70bd965 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -19,9 +19,6 @@ jobs: with: go-version: 1.16 - - name: Install llvm - run: sudo apt-get install llvm - - name: Build UI run: make ui @@ -49,9 +46,6 @@ jobs: with: go-version: 1.16 - - name: Install llvm - run: sudo apt-get install llvm - - name: Build UI run: make ui diff --git a/.golangci.yml b/.golangci.yml index 411010af726..f5edd089edc 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -2,6 +2,7 @@ run: deadline: 5m skip-dirs: - internal/pprof + - internal/go linters-settings: errcheck: exclude: ./.errcheck_excludes.txt diff --git a/Dockerfile b/Dockerfile index b1f5707e7f7..34bd120d2ee 100644 --- a/Dockerfile +++ b/Dockerfile @@ -38,7 +38,6 @@ COPY --chown=nobody:nogroup go.mod go.sum ./ COPY --chown=nobody:nogroup ./cmd/parca ./cmd/parca COPY --chown=nobody:nogroup ./pkg ./pkg COPY --chown=nobody:nogroup ./gen ./gen -COPY --chown=nobody:nogroup ./internal ./internal COPY --chown=nobody:nogroup ./proto ./proto COPY --chown=nobody:nogroup ./ui/ui.go ./ui/ui.go COPY --chown=nobody:nogroup --from=ui-builder /app/packages/app/web/dist ./ui/packages/app/web/dist @@ -48,8 +47,6 @@ RUN go install github.com/grpc-ecosystem/grpc-health-probe@latest # this image is what docker.io/alpine:3.14.1 on August 13 2021 FROM docker.io/alpine@sha256:be9bdc0ef8e96dbc428dc189b31e2e3b05523d96d12ed627c37aa2936653258c -RUN apk add --no-cache llvm - USER nobody COPY --chown=0:0 --from=builder /app/parca /parca diff --git a/Dockerfile.dev b/Dockerfile.dev index 3e868b1b7a9..1791e990996 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -32,14 +32,11 @@ COPY ./cmd /app/cmd COPY ./pkg /app/pkg COPY ./proto /app/proto COPY ./gen /app/gen -COPY ./internal /app/internal RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -trimpath -gcflags="all=-N -l" -o parca ./cmd/parca FROM golang:1.16-alpine -RUN apk add --no-cache llvm - COPY --from=builder /go/bin/dlv / COPY --from=builder /go/bin/grpc-health-probe / COPY --from=builder /app/parca /parca diff --git a/Dockerfile.go.dev b/Dockerfile.go.dev index ee1325a0b30..210262d8363 100644 --- a/Dockerfile.go.dev +++ b/Dockerfile.go.dev @@ -22,14 +22,11 @@ COPY ./cmd /app/cmd COPY ./pkg /app/pkg COPY ./proto /app/proto COPY ./gen /app/gen -COPY ./internal /app/internal RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -gcflags="all=-N -l" -o parca ./cmd/parca FROM golang:1.16-alpine -RUN apk add --no-cache llvm - COPY --from=builder /go/bin/dlv / COPY --from=builder /go/bin/grpc-health-probe / COPY --from=builder /app/parca /parca diff --git a/Dockerfile.release b/Dockerfile.release index 92073f2ec73..e027b8cd28f 100644 --- a/Dockerfile.release +++ b/Dockerfile.release @@ -1,6 +1,5 @@ # this image is what docker.io/alpine:3.14.1 on August 13 2021 FROM docker.io/alpine@sha256:be9bdc0ef8e96dbc428dc189b31e2e3b05523d96d12ed627c37aa2936653258c -RUN apk add --no-cache llvm USER nobody COPY --chown=0:0 parca.yaml / diff --git a/Makefile b/Makefile index a7aafb4d8ab..f21432bb9b9 100644 --- a/Makefile +++ b/Makefile @@ -23,7 +23,7 @@ clean: rm -rf ui/packages/app/web/.next .PHONY: go/deps -go/deps: internal/pprof +go/deps: go mod tidy .PHONY: go/bin @@ -36,7 +36,7 @@ format: go/fmt check-license .PHONY: go/fmt go/fmt: - go fmt `go list ./... | grep -v ./internal/pprof` + go fmt `go list ./...` .PHONY: check-license check-license: @@ -44,7 +44,7 @@ check-license: .PHONY: go/test go/test: - go test -v `go list ./... | grep -v ./internal/pprof` + go test -v `go list ./...` UI_FILES ?= $(shell find ./ui -name "*" -not -path "./ui/lib/node_modules/*" -not -path "./ui/node_modules/*" -not -path "./ui/packages/app/web/node_modules/*" -not -path "./ui/packages/app/web/dist/*" -not -path "./ui/packages/app/web/.next/*") ui/packages/app/web/dist: $(UI_FILES) @@ -68,7 +68,7 @@ proto/generate: proto/vendor: buf mod update mkdir -p proto/google/pprof - curl https://raw.githubusercontent.com/google/pprof/master/proto/profile.proto > proto/google/pprof/profile.proto + curl https://raw.githubusercontent.com/google/pprof/master/proto/profile.proto > proto/google/pprof/profile.proto .PHONY: container container: @@ -98,11 +98,3 @@ dev/up: deploy/manifests .PHONY: dev/down dev/down: source ./scripts/local-dev.sh && down - -internal/pprof: - rm -rf internal/pprof - rm -rf tmp/pprof - git clone https://github.com/google/pprof tmp/pprof - cp -r tmp/pprof/internal internal/pprof - find internal/pprof -type f -exec sed -i 's/github.com\/google\/pprof\/internal/github.com\/parca-dev\/parca\/internal\/pprof/g' {} + - rm -rf tmp/pprof diff --git a/Tiltfile b/Tiltfile index 3ff92d03133..1050ca07cdf 100644 --- a/Tiltfile +++ b/Tiltfile @@ -5,13 +5,13 @@ docker_prune_settings(num_builds=5) ## API Only docker_build('parca.io/parca/parca:dev', '.', dockerfile='Dockerfile.go.dev', - only=['./cmd', './pkg', './internal', './proto', './gen', './go.mod', './go.sum', 'parca.yaml'], + only=['./cmd', './pkg', './proto', './gen', './go.mod', './go.sum', 'parca.yaml'], ) ## All-in-one # docker_build('parca.io/parca/parca:dev', '.', # dockerfile='Dockerfile.dev', -# only=['./cmd', './pkg', './internal', './proto', './gen', './ui', './go.mod', './go.sum', 'parca.yaml'], +# only=['./cmd', './pkg', './proto', './gen', './ui', './go.mod', './go.sum', 'parca.yaml'], # ) k8s_yaml('deploy/tilt/parca-server-deployment.yaml') diff --git a/deploy/tilt/parca-server-deployment.yaml b/deploy/tilt/parca-server-deployment.yaml index f9e4e10416d..46adae826e7 100644 --- a/deploy/tilt/parca-server-deployment.yaml +++ b/deploy/tilt/parca-server-deployment.yaml @@ -54,6 +54,7 @@ spec: - mountPath: /var/parca name: parca-config nodeSelector: + kubernetes.io/arch: amd64 kubernetes.io/os: linux securityContext: fsGroup: 65534 diff --git a/go.mod b/go.mod index 9710e596e52..9b8d88d4d01 100644 --- a/go.mod +++ b/go.mod @@ -10,6 +10,7 @@ require ( github.com/dgraph-io/sroar v0.0.0-20210915181338-8dc690a08d84 github.com/gin-gonic/gin v1.7.0 // indirect github.com/go-chi/cors v1.2.0 + github.com/go-delve/delve v1.7.2 github.com/go-kit/log v0.1.0 github.com/go-ozzo/ozzo-validation/v4 v4.3.0 github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 diff --git a/go.sum b/go.sum index e7b5856fea5..942e5d01a8d 100644 --- a/go.sum +++ b/go.sum @@ -155,6 +155,7 @@ github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kd github.com/apache/arrow/go/arrow v0.0.0-20191024131854-af6fa24be0db/go.mod h1:VTxUBvSJ3s3eHAg65PNgrsn5BtqCRPdmyXh6rAfdxN0= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/aquasecurity/libbpfgo v0.1.2-0.20210708203834-4928d36fafac/go.mod h1:/+clceXE103FaXvVTIY2HAkQjxNtkra4DRWvZYr2SKw= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-metrics v0.3.0/go.mod h1:zXjbSimjXTd7vOpY8B0/2LpvNvDoXBuplAD+gJD3GYs= @@ -284,6 +285,7 @@ github.com/cortexproject/cortex v1.7.1-0.20210316085356-3fedc1108a49/go.mod h1:/ github.com/cortexproject/cortex v1.8.1-0.20210422151339-cf1c444e0905/go.mod h1:xxm4/CLvTmDxwE7yXwtClR4dIvkG4S09o5DygPOgc1U= github.com/cortexproject/cortex v1.10.1-0.20210820081236-70dddb6b70b8 h1:3wtJ9PaFNIpBeSTjjhF7l4qTbvZf0BEX47TEAqqn6G0= github.com/cortexproject/cortex v1.10.1-0.20210820081236-70dddb6b70b8/go.mod h1:F8PX2IHaeFvqCci46Y+fhskJkCtLvh0OqCKFtWyjP7w= +github.com/cosiner/argv v0.1.0/go.mod h1:EusR6TucWKX+zFgtdUsKT2Cvg45K5rtpCcWz4hK06d8= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= @@ -306,6 +308,7 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/denisenkom/go-mssqldb v0.0.0-20190515213511-eb9f6a1743f3/go.mod h1:zAg7JM8CkOJ43xKXIj7eRO9kmWm/TW578qo+oDO6tuM= +github.com/derekparker/trie v0.0.0-20200317170641-1fdf38b7b0e9/go.mod h1:D6ICZm05D9VN1n/8iOtBxLpXtoGp6HDFUJ1RNVieOSE= github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f h1:U5y3Y5UE0w7amNe7Z5G/twsBW0KEalRQXZzf8ufSh9I= github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f/go.mod h1:xH/i4TFMt8koVQZ6WFms69WAsDWr2XsYL3Hkl7jkoLE= github.com/dgraph-io/sroar v0.0.0-20210915181338-8dc690a08d84 h1:vtPkk8vX4OzWs698+GIJ86c1MSCOPX08Uh3CpywWmt0= @@ -403,6 +406,8 @@ github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= github.com/go-chi/cors v1.2.0 h1:tV1g1XENQ8ku4Bq3K9ub2AtgG+p16SmzeMSGTwrOKdE= github.com/go-chi/cors v1.2.0/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= +github.com/go-delve/delve v1.7.2 h1:QTDJlgx9OwUVYVm7xthyf2XHKrZcTQu3wkRbovktidM= +github.com/go-delve/delve v1.7.2/go.mod h1:CHdOd8kuHlQxtBJr1HmJX5h+KmmWd/7Lk5d+D1zHn4E= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -677,6 +682,7 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-dap v0.5.1-0.20210713061233-c91b005e3987/go.mod h1:5q8aYQFnHOAZEMP+6vmq25HKYAEwE+LF5yh7JKrrhSQ= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= @@ -1128,6 +1134,7 @@ github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUr github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= +github.com/peterh/liner v0.0.0-20170317030525-88609521dc4b/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= github.com/peterh/liner v1.0.1-0.20180619022028-8c1271fcf47f/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= @@ -1483,6 +1490,7 @@ go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.9.0 h1:C0g6TWmQYvjKRnljRULLWUVJGy8Uvu0NEL/5frY2/t4= go.opentelemetry.io/proto/otlp v0.9.0/go.mod h1:1vKfU9rv61e9EVGthD1zNvUbiwPcimSsOPU9brfSHJg= +go.starlark.net v0.0.0-20200821142938-949cc6f4b097/go.mod h1:f0znQkUKRrkk36XxWbGjMqQM8wGv/xHBVE2qc3B5oFU= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= @@ -1507,6 +1515,7 @@ go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.14.1/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc= go.uber.org/zap v1.16.0/go.mod h1:MA8QOfq0BHJwdXa996Y4dYkAqRKB8/1K1QMMZVaNZjQ= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/arch v0.0.0-20190927153633-4e8777c89be4/go.mod h1:flIaEI6LNU6xOCD5PaJvn9wGP0agmIOqjrtsKGRguv4= golang.org/x/crypto v0.0.0-20180608092829-8ac0e0d97ce4/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -1859,6 +1868,7 @@ golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191127201027-ecd32218bd7f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191203134012-c197fd4bf371/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= diff --git a/internal/pprof/binutils/addr2liner.go b/internal/pprof/binutils/addr2liner.go deleted file mode 100644 index c25669dfd91..00000000000 --- a/internal/pprof/binutils/addr2liner.go +++ /dev/null @@ -1,238 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package binutils - -import ( - "bufio" - "fmt" - "io" - "os/exec" - "strconv" - "strings" - "sync" - - "github.com/parca-dev/parca/internal/pprof/plugin" -) - -const ( - defaultAddr2line = "addr2line" - - // addr2line may produce multiple lines of output. We - // use this sentinel to identify the end of the output. - sentinel = ^uint64(0) -) - -// addr2Liner is a connection to an addr2line command for obtaining -// address and line number information from a binary. -type addr2Liner struct { - mu sync.Mutex - rw lineReaderWriter - base uint64 - - // nm holds an addr2Liner using nm tool. Certain versions of addr2line - // produce incomplete names due to - // https://sourceware.org/bugzilla/show_bug.cgi?id=17541. As a workaround, - // the names from nm are used when they look more complete. See addrInfo() - // code below for the exact heuristic. - nm *addr2LinerNM -} - -// lineReaderWriter is an interface to abstract the I/O to an addr2line -// process. It writes a line of input to the job, and reads its output -// one line at a time. -type lineReaderWriter interface { - write(string) error - readLine() (string, error) - close() -} - -type addr2LinerJob struct { - cmd *exec.Cmd - in io.WriteCloser - out *bufio.Reader -} - -func (a *addr2LinerJob) write(s string) error { - _, err := fmt.Fprint(a.in, s+"\n") - return err -} - -func (a *addr2LinerJob) readLine() (string, error) { - s, err := a.out.ReadString('\n') - if err != nil { - return "", err - } - return strings.TrimSpace(s), nil -} - -// close releases any resources used by the addr2liner object. -func (a *addr2LinerJob) close() { - a.in.Close() - a.cmd.Wait() -} - -// newAddr2liner starts the given addr2liner command reporting -// information about the given executable file. If file is a shared -// library, base should be the address at which it was mapped in the -// program under consideration. -func newAddr2Liner(cmd, file string, base uint64) (*addr2Liner, error) { - if cmd == "" { - cmd = defaultAddr2line - } - - j := &addr2LinerJob{ - cmd: exec.Command(cmd, "-aif", "-e", file), - } - - var err error - if j.in, err = j.cmd.StdinPipe(); err != nil { - return nil, err - } - - outPipe, err := j.cmd.StdoutPipe() - if err != nil { - return nil, err - } - - j.out = bufio.NewReader(outPipe) - if err := j.cmd.Start(); err != nil { - return nil, err - } - - a := &addr2Liner{ - rw: j, - base: base, - } - - return a, nil -} - -// readFrame parses the addr2line output for a single address. It -// returns a populated plugin.Frame and whether it has reached the end of the -// data. -func (d *addr2Liner) readFrame() (plugin.Frame, bool) { - funcname, err := d.rw.readLine() - if err != nil { - return plugin.Frame{}, true - } - if strings.HasPrefix(funcname, "0x") { - // If addr2line returns a hex address we can assume it is the - // sentinel. Read and ignore next two lines of output from - // addr2line - d.rw.readLine() - d.rw.readLine() - return plugin.Frame{}, true - } - - fileline, err := d.rw.readLine() - if err != nil { - return plugin.Frame{}, true - } - - linenumber := 0 - - if funcname == "??" { - funcname = "" - } - - if fileline == "??:0" { - fileline = "" - } else { - if i := strings.LastIndex(fileline, ":"); i >= 0 { - // Remove discriminator, if present - if disc := strings.Index(fileline, " (discriminator"); disc > 0 { - fileline = fileline[:disc] - } - // If we cannot parse a number after the last ":", keep it as - // part of the filename. - if line, err := strconv.Atoi(fileline[i+1:]); err == nil { - linenumber = line - fileline = fileline[:i] - } - } - } - - return plugin.Frame{ - Func: funcname, - File: fileline, - Line: linenumber}, false -} - -func (d *addr2Liner) rawAddrInfo(addr uint64) ([]plugin.Frame, error) { - d.mu.Lock() - defer d.mu.Unlock() - - if err := d.rw.write(fmt.Sprintf("%x", addr-d.base)); err != nil { - return nil, err - } - - if err := d.rw.write(fmt.Sprintf("%x", sentinel)); err != nil { - return nil, err - } - - resp, err := d.rw.readLine() - if err != nil { - return nil, err - } - - if !strings.HasPrefix(resp, "0x") { - return nil, fmt.Errorf("unexpected addr2line output: %s", resp) - } - - var stack []plugin.Frame - for { - frame, end := d.readFrame() - if end { - break - } - - if frame != (plugin.Frame{}) { - stack = append(stack, frame) - } - } - return stack, err -} - -// addrInfo returns the stack frame information for a specific program -// address. It returns nil if the address could not be identified. -func (d *addr2Liner) addrInfo(addr uint64) ([]plugin.Frame, error) { - stack, err := d.rawAddrInfo(addr) - if err != nil { - return nil, err - } - - // Certain versions of addr2line produce incomplete names due to - // https://sourceware.org/bugzilla/show_bug.cgi?id=17541. Attempt to replace - // the name with a better one from nm. - if len(stack) > 0 && d.nm != nil { - nm, err := d.nm.addrInfo(addr) - if err == nil && len(nm) > 0 { - // Last entry in frame list should match since it is non-inlined. As a - // simple heuristic, we only switch to the nm-based name if it is longer - // by 2 or more characters. We consider nm names that are longer by 1 - // character insignificant to avoid replacing foo with _foo on MacOS (for - // unknown reasons read2line produces the former and nm produces the - // latter on MacOS even though both tools are asked to produce mangled - // names). - nmName := nm[len(nm)-1].Func - a2lName := stack[len(stack)-1].Func - if len(nmName) > len(a2lName)+1 { - stack[len(stack)-1].Func = nmName - } - } - } - - return stack, nil -} diff --git a/internal/pprof/binutils/addr2liner_llvm.go b/internal/pprof/binutils/addr2liner_llvm.go deleted file mode 100644 index 3ca65cea924..00000000000 --- a/internal/pprof/binutils/addr2liner_llvm.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package binutils - -import ( - "bufio" - "fmt" - "io" - "os/exec" - "strconv" - "strings" - "sync" - - "github.com/parca-dev/parca/internal/pprof/plugin" -) - -const ( - defaultLLVMSymbolizer = "llvm-symbolizer" -) - -// llvmSymbolizer is a connection to an llvm-symbolizer command for -// obtaining address and line number information from a binary. -type llvmSymbolizer struct { - sync.Mutex - filename string - rw lineReaderWriter - base uint64 -} - -type llvmSymbolizerJob struct { - cmd *exec.Cmd - in io.WriteCloser - out *bufio.Reader - // llvm-symbolizer requires the symbol type, CODE or DATA, for symbolization. - symType string -} - -func (a *llvmSymbolizerJob) write(s string) error { - _, err := fmt.Fprintln(a.in, a.symType, s) - return err -} - -func (a *llvmSymbolizerJob) readLine() (string, error) { - s, err := a.out.ReadString('\n') - if err != nil { - return "", err - } - return strings.TrimSpace(s), nil -} - -// close releases any resources used by the llvmSymbolizer object. -func (a *llvmSymbolizerJob) close() { - a.in.Close() - a.cmd.Wait() -} - -// newLlvmSymbolizer starts the given llvmSymbolizer command reporting -// information about the given executable file. If file is a shared -// library, base should be the address at which it was mapped in the -// program under consideration. -func newLLVMSymbolizer(cmd, file string, base uint64, isData bool) (*llvmSymbolizer, error) { - if cmd == "" { - cmd = defaultLLVMSymbolizer - } - - j := &llvmSymbolizerJob{ - cmd: exec.Command(cmd, "--inlining", "-demangle=false"), - symType: "CODE", - } - if isData { - j.symType = "DATA" - } - - var err error - if j.in, err = j.cmd.StdinPipe(); err != nil { - return nil, err - } - - outPipe, err := j.cmd.StdoutPipe() - if err != nil { - return nil, err - } - - j.out = bufio.NewReader(outPipe) - if err := j.cmd.Start(); err != nil { - return nil, err - } - - a := &llvmSymbolizer{ - filename: file, - rw: j, - base: base, - } - - return a, nil -} - -// readFrame parses the llvm-symbolizer output for a single address. It -// returns a populated plugin.Frame and whether it has reached the end of the -// data. -func (d *llvmSymbolizer) readFrame() (plugin.Frame, bool) { - funcname, err := d.rw.readLine() - if err != nil { - return plugin.Frame{}, true - } - - switch funcname { - case "": - return plugin.Frame{}, true - case "??": - funcname = "" - } - - fileline, err := d.rw.readLine() - if err != nil { - return plugin.Frame{Func: funcname}, true - } - - linenumber := 0 - // The llvm-symbolizer outputs the ::. - // When it cannot identify the source code location, it outputs "??:0:0". - // Older versions output just the filename and line number, so we check for - // both conditions here. - if fileline == "??:0" || fileline == "??:0:0" { - fileline = "" - } else { - switch split := strings.Split(fileline, ":"); len(split) { - case 1: - // filename - fileline = split[0] - case 2, 3: - // filename:line , or - // filename:line:disc , or - fileline = split[0] - if line, err := strconv.Atoi(split[1]); err == nil { - linenumber = line - } - default: - // Unrecognized, ignore - } - } - - return plugin.Frame{Func: funcname, File: fileline, Line: linenumber}, false -} - -// addrInfo returns the stack frame information for a specific program -// address. It returns nil if the address could not be identified. -func (d *llvmSymbolizer) addrInfo(addr uint64) ([]plugin.Frame, error) { - d.Lock() - defer d.Unlock() - - if err := d.rw.write(fmt.Sprintf("%s 0x%x", d.filename, addr-d.base)); err != nil { - return nil, err - } - - var stack []plugin.Frame - for { - frame, end := d.readFrame() - if end { - break - } - - if frame != (plugin.Frame{}) { - stack = append(stack, frame) - } - } - - return stack, nil -} diff --git a/internal/pprof/binutils/addr2liner_nm.go b/internal/pprof/binutils/addr2liner_nm.go deleted file mode 100644 index aba9aea03a1..00000000000 --- a/internal/pprof/binutils/addr2liner_nm.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package binutils - -import ( - "bufio" - "bytes" - "io" - "os/exec" - "strconv" - "strings" - - "github.com/parca-dev/parca/internal/pprof/plugin" -) - -const ( - defaultNM = "nm" -) - -// addr2LinerNM is a connection to an nm command for obtaining symbol -// information from a binary. -type addr2LinerNM struct { - m []symbolInfo // Sorted list of symbol addresses from binary. -} - -type symbolInfo struct { - address uint64 - size uint64 - name string - symType string -} - -// isData returns if the symbol has a known data object symbol type. -func (s *symbolInfo) isData() bool { - // The following symbol types are taken from https://linux.die.net/man/1/nm: - // Lowercase letter means local symbol, uppercase denotes a global symbol. - // - b or B: the symbol is in the uninitialized data section, e.g. .bss; - // - d or D: the symbol is in the initialized data section; - // - r or R: the symbol is in a read only data section; - // - v or V: the symbol is a weak object; - // - W: the symbol is a weak symbol that has not been specifically tagged as a - // weak object symbol. Experiments with some binaries, showed these to be - // mostly data objects. - return strings.ContainsAny(s.symType, "bBdDrRvVW") -} - -// newAddr2LinerNM starts the given nm command reporting information about the -// given executable file. If file is a shared library, base should be the -// address at which it was mapped in the program under consideration. -func newAddr2LinerNM(cmd, file string, base uint64) (*addr2LinerNM, error) { - if cmd == "" { - cmd = defaultNM - } - var b bytes.Buffer - c := exec.Command(cmd, "--numeric-sort", "--print-size", "--format=posix", file) - c.Stdout = &b - if err := c.Run(); err != nil { - return nil, err - } - return parseAddr2LinerNM(base, &b) -} - -func parseAddr2LinerNM(base uint64, nm io.Reader) (*addr2LinerNM, error) { - a := &addr2LinerNM{ - m: []symbolInfo{}, - } - - // Parse nm output and populate symbol map. - // Skip lines we fail to parse. - buf := bufio.NewReader(nm) - for { - line, err := buf.ReadString('\n') - if line == "" && err != nil { - if err == io.EOF { - break - } - return nil, err - } - line = strings.TrimSpace(line) - fields := strings.Split(line, " ") - if len(fields) != 4 { - continue - } - address, err := strconv.ParseUint(fields[2], 16, 64) - if err != nil { - continue - } - size, err := strconv.ParseUint(fields[3], 16, 64) - if err != nil { - continue - } - a.m = append(a.m, symbolInfo{ - address: address + base, - size: size, - name: fields[0], - symType: fields[1], - }) - } - - return a, nil -} - -// addrInfo returns the stack frame information for a specific program -// address. It returns nil if the address could not be identified. -func (a *addr2LinerNM) addrInfo(addr uint64) ([]plugin.Frame, error) { - if len(a.m) == 0 || addr < a.m[0].address || addr >= (a.m[len(a.m)-1].address+a.m[len(a.m)-1].size) { - return nil, nil - } - - // Binary search. Search until low, high are separated by 1. - low, high := 0, len(a.m) - for low+1 < high { - mid := (low + high) / 2 - v := a.m[mid].address - if addr == v { - low = mid - break - } else if addr > v { - low = mid - } else { - high = mid - } - } - - // Address is between a.m[low] and a.m[high]. Pick low, as it represents - // [low, high). For data symbols, we use a strict check that the address is in - // the [start, start + size) range of a.m[low]. - if a.m[low].isData() && addr >= (a.m[low].address+a.m[low].size) { - return nil, nil - } - return []plugin.Frame{{Func: a.m[low].name}}, nil -} diff --git a/internal/pprof/binutils/binutils.go b/internal/pprof/binutils/binutils.go deleted file mode 100644 index 49e737abb4c..00000000000 --- a/internal/pprof/binutils/binutils.go +++ /dev/null @@ -1,730 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package binutils provides access to the GNU binutils. -package binutils - -import ( - "debug/elf" - "debug/macho" - "debug/pe" - "encoding/binary" - "errors" - "fmt" - "io" - "os" - "os/exec" - "path/filepath" - "regexp" - "runtime" - "strconv" - "strings" - "sync" - - "github.com/parca-dev/parca/internal/pprof/elfexec" - "github.com/parca-dev/parca/internal/pprof/plugin" -) - -// A Binutils implements plugin.ObjTool by invoking the GNU binutils. -type Binutils struct { - mu sync.Mutex - rep *binrep -} - -var ( - objdumpLLVMVerRE = regexp.MustCompile(`LLVM version (?:(\d*)\.(\d*)\.(\d*)|.*(trunk).*)`) - - // Defined for testing - elfOpen = elf.Open -) - -// binrep is an immutable representation for Binutils. It is atomically -// replaced on every mutation to provide thread-safe access. -type binrep struct { - // Commands to invoke. - llvmSymbolizer string - llvmSymbolizerFound bool - addr2line string - addr2lineFound bool - nm string - nmFound bool - objdump string - objdumpFound bool - isLLVMObjdump bool - - // if fast, perform symbolization using nm (symbol names only), - // instead of file-line detail from the slower addr2line. - fast bool -} - -// get returns the current representation for bu, initializing it if necessary. -func (bu *Binutils) get() *binrep { - bu.mu.Lock() - r := bu.rep - if r == nil { - r = &binrep{} - initTools(r, "") - bu.rep = r - } - bu.mu.Unlock() - return r -} - -// update modifies the rep for bu via the supplied function. -func (bu *Binutils) update(fn func(r *binrep)) { - r := &binrep{} - bu.mu.Lock() - defer bu.mu.Unlock() - if bu.rep == nil { - initTools(r, "") - } else { - *r = *bu.rep - } - fn(r) - bu.rep = r -} - -// String returns string representation of the binutils state for debug logging. -func (bu *Binutils) String() string { - r := bu.get() - var llvmSymbolizer, addr2line, nm, objdump string - if r.llvmSymbolizerFound { - llvmSymbolizer = r.llvmSymbolizer - } - if r.addr2lineFound { - addr2line = r.addr2line - } - if r.nmFound { - nm = r.nm - } - if r.objdumpFound { - objdump = r.objdump - } - return fmt.Sprintf("llvm-symbolizer=%q addr2line=%q nm=%q objdump=%q fast=%t", - llvmSymbolizer, addr2line, nm, objdump, r.fast) -} - -// SetFastSymbolization sets a toggle that makes binutils use fast -// symbolization (using nm), which is much faster than addr2line but -// provides only symbol name information (no file/line). -func (bu *Binutils) SetFastSymbolization(fast bool) { - bu.update(func(r *binrep) { r.fast = fast }) -} - -// SetTools processes the contents of the tools option. It -// expects a set of entries separated by commas; each entry is a pair -// of the form t:path, where cmd will be used to look only for the -// tool named t. If t is not specified, the path is searched for all -// tools. -func (bu *Binutils) SetTools(config string) { - bu.update(func(r *binrep) { initTools(r, config) }) -} - -func initTools(b *binrep, config string) { - // paths collect paths per tool; Key "" contains the default. - paths := make(map[string][]string) - for _, t := range strings.Split(config, ",") { - name, path := "", t - if ct := strings.SplitN(t, ":", 2); len(ct) == 2 { - name, path = ct[0], ct[1] - } - paths[name] = append(paths[name], path) - } - - defaultPath := paths[""] - b.llvmSymbolizer, b.llvmSymbolizerFound = chooseExe([]string{"llvm-symbolizer"}, []string{}, append(paths["llvm-symbolizer"], defaultPath...)) - b.addr2line, b.addr2lineFound = chooseExe([]string{"addr2line"}, []string{"gaddr2line"}, append(paths["addr2line"], defaultPath...)) - // The "-n" option is supported by LLVM since 2011. The output of llvm-nm - // and GNU nm with "-n" option is interchangeable for our purposes, so we do - // not need to differrentiate them. - b.nm, b.nmFound = chooseExe([]string{"llvm-nm", "nm"}, []string{"gnm"}, append(paths["nm"], defaultPath...)) - b.objdump, b.objdumpFound, b.isLLVMObjdump = findObjdump(append(paths["objdump"], defaultPath...)) -} - -// findObjdump finds and returns path to preferred objdump binary. -// Order of preference is: llvm-objdump, objdump. -// On MacOS only, also looks for gobjdump with least preference. -// Accepts a list of paths and returns: -// a string with path to the preferred objdump binary if found, -// or an empty string if not found; -// a boolean if any acceptable objdump was found; -// a boolean indicating if it is an LLVM objdump. -func findObjdump(paths []string) (string, bool, bool) { - objdumpNames := []string{"llvm-objdump", "objdump"} - if runtime.GOOS == "darwin" { - objdumpNames = append(objdumpNames, "gobjdump") - } - - for _, objdumpName := range objdumpNames { - if objdump, objdumpFound := findExe(objdumpName, paths); objdumpFound { - cmdOut, err := exec.Command(objdump, "--version").Output() - if err != nil { - continue - } - if isLLVMObjdump(string(cmdOut)) { - return objdump, true, true - } - if isBuObjdump(string(cmdOut)) { - return objdump, true, false - } - } - } - return "", false, false -} - -// chooseExe finds and returns path to preferred binary. names is a list of -// names to search on both Linux and OSX. osxNames is a list of names specific -// to OSX. names always has a higher priority than osxNames. The order of -// the name within each list decides its priority (e.g. the first name has a -// higher priority than the second name in the list). -// -// It returns a string with path to the binary and a boolean indicating if any -// acceptable binary was found. -func chooseExe(names, osxNames []string, paths []string) (string, bool) { - if runtime.GOOS == "darwin" { - names = append(names, osxNames...) - } - for _, name := range names { - if binary, found := findExe(name, paths); found { - return binary, true - } - } - return "", false -} - -// isLLVMObjdump accepts a string with path to an objdump binary, -// and returns a boolean indicating if the given binary is an LLVM -// objdump binary of an acceptable version. -func isLLVMObjdump(output string) bool { - fields := objdumpLLVMVerRE.FindStringSubmatch(output) - if len(fields) != 5 { - return false - } - if fields[4] == "trunk" { - return true - } - verMajor, err := strconv.Atoi(fields[1]) - if err != nil { - return false - } - verPatch, err := strconv.Atoi(fields[3]) - if err != nil { - return false - } - if runtime.GOOS == "linux" && verMajor >= 8 { - // Ensure LLVM objdump is at least version 8.0 on Linux. - // Some flags, like --demangle, and double dashes for options are - // not supported by previous versions. - return true - } - if runtime.GOOS == "darwin" { - // Ensure LLVM objdump is at least version 10.0.1 on MacOS. - return verMajor > 10 || (verMajor == 10 && verPatch >= 1) - } - return false -} - -// isBuObjdump accepts a string with path to an objdump binary, -// and returns a boolean indicating if the given binary is a GNU -// binutils objdump binary. No version check is performed. -func isBuObjdump(output string) bool { - return strings.Contains(output, "GNU objdump") -} - -// findExe looks for an executable command on a set of paths. -// If it cannot find it, returns cmd. -func findExe(cmd string, paths []string) (string, bool) { - for _, p := range paths { - cp := filepath.Join(p, cmd) - if c, err := exec.LookPath(cp); err == nil { - return c, true - } - } - return cmd, false -} - -// Disasm returns the assembly instructions for the specified address range -// of a binary. -func (bu *Binutils) Disasm(file string, start, end uint64, intelSyntax bool) ([]plugin.Inst, error) { - b := bu.get() - if !b.objdumpFound { - return nil, errors.New("cannot disasm: no objdump tool available") - } - args := []string{"--disassemble", "--demangle", "--no-show-raw-insn", - "--line-numbers", fmt.Sprintf("--start-address=%#x", start), - fmt.Sprintf("--stop-address=%#x", end)} - - if intelSyntax { - if b.isLLVMObjdump { - args = append(args, "--x86-asm-syntax=intel") - } else { - args = append(args, "-M", "intel") - } - } - - args = append(args, file) - cmd := exec.Command(b.objdump, args...) - out, err := cmd.Output() - if err != nil { - return nil, fmt.Errorf("%v: %v", cmd.Args, err) - } - - return disassemble(out) -} - -// Open satisfies the plugin.ObjTool interface. -func (bu *Binutils) Open(name string, start, limit, offset uint64) (plugin.ObjFile, error) { - b := bu.get() - - // Make sure file is a supported executable. - // This uses magic numbers, mainly to provide better error messages but - // it should also help speed. - - if _, err := os.Stat(name); err != nil { - // For testing, do not require file name to exist. - if strings.Contains(b.addr2line, "testdata/") { - return &fileAddr2Line{file: file{b: b, name: name}}, nil - } - return nil, err - } - - // Read the first 4 bytes of the file. - - f, err := os.Open(name) - if err != nil { - return nil, fmt.Errorf("error opening %s: %v", name, err) - } - defer f.Close() - - var header [4]byte - if _, err = io.ReadFull(f, header[:]); err != nil { - return nil, fmt.Errorf("error reading magic number from %s: %v", name, err) - } - - elfMagic := string(header[:]) - - // Match against supported file types. - if elfMagic == elf.ELFMAG { - f, err := b.openELF(name, start, limit, offset) - if err != nil { - return nil, fmt.Errorf("error reading ELF file %s: %v", name, err) - } - return f, nil - } - - // Mach-O magic numbers can be big or little endian. - machoMagicLittle := binary.LittleEndian.Uint32(header[:]) - machoMagicBig := binary.BigEndian.Uint32(header[:]) - - if machoMagicLittle == macho.Magic32 || machoMagicLittle == macho.Magic64 || - machoMagicBig == macho.Magic32 || machoMagicBig == macho.Magic64 { - f, err := b.openMachO(name, start, limit, offset) - if err != nil { - return nil, fmt.Errorf("error reading Mach-O file %s: %v", name, err) - } - return f, nil - } - if machoMagicLittle == macho.MagicFat || machoMagicBig == macho.MagicFat { - f, err := b.openFatMachO(name, start, limit, offset) - if err != nil { - return nil, fmt.Errorf("error reading fat Mach-O file %s: %v", name, err) - } - return f, nil - } - - peMagic := string(header[:2]) - if peMagic == "MZ" { - f, err := b.openPE(name, start, limit, offset) - if err != nil { - return nil, fmt.Errorf("error reading PE file %s: %v", name, err) - } - return f, nil - } - - return nil, fmt.Errorf("unrecognized binary format: %s", name) -} - -func (b *binrep) openMachOCommon(name string, of *macho.File, start, limit, offset uint64) (plugin.ObjFile, error) { - - // Subtract the load address of the __TEXT section. Usually 0 for shared - // libraries or 0x100000000 for executables. You can check this value by - // running `objdump -private-headers `. - - textSegment := of.Segment("__TEXT") - if textSegment == nil { - return nil, fmt.Errorf("could not identify base for %s: no __TEXT segment", name) - } - if textSegment.Addr > start { - return nil, fmt.Errorf("could not identify base for %s: __TEXT segment address (0x%x) > mapping start address (0x%x)", - name, textSegment.Addr, start) - } - - base := start - textSegment.Addr - - if b.fast || (!b.addr2lineFound && !b.llvmSymbolizerFound) { - return &fileNM{file: file{b: b, name: name, base: base}}, nil - } - return &fileAddr2Line{file: file{b: b, name: name, base: base}}, nil -} - -func (b *binrep) openFatMachO(name string, start, limit, offset uint64) (plugin.ObjFile, error) { - of, err := macho.OpenFat(name) - if err != nil { - return nil, fmt.Errorf("error parsing %s: %v", name, err) - } - defer of.Close() - - if len(of.Arches) == 0 { - return nil, fmt.Errorf("empty fat Mach-O file: %s", name) - } - - var arch macho.Cpu - // Use the host architecture. - // TODO: This is not ideal because the host architecture may not be the one - // that was profiled. E.g. an amd64 host can profile a 386 program. - switch runtime.GOARCH { - case "386": - arch = macho.Cpu386 - case "amd64", "amd64p32": - arch = macho.CpuAmd64 - case "arm", "armbe", "arm64", "arm64be": - arch = macho.CpuArm - case "ppc": - arch = macho.CpuPpc - case "ppc64", "ppc64le": - arch = macho.CpuPpc64 - default: - return nil, fmt.Errorf("unsupported host architecture for %s: %s", name, runtime.GOARCH) - } - for i := range of.Arches { - if of.Arches[i].Cpu == arch { - return b.openMachOCommon(name, of.Arches[i].File, start, limit, offset) - } - } - return nil, fmt.Errorf("architecture not found in %s: %s", name, runtime.GOARCH) -} - -func (b *binrep) openMachO(name string, start, limit, offset uint64) (plugin.ObjFile, error) { - of, err := macho.Open(name) - if err != nil { - return nil, fmt.Errorf("error parsing %s: %v", name, err) - } - defer of.Close() - - return b.openMachOCommon(name, of, start, limit, offset) -} - -func (b *binrep) openELF(name string, start, limit, offset uint64) (plugin.ObjFile, error) { - ef, err := elfOpen(name) - if err != nil { - return nil, fmt.Errorf("error parsing %s: %v", name, err) - } - defer ef.Close() - - buildID := "" - if f, err := os.Open(name); err == nil { - if id, err := elfexec.GetBuildID(f); err == nil { - buildID = fmt.Sprintf("%x", id) - } - } - - var ( - stextOffset *uint64 - pageAligned = func(addr uint64) bool { return addr%4096 == 0 } - ) - if strings.Contains(name, "vmlinux") || !pageAligned(start) || !pageAligned(limit) || !pageAligned(offset) { - // Reading all Symbols is expensive, and we only rarely need it so - // we don't want to do it every time. But if _stext happens to be - // page-aligned but isn't the same as Vaddr, we would symbolize - // wrong. So if the name the addresses aren't page aligned, or if - // the name is "vmlinux" we read _stext. We can be wrong if: (1) - // someone passes a kernel path that doesn't contain "vmlinux" AND - // (2) _stext is page-aligned AND (3) _stext is not at Vaddr - symbols, err := ef.Symbols() - if err != nil && err != elf.ErrNoSymbols { - return nil, err - } - for _, s := range symbols { - if s.Name == "_stext" { - // The kernel may use _stext as the mapping start address. - stextOffset = &s.Value - break - } - } - } - - // Check that we can compute a base for the binary. This may not be the - // correct base value, so we don't save it. We delay computing the actual base - // value until we have a sample address for this mapping, so that we can - // correctly identify the associated program segment that is needed to compute - // the base. - if _, err := elfexec.GetBase(&ef.FileHeader, elfexec.FindTextProgHeader(ef), stextOffset, start, limit, offset); err != nil { - return nil, fmt.Errorf("could not identify base for %s: %v", name, err) - } - - if b.fast || (!b.addr2lineFound && !b.llvmSymbolizerFound) { - return &fileNM{file: file{ - b: b, - name: name, - buildID: buildID, - m: &elfMapping{start: start, limit: limit, offset: offset, stextOffset: stextOffset}, - }}, nil - } - return &fileAddr2Line{file: file{ - b: b, - name: name, - buildID: buildID, - m: &elfMapping{start: start, limit: limit, offset: offset, stextOffset: stextOffset}, - }}, nil -} - -func (b *binrep) openPE(name string, start, limit, offset uint64) (plugin.ObjFile, error) { - pf, err := pe.Open(name) - if err != nil { - return nil, fmt.Errorf("error parsing %s: %v", name, err) - } - defer pf.Close() - - var imageBase uint64 - switch h := pf.OptionalHeader.(type) { - case *pe.OptionalHeader32: - imageBase = uint64(h.ImageBase) - case *pe.OptionalHeader64: - imageBase = uint64(h.ImageBase) - default: - return nil, fmt.Errorf("unknown OptionalHeader %T", pf.OptionalHeader) - } - - var base uint64 - if start > 0 { - base = start - imageBase - } - if b.fast || (!b.addr2lineFound && !b.llvmSymbolizerFound) { - return &fileNM{file: file{b: b, name: name, base: base}}, nil - } - return &fileAddr2Line{file: file{b: b, name: name, base: base}}, nil -} - -// elfMapping stores the parameters of a runtime mapping that are needed to -// identify the ELF segment associated with a mapping. -type elfMapping struct { - // Runtime mapping parameters. - start, limit, offset uint64 - // Offset of _stext symbol. Only defined for kernel images, nil otherwise. - stextOffset *uint64 -} - -// findProgramHeader returns the program segment that matches the current -// mapping and the given address, or an error if it cannot find a unique program -// header. -func (m *elfMapping) findProgramHeader(ef *elf.File, addr uint64) (*elf.ProgHeader, error) { - // For user space executables, we try to find the actual program segment that - // is associated with the given mapping. Skip this search if limit <= start. - // We cannot use just a check on the start address of the mapping to tell if - // it's a kernel / .ko module mapping, because with quipper address remapping - // enabled, the address would be in the lower half of the address space. - - if m.stextOffset != nil || m.start >= m.limit || m.limit >= (uint64(1)<<63) { - // For the kernel, find the program segment that includes the .text section. - return elfexec.FindTextProgHeader(ef), nil - } - - // Fetch all the loadable segments. - var phdrs []elf.ProgHeader - for i := range ef.Progs { - if ef.Progs[i].Type == elf.PT_LOAD { - phdrs = append(phdrs, ef.Progs[i].ProgHeader) - } - } - // Some ELF files don't contain any loadable program segments, e.g. .ko - // kernel modules. It's not an error to have no header in such cases. - if len(phdrs) == 0 { - return nil, nil - } - // Get all program headers associated with the mapping. - headers := elfexec.ProgramHeadersForMapping(phdrs, m.offset, m.limit-m.start) - if len(headers) == 0 { - return nil, errors.New("no program header matches mapping info") - } - if len(headers) == 1 { - return headers[0], nil - } - - // Use the file offset corresponding to the address to symbolize, to narrow - // down the header. - return elfexec.HeaderForFileOffset(headers, addr-m.start+m.offset) -} - -// file implements the binutils.ObjFile interface. -type file struct { - b *binrep - name string - buildID string - - baseOnce sync.Once // Ensures the base, baseErr and isData are computed once. - base uint64 - baseErr error // Any eventual error while computing the base. - isData bool - // Mapping information. Relevant only for ELF files, nil otherwise. - m *elfMapping -} - -// computeBase computes the relocation base for the given binary file only if -// the elfMapping field is set. It populates the base and isData fields and -// returns an error. -func (f *file) computeBase(addr uint64) error { - if f == nil || f.m == nil { - return nil - } - if addr < f.m.start || addr >= f.m.limit { - return fmt.Errorf("specified address %x is outside the mapping range [%x, %x] for file %q", addr, f.m.start, f.m.limit, f.name) - } - ef, err := elfOpen(f.name) - if err != nil { - return fmt.Errorf("error parsing %s: %v", f.name, err) - } - defer ef.Close() - - ph, err := f.m.findProgramHeader(ef, addr) - if err != nil { - return fmt.Errorf("failed to find program header for file %q, ELF mapping %#v, address %x: %v", f.name, *f.m, addr, err) - } - - base, err := elfexec.GetBase(&ef.FileHeader, ph, f.m.stextOffset, f.m.start, f.m.limit, f.m.offset) - if err != nil { - return err - } - f.base = base - f.isData = ph != nil && ph.Flags&elf.PF_X == 0 - return nil -} - -func (f *file) Name() string { - return f.name -} - -func (f *file) ObjAddr(addr uint64) (uint64, error) { - f.baseOnce.Do(func() { f.baseErr = f.computeBase(addr) }) - if f.baseErr != nil { - return 0, f.baseErr - } - return addr - f.base, nil -} - -func (f *file) BuildID() string { - return f.buildID -} - -func (f *file) SourceLine(addr uint64) ([]plugin.Frame, error) { - f.baseOnce.Do(func() { f.baseErr = f.computeBase(addr) }) - if f.baseErr != nil { - return nil, f.baseErr - } - return nil, nil -} - -func (f *file) Close() error { - return nil -} - -func (f *file) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) { - // Get from nm a list of symbols sorted by address. - cmd := exec.Command(f.b.nm, "-n", f.name) - out, err := cmd.Output() - if err != nil { - return nil, fmt.Errorf("%v: %v", cmd.Args, err) - } - - return findSymbols(out, f.name, r, addr) -} - -// fileNM implements the binutils.ObjFile interface, using 'nm' to map -// addresses to symbols (without file/line number information). It is -// faster than fileAddr2Line. -type fileNM struct { - file - addr2linernm *addr2LinerNM -} - -func (f *fileNM) SourceLine(addr uint64) ([]plugin.Frame, error) { - f.baseOnce.Do(func() { f.baseErr = f.computeBase(addr) }) - if f.baseErr != nil { - return nil, f.baseErr - } - if f.addr2linernm == nil { - addr2liner, err := newAddr2LinerNM(f.b.nm, f.name, f.base) - if err != nil { - return nil, err - } - f.addr2linernm = addr2liner - } - return f.addr2linernm.addrInfo(addr) -} - -// fileAddr2Line implements the binutils.ObjFile interface, using -// llvm-symbolizer, if that's available, or addr2line to map addresses to -// symbols (with file/line number information). It can be slow for large -// binaries with debug information. -type fileAddr2Line struct { - once sync.Once - file - addr2liner *addr2Liner - llvmSymbolizer *llvmSymbolizer - isData bool -} - -func (f *fileAddr2Line) SourceLine(addr uint64) ([]plugin.Frame, error) { - f.baseOnce.Do(func() { f.baseErr = f.computeBase(addr) }) - if f.baseErr != nil { - return nil, f.baseErr - } - f.once.Do(f.init) - if f.llvmSymbolizer != nil { - return f.llvmSymbolizer.addrInfo(addr) - } - if f.addr2liner != nil { - return f.addr2liner.addrInfo(addr) - } - return nil, fmt.Errorf("could not find local addr2liner") -} - -func (f *fileAddr2Line) init() { - if llvmSymbolizer, err := newLLVMSymbolizer(f.b.llvmSymbolizer, f.name, f.base, f.isData); err == nil { - f.llvmSymbolizer = llvmSymbolizer - return - } - - if addr2liner, err := newAddr2Liner(f.b.addr2line, f.name, f.base); err == nil { - f.addr2liner = addr2liner - - // When addr2line encounters some gcc compiled binaries, it - // drops interesting parts of names in anonymous namespaces. - // Fallback to NM for better function names. - if nm, err := newAddr2LinerNM(f.b.nm, f.name, f.base); err == nil { - f.addr2liner.nm = nm - } - } -} - -func (f *fileAddr2Line) Close() error { - if f.llvmSymbolizer != nil { - f.llvmSymbolizer.rw.close() - f.llvmSymbolizer = nil - } - if f.addr2liner != nil { - f.addr2liner.rw.close() - f.addr2liner = nil - } - return nil -} diff --git a/internal/pprof/binutils/binutils_test.go b/internal/pprof/binutils/binutils_test.go deleted file mode 100644 index c5fb1955391..00000000000 --- a/internal/pprof/binutils/binutils_test.go +++ /dev/null @@ -1,840 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package binutils - -import ( - "bytes" - "debug/elf" - "errors" - "fmt" - "math" - "path/filepath" - "reflect" - "regexp" - "runtime" - "strings" - "testing" - - "github.com/parca-dev/parca/internal/pprof/plugin" -) - -var testAddrMap = map[int]string{ - 1000: "_Z3fooid.clone2", - 2000: "_ZNSaIiEC1Ev.clone18", - 3000: "_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm", -} - -func functionName(level int) (name string) { - if name = testAddrMap[level]; name != "" { - return name - } - return fmt.Sprintf("fun%d", level) -} - -func TestAddr2Liner(t *testing.T) { - const offset = 0x500 - - a := addr2Liner{rw: &mockAddr2liner{}, base: offset} - for i := 1; i < 8; i++ { - addr := i*0x1000 + offset - s, err := a.addrInfo(uint64(addr)) - if err != nil { - t.Fatalf("addrInfo(%#x): %v", addr, err) - } - if len(s) != i { - t.Fatalf("addrInfo(%#x): got len==%d, want %d", addr, len(s), i) - } - for l, f := range s { - level := (len(s) - l) * 1000 - want := plugin.Frame{Func: functionName(level), File: fmt.Sprintf("file%d", level), Line: level} - - if f != want { - t.Errorf("AddrInfo(%#x)[%d]: = %+v, want %+v", addr, l, f, want) - } - } - } - s, err := a.addrInfo(0xFFFF) - if err != nil { - t.Fatalf("addrInfo(0xFFFF): %v", err) - } - if len(s) != 0 { - t.Fatalf("AddrInfo(0xFFFF): got len==%d, want 0", len(s)) - } - a.rw.close() -} - -type mockAddr2liner struct { - output []string -} - -func (a *mockAddr2liner) write(s string) error { - var lines []string - switch s { - case "1000": - lines = []string{"_Z3fooid.clone2", "file1000:1000"} - case "2000": - lines = []string{"_ZNSaIiEC1Ev.clone18", "file2000:2000", "_Z3fooid.clone2", "file1000:1000"} - case "3000": - lines = []string{"_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm", "file3000:3000", "_ZNSaIiEC1Ev.clone18", "file2000:2000", "_Z3fooid.clone2", "file1000:1000"} - case "4000": - lines = []string{"fun4000", "file4000:4000", "_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm", "file3000:3000", "_ZNSaIiEC1Ev.clone18", "file2000:2000", "_Z3fooid.clone2", "file1000:1000"} - case "5000": - lines = []string{"fun5000", "file5000:5000", "fun4000", "file4000:4000", "_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm", "file3000:3000", "_ZNSaIiEC1Ev.clone18", "file2000:2000", "_Z3fooid.clone2", "file1000:1000"} - case "6000": - lines = []string{"fun6000", "file6000:6000", "fun5000", "file5000:5000", "fun4000", "file4000:4000", "_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm", "file3000:3000", "_ZNSaIiEC1Ev.clone18", "file2000:2000", "_Z3fooid.clone2", "file1000:1000"} - case "7000": - lines = []string{"fun7000", "file7000:7000", "fun6000", "file6000:6000", "fun5000", "file5000:5000", "fun4000", "file4000:4000", "_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm", "file3000:3000", "_ZNSaIiEC1Ev.clone18", "file2000:2000", "_Z3fooid.clone2", "file1000:1000"} - case "8000": - lines = []string{"fun8000", "file8000:8000", "fun7000", "file7000:7000", "fun6000", "file6000:6000", "fun5000", "file5000:5000", "fun4000", "file4000:4000", "_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm", "file3000:3000", "_ZNSaIiEC1Ev.clone18", "file2000:2000", "_Z3fooid.clone2", "file1000:1000"} - case "9000": - lines = []string{"fun9000", "file9000:9000", "fun8000", "file8000:8000", "fun7000", "file7000:7000", "fun6000", "file6000:6000", "fun5000", "file5000:5000", "fun4000", "file4000:4000", "_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm", "file3000:3000", "_ZNSaIiEC1Ev.clone18", "file2000:2000", "_Z3fooid.clone2", "file1000:1000"} - default: - lines = []string{"??", "??:0"} - } - a.output = append(a.output, "0x"+s) - a.output = append(a.output, lines...) - return nil -} - -func (a *mockAddr2liner) readLine() (string, error) { - if len(a.output) == 0 { - return "", fmt.Errorf("end of file") - } - next := a.output[0] - a.output = a.output[1:] - return next, nil -} - -func (a *mockAddr2liner) close() { -} - -func TestAddr2LinerLookup(t *testing.T) { - for _, tc := range []struct { - desc string - nmOutput string - wantSymbolized map[uint64]string - wantUnsymbolized []uint64 - }{ - { - desc: "odd symbol count", - nmOutput: ` -0x1000 T 1000 100 -0x2000 T 2000 120 -0x3000 T 3000 130 -`, - wantSymbolized: map[uint64]string{ - 0x1000: "0x1000", - 0x1001: "0x1000", - 0x1FFF: "0x1000", - 0x2000: "0x2000", - 0x2001: "0x2000", - 0x3000: "0x3000", - 0x312f: "0x3000", - }, - wantUnsymbolized: []uint64{0x0fff, 0x3130}, - }, - { - desc: "even symbol count", - nmOutput: ` -0x1000 T 1000 100 -0x2000 T 2000 120 -0x3000 T 3000 130 -0x4000 T 4000 140 -`, - wantSymbolized: map[uint64]string{ - 0x1000: "0x1000", - 0x1001: "0x1000", - 0x1FFF: "0x1000", - 0x2000: "0x2000", - 0x2fff: "0x2000", - 0x3000: "0x3000", - 0x3fff: "0x3000", - 0x4000: "0x4000", - 0x413f: "0x4000", - }, - wantUnsymbolized: []uint64{0x0fff, 0x4140}, - }, - { - desc: "different symbol types", - nmOutput: ` -absolute_0x100 a 100 -absolute_0x200 A 200 -text_0x1000 t 1000 100 -bss_0x2000 b 2000 120 -data_0x3000 d 3000 130 -rodata_0x4000 r 4000 140 -weak_0x5000 v 5000 150 -text_0x6000 T 6000 160 -bss_0x7000 B 7000 170 -data_0x8000 D 8000 180 -rodata_0x9000 R 9000 190 -weak_0xa000 V a000 1a0 -weak_0xb000 W b000 1b0 -`, - wantSymbolized: map[uint64]string{ - 0x1000: "text_0x1000", - 0x1FFF: "text_0x1000", - 0x2000: "bss_0x2000", - 0x211f: "bss_0x2000", - 0x3000: "data_0x3000", - 0x312f: "data_0x3000", - 0x4000: "rodata_0x4000", - 0x413f: "rodata_0x4000", - 0x5000: "weak_0x5000", - 0x514f: "weak_0x5000", - 0x6000: "text_0x6000", - 0x6fff: "text_0x6000", - 0x7000: "bss_0x7000", - 0x716f: "bss_0x7000", - 0x8000: "data_0x8000", - 0x817f: "data_0x8000", - 0x9000: "rodata_0x9000", - 0x918f: "rodata_0x9000", - 0xa000: "weak_0xa000", - 0xa19f: "weak_0xa000", - 0xb000: "weak_0xb000", - 0xb1af: "weak_0xb000", - }, - wantUnsymbolized: []uint64{0x100, 0x200, 0x0fff, 0x2120, 0x3130, 0x4140, 0x5150, 0x7170, 0x8180, 0x9190, 0xa1a0, 0xb1b0}, - }, - } { - t.Run(tc.desc, func(t *testing.T) { - a, err := parseAddr2LinerNM(0, bytes.NewBufferString(tc.nmOutput)) - if err != nil { - t.Fatalf("nm parse error: %v", err) - } - for address, want := range tc.wantSymbolized { - if got, _ := a.addrInfo(address); !checkAddress(got, address, want) { - t.Errorf("%x: got %v, want %s", address, got, want) - } - } - for _, unknown := range tc.wantUnsymbolized { - if got, _ := a.addrInfo(unknown); got != nil { - t.Errorf("%x: got %v, want nil", unknown, got) - } - } - }) - } -} - -func checkAddress(got []plugin.Frame, address uint64, want string) bool { - if len(got) != 1 { - return false - } - return got[0].Func == want -} - -func TestSetTools(t *testing.T) { - // Test that multiple calls work. - bu := &Binutils{} - bu.SetTools("") - bu.SetTools("") -} - -func TestSetFastSymbolization(t *testing.T) { - // Test that multiple calls work. - bu := &Binutils{} - bu.SetFastSymbolization(true) - bu.SetFastSymbolization(false) -} - -func skipUnlessLinuxAmd64(t *testing.T) { - if runtime.GOOS != "linux" || runtime.GOARCH != "amd64" { - t.Skip("This test only works on x86-64 Linux") - } -} - -func skipUnlessDarwinAmd64(t *testing.T) { - if runtime.GOOS != "darwin" || runtime.GOARCH != "amd64" { - t.Skip("This test only works on x86-64 macOS") - } -} - -func skipUnlessWindowsAmd64(t *testing.T) { - if runtime.GOOS != "windows" || runtime.GOARCH != "amd64" { - t.Skip("This test only works on x86-64 Windows") - } -} - -func testDisasm(t *testing.T, intelSyntax bool) { - _, llvmObjdump, buObjdump := findObjdump([]string{""}) - if !(llvmObjdump || buObjdump) { - t.Skip("cannot disasm: no objdump tool available") - } - - bu := &Binutils{} - var testexe string - switch runtime.GOOS { - case "linux": - testexe = "exe_linux_64" - case "darwin": - testexe = "exe_mac_64" - case "windows": - testexe = "exe_windows_64.exe" - default: - t.Skipf("unsupported OS %q", runtime.GOOS) - } - - insts, err := bu.Disasm(filepath.Join("testdata", testexe), 0, math.MaxUint64, intelSyntax) - if err != nil { - t.Fatalf("Disasm: unexpected error %v", err) - } - mainCount := 0 - for _, x := range insts { - // macOS symbols have a leading underscore. - if x.Function == "main" || x.Function == "_main" { - mainCount++ - } - } - if mainCount == 0 { - t.Error("Disasm: found no main instructions") - } -} - -func TestDisasm(t *testing.T) { - if (runtime.GOOS != "linux" && runtime.GOOS != "darwin" && runtime.GOOS != "windows") || runtime.GOARCH != "amd64" { - t.Skip("This test only works on x86-64 Linux, macOS or Windows") - } - testDisasm(t, false) -} - -func TestDisasmIntelSyntax(t *testing.T) { - if (runtime.GOOS != "linux" && runtime.GOOS != "darwin" && runtime.GOOS != "windows") || runtime.GOARCH != "amd64" { - t.Skip("This test only works on x86_64 Linux, macOS or Windows as it tests Intel asm syntax") - } - testDisasm(t, true) -} - -func findSymbol(syms []*plugin.Sym, name string) *plugin.Sym { - for _, s := range syms { - for _, n := range s.Name { - if n == name { - return s - } - } - } - return nil -} - -func TestObjFile(t *testing.T) { - // If this test fails, check the address for main function in testdata/exe_linux_64 - // using the command 'nm -n '. Update the hardcoded addresses below to match - // the addresses from the output. - skipUnlessLinuxAmd64(t) - for _, tc := range []struct { - desc string - start, limit, offset uint64 - addr uint64 - }{ - {"fixed load address", 0x400000, 0x4006fc, 0, 0x40052d}, - // True user-mode ASLR binaries are ET_DYN rather than ET_EXEC so this case - // is a bit artificial except that it approximates the - // vmlinux-with-kernel-ASLR case where the binary *is* ET_EXEC. - {"simulated ASLR address", 0x500000, 0x5006fc, 0, 0x50052d}, - } { - t.Run(tc.desc, func(t *testing.T) { - bu := &Binutils{} - f, err := bu.Open(filepath.Join("testdata", "exe_linux_64"), tc.start, tc.limit, tc.offset) - if err != nil { - t.Fatalf("Open: unexpected error %v", err) - } - defer f.Close() - syms, err := f.Symbols(regexp.MustCompile("main"), 0) - if err != nil { - t.Fatalf("Symbols: unexpected error %v", err) - } - - m := findSymbol(syms, "main") - if m == nil { - t.Fatalf("Symbols: did not find main") - } - addr, err := f.ObjAddr(tc.addr) - if err != nil { - t.Fatalf("ObjAddr(%x) failed: %v", tc.addr, err) - } - if addr != m.Start { - t.Errorf("ObjAddr(%x) got %x, want %x", tc.addr, addr, m.Start) - } - gotFrames, err := f.SourceLine(tc.addr) - if err != nil { - t.Fatalf("SourceLine: unexpected error %v", err) - } - wantFrames := []plugin.Frame{ - {Func: "main", File: "/tmp/hello.c", Line: 3}, - } - if !reflect.DeepEqual(gotFrames, wantFrames) { - t.Fatalf("SourceLine for main: got %v; want %v\n", gotFrames, wantFrames) - } - }) - } -} - -func TestMachoFiles(t *testing.T) { - // If this test fails, check the address for main function in testdata/exe_mac_64 - // and testdata/lib_mac_64 using addr2line or gaddr2line. Update the - // hardcoded addresses below to match the addresses from the output. - skipUnlessDarwinAmd64(t) - - // Load `file`, pretending it was mapped at `start`. Then get the symbol - // table. Check that it contains the symbol `sym` and that the address - // `addr` gives the `expected` stack trace. - for _, tc := range []struct { - desc string - file string - start, limit, offset uint64 - addr uint64 - sym string - expected []plugin.Frame - }{ - {"normal mapping", "exe_mac_64", 0x100000000, math.MaxUint64, 0, - 0x100000f50, "_main", - []plugin.Frame{ - {Func: "main", File: "/tmp/hello.c", Line: 3}, - }}, - {"other mapping", "exe_mac_64", 0x200000000, math.MaxUint64, 0, - 0x200000f50, "_main", - []plugin.Frame{ - {Func: "main", File: "/tmp/hello.c", Line: 3}, - }}, - {"lib normal mapping", "lib_mac_64", 0, math.MaxUint64, 0, - 0xfa0, "_bar", - []plugin.Frame{ - {Func: "bar", File: "/tmp/lib.c", Line: 5}, - }}, - } { - t.Run(tc.desc, func(t *testing.T) { - bu := &Binutils{} - f, err := bu.Open(filepath.Join("testdata", tc.file), tc.start, tc.limit, tc.offset) - if err != nil { - t.Fatalf("Open: unexpected error %v", err) - } - t.Logf("binutils: %v", bu) - if runtime.GOOS == "darwin" && !bu.rep.addr2lineFound && !bu.rep.llvmSymbolizerFound { - // On macOS, user needs to install gaddr2line or llvm-symbolizer with - // Homebrew, skip the test when the environment doesn't have it - // installed. - t.Skip("couldn't find addr2line or gaddr2line") - } - defer f.Close() - syms, err := f.Symbols(nil, 0) - if err != nil { - t.Fatalf("Symbols: unexpected error %v", err) - } - - m := findSymbol(syms, tc.sym) - if m == nil { - t.Fatalf("Symbols: could not find symbol %v", tc.sym) - } - gotFrames, err := f.SourceLine(tc.addr) - if err != nil { - t.Fatalf("SourceLine: unexpected error %v", err) - } - if !reflect.DeepEqual(gotFrames, tc.expected) { - t.Fatalf("SourceLine for main: got %v; want %v\n", gotFrames, tc.expected) - } - }) - } -} - -func TestLLVMSymbolizer(t *testing.T) { - if runtime.GOOS != "linux" { - t.Skip("testtdata/llvm-symbolizer has only been tested on linux") - } - - cmd := filepath.Join("testdata", "fake-llvm-symbolizer") - for _, c := range []struct { - addr uint64 - isData bool - frames []plugin.Frame - }{ - {0x10, false, []plugin.Frame{ - {Func: "Inlined_0x10", File: "foo.h", Line: 0}, - {Func: "Func_0x10", File: "foo.c", Line: 2}, - }}, - {0x20, true, []plugin.Frame{ - {Func: "foo_0x20", File: "0x20 8"}, - }}, - } { - desc := fmt.Sprintf("Code %x", c.addr) - if c.isData { - desc = fmt.Sprintf("Data %x", c.addr) - } - t.Run(desc, func(t *testing.T) { - symbolizer, err := newLLVMSymbolizer(cmd, "foo", 0, c.isData) - if err != nil { - t.Fatalf("newLLVMSymbolizer: unexpected error %v", err) - } - defer symbolizer.rw.close() - - frames, err := symbolizer.addrInfo(c.addr) - if err != nil { - t.Fatalf("LLVM: unexpected error %v", err) - } - if !reflect.DeepEqual(frames, c.frames) { - t.Errorf("LLVM: expect %v; got %v\n", c.frames, frames) - } - }) - } -} - -func TestPEFile(t *testing.T) { - // If this test fails, check the address for main function in testdata/exe_windows_64.exe - // using the command 'nm -n '. Update the hardcoded addresses below to match - // the addresses from the output. - skipUnlessWindowsAmd64(t) - for _, tc := range []struct { - desc string - start, limit, offset uint64 - addr uint64 - }{ - {"fake mapping", 0, math.MaxUint64, 0, 0x140001594}, - {"fixed load address", 0x140000000, 0x140002000, 0, 0x140001594}, - {"simulated ASLR address", 0x150000000, 0x150002000, 0, 0x150001594}, - } { - t.Run(tc.desc, func(t *testing.T) { - bu := &Binutils{} - f, err := bu.Open(filepath.Join("testdata", "exe_windows_64.exe"), tc.start, tc.limit, tc.offset) - if err != nil { - t.Fatalf("Open: unexpected error %v", err) - } - defer f.Close() - syms, err := f.Symbols(regexp.MustCompile("main"), 0) - if err != nil { - t.Fatalf("Symbols: unexpected error %v", err) - } - - m := findSymbol(syms, "main") - if m == nil { - t.Fatalf("Symbols: did not find main") - } - addr, err := f.ObjAddr(tc.addr) - if err != nil { - t.Fatalf("ObjAddr(%x) failed: %v", tc.addr, err) - } - if addr != m.Start { - t.Errorf("ObjAddr(%x) got %x, want %x", tc.addr, addr, m.Start) - } - gotFrames, err := f.SourceLine(tc.addr) - if err != nil { - t.Fatalf("SourceLine: unexpected error %v", err) - } - wantFrames := []plugin.Frame{ - {Func: "main", File: "hello.c", Line: 3}, - } - if !reflect.DeepEqual(gotFrames, wantFrames) { - t.Fatalf("SourceLine for main: got %v; want %v\n", gotFrames, wantFrames) - } - }) - } -} - -func TestOpenMalformedELF(t *testing.T) { - // Test that opening a malformed ELF file will report an error containing - // the word "ELF". - bu := &Binutils{} - _, err := bu.Open(filepath.Join("testdata", "malformed_elf"), 0, 0, 0) - if err == nil { - t.Fatalf("Open: unexpected success") - } - - if !strings.Contains(err.Error(), "ELF") { - t.Errorf("Open: got %v, want error containing 'ELF'", err) - } -} - -func TestOpenMalformedMachO(t *testing.T) { - // Test that opening a malformed Mach-O file will report an error containing - // the word "Mach-O". - bu := &Binutils{} - _, err := bu.Open(filepath.Join("testdata", "malformed_macho"), 0, 0, 0) - if err == nil { - t.Fatalf("Open: unexpected success") - } - - if !strings.Contains(err.Error(), "Mach-O") { - t.Errorf("Open: got %v, want error containing 'Mach-O'", err) - } -} - -func TestObjdumpVersionChecks(t *testing.T) { - // Test that the objdump version strings are parsed properly. - type testcase struct { - desc string - os string - ver string - want bool - } - - for _, tc := range []testcase{ - { - desc: "Valid Apple LLVM version string with usable version", - os: "darwin", - ver: "Apple LLVM version 11.0.3 (clang-1103.0.32.62)\nOptimized build.", - want: true, - }, - { - desc: "Valid Apple LLVM version string with unusable version", - os: "darwin", - ver: "Apple LLVM version 10.0.0 (clang-1000.11.45.5)\nOptimized build.", - want: false, - }, - { - desc: "Invalid Apple LLVM version string with usable version", - os: "darwin", - ver: "Apple LLVM versions 11.0.3 (clang-1103.0.32.62)\nOptimized build.", - want: false, - }, - { - desc: "Valid LLVM version string with usable version", - os: "linux", - ver: "LLVM (http://llvm.org/):\nLLVM version 9.0.1\n\nOptimized build.", - want: true, - }, - { - desc: "Valid LLVM version string with unusable version", - os: "linux", - ver: "LLVM (http://llvm.org/):\nLLVM version 6.0.1\n\nOptimized build.", - want: false, - }, - { - desc: "Invalid LLVM version string with usable version", - os: "linux", - ver: "LLVM (http://llvm.org/):\nLLVM versions 9.0.1\n\nOptimized build.", - want: false, - }, - { - desc: "Valid LLVM objdump version string with trunk", - os: runtime.GOOS, - ver: "LLVM (http://llvm.org/):\nLLVM version custom-trunk 124ffeb592a00bfe\nOptimized build.", - want: true, - }, - { - desc: "Invalid LLVM objdump version string with trunk", - os: runtime.GOOS, - ver: "LLVM (http://llvm.org/):\nLLVM version custom-trank 124ffeb592a00bfe\nOptimized build.", - want: false, - }, - { - desc: "Invalid LLVM objdump version string with trunk", - os: runtime.GOOS, - ver: "LLVM (http://llvm.org/):\nllvm version custom-trunk 124ffeb592a00bfe\nOptimized build.", - want: false, - }, - } { - if runtime.GOOS == tc.os { - if got := isLLVMObjdump(tc.ver); got != tc.want { - t.Errorf("%v: got %v, want %v", tc.desc, got, tc.want) - } - } - } - for _, tc := range []testcase{ - { - desc: "Valid GNU objdump version string", - ver: "GNU objdump (GNU Binutils) 2.34\nCopyright (C) 2020 Free Software Foundation, Inc.", - want: true, - }, - { - desc: "Invalid GNU objdump version string", - ver: "GNU nm (GNU Binutils) 2.34\nCopyright (C) 2020 Free Software Foundation, Inc.", - want: false, - }, - } { - if got := isBuObjdump(tc.ver); got != tc.want { - t.Errorf("%v: got %v, want %v", tc.desc, got, tc.want) - } - } -} - -func TestComputeBase(t *testing.T) { - realELFOpen := elfOpen - defer func() { - elfOpen = realELFOpen - }() - - tinyExecFile := &elf.File{ - FileHeader: elf.FileHeader{Type: elf.ET_EXEC}, - Progs: []*elf.Prog{ - {ProgHeader: elf.ProgHeader{Type: elf.PT_PHDR, Flags: elf.PF_R | elf.PF_X, Off: 0x40, Vaddr: 0x400040, Paddr: 0x400040, Filesz: 0x1f8, Memsz: 0x1f8, Align: 8}}, - {ProgHeader: elf.ProgHeader{Type: elf.PT_INTERP, Flags: elf.PF_R, Off: 0x238, Vaddr: 0x400238, Paddr: 0x400238, Filesz: 0x1c, Memsz: 0x1c, Align: 1}}, - {ProgHeader: elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}}, - {ProgHeader: elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}}, - }, - } - tinyBadBSSExecFile := &elf.File{ - FileHeader: elf.FileHeader{Type: elf.ET_EXEC}, - Progs: []*elf.Prog{ - {ProgHeader: elf.ProgHeader{Type: elf.PT_PHDR, Flags: elf.PF_R | elf.PF_X, Off: 0x40, Vaddr: 0x400040, Paddr: 0x400040, Filesz: 0x1f8, Memsz: 0x1f8, Align: 8}}, - {ProgHeader: elf.ProgHeader{Type: elf.PT_INTERP, Flags: elf.PF_R, Off: 0x238, Vaddr: 0x400238, Paddr: 0x400238, Filesz: 0x1c, Memsz: 0x1c, Align: 1}}, - {ProgHeader: elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}}, - {ProgHeader: elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x100, Memsz: 0x1f0, Align: 0x200000}}, - {ProgHeader: elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xd80, Vaddr: 0x400d80, Paddr: 0x400d80, Filesz: 0x90, Memsz: 0x90, Align: 0x200000}}, - }, - } - - for _, tc := range []struct { - desc string - file *elf.File - openErr error - mapping *elfMapping - addr uint64 - wantError bool - wantBase uint64 - wantIsData bool - }{ - { - desc: "no elf mapping, no error", - mapping: nil, - addr: 0x1000, - wantBase: 0, - wantIsData: false, - }, - { - desc: "address outside mapping bounds means error", - file: &elf.File{}, - mapping: &elfMapping{start: 0x2000, limit: 0x5000, offset: 0x1000}, - addr: 0x1000, - wantError: true, - }, - { - desc: "elf.Open failing means error", - file: &elf.File{FileHeader: elf.FileHeader{Type: elf.ET_EXEC}}, - openErr: errors.New("elf.Open failed"), - mapping: &elfMapping{start: 0x2000, limit: 0x5000, offset: 0x1000}, - addr: 0x4000, - wantError: true, - }, - { - desc: "no loadable segments, no error", - file: &elf.File{FileHeader: elf.FileHeader{Type: elf.ET_EXEC}}, - mapping: &elfMapping{start: 0x2000, limit: 0x5000, offset: 0x1000}, - addr: 0x4000, - wantBase: 0, - wantIsData: false, - }, - { - desc: "unsupported executable type, Get Base returns error", - file: &elf.File{FileHeader: elf.FileHeader{Type: elf.ET_NONE}}, - mapping: &elfMapping{start: 0x2000, limit: 0x5000, offset: 0x1000}, - addr: 0x4000, - wantError: true, - }, - { - desc: "tiny file select executable segment by offset", - file: tinyExecFile, - mapping: &elfMapping{start: 0x5000000, limit: 0x5001000, offset: 0x0}, - addr: 0x5000c00, - wantBase: 0x5000000, - wantIsData: false, - }, - { - desc: "tiny file select data segment by offset", - file: tinyExecFile, - mapping: &elfMapping{start: 0x5200000, limit: 0x5201000, offset: 0x0}, - addr: 0x5200c80, - wantBase: 0x5000000, - wantIsData: true, - }, - { - desc: "tiny file offset outside any segment means error", - file: tinyExecFile, - mapping: &elfMapping{start: 0x5200000, limit: 0x5201000, offset: 0x0}, - addr: 0x5200e70, - wantError: true, - }, - { - desc: "tiny file with bad BSS segment selects data segment by offset in initialized section", - file: tinyBadBSSExecFile, - mapping: &elfMapping{start: 0x5200000, limit: 0x5201000, offset: 0x0}, - addr: 0x5200d79, - wantBase: 0x5000000, - wantIsData: true, - }, - { - desc: "tiny file with bad BSS segment with offset in uninitialized section means error", - file: tinyBadBSSExecFile, - mapping: &elfMapping{start: 0x5200000, limit: 0x5201000, offset: 0x0}, - addr: 0x5200d80, - wantError: true, - }, - } { - t.Run(tc.desc, func(t *testing.T) { - elfOpen = func(_ string) (*elf.File, error) { - return tc.file, tc.openErr - } - f := file{m: tc.mapping} - err := f.computeBase(tc.addr) - if (err != nil) != tc.wantError { - t.Errorf("got error %v, want any error=%v", err, tc.wantError) - } - if err != nil { - return - } - if f.base != tc.wantBase { - t.Errorf("got base %x, want %x", f.base, tc.wantBase) - } - if f.isData != tc.wantIsData { - t.Errorf("got isData %v, want %v", f.isData, tc.wantIsData) - } - }) - } -} - -func TestELFObjAddr(t *testing.T) { - // The exe_linux_64 has two loadable program headers: - // LOAD 0x0000000000000000 0x0000000000400000 0x0000000000400000 - // 0x00000000000006fc 0x00000000000006fc R E 0x200000 - // LOAD 0x0000000000000e10 0x0000000000600e10 0x0000000000600e10 - // 0x0000000000000230 0x0000000000000238 RW 0x200000 - name := filepath.Join("testdata", "exe_linux_64") - - for _, tc := range []struct { - desc string - start, limit, offset uint64 - wantOpenError bool - addr uint64 - wantObjAddr uint64 - wantAddrError bool - }{ - {"exec mapping, good address", 0x5400000, 0x5401000, 0, false, 0x5400400, 0x400400, false}, - {"exec mapping, address outside segment", 0x5400000, 0x5401000, 0, false, 0x5400800, 0, true}, - {"short data mapping, good address", 0x5600e00, 0x5602000, 0xe00, false, 0x5600e10, 0x600e10, false}, - {"short data mapping, address outside segment", 0x5600e00, 0x5602000, 0xe00, false, 0x5600e00, 0x600e00, false}, - {"page aligned data mapping, good address", 0x5600000, 0x5602000, 0, false, 0x5601000, 0x601000, false}, - {"page aligned data mapping, address outside segment", 0x5600000, 0x5602000, 0, false, 0x5601048, 0, true}, - {"bad file offset, no matching segment", 0x5600000, 0x5602000, 0x2000, false, 0x5600e10, 0, true}, - {"large mapping size, match by sample offset", 0x5600000, 0x5603000, 0, false, 0x5600e10, 0x600e10, false}, - } { - t.Run(tc.desc, func(t *testing.T) { - b := binrep{} - o, err := b.openELF(name, tc.start, tc.limit, tc.offset) - if (err != nil) != tc.wantOpenError { - t.Errorf("openELF got error %v, want any error=%v", err, tc.wantOpenError) - } - if err != nil { - return - } - got, err := o.ObjAddr(tc.addr) - if (err != nil) != tc.wantAddrError { - t.Errorf("ObjAddr got error %v, want any error=%v", err, tc.wantAddrError) - } - if err != nil { - return - } - if got != tc.wantObjAddr { - t.Errorf("got ObjAddr %x; want %x\n", got, tc.wantObjAddr) - } - }) - } -} diff --git a/internal/pprof/binutils/disasm.go b/internal/pprof/binutils/disasm.go deleted file mode 100644 index 93d87880bf7..00000000000 --- a/internal/pprof/binutils/disasm.go +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package binutils - -import ( - "bytes" - "io" - "regexp" - "strconv" - "strings" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/ianlancetaylor/demangle" -) - -var ( - nmOutputRE = regexp.MustCompile(`^\s*([[:xdigit:]]+)\s+(.)\s+(.*)`) - objdumpAsmOutputRE = regexp.MustCompile(`^\s*([[:xdigit:]]+):\s+(.*)`) - objdumpOutputFileLine = regexp.MustCompile(`^;?\s?(.*):([0-9]+)`) - objdumpOutputFunction = regexp.MustCompile(`^;?\s?(\S.*)\(\):`) - objdumpOutputFunctionLLVM = regexp.MustCompile(`^([[:xdigit:]]+)?\s?(.*):`) -) - -func findSymbols(syms []byte, file string, r *regexp.Regexp, address uint64) ([]*plugin.Sym, error) { - // Collect all symbols from the nm output, grouping names mapped to - // the same address into a single symbol. - - // The symbols to return. - var symbols []*plugin.Sym - - // The current group of symbol names, and the address they are all at. - names, start := []string{}, uint64(0) - - buf := bytes.NewBuffer(syms) - - for { - symAddr, name, err := nextSymbol(buf) - if err == io.EOF { - // Done. If there was an unfinished group, append it. - if len(names) != 0 { - if match := matchSymbol(names, start, symAddr-1, r, address); match != nil { - symbols = append(symbols, &plugin.Sym{Name: match, File: file, Start: start, End: symAddr - 1}) - } - } - - // And return the symbols. - return symbols, nil - } - - if err != nil { - // There was some kind of serious error reading nm's output. - return nil, err - } - - // If this symbol is at the same address as the current group, add it to the group. - if symAddr == start { - names = append(names, name) - continue - } - - // Otherwise append the current group to the list of symbols. - if match := matchSymbol(names, start, symAddr-1, r, address); match != nil { - symbols = append(symbols, &plugin.Sym{Name: match, File: file, Start: start, End: symAddr - 1}) - } - - // And start a new group. - names, start = []string{name}, symAddr - } -} - -// matchSymbol checks if a symbol is to be selected by checking its -// name to the regexp and optionally its address. It returns the name(s) -// to be used for the matched symbol, or nil if no match -func matchSymbol(names []string, start, end uint64, r *regexp.Regexp, address uint64) []string { - if address != 0 && address >= start && address <= end { - return names - } - for _, name := range names { - if r == nil || r.MatchString(name) { - return []string{name} - } - - // Match all possible demangled versions of the name. - for _, o := range [][]demangle.Option{ - {demangle.NoClones}, - {demangle.NoParams}, - {demangle.NoParams, demangle.NoTemplateParams}, - } { - if demangled, err := demangle.ToString(name, o...); err == nil && r.MatchString(demangled) { - return []string{demangled} - } - } - } - return nil -} - -// disassemble parses the output of the objdump command and returns -// the assembly instructions in a slice. -func disassemble(asm []byte) ([]plugin.Inst, error) { - buf := bytes.NewBuffer(asm) - function, file, line := "", "", 0 - var assembly []plugin.Inst - for { - input, err := buf.ReadString('\n') - if err != nil { - if err != io.EOF { - return nil, err - } - if input == "" { - break - } - } - input = strings.TrimSpace(input) - - if fields := objdumpAsmOutputRE.FindStringSubmatch(input); len(fields) == 3 { - if address, err := strconv.ParseUint(fields[1], 16, 64); err == nil { - assembly = append(assembly, - plugin.Inst{ - Addr: address, - Text: fields[2], - Function: function, - File: file, - Line: line, - }) - continue - } - } - if fields := objdumpOutputFileLine.FindStringSubmatch(input); len(fields) == 3 { - if l, err := strconv.ParseUint(fields[2], 10, 32); err == nil { - file, line = fields[1], int(l) - } - continue - } - if fields := objdumpOutputFunction.FindStringSubmatch(input); len(fields) == 2 { - function = fields[1] - continue - } else { - if fields := objdumpOutputFunctionLLVM.FindStringSubmatch(input); len(fields) == 3 { - function = fields[2] - continue - } - } - // Reset on unrecognized lines. - function, file, line = "", "", 0 - } - - return assembly, nil -} - -// nextSymbol parses the nm output to find the next symbol listed. -// Skips over any output it cannot recognize. -func nextSymbol(buf *bytes.Buffer) (uint64, string, error) { - for { - line, err := buf.ReadString('\n') - if err != nil { - if err != io.EOF || line == "" { - return 0, "", err - } - } - line = strings.TrimSpace(line) - - if fields := nmOutputRE.FindStringSubmatch(line); len(fields) == 4 { - if address, err := strconv.ParseUint(fields[1], 16, 64); err == nil { - return address, fields[3], nil - } - } - } -} diff --git a/internal/pprof/binutils/disasm_test.go b/internal/pprof/binutils/disasm_test.go deleted file mode 100644 index 5ce05e0d04c..00000000000 --- a/internal/pprof/binutils/disasm_test.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package binutils - -import ( - "fmt" - "regexp" - "testing" - - "github.com/parca-dev/parca/internal/pprof/plugin" -) - -// TestFindSymbols tests the FindSymbols routine using a hardcoded nm output. -func TestFindSymbols(t *testing.T) { - type testcase struct { - query, syms string - want []plugin.Sym - } - - testsyms := `0000000000001000 t lineA001 -0000000000001000 t lineA002 -0000000000001000 t line1000 -0000000000002000 t line200A -0000000000002000 t line2000 -0000000000002000 t line200B -0000000000003000 t line3000 -0000000000003000 t _ZNK4DumbclEPKc -0000000000003000 t lineB00C -0000000000003000 t line300D -0000000000004000 t _the_end - ` - testcases := []testcase{ - { - "line.*[AC]", - testsyms, - []plugin.Sym{ - {Name: []string{"lineA001"}, File: "object.o", Start: 0x1000, End: 0x1FFF}, - {Name: []string{"line200A"}, File: "object.o", Start: 0x2000, End: 0x2FFF}, - {Name: []string{"lineB00C"}, File: "object.o", Start: 0x3000, End: 0x3FFF}, - }, - }, - { - "Dumb::operator", - testsyms, - []plugin.Sym{ - {Name: []string{"Dumb::operator()(char const*) const"}, File: "object.o", Start: 0x3000, End: 0x3FFF}, - }, - }, - } - - for _, tc := range testcases { - syms, err := findSymbols([]byte(tc.syms), "object.o", regexp.MustCompile(tc.query), 0) - if err != nil { - t.Fatalf("%q: findSymbols: %v", tc.query, err) - } - if err := checkSymbol(syms, tc.want); err != nil { - t.Errorf("%q: %v", tc.query, err) - } - } -} - -func checkSymbol(got []*plugin.Sym, want []plugin.Sym) error { - if len(got) != len(want) { - return fmt.Errorf("unexpected number of symbols %d (want %d)", len(got), len(want)) - } - - for i, g := range got { - w := want[i] - if len(g.Name) != len(w.Name) { - return fmt.Errorf("names, got %d, want %d", len(g.Name), len(w.Name)) - } - for n := range g.Name { - if g.Name[n] != w.Name[n] { - return fmt.Errorf("name %d, got %q, want %q", n, g.Name[n], w.Name[n]) - } - } - if g.File != w.File { - return fmt.Errorf("filename, got %q, want %q", g.File, w.File) - } - if g.Start != w.Start { - return fmt.Errorf("start address, got %#x, want %#x", g.Start, w.Start) - } - if g.End != w.End { - return fmt.Errorf("end address, got %#x, want %#x", g.End, w.End) - } - } - return nil -} - -// TestFunctionAssembly tests the FunctionAssembly routine by using a -// fake objdump script. -func TestFunctionAssembly(t *testing.T) { - type testcase struct { - s plugin.Sym - asm string - want []plugin.Inst - } - testcases := []testcase{ - { - plugin.Sym{Name: []string{"symbol1"}, Start: 0x1000, End: 0x1FFF}, - " 1000: instruction one\n 1001: instruction two\n 1002: instruction three\n 1003: instruction four", - []plugin.Inst{ - {Addr: 0x1000, Text: "instruction one"}, - {Addr: 0x1001, Text: "instruction two"}, - {Addr: 0x1002, Text: "instruction three"}, - {Addr: 0x1003, Text: "instruction four"}, - }, - }, - { - plugin.Sym{Name: []string{"symbol2"}, Start: 0x2000, End: 0x2FFF}, - " 2000: instruction one\n 2001: instruction two", - []plugin.Inst{ - {Addr: 0x2000, Text: "instruction one"}, - {Addr: 0x2001, Text: "instruction two"}, - }, - }, - { - plugin.Sym{Name: []string{"_main"}, Start: 0x30000, End: 0x3FFF}, - "_main:\n; /tmp/hello.c:3\n30001: push %rbp", - []plugin.Inst{ - {Addr: 0x30001, Text: "push %rbp", Function: "_main", File: "/tmp/hello.c", Line: 3}, - }, - }, - { - plugin.Sym{Name: []string{"main"}, Start: 0x4000, End: 0x4FFF}, - "000000000040052d
:\nmain():\n/tmp/hello.c:3\n40001: push %rbp", - []plugin.Inst{ - {Addr: 0x40001, Text: "push %rbp", Function: "main", File: "/tmp/hello.c", Line: 3}, - }, - }, - } - - for _, tc := range testcases { - insts, err := disassemble([]byte(tc.asm)) - if err != nil { - t.Fatalf("FunctionAssembly: %v", err) - } - - if len(insts) != len(tc.want) { - t.Errorf("Unexpected number of assembly instructions %d (want %d)\n", len(insts), len(tc.want)) - } - for i := range insts { - if insts[i] != tc.want[i] { - t.Errorf("Expected symbol %v, got %v\n", tc.want[i], insts[i]) - } - } - } -} diff --git a/internal/pprof/binutils/testdata/build_binaries.go b/internal/pprof/binutils/testdata/build_binaries.go deleted file mode 100644 index 695250be974..00000000000 --- a/internal/pprof/binutils/testdata/build_binaries.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// This is a script that generates the test executables for MacOS and Linux -// in this directory. It should be needed very rarely to run this script. -// It is mostly provided as a future reference on how the original binary -// set was created. - -// When a new executable is generated, hardcoded addresses in the -// functions TestObjFile, TestMachoFiles, TestPEFile in binutils_test.go must be updated. -package main - -import ( - "log" - "os" - "os/exec" - "path/filepath" - "runtime" -) - -func main() { - wd, err := os.Getwd() - if err != nil { - log.Fatal(err) - } - - switch runtime.GOOS { - case "linux": - if err := removeGlob("exe_linux_64*"); err != nil { - log.Fatal(err) - } - - out, err := exec.Command("cc", "-g", "-ffile-prefix-map="+wd+"="+"/tmp", "-o", "exe_linux_64", "hello.c").CombinedOutput() - log.Println(string(out)) - if err != nil { - log.Fatal(err) - } - - case "darwin": - if err := removeGlob("exe_mac_64*", "lib_mac_64"); err != nil { - log.Fatal(err) - } - - out, err := exec.Command("clang", "-g", "-ffile-prefix-map="+wd+"="+"/tmp", "-o", "exe_mac_64", "hello.c").CombinedOutput() - log.Println(string(out)) - if err != nil { - log.Fatal(err) - } - - out, err = exec.Command("clang", "-g", "-ffile-prefix-map="+wd+"="+"/tmp", "-o", "lib_mac_64", "-dynamiclib", "lib.c").CombinedOutput() - log.Println(string(out)) - if err != nil { - log.Fatal(err) - } - - case "windows": - // Many gcc environments may create binaries that trigger false-positives - // in antiviruses. MSYS2 with gcc 10.2.0 is a working environment for - // compiling. To setup the environment follow the guide at - // https://www.msys2.org/ and install gcc with `pacman -S gcc`. - out, err := exec.Command("gcc", "-g", "-ffile-prefix-map="+wd+"=", "-o", "exe_windows_64.exe", "hello.c").CombinedOutput() - log.Println(string(out)) - if err != nil { - log.Fatal(err) - } - log.Println("Please verify that exe_windows_64.exe does not trigger any antivirus on `virustotal.com`.") - default: - log.Fatalf("Unsupported OS %q", runtime.GOOS) - } -} - -func removeGlob(globs ...string) error { - for _, glob := range globs { - matches, err := filepath.Glob(glob) - if err != nil { - return err - } - for _, p := range matches { - os.Remove(p) - } - } - return nil -} diff --git a/internal/pprof/binutils/testdata/exe_linux_64 b/internal/pprof/binutils/testdata/exe_linux_64 deleted file mode 100755 index d86dc7cdfca..00000000000 Binary files a/internal/pprof/binutils/testdata/exe_linux_64 and /dev/null differ diff --git a/internal/pprof/binutils/testdata/exe_mac_64 b/internal/pprof/binutils/testdata/exe_mac_64 deleted file mode 100755 index dba1ae15817..00000000000 Binary files a/internal/pprof/binutils/testdata/exe_mac_64 and /dev/null differ diff --git a/internal/pprof/binutils/testdata/exe_mac_64.dSYM/Contents/Info.plist b/internal/pprof/binutils/testdata/exe_mac_64.dSYM/Contents/Info.plist deleted file mode 100644 index 41ce537f5d9..00000000000 --- a/internal/pprof/binutils/testdata/exe_mac_64.dSYM/Contents/Info.plist +++ /dev/null @@ -1,20 +0,0 @@ - - - - - CFBundleDevelopmentRegion - English - CFBundleIdentifier - com.apple.xcode.dsym.exe_mac_64 - CFBundleInfoDictionaryVersion - 6.0 - CFBundlePackageType - dSYM - CFBundleSignature - ???? - CFBundleShortVersionString - 1.0 - CFBundleVersion - 1 - - diff --git a/internal/pprof/binutils/testdata/exe_mac_64.dSYM/Contents/Resources/DWARF/exe_mac_64 b/internal/pprof/binutils/testdata/exe_mac_64.dSYM/Contents/Resources/DWARF/exe_mac_64 deleted file mode 100644 index 72d80d0b8da..00000000000 Binary files a/internal/pprof/binutils/testdata/exe_mac_64.dSYM/Contents/Resources/DWARF/exe_mac_64 and /dev/null differ diff --git a/internal/pprof/binutils/testdata/exe_windows_64.exe b/internal/pprof/binutils/testdata/exe_windows_64.exe deleted file mode 100644 index 291e3242fa1..00000000000 Binary files a/internal/pprof/binutils/testdata/exe_windows_64.exe and /dev/null differ diff --git a/internal/pprof/binutils/testdata/fake-llvm-symbolizer b/internal/pprof/binutils/testdata/fake-llvm-symbolizer deleted file mode 100755 index a3b4546d97f..00000000000 --- a/internal/pprof/binutils/testdata/fake-llvm-symbolizer +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/sh -# -# Copyright 2014 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Fake llvm-symbolizer to use in tests - -set -f -IFS=" " - -while read line; do - # line has form: - # filename 0xaddr - # Emit dummy output that matches llvm-symbolizer output format. - set -- ${line} - kind=$1 - fname=$2 - addr=$3 - case ${kind} in - CODE) - echo "Inlined_${addr}" - echo "${fname}.h" - echo "Func_${addr}" - echo "${fname}.c:2:1" - echo;; - DATA) - echo "${fname}_${addr}" - echo "${addr} 8" - echo;; - *) echo ${kind} ${fname} ${addr};; - esac -done diff --git a/internal/pprof/binutils/testdata/hello.c b/internal/pprof/binutils/testdata/hello.c deleted file mode 100644 index aed773bca15..00000000000 --- a/internal/pprof/binutils/testdata/hello.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -int main() { - printf("Hello, world!\n"); - return 0; -} diff --git a/internal/pprof/binutils/testdata/lib.c b/internal/pprof/binutils/testdata/lib.c deleted file mode 100644 index f6207b842f7..00000000000 --- a/internal/pprof/binutils/testdata/lib.c +++ /dev/null @@ -1,7 +0,0 @@ -int foo() { - return 1; -} - -int bar() { - return 2; -} diff --git a/internal/pprof/binutils/testdata/lib_mac_64 b/internal/pprof/binutils/testdata/lib_mac_64 deleted file mode 100755 index 933a3f69310..00000000000 Binary files a/internal/pprof/binutils/testdata/lib_mac_64 and /dev/null differ diff --git a/internal/pprof/binutils/testdata/lib_mac_64.dSYM/Contents/Info.plist b/internal/pprof/binutils/testdata/lib_mac_64.dSYM/Contents/Info.plist deleted file mode 100644 index 409e4cf0c90..00000000000 --- a/internal/pprof/binutils/testdata/lib_mac_64.dSYM/Contents/Info.plist +++ /dev/null @@ -1,20 +0,0 @@ - - - - - CFBundleDevelopmentRegion - English - CFBundleIdentifier - com.apple.xcode.dsym.lib_mac_64 - CFBundleInfoDictionaryVersion - 6.0 - CFBundlePackageType - dSYM - CFBundleSignature - ???? - CFBundleShortVersionString - 1.0 - CFBundleVersion - 1 - - diff --git a/internal/pprof/binutils/testdata/lib_mac_64.dSYM/Contents/Resources/DWARF/lib_mac_64 b/internal/pprof/binutils/testdata/lib_mac_64.dSYM/Contents/Resources/DWARF/lib_mac_64 deleted file mode 100644 index 73e73e4a9c8..00000000000 Binary files a/internal/pprof/binutils/testdata/lib_mac_64.dSYM/Contents/Resources/DWARF/lib_mac_64 and /dev/null differ diff --git a/internal/pprof/binutils/testdata/malformed_elf b/internal/pprof/binutils/testdata/malformed_elf deleted file mode 100644 index f0b503b0b6c..00000000000 --- a/internal/pprof/binutils/testdata/malformed_elf +++ /dev/null @@ -1 +0,0 @@ -ELF \ No newline at end of file diff --git a/internal/pprof/binutils/testdata/malformed_macho b/internal/pprof/binutils/testdata/malformed_macho deleted file mode 100644 index b01ddf69a9a..00000000000 --- a/internal/pprof/binutils/testdata/malformed_macho +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/internal/pprof/driver/cli.go b/internal/pprof/driver/cli.go deleted file mode 100644 index e6ee25aa7d9..00000000000 --- a/internal/pprof/driver/cli.go +++ /dev/null @@ -1,367 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "errors" - "fmt" - "os" - "strings" - - "github.com/parca-dev/parca/internal/pprof/binutils" - "github.com/parca-dev/parca/internal/pprof/plugin" -) - -type source struct { - Sources []string - ExecName string - BuildID string - Base []string - DiffBase bool - Normalize bool - - Seconds int - Timeout int - Symbolize string - HTTPHostport string - HTTPDisableBrowser bool - Comment string -} - -// parseFlags parses the command lines through the specified flags package -// and returns the source of the profile and optionally the command -// for the kind of report to generate (nil for interactive use). -func parseFlags(o *plugin.Options) (*source, []string, error) { - flag := o.Flagset - // Comparisons. - flagDiffBase := flag.StringList("diff_base", "", "Source of base profile for comparison") - flagBase := flag.StringList("base", "", "Source of base profile for profile subtraction") - // Source options. - flagSymbolize := flag.String("symbolize", "", "Options for profile symbolization") - flagBuildID := flag.String("buildid", "", "Override build id for first mapping") - flagTimeout := flag.Int("timeout", -1, "Timeout in seconds for fetching a profile") - flagAddComment := flag.String("add_comment", "", "Annotation string to record in the profile") - // CPU profile options - flagSeconds := flag.Int("seconds", -1, "Length of time for dynamic profiles") - // Heap profile options - flagInUseSpace := flag.Bool("inuse_space", false, "Display in-use memory size") - flagInUseObjects := flag.Bool("inuse_objects", false, "Display in-use object counts") - flagAllocSpace := flag.Bool("alloc_space", false, "Display allocated memory size") - flagAllocObjects := flag.Bool("alloc_objects", false, "Display allocated object counts") - // Contention profile options - flagTotalDelay := flag.Bool("total_delay", false, "Display total delay at each region") - flagContentions := flag.Bool("contentions", false, "Display number of delays at each region") - flagMeanDelay := flag.Bool("mean_delay", false, "Display mean delay at each region") - flagTools := flag.String("tools", os.Getenv("PPROF_TOOLS"), "Path for object tool pathnames") - - flagHTTP := flag.String("http", "", "Present interactive web UI at the specified http host:port") - flagNoBrowser := flag.Bool("no_browser", false, "Skip opening a browswer for the interactive web UI") - - // Flags that set configuration properties. - cfg := currentConfig() - configFlagSetter := installConfigFlags(flag, &cfg) - - flagCommands := make(map[string]*bool) - flagParamCommands := make(map[string]*string) - for name, cmd := range pprofCommands { - if cmd.hasParam { - flagParamCommands[name] = flag.String(name, "", "Generate a report in "+name+" format, matching regexp") - } else { - flagCommands[name] = flag.Bool(name, false, "Generate a report in "+name+" format") - } - } - - args := flag.Parse(func() { - o.UI.Print(usageMsgHdr + - usage(true) + - usageMsgSrc + - flag.ExtraUsage() + - usageMsgVars) - }) - if len(args) == 0 { - return nil, nil, errors.New("no profile source specified") - } - - var execName string - // Recognize first argument as an executable or buildid override. - if len(args) > 1 { - arg0 := args[0] - if file, err := o.Obj.Open(arg0, 0, ^uint64(0), 0); err == nil { - file.Close() - execName = arg0 - args = args[1:] - } else if *flagBuildID == "" && isBuildID(arg0) { - *flagBuildID = arg0 - args = args[1:] - } - } - - // Apply any specified flags to cfg. - if err := configFlagSetter(); err != nil { - return nil, nil, err - } - - cmd, err := outputFormat(flagCommands, flagParamCommands) - if err != nil { - return nil, nil, err - } - if cmd != nil && *flagHTTP != "" { - return nil, nil, errors.New("-http is not compatible with an output format on the command line") - } - - if *flagNoBrowser && *flagHTTP == "" { - return nil, nil, errors.New("-no_browser only makes sense with -http") - } - - si := cfg.SampleIndex - si = sampleIndex(flagTotalDelay, si, "delay", "-total_delay", o.UI) - si = sampleIndex(flagMeanDelay, si, "delay", "-mean_delay", o.UI) - si = sampleIndex(flagContentions, si, "contentions", "-contentions", o.UI) - si = sampleIndex(flagInUseSpace, si, "inuse_space", "-inuse_space", o.UI) - si = sampleIndex(flagInUseObjects, si, "inuse_objects", "-inuse_objects", o.UI) - si = sampleIndex(flagAllocSpace, si, "alloc_space", "-alloc_space", o.UI) - si = sampleIndex(flagAllocObjects, si, "alloc_objects", "-alloc_objects", o.UI) - cfg.SampleIndex = si - - if *flagMeanDelay { - cfg.Mean = true - } - - source := &source{ - Sources: args, - ExecName: execName, - BuildID: *flagBuildID, - Seconds: *flagSeconds, - Timeout: *flagTimeout, - Symbolize: *flagSymbolize, - HTTPHostport: *flagHTTP, - HTTPDisableBrowser: *flagNoBrowser, - Comment: *flagAddComment, - } - - if err := source.addBaseProfiles(*flagBase, *flagDiffBase); err != nil { - return nil, nil, err - } - - normalize := cfg.Normalize - if normalize && len(source.Base) == 0 { - return nil, nil, errors.New("must have base profile to normalize by") - } - source.Normalize = normalize - - if bu, ok := o.Obj.(*binutils.Binutils); ok { - bu.SetTools(*flagTools) - } - - setCurrentConfig(cfg) - return source, cmd, nil -} - -// addBaseProfiles adds the list of base profiles or diff base profiles to -// the source. This function will return an error if both base and diff base -// profiles are specified. -func (source *source) addBaseProfiles(flagBase, flagDiffBase []*string) error { - base, diffBase := dropEmpty(flagBase), dropEmpty(flagDiffBase) - if len(base) > 0 && len(diffBase) > 0 { - return errors.New("-base and -diff_base flags cannot both be specified") - } - - source.Base = base - if len(diffBase) > 0 { - source.Base, source.DiffBase = diffBase, true - } - return nil -} - -// dropEmpty list takes a slice of string pointers, and outputs a slice of -// non-empty strings associated with the flag. -func dropEmpty(list []*string) []string { - var l []string - for _, s := range list { - if *s != "" { - l = append(l, *s) - } - } - return l -} - -// installConfigFlags creates command line flags for configuration -// fields and returns a function which can be called after flags have -// been parsed to copy any flags specified on the command line to -// *cfg. -func installConfigFlags(flag plugin.FlagSet, cfg *config) func() error { - // List of functions for setting the different parts of a config. - var setters []func() - var err error // Holds any errors encountered while running setters. - - for _, field := range configFields { - n := field.name - help := configHelp[n] - var setter func() - switch ptr := cfg.fieldPtr(field).(type) { - case *bool: - f := flag.Bool(n, *ptr, help) - setter = func() { *ptr = *f } - case *int: - f := flag.Int(n, *ptr, help) - setter = func() { *ptr = *f } - case *float64: - f := flag.Float64(n, *ptr, help) - setter = func() { *ptr = *f } - case *string: - if len(field.choices) == 0 { - f := flag.String(n, *ptr, help) - setter = func() { *ptr = *f } - } else { - // Make a separate flag per possible choice. - // Set all flags to initially false so we can - // identify conflicts. - bools := make(map[string]*bool) - for _, choice := range field.choices { - bools[choice] = flag.Bool(choice, false, configHelp[choice]) - } - setter = func() { - var set []string - for k, v := range bools { - if *v { - set = append(set, k) - } - } - switch len(set) { - case 0: - // Leave as default value. - case 1: - *ptr = set[0] - default: - err = fmt.Errorf("conflicting options set: %v", set) - } - } - } - } - setters = append(setters, setter) - } - - return func() error { - // Apply the setter for every flag. - for _, setter := range setters { - setter() - if err != nil { - return err - } - } - return nil - } -} - -// isBuildID determines if the profile may contain a build ID, by -// checking that it is a string of hex digits. -func isBuildID(id string) bool { - return strings.Trim(id, "0123456789abcdefABCDEF") == "" -} - -func sampleIndex(flag *bool, si string, sampleType, option string, ui plugin.UI) string { - if *flag { - if si == "" { - return sampleType - } - ui.PrintErr("Multiple value selections, ignoring ", option) - } - return si -} - -func outputFormat(bcmd map[string]*bool, acmd map[string]*string) (cmd []string, err error) { - for n, b := range bcmd { - if *b { - if cmd != nil { - return nil, errors.New("must set at most one output format") - } - cmd = []string{n} - } - } - for n, s := range acmd { - if *s != "" { - if cmd != nil { - return nil, errors.New("must set at most one output format") - } - cmd = []string{n, *s} - } - } - return cmd, nil -} - -var usageMsgHdr = `usage: - -Produce output in the specified format. - - pprof [options] [binary] ... - -Omit the format to get an interactive shell whose commands can be used -to generate various views of a profile - - pprof [options] [binary] ... - -Omit the format and provide the "-http" flag to get an interactive web -interface at the specified host:port that can be used to navigate through -various views of a profile. - - pprof -http [host]:[port] [options] [binary] ... - -Details: -` - -var usageMsgSrc = "\n\n" + - " Source options:\n" + - " -seconds Duration for time-based profile collection\n" + - " -timeout Timeout in seconds for profile collection\n" + - " -buildid Override build id for main binary\n" + - " -add_comment Free-form annotation to add to the profile\n" + - " Displayed on some reports or with pprof -comments\n" + - " -diff_base source Source of base profile for comparison\n" + - " -base source Source of base profile for profile subtraction\n" + - " profile.pb.gz Profile in compressed protobuf format\n" + - " legacy_profile Profile in legacy pprof format\n" + - " http://host/profile URL for profile handler to retrieve\n" + - " -symbolize= Controls source of symbol information\n" + - " none Do not attempt symbolization\n" + - " local Examine only local binaries\n" + - " fastlocal Only get function names from local binaries\n" + - " remote Do not examine local binaries\n" + - " force Force re-symbolization\n" + - " Binary Local path or build id of binary for symbolization\n" - -var usageMsgVars = "\n\n" + - " Misc options:\n" + - " -http Provide web interface at host:port.\n" + - " Host is optional and 'localhost' by default.\n" + - " Port is optional and a randomly available port by default.\n" + - " -no_browser Skip opening a browser for the interactive web UI.\n" + - " -tools Search path for object tools\n" + - "\n" + - " Legacy convenience options:\n" + - " -inuse_space Same as -sample_index=inuse_space\n" + - " -inuse_objects Same as -sample_index=inuse_objects\n" + - " -alloc_space Same as -sample_index=alloc_space\n" + - " -alloc_objects Same as -sample_index=alloc_objects\n" + - " -total_delay Same as -sample_index=delay\n" + - " -contentions Same as -sample_index=contentions\n" + - " -mean_delay Same as -mean -sample_index=delay\n" + - "\n" + - " Environment Variables:\n" + - " PPROF_TMPDIR Location for saved profiles (default $HOME/pprof)\n" + - " PPROF_TOOLS Search path for object-level tools\n" + - " PPROF_BINARY_PATH Search path for local binary files\n" + - " default: $HOME/pprof/binaries\n" + - " searches $name, $path, $buildid/$name, $path/$buildid\n" + - " * On Windows, %USERPROFILE% is used instead of $HOME" diff --git a/internal/pprof/driver/commands.go b/internal/pprof/driver/commands.go deleted file mode 100644 index 26c817ee10f..00000000000 --- a/internal/pprof/driver/commands.go +++ /dev/null @@ -1,451 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "bytes" - "fmt" - "io" - "os" - "os/exec" - "runtime" - "sort" - "strings" - "time" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/report" -) - -// commands describes the commands accepted by pprof. -type commands map[string]*command - -// command describes the actions for a pprof command. Includes a -// function for command-line completion, the report format to use -// during report generation, any postprocessing functions, and whether -// the command expects a regexp parameter (typically a function name). -type command struct { - format int // report format to generate - postProcess PostProcessor // postprocessing to run on report - visualizer PostProcessor // display output using some callback - hasParam bool // collect a parameter from the CLI - description string // single-line description text saying what the command does - usage string // multi-line help text saying how the command is used -} - -// help returns a help string for a command. -func (c *command) help(name string) string { - message := c.description + "\n" - if c.usage != "" { - message += " Usage:\n" - lines := strings.Split(c.usage, "\n") - for _, line := range lines { - message += fmt.Sprintf(" %s\n", line) - } - } - return message + "\n" -} - -// AddCommand adds an additional command to the set of commands -// accepted by pprof. This enables extensions to add new commands for -// specialized visualization formats. If the command specified already -// exists, it is overwritten. -func AddCommand(cmd string, format int, post PostProcessor, desc, usage string) { - pprofCommands[cmd] = &command{format, post, nil, false, desc, usage} -} - -// SetVariableDefault sets the default value for a pprof -// variable. This enables extensions to set their own defaults. -func SetVariableDefault(variable, value string) { - configure(variable, value) -} - -// PostProcessor is a function that applies post-processing to the report output -type PostProcessor func(input io.Reader, output io.Writer, ui plugin.UI) error - -// interactiveMode is true if pprof is running on interactive mode, reading -// commands from its shell. -var interactiveMode = false - -// pprofCommands are the report generation commands recognized by pprof. -var pprofCommands = commands{ - // Commands that require no post-processing. - "comments": {report.Comments, nil, nil, false, "Output all profile comments", ""}, - "disasm": {report.Dis, nil, nil, true, "Output assembly listings annotated with samples", listHelp("disasm", true)}, - "dot": {report.Dot, nil, nil, false, "Outputs a graph in DOT format", reportHelp("dot", false, true)}, - "list": {report.List, nil, nil, true, "Output annotated source for functions matching regexp", listHelp("list", false)}, - "peek": {report.Tree, nil, nil, true, "Output callers/callees of functions matching regexp", "peek func_regex\nDisplay callers and callees of functions matching func_regex."}, - "raw": {report.Raw, nil, nil, false, "Outputs a text representation of the raw profile", ""}, - "tags": {report.Tags, nil, nil, false, "Outputs all tags in the profile", "tags [tag_regex]* [-ignore_regex]* [>file]\nList tags with key:value matching tag_regex and exclude ignore_regex."}, - "text": {report.Text, nil, nil, false, "Outputs top entries in text form", reportHelp("text", true, true)}, - "top": {report.Text, nil, nil, false, "Outputs top entries in text form", reportHelp("top", true, true)}, - "traces": {report.Traces, nil, nil, false, "Outputs all profile samples in text form", ""}, - "tree": {report.Tree, nil, nil, false, "Outputs a text rendering of call graph", reportHelp("tree", true, true)}, - - // Save binary formats to a file - "callgrind": {report.Callgrind, nil, awayFromTTY("callgraph.out"), false, "Outputs a graph in callgrind format", reportHelp("callgrind", false, true)}, - "proto": {report.Proto, nil, awayFromTTY("pb.gz"), false, "Outputs the profile in compressed protobuf format", ""}, - "topproto": {report.TopProto, nil, awayFromTTY("pb.gz"), false, "Outputs top entries in compressed protobuf format", ""}, - - // Generate report in DOT format and postprocess with dot - "gif": {report.Dot, invokeDot("gif"), awayFromTTY("gif"), false, "Outputs a graph image in GIF format", reportHelp("gif", false, true)}, - "pdf": {report.Dot, invokeDot("pdf"), awayFromTTY("pdf"), false, "Outputs a graph in PDF format", reportHelp("pdf", false, true)}, - "png": {report.Dot, invokeDot("png"), awayFromTTY("png"), false, "Outputs a graph image in PNG format", reportHelp("png", false, true)}, - "ps": {report.Dot, invokeDot("ps"), awayFromTTY("ps"), false, "Outputs a graph in PS format", reportHelp("ps", false, true)}, - - // Save SVG output into a file - "svg": {report.Dot, massageDotSVG(), awayFromTTY("svg"), false, "Outputs a graph in SVG format", reportHelp("svg", false, true)}, - - // Visualize postprocessed dot output - "eog": {report.Dot, invokeDot("svg"), invokeVisualizer("svg", []string{"eog"}), false, "Visualize graph through eog", reportHelp("eog", false, false)}, - "evince": {report.Dot, invokeDot("pdf"), invokeVisualizer("pdf", []string{"evince"}), false, "Visualize graph through evince", reportHelp("evince", false, false)}, - "gv": {report.Dot, invokeDot("ps"), invokeVisualizer("ps", []string{"gv --noantialias"}), false, "Visualize graph through gv", reportHelp("gv", false, false)}, - "web": {report.Dot, massageDotSVG(), invokeVisualizer("svg", browsers()), false, "Visualize graph through web browser", reportHelp("web", false, false)}, - - // Visualize callgrind output - "kcachegrind": {report.Callgrind, nil, invokeVisualizer("grind", kcachegrind), false, "Visualize report in KCachegrind", reportHelp("kcachegrind", false, false)}, - - // Visualize HTML directly generated by report. - "weblist": {report.WebList, nil, invokeVisualizer("html", browsers()), true, "Display annotated source in a web browser", listHelp("weblist", false)}, -} - -// configHelp contains help text per configuration parameter. -var configHelp = map[string]string{ - // Filename for file-based output formats, stdout by default. - "output": helpText("Output filename for file-based outputs"), - - // Comparisons. - "drop_negative": helpText( - "Ignore negative differences", - "Do not show any locations with values <0."), - - // Graph handling options. - "call_tree": helpText( - "Create a context-sensitive call tree", - "Treat locations reached through different paths as separate."), - - // Display options. - "relative_percentages": helpText( - "Show percentages relative to focused subgraph", - "If unset, percentages are relative to full graph before focusing", - "to facilitate comparison with original graph."), - "unit": helpText( - "Measurement units to display", - "Scale the sample values to this unit.", - "For time-based profiles, use seconds, milliseconds, nanoseconds, etc.", - "For memory profiles, use megabytes, kilobytes, bytes, etc.", - "Using auto will scale each value independently to the most natural unit."), - "compact_labels": "Show minimal headers", - "source_path": "Search path for source files", - "trim_path": "Path to trim from source paths before search", - "intel_syntax": helpText( - "Show assembly in Intel syntax", - "Only applicable to commands `disasm` and `weblist`"), - - // Filtering options - "nodecount": helpText( - "Max number of nodes to show", - "Uses heuristics to limit the number of locations to be displayed.", - "On graphs, dotted edges represent paths through nodes that have been removed."), - "nodefraction": "Hide nodes below *total", - "edgefraction": "Hide edges below *total", - "trim": helpText( - "Honor nodefraction/edgefraction/nodecount defaults", - "Set to false to get the full profile, without any trimming."), - "focus": helpText( - "Restricts to samples going through a node matching regexp", - "Discard samples that do not include a node matching this regexp.", - "Matching includes the function name, filename or object name."), - "ignore": helpText( - "Skips paths going through any nodes matching regexp", - "If set, discard samples that include a node matching this regexp.", - "Matching includes the function name, filename or object name."), - "prune_from": helpText( - "Drops any functions below the matched frame.", - "If set, any frames matching the specified regexp and any frames", - "below it will be dropped from each sample."), - "hide": helpText( - "Skips nodes matching regexp", - "Discard nodes that match this location.", - "Other nodes from samples that include this location will be shown.", - "Matching includes the function name, filename or object name."), - "show": helpText( - "Only show nodes matching regexp", - "If set, only show nodes that match this location.", - "Matching includes the function name, filename or object name."), - "show_from": helpText( - "Drops functions above the highest matched frame.", - "If set, all frames above the highest match are dropped from every sample.", - "Matching includes the function name, filename or object name."), - "tagfocus": helpText( - "Restricts to samples with tags in range or matched by regexp", - "Use name=value syntax to limit the matching to a specific tag.", - "Numeric tag filter examples: 1kb, 1kb:10kb, memory=32mb:", - "String tag filter examples: foo, foo.*bar, mytag=foo.*bar"), - "tagignore": helpText( - "Discard samples with tags in range or matched by regexp", - "Use name=value syntax to limit the matching to a specific tag.", - "Numeric tag filter examples: 1kb, 1kb:10kb, memory=32mb:", - "String tag filter examples: foo, foo.*bar, mytag=foo.*bar"), - "tagshow": helpText( - "Only consider tags matching this regexp", - "Discard tags that do not match this regexp"), - "taghide": helpText( - "Skip tags matching this regexp", - "Discard tags that match this regexp"), - // Heap profile options - "divide_by": helpText( - "Ratio to divide all samples before visualization", - "Divide all samples values by a constant, eg the number of processors or jobs."), - "mean": helpText( - "Average sample value over first value (count)", - "For memory profiles, report average memory per allocation.", - "For time-based profiles, report average time per event."), - "sample_index": helpText( - "Sample value to report (0-based index or name)", - "Profiles contain multiple values per sample.", - "Use sample_index=i to select the ith value (starting at 0)."), - "normalize": helpText( - "Scales profile based on the base profile."), - - // Data sorting criteria - "flat": helpText("Sort entries based on own weight"), - "cum": helpText("Sort entries based on cumulative weight"), - - // Output granularity - "functions": helpText( - "Aggregate at the function level.", - "Ignores the filename where the function was defined."), - "filefunctions": helpText( - "Aggregate at the function level.", - "Takes into account the filename where the function was defined."), - "files": "Aggregate at the file level.", - "lines": "Aggregate at the source code line level.", - "addresses": helpText( - "Aggregate at the address level.", - "Includes functions' addresses in the output."), - "noinlines": helpText( - "Ignore inlines.", - "Attributes inlined functions to their first out-of-line caller."), -} - -func helpText(s ...string) string { - return strings.Join(s, "\n") + "\n" -} - -// usage returns a string describing the pprof commands and configuration -// options. if commandLine is set, the output reflect cli usage. -func usage(commandLine bool) string { - var prefix string - if commandLine { - prefix = "-" - } - fmtHelp := func(c, d string) string { - return fmt.Sprintf(" %-16s %s", c, strings.SplitN(d, "\n", 2)[0]) - } - - var commands []string - for name, cmd := range pprofCommands { - commands = append(commands, fmtHelp(prefix+name, cmd.description)) - } - sort.Strings(commands) - - var help string - if commandLine { - help = " Output formats (select at most one):\n" - } else { - help = " Commands:\n" - commands = append(commands, fmtHelp("o/options", "List options and their current values")) - commands = append(commands, fmtHelp("q/quit/exit/^D", "Exit pprof")) - } - - help = help + strings.Join(commands, "\n") + "\n\n" + - " Options:\n" - - // Print help for configuration options after sorting them. - // Collect choices for multi-choice options print them together. - var variables []string - var radioStrings []string - for _, f := range configFields { - if len(f.choices) == 0 { - variables = append(variables, fmtHelp(prefix+f.name, configHelp[f.name])) - continue - } - // Format help for for this group. - s := []string{fmtHelp(f.name, "")} - for _, choice := range f.choices { - s = append(s, " "+fmtHelp(prefix+choice, configHelp[choice])) - } - radioStrings = append(radioStrings, strings.Join(s, "\n")) - } - sort.Strings(variables) - sort.Strings(radioStrings) - return help + strings.Join(variables, "\n") + "\n\n" + - " Option groups (only set one per group):\n" + - strings.Join(radioStrings, "\n") -} - -func reportHelp(c string, cum, redirect bool) string { - h := []string{ - c + " [n] [focus_regex]* [-ignore_regex]*", - "Include up to n samples", - "Include samples matching focus_regex, and exclude ignore_regex.", - } - if cum { - h[0] += " [-cum]" - h = append(h, "-cum sorts the output by cumulative weight") - } - if redirect { - h[0] += " >f" - h = append(h, "Optionally save the report on the file f") - } - return strings.Join(h, "\n") -} - -func listHelp(c string, redirect bool) string { - h := []string{ - c + " [-focus_regex]* [-ignore_regex]*", - "Include functions matching func_regex, or including the address specified.", - "Include samples matching focus_regex, and exclude ignore_regex.", - } - if redirect { - h[0] += " >f" - h = append(h, "Optionally save the report on the file f") - } - return strings.Join(h, "\n") -} - -// browsers returns a list of commands to attempt for web visualization. -func browsers() []string { - var cmds []string - if userBrowser := os.Getenv("BROWSER"); userBrowser != "" { - cmds = append(cmds, userBrowser) - } - switch runtime.GOOS { - case "darwin": - cmds = append(cmds, "/usr/bin/open") - case "windows": - cmds = append(cmds, "cmd /c start") - default: - // Commands opening browsers are prioritized over xdg-open, so browser() - // command can be used on linux to open the .svg file generated by the -web - // command (the .svg file includes embedded javascript so is best viewed in - // a browser). - cmds = append(cmds, []string{"chrome", "google-chrome", "chromium", "firefox", "sensible-browser"}...) - if os.Getenv("DISPLAY") != "" { - // xdg-open is only for use in a desktop environment. - cmds = append(cmds, "xdg-open") - } - } - return cmds -} - -var kcachegrind = []string{"kcachegrind"} - -// awayFromTTY saves the output in a file if it would otherwise go to -// the terminal screen. This is used to avoid dumping binary data on -// the screen. -func awayFromTTY(format string) PostProcessor { - return func(input io.Reader, output io.Writer, ui plugin.UI) error { - if output == os.Stdout && (ui.IsTerminal() || interactiveMode) { - tempFile, err := newTempFile("", "profile", "."+format) - if err != nil { - return err - } - ui.PrintErr("Generating report in ", tempFile.Name()) - output = tempFile - } - _, err := io.Copy(output, input) - return err - } -} - -func invokeDot(format string) PostProcessor { - return func(input io.Reader, output io.Writer, ui plugin.UI) error { - cmd := exec.Command("dot", "-T"+format) - cmd.Stdin, cmd.Stdout, cmd.Stderr = input, output, os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("failed to execute dot. Is Graphviz installed? Error: %v", err) - } - return nil - } -} - -// massageDotSVG invokes the dot tool to generate an SVG image and alters -// the image to have panning capabilities when viewed in a browser. -func massageDotSVG() PostProcessor { - generateSVG := invokeDot("svg") - return func(input io.Reader, output io.Writer, ui plugin.UI) error { - baseSVG := new(bytes.Buffer) - if err := generateSVG(input, baseSVG, ui); err != nil { - return err - } - _, err := output.Write([]byte(massageSVG(baseSVG.String()))) - return err - } -} - -func invokeVisualizer(suffix string, visualizers []string) PostProcessor { - return func(input io.Reader, output io.Writer, ui plugin.UI) error { - tempFile, err := newTempFile(os.TempDir(), "pprof", "."+suffix) - if err != nil { - return err - } - deferDeleteTempFile(tempFile.Name()) - if _, err := io.Copy(tempFile, input); err != nil { - return err - } - tempFile.Close() - // Try visualizers until one is successful - for _, v := range visualizers { - // Separate command and arguments for exec.Command. - args := strings.Split(v, " ") - if len(args) == 0 { - continue - } - viewer := exec.Command(args[0], append(args[1:], tempFile.Name())...) - viewer.Stderr = os.Stderr - if err = viewer.Start(); err == nil { - // Wait for a second so that the visualizer has a chance to - // open the input file. This needs to be done even if we're - // waiting for the visualizer as it can be just a wrapper that - // spawns a browser tab and returns right away. - defer func(t <-chan time.Time) { - <-t - }(time.After(time.Second)) - // On interactive mode, let the visualizer run in the background - // so other commands can be issued. - if !interactiveMode { - return viewer.Wait() - } - return nil - } - } - return err - } -} - -// stringToBool is a custom parser for bools. We avoid using strconv.ParseBool -// to remain compatible with old pprof behavior (e.g., treating "" as true). -func stringToBool(s string) (bool, error) { - switch strings.ToLower(s) { - case "true", "t", "yes", "y", "1", "": - return true, nil - case "false", "f", "no", "n", "0": - return false, nil - default: - return false, fmt.Errorf(`illegal value "%s" for bool variable`, s) - } -} diff --git a/internal/pprof/driver/config.go b/internal/pprof/driver/config.go deleted file mode 100644 index b3f82f22c9d..00000000000 --- a/internal/pprof/driver/config.go +++ /dev/null @@ -1,367 +0,0 @@ -package driver - -import ( - "fmt" - "net/url" - "reflect" - "strconv" - "strings" - "sync" -) - -// config holds settings for a single named config. -// The JSON tag name for a field is used both for JSON encoding and as -// a named variable. -type config struct { - // Filename for file-based output formats, stdout by default. - Output string `json:"-"` - - // Display options. - CallTree bool `json:"call_tree,omitempty"` - RelativePercentages bool `json:"relative_percentages,omitempty"` - Unit string `json:"unit,omitempty"` - CompactLabels bool `json:"compact_labels,omitempty"` - SourcePath string `json:"-"` - TrimPath string `json:"-"` - IntelSyntax bool `json:"intel_syntax,omitempty"` - Mean bool `json:"mean,omitempty"` - SampleIndex string `json:"-"` - DivideBy float64 `json:"-"` - Normalize bool `json:"normalize,omitempty"` - Sort string `json:"sort,omitempty"` - - // Filtering options - DropNegative bool `json:"drop_negative,omitempty"` - NodeCount int `json:"nodecount,omitempty"` - NodeFraction float64 `json:"nodefraction,omitempty"` - EdgeFraction float64 `json:"edgefraction,omitempty"` - Trim bool `json:"trim,omitempty"` - Focus string `json:"focus,omitempty"` - Ignore string `json:"ignore,omitempty"` - PruneFrom string `json:"prune_from,omitempty"` - Hide string `json:"hide,omitempty"` - Show string `json:"show,omitempty"` - ShowFrom string `json:"show_from,omitempty"` - TagFocus string `json:"tagfocus,omitempty"` - TagIgnore string `json:"tagignore,omitempty"` - TagShow string `json:"tagshow,omitempty"` - TagHide string `json:"taghide,omitempty"` - NoInlines bool `json:"noinlines,omitempty"` - - // Output granularity - Granularity string `json:"granularity,omitempty"` -} - -// defaultConfig returns the default configuration values; it is unaffected by -// flags and interactive assignments. -func defaultConfig() config { - return config{ - Unit: "minimum", - NodeCount: -1, - NodeFraction: 0.005, - EdgeFraction: 0.001, - Trim: true, - DivideBy: 1.0, - Sort: "flat", - Granularity: "functions", - } -} - -// currentConfig holds the current configuration values; it is affected by -// flags and interactive assignments. -var currentCfg = defaultConfig() -var currentMu sync.Mutex - -func currentConfig() config { - currentMu.Lock() - defer currentMu.Unlock() - return currentCfg -} - -func setCurrentConfig(cfg config) { - currentMu.Lock() - defer currentMu.Unlock() - currentCfg = cfg -} - -// configField contains metadata for a single configuration field. -type configField struct { - name string // JSON field name/key in variables - urlparam string // URL parameter name - saved bool // Is field saved in settings? - field reflect.StructField // Field in config - choices []string // Name Of variables in group - defaultValue string // Default value for this field. -} - -var ( - configFields []configField // Precomputed metadata per config field - - // configFieldMap holds an entry for every config field as well as an - // entry for every valid choice for a multi-choice field. - configFieldMap map[string]configField -) - -func init() { - // Config names for fields that are not saved in settings and therefore - // do not have a JSON name. - notSaved := map[string]string{ - // Not saved in settings, but present in URLs. - "SampleIndex": "sample_index", - - // Following fields are also not placed in URLs. - "Output": "output", - "SourcePath": "source_path", - "TrimPath": "trim_path", - "DivideBy": "divide_by", - } - - // choices holds the list of allowed values for config fields that can - // take on one of a bounded set of values. - choices := map[string][]string{ - "sort": {"cum", "flat"}, - "granularity": {"functions", "filefunctions", "files", "lines", "addresses"}, - } - - // urlparam holds the mapping from a config field name to the URL - // parameter used to hold that config field. If no entry is present for - // a name, the corresponding field is not saved in URLs. - urlparam := map[string]string{ - "drop_negative": "dropneg", - "call_tree": "calltree", - "relative_percentages": "rel", - "unit": "unit", - "compact_labels": "compact", - "intel_syntax": "intel", - "nodecount": "n", - "nodefraction": "nf", - "edgefraction": "ef", - "trim": "trim", - "focus": "f", - "ignore": "i", - "prune_from": "prunefrom", - "hide": "h", - "show": "s", - "show_from": "sf", - "tagfocus": "tf", - "tagignore": "ti", - "tagshow": "ts", - "taghide": "th", - "mean": "mean", - "sample_index": "si", - "normalize": "norm", - "sort": "sort", - "granularity": "g", - "noinlines": "noinlines", - } - - def := defaultConfig() - configFieldMap = map[string]configField{} - t := reflect.TypeOf(config{}) - for i, n := 0, t.NumField(); i < n; i++ { - field := t.Field(i) - js := strings.Split(field.Tag.Get("json"), ",") - if len(js) == 0 { - continue - } - // Get the configuration name for this field. - name := js[0] - if name == "-" { - name = notSaved[field.Name] - if name == "" { - // Not a configurable field. - continue - } - } - f := configField{ - name: name, - urlparam: urlparam[name], - saved: (name == js[0]), - field: field, - choices: choices[name], - } - f.defaultValue = def.get(f) - configFields = append(configFields, f) - configFieldMap[f.name] = f - for _, choice := range f.choices { - configFieldMap[choice] = f - } - } -} - -// fieldPtr returns a pointer to the field identified by f in *cfg. -func (cfg *config) fieldPtr(f configField) interface{} { - // reflect.ValueOf: converts to reflect.Value - // Elem: dereferences cfg to make *cfg - // FieldByIndex: fetches the field - // Addr: takes address of field - // Interface: converts back from reflect.Value to a regular value - return reflect.ValueOf(cfg).Elem().FieldByIndex(f.field.Index).Addr().Interface() -} - -// get returns the value of field f in cfg. -func (cfg *config) get(f configField) string { - switch ptr := cfg.fieldPtr(f).(type) { - case *string: - return *ptr - case *int: - return fmt.Sprint(*ptr) - case *float64: - return fmt.Sprint(*ptr) - case *bool: - return fmt.Sprint(*ptr) - } - panic(fmt.Sprintf("unsupported config field type %v", f.field.Type)) -} - -// set sets the value of field f in cfg to value. -func (cfg *config) set(f configField, value string) error { - switch ptr := cfg.fieldPtr(f).(type) { - case *string: - if len(f.choices) > 0 { - // Verify that value is one of the allowed choices. - for _, choice := range f.choices { - if choice == value { - *ptr = value - return nil - } - } - return fmt.Errorf("invalid %q value %q", f.name, value) - } - *ptr = value - case *int: - v, err := strconv.Atoi(value) - if err != nil { - return err - } - *ptr = v - case *float64: - v, err := strconv.ParseFloat(value, 64) - if err != nil { - return err - } - *ptr = v - case *bool: - v, err := stringToBool(value) - if err != nil { - return err - } - *ptr = v - default: - panic(fmt.Sprintf("unsupported config field type %v", f.field.Type)) - } - return nil -} - -// isConfigurable returns true if name is either the name of a config field, or -// a valid value for a multi-choice config field. -func isConfigurable(name string) bool { - _, ok := configFieldMap[name] - return ok -} - -// isBoolConfig returns true if name is either name of a boolean config field, -// or a valid value for a multi-choice config field. -func isBoolConfig(name string) bool { - f, ok := configFieldMap[name] - if !ok { - return false - } - if name != f.name { - return true // name must be one possible value for the field - } - var cfg config - _, ok = cfg.fieldPtr(f).(*bool) - return ok -} - -// completeConfig returns the list of configurable names starting with prefix. -func completeConfig(prefix string) []string { - var result []string - for v := range configFieldMap { - if strings.HasPrefix(v, prefix) { - result = append(result, v) - } - } - return result -} - -// configure stores the name=value mapping into the current config, correctly -// handling the case when name identifies a particular choice in a field. -func configure(name, value string) error { - currentMu.Lock() - defer currentMu.Unlock() - f, ok := configFieldMap[name] - if !ok { - return fmt.Errorf("unknown config field %q", name) - } - if f.name == name { - return currentCfg.set(f, value) - } - // name must be one of the choices. If value is true, set field-value - // to name. - if v, err := strconv.ParseBool(value); v && err == nil { - return currentCfg.set(f, name) - } - return fmt.Errorf("unknown config field %q", name) -} - -// resetTransient sets all transient fields in *cfg to their currently -// configured values. -func (cfg *config) resetTransient() { - current := currentConfig() - cfg.Output = current.Output - cfg.SourcePath = current.SourcePath - cfg.TrimPath = current.TrimPath - cfg.DivideBy = current.DivideBy - cfg.SampleIndex = current.SampleIndex -} - -// applyURL updates *cfg based on params. -func (cfg *config) applyURL(params url.Values) error { - for _, f := range configFields { - var value string - if f.urlparam != "" { - value = params.Get(f.urlparam) - } - if value == "" { - continue - } - if err := cfg.set(f, value); err != nil { - return fmt.Errorf("error setting config field %s: %v", f.name, err) - } - } - return nil -} - -// makeURL returns a URL based on initialURL that contains the config contents -// as parameters. The second result is true iff a parameter value was changed. -func (cfg *config) makeURL(initialURL url.URL) (url.URL, bool) { - q := initialURL.Query() - changed := false - for _, f := range configFields { - if f.urlparam == "" || !f.saved { - continue - } - v := cfg.get(f) - if v == f.defaultValue { - v = "" // URL for of default value is the empty string. - } else if f.field.Type.Kind() == reflect.Bool { - // Shorten bool values to "f" or "t" - v = v[:1] - } - if q.Get(f.urlparam) == v { - continue - } - changed = true - if v == "" { - q.Del(f.urlparam) - } else { - q.Set(f.urlparam, v) - } - } - if changed { - initialURL.RawQuery = q.Encode() - } - return initialURL, changed -} diff --git a/internal/pprof/driver/driver.go b/internal/pprof/driver/driver.go deleted file mode 100644 index 08548ad5a2c..00000000000 --- a/internal/pprof/driver/driver.go +++ /dev/null @@ -1,344 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package driver implements the core pprof functionality. It can be -// parameterized with a flag implementation, fetch and symbolize -// mechanisms. -package driver - -import ( - "bytes" - "fmt" - "os" - "path/filepath" - "regexp" - "strings" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/report" - "github.com/google/pprof/profile" -) - -// PProf acquires a profile, and symbolizes it using a profile -// manager. Then it generates a report formatted according to the -// options selected through the flags package. -func PProf(eo *plugin.Options) error { - // Remove any temporary files created during pprof processing. - defer cleanupTempFiles() - - o := setDefaults(eo) - - src, cmd, err := parseFlags(o) - if err != nil { - return err - } - - p, err := fetchProfiles(src, o) - if err != nil { - return err - } - - if cmd != nil { - return generateReport(p, cmd, currentConfig(), o) - } - - if src.HTTPHostport != "" { - return serveWebInterface(src.HTTPHostport, p, o, src.HTTPDisableBrowser) - } - return interactive(p, o) -} - -func generateRawReport(p *profile.Profile, cmd []string, cfg config, o *plugin.Options) (*command, *report.Report, error) { - p = p.Copy() // Prevent modification to the incoming profile. - - // Identify units of numeric tags in profile. - numLabelUnits := identifyNumLabelUnits(p, o.UI) - - // Get report output format - c := pprofCommands[cmd[0]] - if c == nil { - panic("unexpected nil command") - } - - cfg = applyCommandOverrides(cmd[0], c.format, cfg) - - // Delay focus after configuring report to get percentages on all samples. - relative := cfg.RelativePercentages - if relative { - if err := applyFocus(p, numLabelUnits, cfg, o.UI); err != nil { - return nil, nil, err - } - } - ropt, err := reportOptions(p, numLabelUnits, cfg) - if err != nil { - return nil, nil, err - } - ropt.OutputFormat = c.format - if len(cmd) == 2 { - s, err := regexp.Compile(cmd[1]) - if err != nil { - return nil, nil, fmt.Errorf("parsing argument regexp %s: %v", cmd[1], err) - } - ropt.Symbol = s - } - - rpt := report.New(p, ropt) - if !relative { - if err := applyFocus(p, numLabelUnits, cfg, o.UI); err != nil { - return nil, nil, err - } - } - if err := aggregate(p, cfg); err != nil { - return nil, nil, err - } - - return c, rpt, nil -} - -func generateReport(p *profile.Profile, cmd []string, cfg config, o *plugin.Options) error { - c, rpt, err := generateRawReport(p, cmd, cfg, o) - if err != nil { - return err - } - - // Generate the report. - dst := new(bytes.Buffer) - if err := report.Generate(dst, rpt, o.Obj); err != nil { - return err - } - src := dst - - // If necessary, perform any data post-processing. - if c.postProcess != nil { - dst = new(bytes.Buffer) - if err := c.postProcess(src, dst, o.UI); err != nil { - return err - } - src = dst - } - - // If no output is specified, use default visualizer. - output := cfg.Output - if output == "" { - if c.visualizer != nil { - return c.visualizer(src, os.Stdout, o.UI) - } - _, err := src.WriteTo(os.Stdout) - return err - } - - // Output to specified file. - o.UI.PrintErr("Generating report in ", output) - out, err := o.Writer.Open(output) - if err != nil { - return err - } - if _, err := src.WriteTo(out); err != nil { - out.Close() - return err - } - return out.Close() -} - -func applyCommandOverrides(cmd string, outputFormat int, cfg config) config { - // Some report types override the trim flag to false below. This is to make - // sure the default heuristics of excluding insignificant nodes and edges - // from the call graph do not apply. One example where it is important is - // annotated source or disassembly listing. Those reports run on a specific - // function (or functions), but the trimming is applied before the function - // data is selected. So, with trimming enabled, the report could end up - // showing no data if the specified function is "uninteresting" as far as the - // trimming is concerned. - trim := cfg.Trim - - switch cmd { - case "disasm": - trim = false - cfg.Granularity = "addresses" - // Force the 'noinlines' mode so that source locations for a given address - // collapse and there is only one for the given address. Without this - // cumulative metrics would be double-counted when annotating the assembly. - // This is because the merge is done by address and in case of an inlined - // stack each of the inlined entries is a separate callgraph node. - cfg.NoInlines = true - case "weblist": - trim = false - cfg.Granularity = "addresses" - cfg.NoInlines = false // Need inline info to support call expansion - case "peek": - trim = false - case "list": - trim = false - cfg.Granularity = "lines" - // Do not force 'noinlines' to be false so that specifying - // "-list foo -noinlines" is supported and works as expected. - case "text", "top", "topproto": - if cfg.NodeCount == -1 { - cfg.NodeCount = 0 - } - default: - if cfg.NodeCount == -1 { - cfg.NodeCount = 80 - } - } - - switch outputFormat { - case report.Proto, report.Raw, report.Callgrind: - trim = false - cfg.Granularity = "addresses" - cfg.NoInlines = false - } - - if !trim { - cfg.NodeCount = 0 - cfg.NodeFraction = 0 - cfg.EdgeFraction = 0 - } - return cfg -} - -func aggregate(prof *profile.Profile, cfg config) error { - var function, filename, linenumber, address bool - inlines := !cfg.NoInlines - switch cfg.Granularity { - case "addresses": - if inlines { - return nil - } - function = true - filename = true - linenumber = true - address = true - case "lines": - function = true - filename = true - linenumber = true - case "files": - filename = true - case "functions": - function = true - case "filefunctions": - function = true - filename = true - default: - return fmt.Errorf("unexpected granularity") - } - return prof.Aggregate(inlines, function, filename, linenumber, address) -} - -func reportOptions(p *profile.Profile, numLabelUnits map[string]string, cfg config) (*report.Options, error) { - si, mean := cfg.SampleIndex, cfg.Mean - value, meanDiv, sample, err := sampleFormat(p, si, mean) - if err != nil { - return nil, err - } - - stype := sample.Type - if mean { - stype = "mean_" + stype - } - - if cfg.DivideBy == 0 { - return nil, fmt.Errorf("zero divisor specified") - } - - var filters []string - addFilter := func(k string, v string) { - if v != "" { - filters = append(filters, k+"="+v) - } - } - addFilter("focus", cfg.Focus) - addFilter("ignore", cfg.Ignore) - addFilter("hide", cfg.Hide) - addFilter("show", cfg.Show) - addFilter("show_from", cfg.ShowFrom) - addFilter("tagfocus", cfg.TagFocus) - addFilter("tagignore", cfg.TagIgnore) - addFilter("tagshow", cfg.TagShow) - addFilter("taghide", cfg.TagHide) - - ropt := &report.Options{ - CumSort: cfg.Sort == "cum", - CallTree: cfg.CallTree, - DropNegative: cfg.DropNegative, - - CompactLabels: cfg.CompactLabels, - Ratio: 1 / cfg.DivideBy, - - NodeCount: cfg.NodeCount, - NodeFraction: cfg.NodeFraction, - EdgeFraction: cfg.EdgeFraction, - - ActiveFilters: filters, - NumLabelUnits: numLabelUnits, - - SampleValue: value, - SampleMeanDivisor: meanDiv, - SampleType: stype, - SampleUnit: sample.Unit, - - OutputUnit: cfg.Unit, - - SourcePath: cfg.SourcePath, - TrimPath: cfg.TrimPath, - - IntelSyntax: cfg.IntelSyntax, - } - - if len(p.Mapping) > 0 && p.Mapping[0].File != "" { - ropt.Title = filepath.Base(p.Mapping[0].File) - } - - return ropt, nil -} - -// identifyNumLabelUnits returns a map of numeric label keys to the units -// associated with those keys. -func identifyNumLabelUnits(p *profile.Profile, ui plugin.UI) map[string]string { - numLabelUnits, ignoredUnits := p.NumLabelUnits() - - // Print errors for tags with multiple units associated with - // a single key. - for k, units := range ignoredUnits { - ui.PrintErr(fmt.Sprintf("For tag %s used unit %s, also encountered unit(s) %s", k, numLabelUnits[k], strings.Join(units, ", "))) - } - return numLabelUnits -} - -type sampleValueFunc func([]int64) int64 - -// sampleFormat returns a function to extract values out of a profile.Sample, -// and the type/units of those values. -func sampleFormat(p *profile.Profile, sampleIndex string, mean bool) (value, meanDiv sampleValueFunc, v *profile.ValueType, err error) { - if len(p.SampleType) == 0 { - return nil, nil, nil, fmt.Errorf("profile has no samples") - } - index, err := p.SampleIndexByName(sampleIndex) - if err != nil { - return nil, nil, nil, err - } - value = valueExtractor(index) - if mean { - meanDiv = valueExtractor(0) - } - v = p.SampleType[index] - return -} - -func valueExtractor(ix int) sampleValueFunc { - return func(v []int64) int64 { - return v[ix] - } -} diff --git a/internal/pprof/driver/driver_focus.go b/internal/pprof/driver/driver_focus.go deleted file mode 100644 index c1df7536e71..00000000000 --- a/internal/pprof/driver/driver_focus.go +++ /dev/null @@ -1,219 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "fmt" - "regexp" - "strconv" - "strings" - - "github.com/parca-dev/parca/internal/pprof/measurement" - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/google/pprof/profile" -) - -var tagFilterRangeRx = regexp.MustCompile("([+-]?[[:digit:]]+)([[:alpha:]]+)?") - -// applyFocus filters samples based on the focus/ignore options -func applyFocus(prof *profile.Profile, numLabelUnits map[string]string, cfg config, ui plugin.UI) error { - focus, err := compileRegexOption("focus", cfg.Focus, nil) - ignore, err := compileRegexOption("ignore", cfg.Ignore, err) - hide, err := compileRegexOption("hide", cfg.Hide, err) - show, err := compileRegexOption("show", cfg.Show, err) - showfrom, err := compileRegexOption("show_from", cfg.ShowFrom, err) - tagfocus, err := compileTagFilter("tagfocus", cfg.TagFocus, numLabelUnits, ui, err) - tagignore, err := compileTagFilter("tagignore", cfg.TagIgnore, numLabelUnits, ui, err) - prunefrom, err := compileRegexOption("prune_from", cfg.PruneFrom, err) - if err != nil { - return err - } - - fm, im, hm, hnm := prof.FilterSamplesByName(focus, ignore, hide, show) - warnNoMatches(focus == nil || fm, "Focus", ui) - warnNoMatches(ignore == nil || im, "Ignore", ui) - warnNoMatches(hide == nil || hm, "Hide", ui) - warnNoMatches(show == nil || hnm, "Show", ui) - - sfm := prof.ShowFrom(showfrom) - warnNoMatches(showfrom == nil || sfm, "ShowFrom", ui) - - tfm, tim := prof.FilterSamplesByTag(tagfocus, tagignore) - warnNoMatches(tagfocus == nil || tfm, "TagFocus", ui) - warnNoMatches(tagignore == nil || tim, "TagIgnore", ui) - - tagshow, err := compileRegexOption("tagshow", cfg.TagShow, err) - taghide, err := compileRegexOption("taghide", cfg.TagHide, err) - tns, tnh := prof.FilterTagsByName(tagshow, taghide) - warnNoMatches(tagshow == nil || tns, "TagShow", ui) - warnNoMatches(taghide == nil || tnh, "TagHide", ui) - - if prunefrom != nil { - prof.PruneFrom(prunefrom) - } - return err -} - -func compileRegexOption(name, value string, err error) (*regexp.Regexp, error) { - if value == "" || err != nil { - return nil, err - } - rx, err := regexp.Compile(value) - if err != nil { - return nil, fmt.Errorf("parsing %s regexp: %v", name, err) - } - return rx, nil -} - -func compileTagFilter(name, value string, numLabelUnits map[string]string, ui plugin.UI, err error) (func(*profile.Sample) bool, error) { - if value == "" || err != nil { - return nil, err - } - - tagValuePair := strings.SplitN(value, "=", 2) - var wantKey string - if len(tagValuePair) == 2 { - wantKey = tagValuePair[0] - value = tagValuePair[1] - } - - if numFilter := parseTagFilterRange(value); numFilter != nil { - ui.PrintErr(name, ":Interpreted '", value, "' as range, not regexp") - labelFilter := func(vals []int64, unit string) bool { - for _, val := range vals { - if numFilter(val, unit) { - return true - } - } - return false - } - numLabelUnit := func(key string) string { - return numLabelUnits[key] - } - if wantKey == "" { - return func(s *profile.Sample) bool { - for key, vals := range s.NumLabel { - if labelFilter(vals, numLabelUnit(key)) { - return true - } - } - return false - }, nil - } - return func(s *profile.Sample) bool { - if vals, ok := s.NumLabel[wantKey]; ok { - return labelFilter(vals, numLabelUnit(wantKey)) - } - return false - }, nil - } - - var rfx []*regexp.Regexp - for _, tagf := range strings.Split(value, ",") { - fx, err := regexp.Compile(tagf) - if err != nil { - return nil, fmt.Errorf("parsing %s regexp: %v", name, err) - } - rfx = append(rfx, fx) - } - if wantKey == "" { - return func(s *profile.Sample) bool { - matchedrx: - for _, rx := range rfx { - for key, vals := range s.Label { - for _, val := range vals { - // TODO: Match against val, not key:val in future - if rx.MatchString(key + ":" + val) { - continue matchedrx - } - } - } - return false - } - return true - }, nil - } - return func(s *profile.Sample) bool { - if vals, ok := s.Label[wantKey]; ok { - for _, rx := range rfx { - for _, val := range vals { - if rx.MatchString(val) { - return true - } - } - } - } - return false - }, nil -} - -// parseTagFilterRange returns a function to checks if a value is -// contained on the range described by a string. It can recognize -// strings of the form: -// "32kb" -- matches values == 32kb -// ":64kb" -- matches values <= 64kb -// "4mb:" -- matches values >= 4mb -// "12kb:64mb" -- matches values between 12kb and 64mb (both included). -func parseTagFilterRange(filter string) func(int64, string) bool { - ranges := tagFilterRangeRx.FindAllStringSubmatch(filter, 2) - if len(ranges) == 0 { - return nil // No ranges were identified - } - v, err := strconv.ParseInt(ranges[0][1], 10, 64) - if err != nil { - panic(fmt.Errorf("failed to parse int %s: %v", ranges[0][1], err)) - } - scaledValue, unit := measurement.Scale(v, ranges[0][2], ranges[0][2]) - if len(ranges) == 1 { - switch match := ranges[0][0]; filter { - case match: - return func(v int64, u string) bool { - sv, su := measurement.Scale(v, u, unit) - return su == unit && sv == scaledValue - } - case match + ":": - return func(v int64, u string) bool { - sv, su := measurement.Scale(v, u, unit) - return su == unit && sv >= scaledValue - } - case ":" + match: - return func(v int64, u string) bool { - sv, su := measurement.Scale(v, u, unit) - return su == unit && sv <= scaledValue - } - } - return nil - } - if filter != ranges[0][0]+":"+ranges[1][0] { - return nil - } - if v, err = strconv.ParseInt(ranges[1][1], 10, 64); err != nil { - panic(fmt.Errorf("failed to parse int %s: %v", ranges[1][1], err)) - } - scaledValue2, unit2 := measurement.Scale(v, ranges[1][2], unit) - if unit != unit2 { - return nil - } - return func(v int64, u string) bool { - sv, su := measurement.Scale(v, u, unit) - return su == unit && sv >= scaledValue && sv <= scaledValue2 - } -} - -func warnNoMatches(match bool, option string, ui plugin.UI) { - if !match { - ui.PrintErr(option + " expression matched no samples") - } -} diff --git a/internal/pprof/driver/driver_test.go b/internal/pprof/driver/driver_test.go deleted file mode 100644 index b523631df58..00000000000 --- a/internal/pprof/driver/driver_test.go +++ /dev/null @@ -1,1710 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "bytes" - "flag" - "fmt" - "io/ioutil" - "net" - _ "net/http/pprof" - "os" - "reflect" - "regexp" - "runtime" - "strconv" - "strings" - "testing" - "time" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/proftest" - "github.com/parca-dev/parca/internal/pprof/symbolz" - "github.com/google/pprof/profile" -) - -var updateFlag = flag.Bool("update", false, "Update the golden files") - -func TestParse(t *testing.T) { - // Override weblist command to collect output in buffer - pprofCommands["weblist"].postProcess = nil - - // Our mockObjTool.Open will always return success, causing - // driver.locateBinaries to "find" the binaries below in a non-existent - // directory. As a workaround, point the search path to the fake - // directory containing out fake binaries. - savePath := os.Getenv("PPROF_BINARY_PATH") - os.Setenv("PPROF_BINARY_PATH", "/path/to") - defer os.Setenv("PPROF_BINARY_PATH", savePath) - testcase := []struct { - flags, source string - }{ - {"text,functions,flat", "cpu"}, - {"text,functions,noinlines,flat", "cpu"}, - {"text,filefunctions,noinlines,flat", "cpu"}, - {"text,addresses,noinlines,flat", "cpu"}, - {"tree,addresses,flat,nodecount=4", "cpusmall"}, - {"text,functions,flat,nodecount=5,call_tree", "unknown"}, - {"text,alloc_objects,flat", "heap_alloc"}, - {"text,files,flat", "heap"}, - {"text,files,flat,focus=[12]00,taghide=[X3]00", "heap"}, - {"text,inuse_objects,flat", "heap"}, - {"text,lines,cum,hide=line[X3]0", "cpu"}, - {"text,lines,cum,show=[12]00", "cpu"}, - {"text,lines,cum,hide=line[X3]0,focus=[12]00", "cpu"}, - {"topproto,lines,cum,hide=mangled[X3]0", "cpu"}, - {"topproto,lines", "cpu"}, - {"tree,lines,cum,focus=[24]00", "heap"}, - {"tree,relative_percentages,cum,focus=[24]00", "heap"}, - {"tree,lines,cum,show_from=line2", "cpu"}, - {"callgrind", "cpu"}, - {"callgrind,call_tree", "cpu"}, - {"callgrind", "heap"}, - {"dot,functions,flat", "cpu"}, - {"dot,functions,flat,call_tree", "cpu"}, - {"dot,lines,flat,focus=[12]00", "heap"}, - {"dot,unit=minimum", "heap_sizetags"}, - {"dot,addresses,flat,ignore=[X3]002,focus=[X1]000", "contention"}, - {"dot,files,cum", "contention"}, - {"comments,add_comment=some-comment", "cpu"}, - {"comments", "heap"}, - {"tags", "cpu"}, - {"tags,tagignore=tag[13],tagfocus=key[12]", "cpu"}, - {"tags", "heap"}, - {"tags,unit=bytes", "heap"}, - {"traces", "cpu"}, - {"traces,addresses", "cpu"}, - {"traces", "heap_tags"}, - {"dot,alloc_space,flat,focus=[234]00", "heap_alloc"}, - {"dot,alloc_space,flat,tagshow=[2]00", "heap_alloc"}, - {"dot,alloc_space,flat,hide=line.*1?23?", "heap_alloc"}, - {"dot,inuse_space,flat,tagfocus=1mb:2gb", "heap"}, - {"dot,inuse_space,flat,tagfocus=30kb:,tagignore=1mb:2mb", "heap"}, - {"disasm=line[13],addresses,flat", "cpu"}, - {"peek=line.*01", "cpu"}, - {"weblist=line(1000|3000)$,addresses,flat", "cpu"}, - {"tags,tagfocus=400kb:", "heap_request"}, - {"tags,tagfocus=+400kb:", "heap_request"}, - {"dot", "long_name_funcs"}, - {"text", "long_name_funcs"}, - } - - baseConfig := currentConfig() - defer setCurrentConfig(baseConfig) - for _, tc := range testcase { - t.Run(tc.flags+":"+tc.source, func(t *testing.T) { - // Reset config before processing - setCurrentConfig(baseConfig) - - testUI := &proftest.TestUI{T: t, AllowRx: "Generating report in|Ignoring local file|expression matched no samples|Interpreted .* as range, not regexp"} - - f := baseFlags() - f.args = []string{tc.source} - - flags := strings.Split(tc.flags, ",") - - // Encode profile into a protobuf and decode it again. - protoTempFile, err := ioutil.TempFile("", "profile_proto") - if err != nil { - t.Errorf("cannot create tempfile: %v", err) - } - defer os.Remove(protoTempFile.Name()) - defer protoTempFile.Close() - f.strings["output"] = protoTempFile.Name() - - if flags[0] == "topproto" { - f.bools["proto"] = false - f.bools["topproto"] = true - f.bools["addresses"] = true - } - - // First pprof invocation to save the profile into a profile.proto. - // Pass in flag set hen setting defaults, because otherwise default - // transport will try to add flags to the default flag set. - o1 := setDefaults(&plugin.Options{Flagset: f}) - o1.Fetch = testFetcher{} - o1.Sym = testSymbolizer{} - o1.UI = testUI - if err := PProf(o1); err != nil { - t.Fatalf("%s %q: %v", tc.source, tc.flags, err) - } - // Reset config after the proto invocation - setCurrentConfig(baseConfig) - - // Read the profile from the encoded protobuf - outputTempFile, err := ioutil.TempFile("", "profile_output") - if err != nil { - t.Errorf("cannot create tempfile: %v", err) - } - defer os.Remove(outputTempFile.Name()) - defer outputTempFile.Close() - - f = baseFlags() - f.strings["output"] = outputTempFile.Name() - f.args = []string{protoTempFile.Name()} - - delete(f.bools, "proto") - addFlags(&f, flags) - solution := solutionFilename(tc.source, &f) - // Apply the flags for the second pprof run, and identify name of - // the file containing expected results - if flags[0] == "topproto" { - addFlags(&f, flags) - solution = solutionFilename(tc.source, &f) - delete(f.bools, "topproto") - f.bools["text"] = true - } - - // Second pprof invocation to read the profile from profile.proto - // and generate a report. - // Pass in flag set hen setting defaults, because otherwise default - // transport will try to add flags to the default flag set. - o2 := setDefaults(&plugin.Options{Flagset: f}) - o2.Sym = testSymbolizeDemangler{} - o2.Obj = new(mockObjTool) - o2.UI = testUI - - if err := PProf(o2); err != nil { - t.Errorf("%s: %v", tc.source, err) - } - b, err := ioutil.ReadFile(outputTempFile.Name()) - if err != nil { - t.Errorf("Failed to read profile %s: %v", outputTempFile.Name(), err) - } - - // Read data file with expected solution - solution = "testdata/" + solution - sbuf, err := ioutil.ReadFile(solution) - if err != nil { - t.Fatalf("reading solution file %s: %v", solution, err) - } - if runtime.GOOS == "windows" { - if flags[0] == "dot" { - // The .dot test has the paths inside strings, so \ must be escaped. - sbuf = bytes.Replace(sbuf, []byte("testdata/"), []byte(`testdata\\`), -1) - sbuf = bytes.Replace(sbuf, []byte("/path/to/"), []byte(`\\path\\to\\`), -1) - } else { - sbuf = bytes.Replace(sbuf, []byte("testdata/"), []byte(`testdata\`), -1) - sbuf = bytes.Replace(sbuf, []byte("/path/to/"), []byte(`\path\to\`), -1) - } - } - - if flags[0] == "svg" { - b = removeScripts(b) - sbuf = removeScripts(sbuf) - } - - if string(b) != string(sbuf) { - t.Errorf("diff %s %s", solution, tc.source) - d, err := proftest.Diff(sbuf, b) - if err != nil { - t.Fatalf("diff %s %v", solution, err) - } - t.Errorf("%s\n%s\n", solution, d) - if *updateFlag { - err := ioutil.WriteFile(solution, b, 0644) - if err != nil { - t.Errorf("failed to update the solution file %q: %v", solution, err) - } - } - } - }) - } -} - -// removeScripts removes pairs from its input -func removeScripts(in []byte) []byte { - beginMarker := []byte("") - - if begin := bytes.Index(in, beginMarker); begin > 0 { - if end := bytes.Index(in[begin:], endMarker); end > 0 { - in = append(in[:begin], removeScripts(in[begin+end+len(endMarker):])...) - } - } - return in -} - -// addFlags parses flag descriptions and adds them to the testFlags -func addFlags(f *testFlags, flags []string) { - for _, flag := range flags { - fields := strings.SplitN(flag, "=", 2) - switch len(fields) { - case 1: - f.bools[fields[0]] = true - case 2: - if i, err := strconv.Atoi(fields[1]); err == nil { - f.ints[fields[0]] = i - } else { - f.strings[fields[0]] = fields[1] - } - } - } -} - -func testSourceURL(port int) string { - return fmt.Sprintf("http://%s/", net.JoinHostPort(testSourceAddress, strconv.Itoa(port))) -} - -// solutionFilename returns the name of the solution file for the test -func solutionFilename(source string, f *testFlags) string { - name := []string{"pprof", strings.TrimPrefix(source, testSourceURL(8000))} - name = addString(name, f, []string{"flat", "cum"}) - name = addString(name, f, []string{"functions", "filefunctions", "files", "lines", "addresses"}) - name = addString(name, f, []string{"noinlines"}) - name = addString(name, f, []string{"inuse_space", "inuse_objects", "alloc_space", "alloc_objects"}) - name = addString(name, f, []string{"relative_percentages"}) - name = addString(name, f, []string{"seconds"}) - name = addString(name, f, []string{"call_tree"}) - name = addString(name, f, []string{"text", "tree", "callgrind", "dot", "svg", "tags", "dot", "traces", "disasm", "peek", "weblist", "topproto", "comments"}) - if f.strings["focus"] != "" || f.strings["tagfocus"] != "" { - name = append(name, "focus") - } - if f.strings["ignore"] != "" || f.strings["tagignore"] != "" { - name = append(name, "ignore") - } - if f.strings["show_from"] != "" { - name = append(name, "show_from") - } - name = addString(name, f, []string{"hide", "show"}) - if f.strings["unit"] != "minimum" { - name = addString(name, f, []string{"unit"}) - } - return strings.Join(name, ".") -} - -func addString(name []string, f *testFlags, components []string) []string { - for _, c := range components { - if f.bools[c] || f.strings[c] != "" || f.ints[c] != 0 { - return append(name, c) - } - } - return name -} - -// testFlags implements the plugin.FlagSet interface. -type testFlags struct { - bools map[string]bool - ints map[string]int - floats map[string]float64 - strings map[string]string - args []string - stringLists map[string][]string -} - -func (testFlags) ExtraUsage() string { return "" } - -func (testFlags) AddExtraUsage(eu string) {} - -func (f testFlags) Bool(s string, d bool, c string) *bool { - if b, ok := f.bools[s]; ok { - return &b - } - return &d -} - -func (f testFlags) Int(s string, d int, c string) *int { - if i, ok := f.ints[s]; ok { - return &i - } - return &d -} - -func (f testFlags) Float64(s string, d float64, c string) *float64 { - if g, ok := f.floats[s]; ok { - return &g - } - return &d -} - -func (f testFlags) String(s, d, c string) *string { - if t, ok := f.strings[s]; ok { - return &t - } - return &d -} - -func (f testFlags) StringList(s, d, c string) *[]*string { - if t, ok := f.stringLists[s]; ok { - // convert slice of strings to slice of string pointers before returning. - tp := make([]*string, len(t)) - for i, v := range t { - tp[i] = &v - } - return &tp - } - return &[]*string{} -} - -func (f testFlags) Parse(func()) []string { - return f.args -} - -func baseFlags() testFlags { - return testFlags{ - bools: map[string]bool{ - "proto": true, - "trim": true, - "compact_labels": true, - }, - ints: map[string]int{ - "nodecount": 20, - }, - floats: map[string]float64{ - "nodefraction": 0.05, - "edgefraction": 0.01, - "divide_by": 1.0, - }, - strings: map[string]string{ - "unit": "minimum", - }, - } -} - -const testStart = 0x1000 -const testOffset = 0x5000 - -type testFetcher struct{} - -func (testFetcher) Fetch(s string, d, t time.Duration) (*profile.Profile, string, error) { - var p *profile.Profile - switch s { - case "cpu", "unknown": - p = cpuProfile() - case "cpusmall": - p = cpuProfileSmall() - case "heap": - p = heapProfile() - case "heap_alloc": - p = heapProfile() - p.SampleType = []*profile.ValueType{ - {Type: "alloc_objects", Unit: "count"}, - {Type: "alloc_space", Unit: "bytes"}, - } - case "heap_request": - p = heapProfile() - for _, s := range p.Sample { - s.NumLabel["request"] = s.NumLabel["bytes"] - } - case "heap_sizetags": - p = heapProfile() - tags := []int64{2, 4, 8, 16, 32, 64, 128, 256} - for _, s := range p.Sample { - numValues := append(s.NumLabel["bytes"], tags...) - s.NumLabel["bytes"] = numValues - } - case "heap_tags": - p = heapProfile() - for i := 0; i < len(p.Sample); i += 2 { - s := p.Sample[i] - if s.Label == nil { - s.Label = make(map[string][]string) - } - s.NumLabel["request"] = s.NumLabel["bytes"] - s.Label["key1"] = []string{"tag"} - } - case "contention": - p = contentionProfile() - case "symbolz": - p = symzProfile() - case "long_name_funcs": - p = longNameFuncsProfile() - default: - return nil, "", fmt.Errorf("unexpected source: %s", s) - } - return p, testSourceURL(8000) + s, nil -} - -type testSymbolizer struct{} - -func (testSymbolizer) Symbolize(_ string, _ plugin.MappingSources, _ *profile.Profile) error { - return nil -} - -type testSymbolizeDemangler struct{} - -func (testSymbolizeDemangler) Symbolize(_ string, _ plugin.MappingSources, p *profile.Profile) error { - for _, fn := range p.Function { - if fn.Name == "" || fn.SystemName == fn.Name { - fn.Name = fakeDemangler(fn.SystemName) - } - } - return nil -} - -func testFetchSymbols(source, post string) ([]byte, error) { - var buf bytes.Buffer - - switch source { - case testSourceURL(8000) + "symbolz": - for _, address := range strings.Split(post, "+") { - a, _ := strconv.ParseInt(address, 0, 64) - fmt.Fprintf(&buf, "%v\t", address) - if a-testStart > testOffset { - fmt.Fprintf(&buf, "wrong_source_%v_", address) - continue - } - fmt.Fprintf(&buf, "%#x\n", a-testStart) - } - return buf.Bytes(), nil - case testSourceURL(8001) + "symbolz": - for _, address := range strings.Split(post, "+") { - a, _ := strconv.ParseInt(address, 0, 64) - fmt.Fprintf(&buf, "%v\t", address) - if a-testStart < testOffset { - fmt.Fprintf(&buf, "wrong_source_%v_", address) - continue - } - fmt.Fprintf(&buf, "%#x\n", a-testStart-testOffset) - } - return buf.Bytes(), nil - default: - return nil, fmt.Errorf("unexpected source: %s", source) - } -} - -type testSymbolzSymbolizer struct{} - -func (testSymbolzSymbolizer) Symbolize(variables string, sources plugin.MappingSources, p *profile.Profile) error { - return symbolz.Symbolize(p, false, sources, testFetchSymbols, nil) -} - -func fakeDemangler(name string) string { - switch name { - case "mangled1000": - return "line1000" - case "mangled2000": - return "line2000" - case "mangled2001": - return "line2001" - case "mangled3000": - return "line3000" - case "mangled3001": - return "line3001" - case "mangled3002": - return "line3002" - case "mangledNEW": - return "operator new" - case "mangledMALLOC": - return "malloc" - default: - return name - } -} - -// longNameFuncsProfile returns a profile with function names which should be -// shortened in graph and flame views. -func longNameFuncsProfile() *profile.Profile { - var longNameFuncsM = []*profile.Mapping{ - { - ID: 1, - Start: 0x1000, - Limit: 0x4000, - File: "/path/to/testbinary", - HasFunctions: true, - HasFilenames: true, - HasLineNumbers: true, - HasInlineFrames: true, - }, - } - - var longNameFuncsF = []*profile.Function{ - {ID: 1, Name: "path/to/package1.object.function1", SystemName: "path/to/package1.object.function1", Filename: "path/to/package1.go"}, - {ID: 2, Name: "(anonymous namespace)::Bar::Foo", SystemName: "(anonymous namespace)::Bar::Foo", Filename: "a/long/path/to/package2.cc"}, - {ID: 3, Name: "java.bar.foo.FooBar.run(java.lang.Runnable)", SystemName: "java.bar.foo.FooBar.run(java.lang.Runnable)", Filename: "FooBar.java"}, - } - - var longNameFuncsL = []*profile.Location{ - { - ID: 1000, - Mapping: longNameFuncsM[0], - Address: 0x1000, - Line: []profile.Line{ - {Function: longNameFuncsF[0], Line: 1}, - }, - }, - { - ID: 2000, - Mapping: longNameFuncsM[0], - Address: 0x2000, - Line: []profile.Line{ - {Function: longNameFuncsF[1], Line: 4}, - }, - }, - { - ID: 3000, - Mapping: longNameFuncsM[0], - Address: 0x3000, - Line: []profile.Line{ - {Function: longNameFuncsF[2], Line: 9}, - }, - }, - } - - return &profile.Profile{ - PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, - Period: 1, - DurationNanos: 10e9, - SampleType: []*profile.ValueType{ - {Type: "samples", Unit: "count"}, - {Type: "cpu", Unit: "milliseconds"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{longNameFuncsL[0], longNameFuncsL[1], longNameFuncsL[2]}, - Value: []int64{1000, 1000}, - }, - { - Location: []*profile.Location{longNameFuncsL[0], longNameFuncsL[1]}, - Value: []int64{100, 100}, - }, - { - Location: []*profile.Location{longNameFuncsL[2]}, - Value: []int64{10, 10}, - }, - }, - Location: longNameFuncsL, - Function: longNameFuncsF, - Mapping: longNameFuncsM, - } -} - -func cpuProfile() *profile.Profile { - var cpuM = []*profile.Mapping{ - { - ID: 1, - Start: 0x1000, - Limit: 0x4000, - File: "/path/to/testbinary", - HasFunctions: true, - HasFilenames: true, - HasLineNumbers: true, - HasInlineFrames: true, - }, - } - - var cpuF = []*profile.Function{ - {ID: 1, Name: "mangled1000", SystemName: "mangled1000", Filename: "testdata/file1000.src"}, - {ID: 2, Name: "mangled2000", SystemName: "mangled2000", Filename: "testdata/file2000.src"}, - {ID: 3, Name: "mangled2001", SystemName: "mangled2001", Filename: "testdata/file2000.src"}, - {ID: 4, Name: "mangled3000", SystemName: "mangled3000", Filename: "testdata/file3000.src"}, - {ID: 5, Name: "mangled3001", SystemName: "mangled3001", Filename: "testdata/file3000.src"}, - {ID: 6, Name: "mangled3002", SystemName: "mangled3002", Filename: "testdata/file3000.src"}, - } - - var cpuL = []*profile.Location{ - { - ID: 1000, - Mapping: cpuM[0], - Address: 0x1000, - Line: []profile.Line{ - {Function: cpuF[0], Line: 1}, - }, - }, - { - ID: 2000, - Mapping: cpuM[0], - Address: 0x2000, - Line: []profile.Line{ - {Function: cpuF[2], Line: 9}, - {Function: cpuF[1], Line: 4}, - }, - }, - { - ID: 3000, - Mapping: cpuM[0], - Address: 0x3000, - Line: []profile.Line{ - {Function: cpuF[5], Line: 2}, - {Function: cpuF[4], Line: 5}, - {Function: cpuF[3], Line: 6}, - }, - }, - { - ID: 3001, - Mapping: cpuM[0], - Address: 0x3001, - Line: []profile.Line{ - {Function: cpuF[4], Line: 8}, - {Function: cpuF[3], Line: 9}, - }, - }, - { - ID: 3002, - Mapping: cpuM[0], - Address: 0x3002, - Line: []profile.Line{ - {Function: cpuF[5], Line: 5}, - {Function: cpuF[3], Line: 9}, - }, - }, - } - - return &profile.Profile{ - PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, - Period: 1, - DurationNanos: 10e9, - SampleType: []*profile.ValueType{ - {Type: "samples", Unit: "count"}, - {Type: "cpu", Unit: "milliseconds"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{cpuL[0], cpuL[1], cpuL[2]}, - Value: []int64{1000, 1000}, - Label: map[string][]string{ - "key1": {"tag1"}, - "key2": {"tag1"}, - }, - }, - { - Location: []*profile.Location{cpuL[0], cpuL[3]}, - Value: []int64{100, 100}, - Label: map[string][]string{ - "key1": {"tag2"}, - "key3": {"tag2"}, - }, - }, - { - Location: []*profile.Location{cpuL[1], cpuL[4]}, - Value: []int64{10, 10}, - Label: map[string][]string{ - "key1": {"tag3"}, - "key2": {"tag2"}, - }, - }, - { - Location: []*profile.Location{cpuL[2]}, - Value: []int64{10, 10}, - Label: map[string][]string{ - "key1": {"tag4"}, - "key2": {"tag1"}, - }, - }, - }, - Location: cpuL, - Function: cpuF, - Mapping: cpuM, - } -} - -func cpuProfileSmall() *profile.Profile { - var cpuM = []*profile.Mapping{ - { - ID: 1, - Start: 0x1000, - Limit: 0x4000, - File: "/path/to/testbinary", - HasFunctions: true, - HasFilenames: true, - HasLineNumbers: true, - HasInlineFrames: true, - }, - } - - var cpuL = []*profile.Location{ - { - ID: 1000, - Mapping: cpuM[0], - Address: 0x1000, - }, - { - ID: 2000, - Mapping: cpuM[0], - Address: 0x2000, - }, - { - ID: 3000, - Mapping: cpuM[0], - Address: 0x3000, - }, - { - ID: 4000, - Mapping: cpuM[0], - Address: 0x4000, - }, - { - ID: 5000, - Mapping: cpuM[0], - Address: 0x5000, - }, - } - - return &profile.Profile{ - PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, - Period: 1, - DurationNanos: 10e9, - SampleType: []*profile.ValueType{ - {Type: "samples", Unit: "count"}, - {Type: "cpu", Unit: "milliseconds"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{cpuL[0], cpuL[1], cpuL[2]}, - Value: []int64{1000, 1000}, - }, - { - Location: []*profile.Location{cpuL[3], cpuL[1], cpuL[4]}, - Value: []int64{1000, 1000}, - }, - { - Location: []*profile.Location{cpuL[2]}, - Value: []int64{1000, 1000}, - }, - { - Location: []*profile.Location{cpuL[4]}, - Value: []int64{1000, 1000}, - }, - }, - Location: cpuL, - Function: nil, - Mapping: cpuM, - } -} - -func heapProfile() *profile.Profile { - var heapM = []*profile.Mapping{ - { - ID: 1, - BuildID: "buildid", - Start: 0x1000, - Limit: 0x4000, - HasFunctions: true, - HasFilenames: true, - HasLineNumbers: true, - HasInlineFrames: true, - }, - } - - var heapF = []*profile.Function{ - {ID: 1, Name: "pruneme", SystemName: "pruneme", Filename: "prune.h"}, - {ID: 2, Name: "mangled1000", SystemName: "mangled1000", Filename: "testdata/file1000.src"}, - {ID: 3, Name: "mangled2000", SystemName: "mangled2000", Filename: "testdata/file2000.src"}, - {ID: 4, Name: "mangled2001", SystemName: "mangled2001", Filename: "testdata/file2000.src"}, - {ID: 5, Name: "mangled3000", SystemName: "mangled3000", Filename: "testdata/file3000.src"}, - {ID: 6, Name: "mangled3001", SystemName: "mangled3001", Filename: "testdata/file3000.src"}, - {ID: 7, Name: "mangled3002", SystemName: "mangled3002", Filename: "testdata/file3000.src"}, - {ID: 8, Name: "mangledMALLOC", SystemName: "mangledMALLOC", Filename: "malloc.h"}, - {ID: 9, Name: "mangledNEW", SystemName: "mangledNEW", Filename: "new.h"}, - } - - var heapL = []*profile.Location{ - { - ID: 1000, - Mapping: heapM[0], - Address: 0x1000, - Line: []profile.Line{ - {Function: heapF[0], Line: 100}, - {Function: heapF[7], Line: 100}, - {Function: heapF[1], Line: 1}, - }, - }, - { - ID: 2000, - Mapping: heapM[0], - Address: 0x2000, - Line: []profile.Line{ - {Function: heapF[8], Line: 100}, - {Function: heapF[3], Line: 2}, - {Function: heapF[2], Line: 3}, - }, - }, - { - ID: 3000, - Mapping: heapM[0], - Address: 0x3000, - Line: []profile.Line{ - {Function: heapF[8], Line: 100}, - {Function: heapF[6], Line: 3}, - {Function: heapF[5], Line: 2}, - {Function: heapF[4], Line: 4}, - }, - }, - { - ID: 3001, - Mapping: heapM[0], - Address: 0x3001, - Line: []profile.Line{ - {Function: heapF[0], Line: 100}, - {Function: heapF[8], Line: 100}, - {Function: heapF[5], Line: 2}, - {Function: heapF[4], Line: 4}, - }, - }, - { - ID: 3002, - Mapping: heapM[0], - Address: 0x3002, - Line: []profile.Line{ - {Function: heapF[6], Line: 3}, - {Function: heapF[4], Line: 4}, - }, - }, - } - - return &profile.Profile{ - Comments: []string{"comment", "#hidden comment"}, - PeriodType: &profile.ValueType{Type: "allocations", Unit: "bytes"}, - Period: 524288, - SampleType: []*profile.ValueType{ - {Type: "inuse_objects", Unit: "count"}, - {Type: "inuse_space", Unit: "bytes"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{heapL[0], heapL[1], heapL[2]}, - Value: []int64{10, 1024000}, - NumLabel: map[string][]int64{"bytes": {102400}}, - }, - { - Location: []*profile.Location{heapL[0], heapL[3]}, - Value: []int64{20, 4096000}, - NumLabel: map[string][]int64{"bytes": {204800}}, - }, - { - Location: []*profile.Location{heapL[1], heapL[4]}, - Value: []int64{40, 65536000}, - NumLabel: map[string][]int64{"bytes": {1638400}}, - }, - { - Location: []*profile.Location{heapL[2]}, - Value: []int64{80, 32768000}, - NumLabel: map[string][]int64{"bytes": {409600}}, - }, - }, - DropFrames: ".*operator new.*|malloc", - Location: heapL, - Function: heapF, - Mapping: heapM, - } -} - -func contentionProfile() *profile.Profile { - var contentionM = []*profile.Mapping{ - { - ID: 1, - BuildID: "buildid-contention", - Start: 0x1000, - Limit: 0x4000, - HasFunctions: true, - HasFilenames: true, - HasLineNumbers: true, - HasInlineFrames: true, - }, - } - - var contentionF = []*profile.Function{ - {ID: 1, Name: "mangled1000", SystemName: "mangled1000", Filename: "testdata/file1000.src"}, - {ID: 2, Name: "mangled2000", SystemName: "mangled2000", Filename: "testdata/file2000.src"}, - {ID: 3, Name: "mangled2001", SystemName: "mangled2001", Filename: "testdata/file2000.src"}, - {ID: 4, Name: "mangled3000", SystemName: "mangled3000", Filename: "testdata/file3000.src"}, - {ID: 5, Name: "mangled3001", SystemName: "mangled3001", Filename: "testdata/file3000.src"}, - {ID: 6, Name: "mangled3002", SystemName: "mangled3002", Filename: "testdata/file3000.src"}, - } - - var contentionL = []*profile.Location{ - { - ID: 1000, - Mapping: contentionM[0], - Address: 0x1000, - Line: []profile.Line{ - {Function: contentionF[0], Line: 1}, - }, - }, - { - ID: 2000, - Mapping: contentionM[0], - Address: 0x2000, - Line: []profile.Line{ - {Function: contentionF[2], Line: 2}, - {Function: contentionF[1], Line: 3}, - }, - }, - { - ID: 3000, - Mapping: contentionM[0], - Address: 0x3000, - Line: []profile.Line{ - {Function: contentionF[5], Line: 2}, - {Function: contentionF[4], Line: 3}, - {Function: contentionF[3], Line: 5}, - }, - }, - { - ID: 3001, - Mapping: contentionM[0], - Address: 0x3001, - Line: []profile.Line{ - {Function: contentionF[4], Line: 3}, - {Function: contentionF[3], Line: 5}, - }, - }, - { - ID: 3002, - Mapping: contentionM[0], - Address: 0x3002, - Line: []profile.Line{ - {Function: contentionF[5], Line: 4}, - {Function: contentionF[3], Line: 3}, - }, - }, - } - - return &profile.Profile{ - PeriodType: &profile.ValueType{Type: "contentions", Unit: "count"}, - Period: 524288, - SampleType: []*profile.ValueType{ - {Type: "contentions", Unit: "count"}, - {Type: "delay", Unit: "nanoseconds"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{contentionL[0], contentionL[1], contentionL[2]}, - Value: []int64{10, 10240000}, - }, - { - Location: []*profile.Location{contentionL[0], contentionL[3]}, - Value: []int64{20, 40960000}, - }, - { - Location: []*profile.Location{contentionL[1], contentionL[4]}, - Value: []int64{40, 65536000}, - }, - { - Location: []*profile.Location{contentionL[2]}, - Value: []int64{80, 32768000}, - }, - }, - Location: contentionL, - Function: contentionF, - Mapping: contentionM, - Comments: []string{"Comment #1", "Comment #2"}, - } -} - -func symzProfile() *profile.Profile { - var symzM = []*profile.Mapping{ - { - ID: 1, - Start: testStart, - Limit: 0x4000, - File: "/path/to/testbinary", - }, - } - - var symzL = []*profile.Location{ - {ID: 1, Mapping: symzM[0], Address: testStart}, - {ID: 2, Mapping: symzM[0], Address: testStart + 0x1000}, - {ID: 3, Mapping: symzM[0], Address: testStart + 0x2000}, - } - - return &profile.Profile{ - PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, - Period: 1, - DurationNanos: 10e9, - SampleType: []*profile.ValueType{ - {Type: "samples", Unit: "count"}, - {Type: "cpu", Unit: "milliseconds"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{symzL[0], symzL[1], symzL[2]}, - Value: []int64{1, 1}, - }, - }, - Location: symzL, - Mapping: symzM, - } -} - -var autoCompleteTests = []struct { - in string - out string -}{ - {"", ""}, - {"xyz", "xyz"}, // no match - {"dis", "disasm"}, // single match - {"t", "t"}, // many matches - {"top abc", "top abc"}, // no function name match - {"top mangledM", "top mangledMALLOC"}, // single function name match - {"top cmd cmd mangledM", "top cmd cmd mangledMALLOC"}, - {"top mangled", "top mangled"}, // many function name matches - {"cmd mangledM", "cmd mangledM"}, // invalid command - {"top mangledM cmd", "top mangledM cmd"}, // cursor misplaced - {"top edMA", "top mangledMALLOC"}, // single infix function name match - {"top -mangledM", "top -mangledMALLOC"}, // ignore sign handled - {"lin", "lines"}, // single variable match - {"EdGeF", "edgefraction"}, // single capitalized match - {"help dis", "help disasm"}, // help command match - {"help relative_perc", "help relative_percentages"}, // help variable match - {"help coMpa", "help compact_labels"}, // help variable capitalized match -} - -func TestAutoComplete(t *testing.T) { - complete := newCompleter(functionNames(heapProfile())) - - for _, test := range autoCompleteTests { - if out := complete(test.in); out != test.out { - t.Errorf("autoComplete(%s) = %s; want %s", test.in, out, test.out) - } - } -} - -func TestTagFilter(t *testing.T) { - var tagFilterTests = []struct { - desc, value string - tags map[string][]string - want bool - }{ - { - "1 key with 1 matching value", - "tag2", - map[string][]string{"value1": {"tag1", "tag2"}}, - true, - }, - { - "1 key with no matching values", - "tag3", - map[string][]string{"value1": {"tag1", "tag2"}}, - false, - }, - { - "two keys, each with value matching different one value in list", - "tag1,tag3", - map[string][]string{"value1": {"tag1", "tag2"}, "value2": {"tag3"}}, - true, - }, - {"two keys, all value matching different regex value in list", - "t..[12],t..3", - map[string][]string{"value1": {"tag1", "tag2"}, "value2": {"tag3"}}, - true, - }, - { - "one key, not all values in list matched", - "tag2,tag3", - map[string][]string{"value1": {"tag1", "tag2"}}, - false, - }, - { - "key specified, list of tags where all tags in list matched", - "key1=tag1,tag2", - map[string][]string{"key1": {"tag1", "tag2"}}, - true, - }, - {"key specified, list of tag values where not all are matched", - "key1=tag1,tag2", - map[string][]string{"key1": {"tag1"}}, - true, - }, - { - "key included for regex matching, list of values where all values in list matched", - "key1:tag1,tag2", - map[string][]string{"key1": {"tag1", "tag2"}}, - true, - }, - { - "key included for regex matching, list of values where not only second value matched", - "key1:tag1,tag2", - map[string][]string{"key1": {"tag2"}}, - false, - }, - { - "key included for regex matching, list of values where not only first value matched", - "key1:tag1,tag2", - map[string][]string{"key1": {"tag1"}}, - false, - }, - } - for _, test := range tagFilterTests { - t.Run(test.desc, func(t *testing.T) { - filter, err := compileTagFilter(test.desc, test.value, nil, &proftest.TestUI{T: t}, nil) - if err != nil { - t.Fatalf("tagFilter %s:%v", test.desc, err) - } - s := profile.Sample{ - Label: test.tags, - } - if got := filter(&s); got != test.want { - t.Errorf("tagFilter %s: got %v, want %v", test.desc, got, test.want) - } - }) - } -} - -func TestIdentifyNumLabelUnits(t *testing.T) { - var tagFilterTests = []struct { - desc string - tagVals []map[string][]int64 - tagUnits []map[string][]string - wantUnits map[string]string - allowedRx string - wantIgnoreErrCount int - }{ - { - "Multiple keys, no units for all keys", - []map[string][]int64{{"keyA": {131072}, "keyB": {128}}}, - []map[string][]string{{"keyA": {}, "keyB": {""}}}, - map[string]string{"keyA": "keyA", "keyB": "keyB"}, - "", - 0, - }, - { - "Multiple keys, different units for each key", - []map[string][]int64{{"keyA": {131072}, "keyB": {128}}}, - []map[string][]string{{"keyA": {"bytes"}, "keyB": {"kilobytes"}}}, - map[string]string{"keyA": "bytes", "keyB": "kilobytes"}, - "", - 0, - }, - { - "Multiple keys with multiple values, different units for each key", - []map[string][]int64{{"keyC": {131072, 1}, "keyD": {128, 252}}}, - []map[string][]string{{"keyC": {"bytes", "bytes"}, "keyD": {"kilobytes", "kilobytes"}}}, - map[string]string{"keyC": "bytes", "keyD": "kilobytes"}, - "", - 0, - }, - { - "Multiple keys with multiple values, some units missing", - []map[string][]int64{{"key1": {131072, 1}, "A": {128, 252}, "key3": {128}, "key4": {1}}, {"key3": {128}, "key4": {1}}}, - []map[string][]string{{"key1": {"", "bytes"}, "A": {"kilobytes", ""}, "key3": {""}, "key4": {"hour"}}, {"key3": {"seconds"}, "key4": {""}}}, - map[string]string{"key1": "bytes", "A": "kilobytes", "key3": "seconds", "key4": "hour"}, - "", - 0, - }, - { - "One key with three units in same sample", - []map[string][]int64{{"key": {8, 8, 16}}}, - []map[string][]string{{"key": {"bytes", "megabytes", "kilobytes"}}}, - map[string]string{"key": "bytes"}, - `(For tag key used unit bytes, also encountered unit\(s\) kilobytes, megabytes)`, - 1, - }, - { - "One key with four units in same sample", - []map[string][]int64{{"key": {8, 8, 16, 32}}}, - []map[string][]string{{"key": {"bytes", "kilobytes", "a", "megabytes"}}}, - map[string]string{"key": "bytes"}, - `(For tag key used unit bytes, also encountered unit\(s\) a, kilobytes, megabytes)`, - 1, - }, - { - "One key with two units in same sample", - []map[string][]int64{{"key": {8, 8}}}, - []map[string][]string{{"key": {"bytes", "seconds"}}}, - map[string]string{"key": "bytes"}, - `(For tag key used unit bytes, also encountered unit\(s\) seconds)`, - 1, - }, - { - "One key with different units in different samples", - []map[string][]int64{{"key1": {8}}, {"key1": {8}}, {"key1": {8}}}, - []map[string][]string{{"key1": {"bytes"}}, {"key1": {"kilobytes"}}, {"key1": {"megabytes"}}}, - map[string]string{"key1": "bytes"}, - `(For tag key1 used unit bytes, also encountered unit\(s\) kilobytes, megabytes)`, - 1, - }, - { - "Key alignment, unit not specified", - []map[string][]int64{{"alignment": {8}}}, - []map[string][]string{nil}, - map[string]string{"alignment": "bytes"}, - "", - 0, - }, - { - "Key request, unit not specified", - []map[string][]int64{{"request": {8}}, {"request": {8, 8}}}, - []map[string][]string{nil, nil}, - map[string]string{"request": "bytes"}, - "", - 0, - }, - { - "Check units not over-written for keys with default units", - []map[string][]int64{{ - "alignment": {8}, - "request": {8}, - "bytes": {8}, - }}, - []map[string][]string{{ - "alignment": {"seconds"}, - "request": {"minutes"}, - "bytes": {"hours"}, - }}, - map[string]string{ - "alignment": "seconds", - "request": "minutes", - "bytes": "hours", - }, - "", - 0, - }, - } - for _, test := range tagFilterTests { - t.Run(test.desc, func(t *testing.T) { - p := profile.Profile{Sample: make([]*profile.Sample, len(test.tagVals))} - for i, numLabel := range test.tagVals { - s := profile.Sample{ - NumLabel: numLabel, - NumUnit: test.tagUnits[i], - } - p.Sample[i] = &s - } - testUI := &proftest.TestUI{T: t, AllowRx: test.allowedRx} - units := identifyNumLabelUnits(&p, testUI) - if !reflect.DeepEqual(test.wantUnits, units) { - t.Errorf("got %v units, want %v", units, test.wantUnits) - } - if got, want := testUI.NumAllowRxMatches, test.wantIgnoreErrCount; want != got { - t.Errorf("got %d errors logged, want %d errors logged", got, want) - } - }) - } -} - -func TestNumericTagFilter(t *testing.T) { - var tagFilterTests = []struct { - desc, value string - tags map[string][]int64 - identifiedUnits map[string]string - want bool - }{ - { - "Match when unit conversion required", - "128kb", - map[string][]int64{"key1": {131072}, "key2": {128}}, - map[string]string{"key1": "bytes", "key2": "kilobytes"}, - true, - }, - { - "Match only when values equal after unit conversion", - "512kb", - map[string][]int64{"key1": {512}, "key2": {128}}, - map[string]string{"key1": "bytes", "key2": "kilobytes"}, - false, - }, - { - "Match when values and units initially equal", - "10bytes", - map[string][]int64{"key1": {10}, "key2": {128}}, - map[string]string{"key1": "bytes", "key2": "kilobytes"}, - true, - }, - { - "Match range without lower bound, no unit conversion required", - ":10bytes", - map[string][]int64{"key1": {8}}, - map[string]string{"key1": "bytes"}, - true, - }, - { - "Match range without lower bound, unit conversion required", - ":10kb", - map[string][]int64{"key1": {8}}, - map[string]string{"key1": "bytes"}, - true, - }, - { - "Match range without upper bound, unit conversion required", - "10b:", - map[string][]int64{"key1": {8}}, - map[string]string{"key1": "kilobytes"}, - true, - }, - { - "Match range without upper bound, no unit conversion required", - "10b:", - map[string][]int64{"key1": {12}}, - map[string]string{"key1": "bytes"}, - true, - }, - { - "Don't match range without upper bound, no unit conversion required", - "10b:", - map[string][]int64{"key1": {8}}, - map[string]string{"key1": "bytes"}, - false, - }, - { - "Multiple keys with different units, don't match range without upper bound", - "10kb:", - map[string][]int64{"key1": {8}}, - map[string]string{"key1": "bytes", "key2": "kilobytes"}, - false, - }, - { - "Match range without upper bound, unit conversion required", - "10b:", - map[string][]int64{"key1": {8}}, - map[string]string{"key1": "kilobytes"}, - true, - }, - { - "Don't match range without lower bound, no unit conversion required", - ":10b", - map[string][]int64{"key1": {12}}, - map[string]string{"key1": "bytes"}, - false, - }, - { - "Match specific key, key present, one of two values match", - "bytes=5b", - map[string][]int64{"bytes": {10, 5}}, - map[string]string{"bytes": "bytes"}, - true, - }, - { - "Match specific key, key present and value matches", - "bytes=1024b", - map[string][]int64{"bytes": {1024}}, - map[string]string{"bytes": "kilobytes"}, - false, - }, - { - "Match specific key, matching key present and value matches, also non-matching key", - "bytes=1024b", - map[string][]int64{"bytes": {1024}, "key2": {5}}, - map[string]string{"bytes": "bytes", "key2": "bytes"}, - true, - }, - { - "Match specific key and range of values, value matches", - "bytes=512b:1024b", - map[string][]int64{"bytes": {780}}, - map[string]string{"bytes": "bytes"}, - true, - }, - { - "Match specific key and range of values, value too large", - "key1=1kb:2kb", - map[string][]int64{"key1": {4096}}, - map[string]string{"key1": "bytes"}, - false, - }, - { - "Match specific key and range of values, value too small", - "key1=1kb:2kb", - map[string][]int64{"key1": {256}}, - map[string]string{"key1": "bytes"}, - false, - }, - { - "Match specific key and value, unit conversion required", - "bytes=1024b", - map[string][]int64{"bytes": {1}}, - map[string]string{"bytes": "kilobytes"}, - true, - }, - { - "Match specific key and value, key does not appear", - "key2=256bytes", - map[string][]int64{"key1": {256}}, - map[string]string{"key1": "bytes"}, - false, - }, - { - "Match negative key and range of values, value matches", - "bytes=-512b:-128b", - map[string][]int64{"bytes": {-256}}, - map[string]string{"bytes": "bytes"}, - true, - }, - { - "Match negative key and range of values, value outside range", - "bytes=-512b:-128b", - map[string][]int64{"bytes": {-2048}}, - map[string]string{"bytes": "bytes"}, - false, - }, - { - "Match exact value, unitless tag", - "pid=123", - map[string][]int64{"pid": {123}}, - nil, - true, - }, - { - "Match range, unitless tag", - "pid=123:123", - map[string][]int64{"pid": {123}}, - nil, - true, - }, - { - "Don't match range, unitless tag", - "pid=124:124", - map[string][]int64{"pid": {123}}, - nil, - false, - }, - { - "Match range without upper bound, unitless tag", - "pid=100:", - map[string][]int64{"pid": {123}}, - nil, - true, - }, - { - "Don't match range without upper bound, unitless tag", - "pid=200:", - map[string][]int64{"pid": {123}}, - nil, - false, - }, - { - "Match range without lower bound, unitless tag", - "pid=:200", - map[string][]int64{"pid": {123}}, - nil, - true, - }, - { - "Don't match range without lower bound, unitless tag", - "pid=:100", - map[string][]int64{"pid": {123}}, - nil, - false, - }, - } - for _, test := range tagFilterTests { - t.Run(test.desc, func(t *testing.T) { - wantErrMsg := strings.Join([]string{"(", test.desc, ":Interpreted '", test.value[strings.Index(test.value, "=")+1:], "' as range, not regexp", ")"}, "") - filter, err := compileTagFilter(test.desc, test.value, test.identifiedUnits, &proftest.TestUI{T: t, - AllowRx: wantErrMsg}, nil) - if err != nil { - t.Fatalf("%v", err) - } - s := profile.Sample{ - NumLabel: test.tags, - } - if got := filter(&s); got != test.want { - t.Fatalf("got %v, want %v", got, test.want) - } - }) - } -} - -// TestOptionsHaveHelp tests that a help message is supplied for every -// selectable option. -func TestOptionsHaveHelp(t *testing.T) { - for _, f := range configFields { - // Check all choices if this is a group, else check f.name. - names := f.choices - if len(names) == 0 { - names = []string{f.name} - } - for _, name := range names { - if _, ok := configHelp[name]; !ok { - t.Errorf("missing help message for %q", name) - } - } - } -} - -type testSymbolzMergeFetcher struct{} - -func (testSymbolzMergeFetcher) Fetch(s string, d, t time.Duration) (*profile.Profile, string, error) { - var p *profile.Profile - switch s { - case testSourceURL(8000) + "symbolz": - p = symzProfile() - case testSourceURL(8001) + "symbolz": - p = symzProfile() - p.Mapping[0].Start += testOffset - p.Mapping[0].Limit += testOffset - for i := range p.Location { - p.Location[i].Address += testOffset - } - default: - return nil, "", fmt.Errorf("unexpected source: %s", s) - } - return p, s, nil -} - -func TestSymbolzAfterMerge(t *testing.T) { - baseConfig := currentConfig() - defer setCurrentConfig(baseConfig) - - f := baseFlags() - f.args = []string{ - testSourceURL(8000) + "symbolz", - testSourceURL(8001) + "symbolz", - } - - o := setDefaults(nil) - o.Flagset = f - o.Obj = new(mockObjTool) - src, cmd, err := parseFlags(o) - if err != nil { - t.Fatalf("parseFlags: %v", err) - } - - if len(cmd) != 1 || cmd[0] != "proto" { - t.Fatalf("parseFlags returned command %v, want [proto]", cmd) - } - - o.Fetch = testSymbolzMergeFetcher{} - o.Sym = testSymbolzSymbolizer{} - p, err := fetchProfiles(src, o) - if err != nil { - t.Fatalf("fetchProfiles: %v", err) - } - if len(p.Location) != 3 { - t.Errorf("Got %d locations after merge, want %d", len(p.Location), 3) - } - for i, l := range p.Location { - if len(l.Line) != 1 { - t.Errorf("Number of lines for symbolz %#x in iteration %d, got %d, want %d", l.Address, i, len(l.Line), 1) - continue - } - address := l.Address - l.Mapping.Start - if got, want := l.Line[0].Function.Name, fmt.Sprintf("%#x", address); got != want { - t.Errorf("symbolz %#x, got %s, want %s", address, got, want) - } - } -} - -type mockObjTool struct{} - -func (*mockObjTool) Open(file string, start, limit, offset uint64) (plugin.ObjFile, error) { - return &mockFile{file, "abcdef", 0}, nil -} - -func (m *mockObjTool) Disasm(file string, start, end uint64, intelSyntax bool) ([]plugin.Inst, error) { - const fn1 = "line1000" - const fn3 = "line3000" - const file1 = "testdata/file1000.src" - const file3 = "testdata/file3000.src" - data := []plugin.Inst{ - {Addr: 0x1000, Text: "instruction one", Function: fn1, File: file1, Line: 1}, - {Addr: 0x1001, Text: "instruction two", Function: fn1, File: file1, Line: 1}, - {Addr: 0x1002, Text: "instruction three", Function: fn1, File: file1, Line: 2}, - {Addr: 0x1003, Text: "instruction four", Function: fn1, File: file1, Line: 1}, - {Addr: 0x3000, Text: "instruction one", Function: fn3, File: file3}, - {Addr: 0x3001, Text: "instruction two", Function: fn3, File: file3}, - {Addr: 0x3002, Text: "instruction three", Function: fn3, File: file3}, - {Addr: 0x3003, Text: "instruction four", Function: fn3, File: file3}, - {Addr: 0x3004, Text: "instruction five", Function: fn3, File: file3}, - } - var result []plugin.Inst - for _, inst := range data { - if inst.Addr >= start && inst.Addr <= end { - result = append(result, inst) - } - } - return result, nil -} - -type mockFile struct { - name, buildID string - base uint64 -} - -// Name returns the underlyinf file name, if available -func (m *mockFile) Name() string { - return m.name -} - -// ObjAddr returns the objdump address corresponding to a runtime address. -func (m *mockFile) ObjAddr(addr uint64) (uint64, error) { - return addr - m.base, nil -} - -// BuildID returns the GNU build ID of the file, or an empty string. -func (m *mockFile) BuildID() string { - return m.buildID -} - -// SourceLine reports the source line information for a given -// address in the file. Due to inlining, the source line information -// is in general a list of positions representing a call stack, -// with the leaf function first. -func (*mockFile) SourceLine(addr uint64) ([]plugin.Frame, error) { - // Return enough data to support the SourceLine() calls needed for - // weblist on cpuProfile() contents. - frame := func(fn, file string, line int) plugin.Frame { - return plugin.Frame{Func: fn, File: file, Line: line} - } - switch addr { - case 0x1000: - return []plugin.Frame{ - frame("mangled1000", "testdata/file1000.src", 1), - }, nil - case 0x1001: - return []plugin.Frame{ - frame("mangled1000", "testdata/file1000.src", 1), - }, nil - case 0x1002: - return []plugin.Frame{ - frame("mangled1000", "testdata/file1000.src", 2), - }, nil - case 0x1003: - return []plugin.Frame{ - frame("mangled1000", "testdata/file1000.src", 1), - }, nil - case 0x2000: - return []plugin.Frame{ - frame("mangled2001", "testdata/file2000.src", 9), - frame("mangled2000", "testdata/file2000.src", 4), - }, nil - case 0x3000: - return []plugin.Frame{ - frame("mangled3002", "testdata/file3000.src", 2), - frame("mangled3001", "testdata/file3000.src", 5), - frame("mangled3000", "testdata/file3000.src", 6), - }, nil - case 0x3001: - return []plugin.Frame{ - frame("mangled3001", "testdata/file3000.src", 8), - frame("mangled3000", "testdata/file3000.src", 9), - }, nil - case 0x3002: - return []plugin.Frame{ - frame("mangled3002", "testdata/file3000.src", 5), - frame("mangled3000", "testdata/file3000.src", 9), - }, nil - } - - return nil, nil -} - -// Symbols returns a list of symbols in the object file. -// If r is not nil, Symbols restricts the list to symbols -// with names matching the regular expression. -// If addr is not zero, Symbols restricts the list to symbols -// containing that address. -func (m *mockFile) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) { - switch r.String() { - case "line[13]": - return []*plugin.Sym{ - { - Name: []string{"line1000"}, File: m.name, - Start: 0x1000, End: 0x1003, - }, - { - Name: []string{"line3000"}, File: m.name, - Start: 0x3000, End: 0x3004, - }, - }, nil - } - return nil, fmt.Errorf("unimplemented") -} - -// Close closes the file, releasing associated resources. -func (*mockFile) Close() error { - return nil -} diff --git a/internal/pprof/driver/fetch.go b/internal/pprof/driver/fetch.go deleted file mode 100644 index d23ecb0607d..00000000000 --- a/internal/pprof/driver/fetch.go +++ /dev/null @@ -1,587 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "bytes" - "fmt" - "io" - "io/ioutil" - "net/http" - "net/url" - "os" - "os/exec" - "path/filepath" - "runtime" - "strconv" - "strings" - "sync" - "time" - - "github.com/parca-dev/parca/internal/pprof/measurement" - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/google/pprof/profile" -) - -// fetchProfiles fetches and symbolizes the profiles specified by s. -// It will merge all the profiles it is able to retrieve, even if -// there are some failures. It will return an error if it is unable to -// fetch any profiles. -func fetchProfiles(s *source, o *plugin.Options) (*profile.Profile, error) { - sources := make([]profileSource, 0, len(s.Sources)) - for _, src := range s.Sources { - sources = append(sources, profileSource{ - addr: src, - source: s, - }) - } - - bases := make([]profileSource, 0, len(s.Base)) - for _, src := range s.Base { - bases = append(bases, profileSource{ - addr: src, - source: s, - }) - } - - p, pbase, m, mbase, save, err := grabSourcesAndBases(sources, bases, o.Fetch, o.Obj, o.UI, o.HTTPTransport) - if err != nil { - return nil, err - } - - if pbase != nil { - if s.DiffBase { - pbase.SetLabel("pprof::base", []string{"true"}) - } - if s.Normalize { - err := p.Normalize(pbase) - if err != nil { - return nil, err - } - } - pbase.Scale(-1) - p, m, err = combineProfiles([]*profile.Profile{p, pbase}, []plugin.MappingSources{m, mbase}) - if err != nil { - return nil, err - } - } - - // Symbolize the merged profile. - if err := o.Sym.Symbolize(s.Symbolize, m, p); err != nil { - return nil, err - } - p.RemoveUninteresting() - unsourceMappings(p) - - if s.Comment != "" { - p.Comments = append(p.Comments, s.Comment) - } - - // Save a copy of the merged profile if there is at least one remote source. - if save { - dir, err := setTmpDir(o.UI) - if err != nil { - return nil, err - } - - prefix := "pprof." - if len(p.Mapping) > 0 && p.Mapping[0].File != "" { - prefix += filepath.Base(p.Mapping[0].File) + "." - } - for _, s := range p.SampleType { - prefix += s.Type + "." - } - - tempFile, err := newTempFile(dir, prefix, ".pb.gz") - if err == nil { - if err = p.Write(tempFile); err == nil { - o.UI.PrintErr("Saved profile in ", tempFile.Name()) - } - } - if err != nil { - o.UI.PrintErr("Could not save profile: ", err) - } - } - - if err := p.CheckValid(); err != nil { - return nil, err - } - - return p, nil -} - -func grabSourcesAndBases(sources, bases []profileSource, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (*profile.Profile, *profile.Profile, plugin.MappingSources, plugin.MappingSources, bool, error) { - wg := sync.WaitGroup{} - wg.Add(2) - var psrc, pbase *profile.Profile - var msrc, mbase plugin.MappingSources - var savesrc, savebase bool - var errsrc, errbase error - var countsrc, countbase int - go func() { - defer wg.Done() - psrc, msrc, savesrc, countsrc, errsrc = chunkedGrab(sources, fetch, obj, ui, tr) - }() - go func() { - defer wg.Done() - pbase, mbase, savebase, countbase, errbase = chunkedGrab(bases, fetch, obj, ui, tr) - }() - wg.Wait() - save := savesrc || savebase - - if errsrc != nil { - return nil, nil, nil, nil, false, fmt.Errorf("problem fetching source profiles: %v", errsrc) - } - if errbase != nil { - return nil, nil, nil, nil, false, fmt.Errorf("problem fetching base profiles: %v,", errbase) - } - if countsrc == 0 { - return nil, nil, nil, nil, false, fmt.Errorf("failed to fetch any source profiles") - } - if countbase == 0 && len(bases) > 0 { - return nil, nil, nil, nil, false, fmt.Errorf("failed to fetch any base profiles") - } - if want, got := len(sources), countsrc; want != got { - ui.PrintErr(fmt.Sprintf("Fetched %d source profiles out of %d", got, want)) - } - if want, got := len(bases), countbase; want != got { - ui.PrintErr(fmt.Sprintf("Fetched %d base profiles out of %d", got, want)) - } - - return psrc, pbase, msrc, mbase, save, nil -} - -// chunkedGrab fetches the profiles described in source and merges them into -// a single profile. It fetches a chunk of profiles concurrently, with a maximum -// chunk size to limit its memory usage. -func chunkedGrab(sources []profileSource, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (*profile.Profile, plugin.MappingSources, bool, int, error) { - const chunkSize = 64 - - var p *profile.Profile - var msrc plugin.MappingSources - var save bool - var count int - - for start := 0; start < len(sources); start += chunkSize { - end := start + chunkSize - if end > len(sources) { - end = len(sources) - } - chunkP, chunkMsrc, chunkSave, chunkCount, chunkErr := concurrentGrab(sources[start:end], fetch, obj, ui, tr) - switch { - case chunkErr != nil: - return nil, nil, false, 0, chunkErr - case chunkP == nil: - continue - case p == nil: - p, msrc, save, count = chunkP, chunkMsrc, chunkSave, chunkCount - default: - p, msrc, chunkErr = combineProfiles([]*profile.Profile{p, chunkP}, []plugin.MappingSources{msrc, chunkMsrc}) - if chunkErr != nil { - return nil, nil, false, 0, chunkErr - } - if chunkSave { - save = true - } - count += chunkCount - } - } - - return p, msrc, save, count, nil -} - -// concurrentGrab fetches multiple profiles concurrently -func concurrentGrab(sources []profileSource, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (*profile.Profile, plugin.MappingSources, bool, int, error) { - wg := sync.WaitGroup{} - wg.Add(len(sources)) - for i := range sources { - go func(s *profileSource) { - defer wg.Done() - s.p, s.msrc, s.remote, s.err = grabProfile(s.source, s.addr, fetch, obj, ui, tr) - }(&sources[i]) - } - wg.Wait() - - var save bool - profiles := make([]*profile.Profile, 0, len(sources)) - msrcs := make([]plugin.MappingSources, 0, len(sources)) - for i := range sources { - s := &sources[i] - if err := s.err; err != nil { - ui.PrintErr(s.addr + ": " + err.Error()) - continue - } - save = save || s.remote - profiles = append(profiles, s.p) - msrcs = append(msrcs, s.msrc) - *s = profileSource{} - } - - if len(profiles) == 0 { - return nil, nil, false, 0, nil - } - - p, msrc, err := combineProfiles(profiles, msrcs) - if err != nil { - return nil, nil, false, 0, err - } - return p, msrc, save, len(profiles), nil -} - -func combineProfiles(profiles []*profile.Profile, msrcs []plugin.MappingSources) (*profile.Profile, plugin.MappingSources, error) { - // Merge profiles. - if err := measurement.ScaleProfiles(profiles); err != nil { - return nil, nil, err - } - - p, err := profile.Merge(profiles) - if err != nil { - return nil, nil, err - } - - // Combine mapping sources. - msrc := make(plugin.MappingSources) - for _, ms := range msrcs { - for m, s := range ms { - msrc[m] = append(msrc[m], s...) - } - } - return p, msrc, nil -} - -type profileSource struct { - addr string - source *source - - p *profile.Profile - msrc plugin.MappingSources - remote bool - err error -} - -func homeEnv() string { - switch runtime.GOOS { - case "windows": - return "USERPROFILE" - case "plan9": - return "home" - default: - return "HOME" - } -} - -// setTmpDir prepares the directory to use to save profiles retrieved -// remotely. It is selected from PPROF_TMPDIR, defaults to $HOME/pprof, and, if -// $HOME is not set, falls back to os.TempDir(). -func setTmpDir(ui plugin.UI) (string, error) { - var dirs []string - if profileDir := os.Getenv("PPROF_TMPDIR"); profileDir != "" { - dirs = append(dirs, profileDir) - } - if homeDir := os.Getenv(homeEnv()); homeDir != "" { - dirs = append(dirs, filepath.Join(homeDir, "pprof")) - } - dirs = append(dirs, os.TempDir()) - for _, tmpDir := range dirs { - if err := os.MkdirAll(tmpDir, 0755); err != nil { - ui.PrintErr("Could not use temp dir ", tmpDir, ": ", err.Error()) - continue - } - return tmpDir, nil - } - return "", fmt.Errorf("failed to identify temp dir") -} - -const testSourceAddress = "pproftest.local" - -// grabProfile fetches a profile. Returns the profile, sources for the -// profile mappings, a bool indicating if the profile was fetched -// remotely, and an error. -func grabProfile(s *source, source string, fetcher plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (p *profile.Profile, msrc plugin.MappingSources, remote bool, err error) { - var src string - duration, timeout := time.Duration(s.Seconds)*time.Second, time.Duration(s.Timeout)*time.Second - if fetcher != nil { - p, src, err = fetcher.Fetch(source, duration, timeout) - if err != nil { - return - } - } - if err != nil || p == nil { - // Fetch the profile over HTTP or from a file. - p, src, err = fetch(source, duration, timeout, ui, tr) - if err != nil { - return - } - } - - if err = p.CheckValid(); err != nil { - return - } - - // Update the binary locations from command line and paths. - locateBinaries(p, s, obj, ui) - - // Collect the source URL for all mappings. - if src != "" { - msrc = collectMappingSources(p, src) - remote = true - if strings.HasPrefix(src, "http://"+testSourceAddress) { - // Treat test inputs as local to avoid saving - // testcase profiles during driver testing. - remote = false - } - } - return -} - -// collectMappingSources saves the mapping sources of a profile. -func collectMappingSources(p *profile.Profile, source string) plugin.MappingSources { - ms := plugin.MappingSources{} - for _, m := range p.Mapping { - src := struct { - Source string - Start uint64 - }{ - source, m.Start, - } - key := m.BuildID - if key == "" { - key = m.File - } - if key == "" { - // If there is no build id or source file, use the source as the - // mapping file. This will enable remote symbolization for this - // mapping, in particular for Go profiles on the legacy format. - // The source is reset back to empty string by unsourceMapping - // which is called after symbolization is finished. - m.File = source - key = source - } - ms[key] = append(ms[key], src) - } - return ms -} - -// unsourceMappings iterates over the mappings in a profile and replaces file -// set to the remote source URL by collectMappingSources back to empty string. -func unsourceMappings(p *profile.Profile) { - for _, m := range p.Mapping { - if m.BuildID == "" { - if u, err := url.Parse(m.File); err == nil && u.IsAbs() { - m.File = "" - } - } - } -} - -// locateBinaries searches for binary files listed in the profile and, if found, -// updates the profile accordingly. -func locateBinaries(p *profile.Profile, s *source, obj plugin.ObjTool, ui plugin.UI) { - // Construct search path to examine - searchPath := os.Getenv("PPROF_BINARY_PATH") - if searchPath == "" { - // Use $HOME/pprof/binaries as default directory for local symbolization binaries - searchPath = filepath.Join(os.Getenv(homeEnv()), "pprof", "binaries") - } -mapping: - for _, m := range p.Mapping { - var baseName string - if m.File != "" { - baseName = filepath.Base(m.File) - } - - for _, path := range filepath.SplitList(searchPath) { - var fileNames []string - if m.BuildID != "" { - fileNames = []string{filepath.Join(path, m.BuildID, baseName)} - if matches, err := filepath.Glob(filepath.Join(path, m.BuildID, "*")); err == nil { - fileNames = append(fileNames, matches...) - } - fileNames = append(fileNames, filepath.Join(path, m.File, m.BuildID)) // perf path format - } - if m.File != "" { - // Try both the basename and the full path, to support the same directory - // structure as the perf symfs option. - if baseName != "" { - fileNames = append(fileNames, filepath.Join(path, baseName)) - } - fileNames = append(fileNames, filepath.Join(path, m.File)) - } - for _, name := range fileNames { - if f, err := obj.Open(name, m.Start, m.Limit, m.Offset); err == nil { - defer f.Close() - fileBuildID := f.BuildID() - if m.BuildID != "" && m.BuildID != fileBuildID { - ui.PrintErr("Ignoring local file " + name + ": build-id mismatch (" + m.BuildID + " != " + fileBuildID + ")") - } else { - m.File = name - continue mapping - } - } - } - } - } - if len(p.Mapping) == 0 { - // If there are no mappings, add a fake mapping to attempt symbolization. - // This is useful for some profiles generated by the golang runtime, which - // do not include any mappings. Symbolization with a fake mapping will only - // be successful against a non-PIE binary. - m := &profile.Mapping{ID: 1} - p.Mapping = []*profile.Mapping{m} - for _, l := range p.Location { - l.Mapping = m - } - } - // Replace executable filename/buildID with the overrides from source. - // Assumes the executable is the first Mapping entry. - if execName, buildID := s.ExecName, s.BuildID; execName != "" || buildID != "" { - m := p.Mapping[0] - if execName != "" { - m.File = execName - } - if buildID != "" { - m.BuildID = buildID - } - } -} - -// fetch fetches a profile from source, within the timeout specified, -// producing messages through the ui. It returns the profile and the -// url of the actual source of the profile for remote profiles. -func fetch(source string, duration, timeout time.Duration, ui plugin.UI, tr http.RoundTripper) (p *profile.Profile, src string, err error) { - var f io.ReadCloser - - if sourceURL, timeout := adjustURL(source, duration, timeout); sourceURL != "" { - ui.Print("Fetching profile over HTTP from " + sourceURL) - if duration > 0 { - ui.Print(fmt.Sprintf("Please wait... (%v)", duration)) - } - f, err = fetchURL(sourceURL, timeout, tr) - src = sourceURL - } else if isPerfFile(source) { - f, err = convertPerfData(source, ui) - } else { - f, err = os.Open(source) - } - if err == nil { - defer f.Close() - p, err = profile.Parse(f) - } - return -} - -// fetchURL fetches a profile from a URL using HTTP. -func fetchURL(source string, timeout time.Duration, tr http.RoundTripper) (io.ReadCloser, error) { - client := &http.Client{ - Transport: tr, - Timeout: timeout + 5*time.Second, - } - resp, err := client.Get(source) - if err != nil { - return nil, fmt.Errorf("http fetch: %v", err) - } - if resp.StatusCode != http.StatusOK { - defer resp.Body.Close() - return nil, statusCodeError(resp) - } - - return resp.Body, nil -} - -func statusCodeError(resp *http.Response) error { - if resp.Header.Get("X-Go-Pprof") != "" && strings.Contains(resp.Header.Get("Content-Type"), "text/plain") { - // error is from pprof endpoint - if body, err := ioutil.ReadAll(resp.Body); err == nil { - return fmt.Errorf("server response: %s - %s", resp.Status, body) - } - } - return fmt.Errorf("server response: %s", resp.Status) -} - -// isPerfFile checks if a file is in perf.data format. It also returns false -// if it encounters an error during the check. -func isPerfFile(path string) bool { - sourceFile, openErr := os.Open(path) - if openErr != nil { - return false - } - defer sourceFile.Close() - - // If the file is the output of a perf record command, it should begin - // with the string PERFILE2. - perfHeader := []byte("PERFILE2") - actualHeader := make([]byte, len(perfHeader)) - if _, readErr := sourceFile.Read(actualHeader); readErr != nil { - return false - } - return bytes.Equal(actualHeader, perfHeader) -} - -// convertPerfData converts the file at path which should be in perf.data format -// using the perf_to_profile tool and returns the file containing the -// profile.proto formatted data. -func convertPerfData(perfPath string, ui plugin.UI) (*os.File, error) { - ui.Print(fmt.Sprintf( - "Converting %s to a profile.proto... (May take a few minutes)", - perfPath)) - profile, err := newTempFile(os.TempDir(), "pprof_", ".pb.gz") - if err != nil { - return nil, err - } - deferDeleteTempFile(profile.Name()) - cmd := exec.Command("perf_to_profile", "-i", perfPath, "-o", profile.Name(), "-f") - cmd.Stdout, cmd.Stderr = os.Stdout, os.Stderr - if err := cmd.Run(); err != nil { - profile.Close() - return nil, fmt.Errorf("failed to convert perf.data file. Try github.com/google/perf_data_converter: %v", err) - } - return profile, nil -} - -// adjustURL validates if a profile source is a URL and returns an -// cleaned up URL and the timeout to use for retrieval over HTTP. -// If the source cannot be recognized as a URL it returns an empty string. -func adjustURL(source string, duration, timeout time.Duration) (string, time.Duration) { - u, err := url.Parse(source) - if err != nil || (u.Host == "" && u.Scheme != "" && u.Scheme != "file") { - // Try adding http:// to catch sources of the form hostname:port/path. - // url.Parse treats "hostname" as the scheme. - u, err = url.Parse("http://" + source) - } - if err != nil || u.Host == "" { - return "", 0 - } - - // Apply duration/timeout overrides to URL. - values := u.Query() - if duration > 0 { - values.Set("seconds", fmt.Sprint(int(duration.Seconds()))) - } else { - if urlSeconds := values.Get("seconds"); urlSeconds != "" { - if us, err := strconv.ParseInt(urlSeconds, 10, 32); err == nil { - duration = time.Duration(us) * time.Second - } - } - } - if timeout <= 0 { - if duration > 0 { - timeout = duration + duration/2 - } else { - timeout = 60 * time.Second - } - } - u.RawQuery = values.Encode() - return u.String(), timeout -} diff --git a/internal/pprof/driver/fetch_test.go b/internal/pprof/driver/fetch_test.go deleted file mode 100644 index 84fb228d43c..00000000000 --- a/internal/pprof/driver/fetch_test.go +++ /dev/null @@ -1,756 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "crypto/ecdsa" - "crypto/elliptic" - "crypto/rand" - "crypto/tls" - "crypto/x509" - "encoding/pem" - "fmt" - "io/ioutil" - "math/big" - "net" - "net/http" - "os" - "path/filepath" - "reflect" - "regexp" - "runtime" - "strings" - "testing" - "time" - - "github.com/parca-dev/parca/internal/pprof/binutils" - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/proftest" - "github.com/parca-dev/parca/internal/pprof/symbolizer" - "github.com/parca-dev/parca/internal/pprof/transport" - "github.com/google/pprof/profile" -) - -func TestSymbolizationPath(t *testing.T) { - if runtime.GOOS == "windows" { - t.Skip("test assumes Unix paths") - } - - // Save environment variables to restore after test - saveHome := os.Getenv(homeEnv()) - savePath := os.Getenv("PPROF_BINARY_PATH") - - tempdir, err := ioutil.TempDir("", "home") - if err != nil { - t.Fatal("creating temp dir: ", err) - } - defer os.RemoveAll(tempdir) - os.MkdirAll(filepath.Join(tempdir, "pprof", "binaries", "abcde10001"), 0700) - os.Create(filepath.Join(tempdir, "pprof", "binaries", "abcde10001", "binary")) - - obj := testObj{tempdir} - os.Setenv(homeEnv(), tempdir) - for _, tc := range []struct { - env, file, buildID, want string - msgCount int - }{ - {"", "/usr/bin/binary", "", "/usr/bin/binary", 0}, - {"", "/usr/bin/binary", "fedcb10000", "/usr/bin/binary", 0}, - {"/usr", "/bin/binary", "", "/usr/bin/binary", 0}, - {"", "/prod/path/binary", "abcde10001", filepath.Join(tempdir, "pprof/binaries/abcde10001/binary"), 0}, - {"/alternate/architecture", "/usr/bin/binary", "", "/alternate/architecture/binary", 0}, - {"/alternate/architecture", "/usr/bin/binary", "abcde10001", "/alternate/architecture/binary", 0}, - {"/nowhere:/alternate/architecture", "/usr/bin/binary", "fedcb10000", "/usr/bin/binary", 1}, - {"/nowhere:/alternate/architecture", "/usr/bin/binary", "abcde10002", "/usr/bin/binary", 1}, - } { - os.Setenv("PPROF_BINARY_PATH", tc.env) - p := &profile.Profile{ - Mapping: []*profile.Mapping{ - { - File: tc.file, - BuildID: tc.buildID, - }, - }, - } - s := &source{} - locateBinaries(p, s, obj, &proftest.TestUI{T: t, Ignore: tc.msgCount}) - if file := p.Mapping[0].File; file != tc.want { - t.Errorf("%s:%s:%s, want %s, got %s", tc.env, tc.file, tc.buildID, tc.want, file) - } - } - os.Setenv(homeEnv(), saveHome) - os.Setenv("PPROF_BINARY_PATH", savePath) -} - -func TestCollectMappingSources(t *testing.T) { - const startAddress uint64 = 0x40000 - const url = "http://example.com" - for _, tc := range []struct { - file, buildID string - want plugin.MappingSources - }{ - {"/usr/bin/binary", "buildId", mappingSources("buildId", url, startAddress)}, - {"/usr/bin/binary", "", mappingSources("/usr/bin/binary", url, startAddress)}, - {"", "", mappingSources(url, url, startAddress)}, - } { - p := &profile.Profile{ - Mapping: []*profile.Mapping{ - { - File: tc.file, - BuildID: tc.buildID, - Start: startAddress, - }, - }, - } - got := collectMappingSources(p, url) - if !reflect.DeepEqual(got, tc.want) { - t.Errorf("%s:%s, want %v, got %v", tc.file, tc.buildID, tc.want, got) - } - } -} - -func TestUnsourceMappings(t *testing.T) { - for _, tc := range []struct { - file, buildID, want string - }{ - {"/usr/bin/binary", "buildId", "/usr/bin/binary"}, - {"http://example.com", "", ""}, - } { - p := &profile.Profile{ - Mapping: []*profile.Mapping{ - { - File: tc.file, - BuildID: tc.buildID, - }, - }, - } - unsourceMappings(p) - if got := p.Mapping[0].File; got != tc.want { - t.Errorf("%s:%s, want %s, got %s", tc.file, tc.buildID, tc.want, got) - } - } -} - -type testObj struct { - home string -} - -func (o testObj) Open(file string, start, limit, offset uint64) (plugin.ObjFile, error) { - switch file { - case "/alternate/architecture/binary": - return testFile{file, "abcde10001"}, nil - case "/usr/bin/binary": - return testFile{file, "fedcb10000"}, nil - case filepath.Join(o.home, "pprof/binaries/abcde10001/binary"): - return testFile{file, "abcde10001"}, nil - } - return nil, fmt.Errorf("not found: %s", file) -} -func (testObj) Demangler(_ string) func(names []string) (map[string]string, error) { - return func(names []string) (map[string]string, error) { return nil, nil } -} -func (testObj) Disasm(file string, start, end uint64, intelSyntax bool) ([]plugin.Inst, error) { - return nil, nil -} - -type testFile struct{ name, buildID string } - -func (f testFile) Name() string { return f.name } -func (testFile) ObjAddr(addr uint64) (uint64, error) { return addr, nil } -func (f testFile) BuildID() string { return f.buildID } -func (testFile) SourceLine(addr uint64) ([]plugin.Frame, error) { return nil, nil } -func (testFile) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) { return nil, nil } -func (testFile) Close() error { return nil } - -func TestFetch(t *testing.T) { - const path = "testdata/" - type testcase struct { - source, execName string - } - - for _, tc := range []testcase{ - {path + "go.crc32.cpu", ""}, - {path + "go.nomappings.crash", "/bin/gotest.exe"}, - {"http://localhost/profile?file=cppbench.cpu", ""}, - } { - p, _, _, err := grabProfile(&source{ExecName: tc.execName}, tc.source, nil, testObj{}, &proftest.TestUI{T: t}, &httpTransport{}) - if err != nil { - t.Fatalf("%s: %s", tc.source, err) - } - if len(p.Sample) == 0 { - t.Errorf("%s: want non-zero samples", tc.source) - } - if e := tc.execName; e != "" { - switch { - case len(p.Mapping) == 0 || p.Mapping[0] == nil: - t.Errorf("%s: want mapping[0].execName == %s, got no mappings", tc.source, e) - case p.Mapping[0].File != e: - t.Errorf("%s: want mapping[0].execName == %s, got %s", tc.source, e, p.Mapping[0].File) - } - } - } -} - -func TestFetchWithBase(t *testing.T) { - baseConfig := currentConfig() - defer setCurrentConfig(baseConfig) - - type WantSample struct { - values []int64 - labels map[string][]string - } - - const path = "testdata/" - type testcase struct { - desc string - sources []string - bases []string - diffBases []string - normalize bool - wantSamples []WantSample - wantErrorMsg string - } - - testcases := []testcase{ - { - "not normalized base is same as source", - []string{path + "cppbench.contention"}, - []string{path + "cppbench.contention"}, - nil, - false, - nil, - "", - }, - { - "not normalized base is same as source", - []string{path + "cppbench.contention"}, - []string{path + "cppbench.contention"}, - nil, - false, - nil, - "", - }, - { - "not normalized single source, multiple base (all profiles same)", - []string{path + "cppbench.contention"}, - []string{path + "cppbench.contention", path + "cppbench.contention"}, - nil, - false, - []WantSample{ - { - values: []int64{-2700, -608881724}, - labels: map[string][]string{}, - }, - { - values: []int64{-100, -23992}, - labels: map[string][]string{}, - }, - { - values: []int64{-200, -179943}, - labels: map[string][]string{}, - }, - { - values: []int64{-100, -17778444}, - labels: map[string][]string{}, - }, - { - values: []int64{-100, -75976}, - labels: map[string][]string{}, - }, - { - values: []int64{-300, -63568134}, - labels: map[string][]string{}, - }, - }, - "", - }, - { - "not normalized, different base and source", - []string{path + "cppbench.contention"}, - []string{path + "cppbench.small.contention"}, - nil, - false, - []WantSample{ - { - values: []int64{1700, 608878600}, - labels: map[string][]string{}, - }, - { - values: []int64{100, 23992}, - labels: map[string][]string{}, - }, - { - values: []int64{200, 179943}, - labels: map[string][]string{}, - }, - { - values: []int64{100, 17778444}, - labels: map[string][]string{}, - }, - { - values: []int64{100, 75976}, - labels: map[string][]string{}, - }, - { - values: []int64{300, 63568134}, - labels: map[string][]string{}, - }, - }, - "", - }, - { - "normalized base is same as source", - []string{path + "cppbench.contention"}, - []string{path + "cppbench.contention"}, - nil, - true, - nil, - "", - }, - { - "normalized single source, multiple base (all profiles same)", - []string{path + "cppbench.contention"}, - []string{path + "cppbench.contention", path + "cppbench.contention"}, - nil, - true, - nil, - "", - }, - { - "normalized different base and source", - []string{path + "cppbench.contention"}, - []string{path + "cppbench.small.contention"}, - nil, - true, - []WantSample{ - { - values: []int64{-229, -369}, - labels: map[string][]string{}, - }, - { - values: []int64{29, 0}, - labels: map[string][]string{}, - }, - { - values: []int64{57, 1}, - labels: map[string][]string{}, - }, - { - values: []int64{29, 80}, - labels: map[string][]string{}, - }, - { - values: []int64{29, 0}, - labels: map[string][]string{}, - }, - { - values: []int64{86, 288}, - labels: map[string][]string{}, - }, - }, - "", - }, - { - "not normalized diff base is same as source", - []string{path + "cppbench.contention"}, - nil, - []string{path + "cppbench.contention"}, - false, - []WantSample{ - { - values: []int64{2700, 608881724}, - labels: map[string][]string{}, - }, - { - values: []int64{100, 23992}, - labels: map[string][]string{}, - }, - { - values: []int64{200, 179943}, - labels: map[string][]string{}, - }, - { - values: []int64{100, 17778444}, - labels: map[string][]string{}, - }, - { - values: []int64{100, 75976}, - labels: map[string][]string{}, - }, - { - values: []int64{300, 63568134}, - labels: map[string][]string{}, - }, - { - values: []int64{-2700, -608881724}, - labels: map[string][]string{"pprof::base": {"true"}}, - }, - { - values: []int64{-100, -23992}, - labels: map[string][]string{"pprof::base": {"true"}}, - }, - { - values: []int64{-200, -179943}, - labels: map[string][]string{"pprof::base": {"true"}}, - }, - { - values: []int64{-100, -17778444}, - labels: map[string][]string{"pprof::base": {"true"}}, - }, - { - values: []int64{-100, -75976}, - labels: map[string][]string{"pprof::base": {"true"}}, - }, - { - values: []int64{-300, -63568134}, - labels: map[string][]string{"pprof::base": {"true"}}, - }, - }, - "", - }, - { - "diff_base and base both specified", - []string{path + "cppbench.contention"}, - []string{path + "cppbench.contention"}, - []string{path + "cppbench.contention"}, - false, - nil, - "-base and -diff_base flags cannot both be specified", - }, - } - - for _, tc := range testcases { - t.Run(tc.desc, func(t *testing.T) { - setCurrentConfig(baseConfig) - f := testFlags{ - stringLists: map[string][]string{ - "base": tc.bases, - "diff_base": tc.diffBases, - }, - bools: map[string]bool{ - "normalize": tc.normalize, - }, - } - f.args = tc.sources - - o := setDefaults(&plugin.Options{ - UI: &proftest.TestUI{T: t, AllowRx: "Local symbolization failed|Some binary filenames not available"}, - Flagset: f, - HTTPTransport: transport.New(nil), - }) - src, _, err := parseFlags(o) - - if tc.wantErrorMsg != "" { - if err == nil { - t.Fatalf("got nil, want error %q", tc.wantErrorMsg) - } - - if gotErrMsg := err.Error(); gotErrMsg != tc.wantErrorMsg { - t.Fatalf("got error %q, want error %q", gotErrMsg, tc.wantErrorMsg) - } - return - } - - if err != nil { - t.Fatalf("got error %q, want no error", err) - } - - p, err := fetchProfiles(src, o) - - if err != nil { - t.Fatalf("got error %q, want no error", err) - } - - if got, want := len(p.Sample), len(tc.wantSamples); got != want { - t.Fatalf("got %d samples want %d", got, want) - } - - for i, sample := range p.Sample { - if !reflect.DeepEqual(tc.wantSamples[i].values, sample.Value) { - t.Errorf("for sample %d got values %v, want %v", i, sample.Value, tc.wantSamples[i]) - } - if !reflect.DeepEqual(tc.wantSamples[i].labels, sample.Label) { - t.Errorf("for sample %d got labels %v, want %v", i, sample.Label, tc.wantSamples[i].labels) - } - } - }) - } -} - -// mappingSources creates MappingSources map with a single item. -func mappingSources(key, source string, start uint64) plugin.MappingSources { - return plugin.MappingSources{ - key: []struct { - Source string - Start uint64 - }{ - {Source: source, Start: start}, - }, - } -} - -type httpTransport struct{} - -func (tr *httpTransport) RoundTrip(req *http.Request) (*http.Response, error) { - values := req.URL.Query() - file := values.Get("file") - - if file == "" { - return nil, fmt.Errorf("want .../file?profile, got %s", req.URL.String()) - } - - t := &http.Transport{} - t.RegisterProtocol("file", http.NewFileTransport(http.Dir("testdata/"))) - - c := &http.Client{Transport: t} - return c.Get("file:///" + file) -} - -func closedError() string { - if runtime.GOOS == "plan9" { - return "listen hungup" - } - return "use of closed" -} - -func TestHTTPSInsecure(t *testing.T) { - if runtime.GOOS == "nacl" || runtime.GOOS == "js" { - t.Skip("test assumes tcp available") - } - saveHome := os.Getenv(homeEnv()) - tempdir, err := ioutil.TempDir("", "home") - if err != nil { - t.Fatal("creating temp dir: ", err) - } - defer os.RemoveAll(tempdir) - - // pprof writes to $HOME/pprof by default which is not necessarily - // writeable (e.g. on a Debian buildd) so set $HOME to something we - // know we can write to for the duration of the test. - os.Setenv(homeEnv(), tempdir) - defer os.Setenv(homeEnv(), saveHome) - - baseConfig := currentConfig() - defer setCurrentConfig(baseConfig) - - tlsCert, _, _ := selfSignedCert(t, "") - tlsConfig := &tls.Config{Certificates: []tls.Certificate{tlsCert}} - - l, err := tls.Listen("tcp", "localhost:0", tlsConfig) - if err != nil { - t.Fatalf("net.Listen: got error %v, want no error", err) - } - - donec := make(chan error, 1) - go func(donec chan<- error) { - donec <- http.Serve(l, nil) - }(donec) - defer func() { - if got, want := <-donec, closedError(); !strings.Contains(got.Error(), want) { - t.Fatalf("Serve got error %v, want %q", got, want) - } - }() - defer l.Close() - - outputTempFile, err := ioutil.TempFile("", "profile_output") - if err != nil { - t.Fatalf("Failed to create tempfile: %v", err) - } - defer os.Remove(outputTempFile.Name()) - defer outputTempFile.Close() - - address := "https+insecure://" + l.Addr().String() + "/debug/pprof/goroutine" - s := &source{ - Sources: []string{address}, - Timeout: 10, - Symbolize: "remote", - } - o := &plugin.Options{ - Obj: &binutils.Binutils{}, - UI: &proftest.TestUI{T: t, AllowRx: "Saved profile in"}, - HTTPTransport: transport.New(nil), - } - o.Sym = &symbolizer.Symbolizer{Obj: o.Obj, UI: o.UI} - p, err := fetchProfiles(s, o) - if err != nil { - t.Fatal(err) - } - if len(p.SampleType) == 0 { - t.Fatalf("fetchProfiles(%s) got empty profile: len(p.SampleType)==0", address) - } - if len(p.Function) == 0 { - t.Fatalf("fetchProfiles(%s) got non-symbolized profile: len(p.Function)==0", address) - } - if err := checkProfileHasFunction(p, "TestHTTPSInsecure"); err != nil { - t.Fatalf("fetchProfiles(%s) %v", address, err) - } -} - -func TestHTTPSWithServerCertFetch(t *testing.T) { - if runtime.GOOS == "nacl" || runtime.GOOS == "js" { - t.Skip("test assumes tcp available") - } - saveHome := os.Getenv(homeEnv()) - tempdir, err := ioutil.TempDir("", "home") - if err != nil { - t.Fatal("creating temp dir: ", err) - } - defer os.RemoveAll(tempdir) - - // pprof writes to $HOME/pprof by default which is not necessarily - // writeable (e.g. on a Debian buildd) so set $HOME to something we - // know we can write to for the duration of the test. - os.Setenv(homeEnv(), tempdir) - defer os.Setenv(homeEnv(), saveHome) - - baseConfig := currentConfig() - defer setCurrentConfig(baseConfig) - - cert, certBytes, keyBytes := selfSignedCert(t, "localhost") - cas := x509.NewCertPool() - cas.AppendCertsFromPEM(certBytes) - - tlsConfig := &tls.Config{ - RootCAs: cas, - Certificates: []tls.Certificate{cert}, - ClientAuth: tls.RequireAndVerifyClientCert, - ClientCAs: cas, - } - - l, err := tls.Listen("tcp", "localhost:0", tlsConfig) - if err != nil { - t.Fatalf("net.Listen: got error %v, want no error", err) - } - - donec := make(chan error, 1) - go func(donec chan<- error) { - donec <- http.Serve(l, nil) - }(donec) - defer func() { - if got, want := <-donec, closedError(); !strings.Contains(got.Error(), want) { - t.Fatalf("Serve got error %v, want %q", got, want) - } - }() - defer l.Close() - - outputTempFile, err := ioutil.TempFile("", "profile_output") - if err != nil { - t.Fatalf("Failed to create tempfile: %v", err) - } - defer os.Remove(outputTempFile.Name()) - defer outputTempFile.Close() - - // Get port from the address, so request to the server can be made using - // the host name specified in certificates. - _, portStr, err := net.SplitHostPort(l.Addr().String()) - if err != nil { - t.Fatalf("cannot get port from URL: %v", err) - } - address := "https://" + "localhost:" + portStr + "/debug/pprof/goroutine" - s := &source{ - Sources: []string{address}, - Timeout: 10, - Symbolize: "remote", - } - - certTempFile, err := ioutil.TempFile("", "cert_output") - if err != nil { - t.Errorf("cannot create cert tempfile: %v", err) - } - defer os.Remove(certTempFile.Name()) - defer certTempFile.Close() - certTempFile.Write(certBytes) - - keyTempFile, err := ioutil.TempFile("", "key_output") - if err != nil { - t.Errorf("cannot create key tempfile: %v", err) - } - defer os.Remove(keyTempFile.Name()) - defer keyTempFile.Close() - keyTempFile.Write(keyBytes) - - f := &testFlags{ - strings: map[string]string{ - "tls_cert": certTempFile.Name(), - "tls_key": keyTempFile.Name(), - "tls_ca": certTempFile.Name(), - }, - } - o := &plugin.Options{ - Obj: &binutils.Binutils{}, - UI: &proftest.TestUI{T: t, AllowRx: "Saved profile in"}, - Flagset: f, - HTTPTransport: transport.New(f), - } - - o.Sym = &symbolizer.Symbolizer{Obj: o.Obj, UI: o.UI, Transport: o.HTTPTransport} - p, err := fetchProfiles(s, o) - if err != nil { - t.Fatal(err) - } - if len(p.SampleType) == 0 { - t.Fatalf("fetchProfiles(%s) got empty profile: len(p.SampleType)==0", address) - } - if len(p.Function) == 0 { - t.Fatalf("fetchProfiles(%s) got non-symbolized profile: len(p.Function)==0", address) - } - if err := checkProfileHasFunction(p, "TestHTTPSWithServerCertFetch"); err != nil { - t.Fatalf("fetchProfiles(%s) %v", address, err) - } -} - -func checkProfileHasFunction(p *profile.Profile, fname string) error { - for _, f := range p.Function { - if strings.Contains(f.Name, fname) { - return nil - } - } - return fmt.Errorf("got %s, want function %q", p.String(), fname) -} - -// selfSignedCert generates a self-signed certificate, and returns the -// generated certificate, and byte arrays containing the certificate and -// key associated with the certificate. -func selfSignedCert(t *testing.T, host string) (tls.Certificate, []byte, []byte) { - privKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - if err != nil { - t.Fatalf("failed to generate private key: %v", err) - } - b, err := x509.MarshalECPrivateKey(privKey) - if err != nil { - t.Fatalf("failed to marshal private key: %v", err) - } - bk := pem.EncodeToMemory(&pem.Block{Type: "EC PRIVATE KEY", Bytes: b}) - - tmpl := x509.Certificate{ - SerialNumber: big.NewInt(1), - NotBefore: time.Now(), - NotAfter: time.Now().Add(10 * time.Minute), - IsCA: true, - DNSNames: []string{host}, - } - - b, err = x509.CreateCertificate(rand.Reader, &tmpl, &tmpl, privKey.Public(), privKey) - if err != nil { - t.Fatalf("failed to create cert: %v", err) - } - bc := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: b}) - - cert, err := tls.X509KeyPair(bc, bk) - if err != nil { - t.Fatalf("failed to create TLS key pair: %v", err) - } - return cert, bc, bk -} diff --git a/internal/pprof/driver/flags.go b/internal/pprof/driver/flags.go deleted file mode 100644 index 53903191663..00000000000 --- a/internal/pprof/driver/flags.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2018 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "flag" - "strings" -) - -// GoFlags implements the plugin.FlagSet interface. -type GoFlags struct { - UsageMsgs []string -} - -// Bool implements the plugin.FlagSet interface. -func (*GoFlags) Bool(o string, d bool, c string) *bool { - return flag.Bool(o, d, c) -} - -// Int implements the plugin.FlagSet interface. -func (*GoFlags) Int(o string, d int, c string) *int { - return flag.Int(o, d, c) -} - -// Float64 implements the plugin.FlagSet interface. -func (*GoFlags) Float64(o string, d float64, c string) *float64 { - return flag.Float64(o, d, c) -} - -// String implements the plugin.FlagSet interface. -func (*GoFlags) String(o, d, c string) *string { - return flag.String(o, d, c) -} - -// StringList implements the plugin.FlagSet interface. -func (*GoFlags) StringList(o, d, c string) *[]*string { - return &[]*string{flag.String(o, d, c)} -} - -// ExtraUsage implements the plugin.FlagSet interface. -func (f *GoFlags) ExtraUsage() string { - return strings.Join(f.UsageMsgs, "\n") -} - -// AddExtraUsage implements the plugin.FlagSet interface. -func (f *GoFlags) AddExtraUsage(eu string) { - f.UsageMsgs = append(f.UsageMsgs, eu) -} - -// Parse implements the plugin.FlagSet interface. -func (*GoFlags) Parse(usage func()) []string { - flag.Usage = usage - flag.Parse() - args := flag.Args() - if len(args) == 0 { - usage() - } - return args -} diff --git a/internal/pprof/driver/flamegraph.go b/internal/pprof/driver/flamegraph.go deleted file mode 100644 index 6e37767554f..00000000000 --- a/internal/pprof/driver/flamegraph.go +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "encoding/json" - "html/template" - "net/http" - "strings" - - "github.com/parca-dev/parca/internal/pprof/graph" - "github.com/parca-dev/parca/internal/pprof/measurement" - "github.com/parca-dev/parca/internal/pprof/report" -) - -type treeNode struct { - Name string `json:"n"` - FullName string `json:"f"` - Cum int64 `json:"v"` - CumFormat string `json:"l"` - Percent string `json:"p"` - Children []*treeNode `json:"c"` -} - -// flamegraph generates a web page containing a flamegraph. -func (ui *webInterface) flamegraph(w http.ResponseWriter, req *http.Request) { - // Force the call tree so that the graph is a tree. - // Also do not trim the tree so that the flame graph contains all functions. - rpt, errList := ui.makeReport(w, req, []string{"svg"}, func(cfg *config) { - cfg.CallTree = true - cfg.Trim = false - }) - if rpt == nil { - return // error already reported - } - - // Generate dot graph. - g, config := report.GetDOT(rpt) - var nodes []*treeNode - nroots := 0 - rootValue := int64(0) - nodeArr := []string{} - nodeMap := map[*graph.Node]*treeNode{} - // Make all nodes and the map, collect the roots. - for _, n := range g.Nodes { - v := n.CumValue() - fullName := n.Info.PrintableName() - node := &treeNode{ - Name: graph.ShortenFunctionName(fullName), - FullName: fullName, - Cum: v, - CumFormat: config.FormatValue(v), - Percent: strings.TrimSpace(measurement.Percentage(v, config.Total)), - } - nodes = append(nodes, node) - if len(n.In) == 0 { - nodes[nroots], nodes[len(nodes)-1] = nodes[len(nodes)-1], nodes[nroots] - nroots++ - rootValue += v - } - nodeMap[n] = node - // Get all node names into an array. - nodeArr = append(nodeArr, n.Info.Name) - } - // Populate the child links. - for _, n := range g.Nodes { - node := nodeMap[n] - for child := range n.Out { - node.Children = append(node.Children, nodeMap[child]) - } - } - - rootNode := &treeNode{ - Name: "root", - FullName: "root", - Cum: rootValue, - CumFormat: config.FormatValue(rootValue), - Percent: strings.TrimSpace(measurement.Percentage(rootValue, config.Total)), - Children: nodes[0:nroots], - } - - // JSON marshalling flame graph - b, err := json.Marshal(rootNode) - if err != nil { - http.Error(w, "error serializing flame graph", http.StatusInternalServerError) - ui.options.UI.PrintErr(err) - return - } - - ui.render(w, req, "flamegraph", rpt, errList, config.Labels, webArgs{ - FlameGraph: template.JS(b), - Nodes: nodeArr, - }) -} diff --git a/internal/pprof/driver/interactive.go b/internal/pprof/driver/interactive.go deleted file mode 100644 index f1765c5e913..00000000000 --- a/internal/pprof/driver/interactive.go +++ /dev/null @@ -1,418 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "fmt" - "io" - "regexp" - "sort" - "strconv" - "strings" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/report" - "github.com/google/pprof/profile" -) - -var commentStart = "//:" // Sentinel for comments on options -var tailDigitsRE = regexp.MustCompile("[0-9]+$") - -// interactive starts a shell to read pprof commands. -func interactive(p *profile.Profile, o *plugin.Options) error { - // Enter command processing loop. - o.UI.SetAutoComplete(newCompleter(functionNames(p))) - configure("compact_labels", "true") - configHelp["sample_index"] += fmt.Sprintf("Or use sample_index=name, with name in %v.\n", sampleTypes(p)) - - // Do not wait for the visualizer to complete, to allow multiple - // graphs to be visualized simultaneously. - interactiveMode = true - shortcuts := profileShortcuts(p) - - greetings(p, o.UI) - for { - input, err := o.UI.ReadLine("(pprof) ") - if err != nil { - if err != io.EOF { - return err - } - if input == "" { - return nil - } - } - - for _, input := range shortcuts.expand(input) { - // Process assignments of the form variable=value - if s := strings.SplitN(input, "=", 2); len(s) > 0 { - name := strings.TrimSpace(s[0]) - var value string - if len(s) == 2 { - value = s[1] - if comment := strings.LastIndex(value, commentStart); comment != -1 { - value = value[:comment] - } - value = strings.TrimSpace(value) - } - if isConfigurable(name) { - // All non-bool options require inputs - if len(s) == 1 && !isBoolConfig(name) { - o.UI.PrintErr(fmt.Errorf("please specify a value, e.g. %s=", name)) - continue - } - if name == "sample_index" { - // Error check sample_index=xxx to ensure xxx is a valid sample type. - index, err := p.SampleIndexByName(value) - if err != nil { - o.UI.PrintErr(err) - continue - } - if index < 0 || index >= len(p.SampleType) { - o.UI.PrintErr(fmt.Errorf("invalid sample_index %q", value)) - continue - } - value = p.SampleType[index].Type - } - if err := configure(name, value); err != nil { - o.UI.PrintErr(err) - } - continue - } - } - - tokens := strings.Fields(input) - if len(tokens) == 0 { - continue - } - - switch tokens[0] { - case "o", "options": - printCurrentOptions(p, o.UI) - continue - case "exit", "quit", "q": - return nil - case "help": - commandHelp(strings.Join(tokens[1:], " "), o.UI) - continue - } - - args, cfg, err := parseCommandLine(tokens) - if err == nil { - err = generateReportWrapper(p, args, cfg, o) - } - - if err != nil { - o.UI.PrintErr(err) - } - } - } -} - -var generateReportWrapper = generateReport // For testing purposes. - -// greetings prints a brief welcome and some overall profile -// information before accepting interactive commands. -func greetings(p *profile.Profile, ui plugin.UI) { - numLabelUnits := identifyNumLabelUnits(p, ui) - ropt, err := reportOptions(p, numLabelUnits, currentConfig()) - if err == nil { - rpt := report.New(p, ropt) - ui.Print(strings.Join(report.ProfileLabels(rpt), "\n")) - if rpt.Total() == 0 && len(p.SampleType) > 1 { - ui.Print(`No samples were found with the default sample value type.`) - ui.Print(`Try "sample_index" command to analyze different sample values.`, "\n") - } - } - ui.Print(`Entering interactive mode (type "help" for commands, "o" for options)`) -} - -// shortcuts represents composite commands that expand into a sequence -// of other commands. -type shortcuts map[string][]string - -func (a shortcuts) expand(input string) []string { - input = strings.TrimSpace(input) - if a != nil { - if r, ok := a[input]; ok { - return r - } - } - return []string{input} -} - -var pprofShortcuts = shortcuts{ - ":": []string{"focus=", "ignore=", "hide=", "tagfocus=", "tagignore="}, -} - -// profileShortcuts creates macros for convenience and backward compatibility. -func profileShortcuts(p *profile.Profile) shortcuts { - s := pprofShortcuts - // Add shortcuts for sample types - for _, st := range p.SampleType { - command := fmt.Sprintf("sample_index=%s", st.Type) - s[st.Type] = []string{command} - s["total_"+st.Type] = []string{"mean=0", command} - s["mean_"+st.Type] = []string{"mean=1", command} - } - return s -} - -func sampleTypes(p *profile.Profile) []string { - types := make([]string, len(p.SampleType)) - for i, t := range p.SampleType { - types[i] = t.Type - } - return types -} - -func printCurrentOptions(p *profile.Profile, ui plugin.UI) { - var args []string - current := currentConfig() - for _, f := range configFields { - n := f.name - v := current.get(f) - comment := "" - switch { - case len(f.choices) > 0: - values := append([]string{}, f.choices...) - sort.Strings(values) - comment = "[" + strings.Join(values, " | ") + "]" - case n == "sample_index": - st := sampleTypes(p) - if v == "" { - // Apply default (last sample index). - v = st[len(st)-1] - } - // Add comments for all sample types in profile. - comment = "[" + strings.Join(st, " | ") + "]" - case n == "source_path": - continue - case n == "nodecount" && v == "-1": - comment = "default" - case v == "": - // Add quotes for empty values. - v = `""` - } - if comment != "" { - comment = commentStart + " " + comment - } - args = append(args, fmt.Sprintf(" %-25s = %-20s %s", n, v, comment)) - } - sort.Strings(args) - ui.Print(strings.Join(args, "\n")) -} - -// parseCommandLine parses a command and returns the pprof command to -// execute and the configuration to use for the report. -func parseCommandLine(input []string) ([]string, config, error) { - cmd, args := input[:1], input[1:] - name := cmd[0] - - c := pprofCommands[name] - if c == nil { - // Attempt splitting digits on abbreviated commands (eg top10) - if d := tailDigitsRE.FindString(name); d != "" && d != name { - name = name[:len(name)-len(d)] - cmd[0], args = name, append([]string{d}, args...) - c = pprofCommands[name] - } - } - if c == nil { - if _, ok := configHelp[name]; ok { - value := "" - if len(args) > 0 { - value = args[0] - } - return nil, config{}, fmt.Errorf("did you mean: %s=%s", name, value) - } - return nil, config{}, fmt.Errorf("unrecognized command: %q", name) - } - - if c.hasParam { - if len(args) == 0 { - return nil, config{}, fmt.Errorf("command %s requires an argument", name) - } - cmd = append(cmd, args[0]) - args = args[1:] - } - - // Copy config since options set in the command line should not persist. - vcopy := currentConfig() - - var focus, ignore string - for i := 0; i < len(args); i++ { - t := args[i] - if n, err := strconv.ParseInt(t, 10, 32); err == nil { - vcopy.NodeCount = int(n) - continue - } - switch t[0] { - case '>': - outputFile := t[1:] - if outputFile == "" { - i++ - if i >= len(args) { - return nil, config{}, fmt.Errorf("unexpected end of line after >") - } - outputFile = args[i] - } - vcopy.Output = outputFile - case '-': - if t == "--cum" || t == "-cum" { - vcopy.Sort = "cum" - continue - } - ignore = catRegex(ignore, t[1:]) - default: - focus = catRegex(focus, t) - } - } - - if name == "tags" { - if focus != "" { - vcopy.TagFocus = focus - } - if ignore != "" { - vcopy.TagIgnore = ignore - } - } else { - if focus != "" { - vcopy.Focus = focus - } - if ignore != "" { - vcopy.Ignore = ignore - } - } - if vcopy.NodeCount == -1 && (name == "text" || name == "top") { - vcopy.NodeCount = 10 - } - - return cmd, vcopy, nil -} - -func catRegex(a, b string) string { - if a != "" && b != "" { - return a + "|" + b - } - return a + b -} - -// commandHelp displays help and usage information for all Commands -// and Variables or a specific Command or Variable. -func commandHelp(args string, ui plugin.UI) { - if args == "" { - help := usage(false) - help = help + ` - : Clear focus/ignore/hide/tagfocus/tagignore - - type "help " for more information -` - - ui.Print(help) - return - } - - if c := pprofCommands[args]; c != nil { - ui.Print(c.help(args)) - return - } - - if help, ok := configHelp[args]; ok { - ui.Print(help + "\n") - return - } - - ui.PrintErr("Unknown command: " + args) -} - -// newCompleter creates an autocompletion function for a set of commands. -func newCompleter(fns []string) func(string) string { - return func(line string) string { - switch tokens := strings.Fields(line); len(tokens) { - case 0: - // Nothing to complete - case 1: - // Single token -- complete command name - if match := matchVariableOrCommand(tokens[0]); match != "" { - return match - } - case 2: - if tokens[0] == "help" { - if match := matchVariableOrCommand(tokens[1]); match != "" { - return tokens[0] + " " + match - } - return line - } - fallthrough - default: - // Multiple tokens -- complete using functions, except for tags - if cmd := pprofCommands[tokens[0]]; cmd != nil && tokens[0] != "tags" { - lastTokenIdx := len(tokens) - 1 - lastToken := tokens[lastTokenIdx] - if strings.HasPrefix(lastToken, "-") { - lastToken = "-" + functionCompleter(lastToken[1:], fns) - } else { - lastToken = functionCompleter(lastToken, fns) - } - return strings.Join(append(tokens[:lastTokenIdx], lastToken), " ") - } - } - return line - } -} - -// matchVariableOrCommand attempts to match a string token to the prefix of a Command. -func matchVariableOrCommand(token string) string { - token = strings.ToLower(token) - var matches []string - for cmd := range pprofCommands { - if strings.HasPrefix(cmd, token) { - matches = append(matches, cmd) - } - } - matches = append(matches, completeConfig(token)...) - if len(matches) == 1 { - return matches[0] - } - return "" -} - -// functionCompleter replaces provided substring with a function -// name retrieved from a profile if a single match exists. Otherwise, -// it returns unchanged substring. It defaults to no-op if the profile -// is not specified. -func functionCompleter(substring string, fns []string) string { - found := "" - for _, fName := range fns { - if strings.Contains(fName, substring) { - if found != "" { - return substring - } - found = fName - } - } - if found != "" { - return found - } - return substring -} - -func functionNames(p *profile.Profile) []string { - var fns []string - for _, fn := range p.Function { - fns = append(fns, fn.Name) - } - return fns -} diff --git a/internal/pprof/driver/interactive_test.go b/internal/pprof/driver/interactive_test.go deleted file mode 100644 index 60706e7dbc4..00000000000 --- a/internal/pprof/driver/interactive_test.go +++ /dev/null @@ -1,274 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "fmt" - "math/rand" - "strings" - "testing" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/proftest" - "github.com/parca-dev/parca/internal/pprof/report" - "github.com/parca-dev/parca/internal/pprof/transport" - "github.com/google/pprof/profile" -) - -func TestShell(t *testing.T) { - p := &profile.Profile{} - generateReportWrapper = checkValue - defer func() { generateReportWrapper = generateReport }() - - // Use test commands and variables to exercise interactive processing - var savedCommands commands - savedCommands, pprofCommands = pprofCommands, testCommands - defer func() { pprofCommands = savedCommands }() - - savedConfig := currentConfig() - defer setCurrentConfig(savedConfig) - - shortcuts1, scScript1 := makeShortcuts(interleave(script, 2), 1) - shortcuts2, scScript2 := makeShortcuts(interleave(script, 1), 2) - - var testcases = []struct { - name string - input []string - shortcuts shortcuts - allowRx string - numAllowRxMatches int - propagateError bool - }{ - {"Random interleave of independent scripts 1", interleave(script, 0), pprofShortcuts, "", 0, false}, - {"Random interleave of independent scripts 2", interleave(script, 1), pprofShortcuts, "", 0, false}, - {"Random interleave of independent scripts with shortcuts 1", scScript1, shortcuts1, "", 0, false}, - {"Random interleave of independent scripts with shortcuts 2", scScript2, shortcuts2, "", 0, false}, - {"Group with invalid value", []string{"sort=this"}, pprofShortcuts, `invalid "sort" value`, 1, false}, - {"No special value provided for the option", []string{"sample_index"}, pprofShortcuts, `please specify a value, e.g. sample_index=`, 1, false}, - {"No string value provided for the option", []string{"focus"}, pprofShortcuts, `please specify a value, e.g. focus=`, 1, false}, - {"No float value provided for the option", []string{"divide_by"}, pprofShortcuts, `please specify a value, e.g. divide_by=`, 1, false}, - {"Helpful input format reminder", []string{"sample_index 0"}, pprofShortcuts, `did you mean: sample_index=0`, 1, false}, - {"Verify propagation of IO errors", []string{"**error**"}, pprofShortcuts, "", 0, true}, - } - - o := setDefaults(&plugin.Options{HTTPTransport: transport.New(nil)}) - for _, tc := range testcases { - t.Run(tc.name, func(t *testing.T) { - setCurrentConfig(savedConfig) - pprofShortcuts = tc.shortcuts - ui := &proftest.TestUI{ - T: t, - Input: tc.input, - AllowRx: tc.allowRx, - } - o.UI = ui - - err := interactive(p, o) - if (tc.propagateError && err == nil) || (!tc.propagateError && err != nil) { - t.Errorf("%s: %v", tc.name, err) - } - - // Confirm error message written out once. - if tc.numAllowRxMatches != ui.NumAllowRxMatches { - t.Errorf("want error message to be printed %d time(s), got %d", - tc.numAllowRxMatches, ui.NumAllowRxMatches) - } - }) - } -} - -var testCommands = commands{ - "check": &command{report.Raw, nil, nil, true, "", ""}, -} - -// script contains sequences of commands to be executed for testing. Commands -// are split by semicolon and interleaved randomly, so they must be -// independent from each other. -var script = []string{ - "call_tree=true;call_tree=false;check call_tree=false;call_tree=yes;check call_tree=true", - "mean=1;check mean=true;mean=n;check mean=false", - "nodecount=-1;nodecount=-2;check nodecount=-2;nodecount=999999;check nodecount=999999", - "nodefraction=-1;nodefraction=-2.5;check nodefraction=-2.5;nodefraction=0.0001;check nodefraction=0.0001", - "focus=one;focus=two;check focus=two", - "flat=true;check sort=flat;cum=1;check sort=cum", -} - -func makeShortcuts(input []string, seed int) (shortcuts, []string) { - rand.Seed(int64(seed)) - - s := shortcuts{} - var output, chunk []string - for _, l := range input { - chunk = append(chunk, l) - switch rand.Intn(3) { - case 0: - // Create a macro for commands in 'chunk'. - macro := fmt.Sprintf("alias%d", len(s)) - s[macro] = chunk - output = append(output, macro) - chunk = nil - case 1: - // Append commands in 'chunk' by themselves. - output = append(output, chunk...) - chunk = nil - case 2: - // Accumulate commands into 'chunk' - } - } - output = append(output, chunk...) - return s, output -} - -func checkValue(p *profile.Profile, cmd []string, cfg config, o *plugin.Options) error { - if len(cmd) != 2 { - return fmt.Errorf("expected len(cmd)==2, got %v", cmd) - } - - input := cmd[1] - args := strings.SplitN(input, "=", 2) - if len(args) == 0 { - return fmt.Errorf("unexpected empty input") - } - name, value := args[0], "" - if len(args) == 2 { - value = args[1] - } - - f, ok := configFieldMap[name] - if !ok { - return fmt.Errorf("Could not find variable named %s", name) - } - - if got := cfg.get(f); got != value { - return fmt.Errorf("Variable %s, want %s, got %s", name, value, got) - } - return nil -} - -func interleave(input []string, seed int) []string { - var inputs [][]string - for _, s := range input { - inputs = append(inputs, strings.Split(s, ";")) - } - rand.Seed(int64(seed)) - var output []string - for len(inputs) > 0 { - next := rand.Intn(len(inputs)) - output = append(output, inputs[next][0]) - if tail := inputs[next][1:]; len(tail) > 0 { - inputs[next] = tail - } else { - inputs = append(inputs[:next], inputs[next+1:]...) - } - } - return output -} - -func TestInteractiveCommands(t *testing.T) { - type interactiveTestcase struct { - input string - want map[string]string - } - - testcases := []interactiveTestcase{ - { - "top 10 --cum focus1 -ignore focus2", - map[string]string{ - "granularity": "functions", - "nodecount": "10", - "sort": "cum", - "focus": "focus1|focus2", - "ignore": "ignore", - }, - }, - { - "top10 --cum focus1 -ignore focus2", - map[string]string{ - "granularity": "functions", - "nodecount": "10", - "sort": "cum", - "focus": "focus1|focus2", - "ignore": "ignore", - }, - }, - { - "dot", - map[string]string{ - "granularity": "functions", - "nodecount": "80", - "sort": "flat", - }, - }, - { - "tags -ignore1 -ignore2 focus1 >out", - map[string]string{ - "granularity": "functions", - "nodecount": "80", - "sort": "flat", - "output": "out", - "tagfocus": "focus1", - "tagignore": "ignore1|ignore2", - }, - }, - { - "weblist find -test", - map[string]string{ - "granularity": "addresses", - "noinlines": "false", - "nodecount": "0", - "sort": "flat", - "ignore": "test", - }, - }, - { - "callgrind fun -ignore >out", - map[string]string{ - "granularity": "addresses", - "nodecount": "0", - "sort": "flat", - "output": "out", - }, - }, - { - "999", - nil, // Error - }, - } - - for _, tc := range testcases { - cmd, cfg, err := parseCommandLine(strings.Fields(tc.input)) - if tc.want == nil && err != nil { - // Error expected - continue - } - if err != nil { - t.Errorf("failed on %q: %v", tc.input, err) - continue - } - - // Get report output format - c := pprofCommands[cmd[0]] - if c == nil { - t.Fatalf("unexpected nil command") - } - cfg = applyCommandOverrides(cmd[0], c.format, cfg) - - for n, want := range tc.want { - if got := cfg.get(configFieldMap[n]); got != want { - t.Errorf("failed on %q, cmd=%q, %s got %s, want %s", tc.input, cmd, n, got, want) - } - } - } -} diff --git a/internal/pprof/driver/options.go b/internal/pprof/driver/options.go deleted file mode 100644 index bc88269ff33..00000000000 --- a/internal/pprof/driver/options.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "bufio" - "fmt" - "io" - "os" - "strings" - - "github.com/parca-dev/parca/internal/pprof/binutils" - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/symbolizer" - "github.com/parca-dev/parca/internal/pprof/transport" -) - -// setDefaults returns a new plugin.Options with zero fields sets to -// sensible defaults. -func setDefaults(o *plugin.Options) *plugin.Options { - d := &plugin.Options{} - if o != nil { - *d = *o - } - if d.Writer == nil { - d.Writer = oswriter{} - } - if d.Flagset == nil { - d.Flagset = &GoFlags{} - } - if d.Obj == nil { - d.Obj = &binutils.Binutils{} - } - if d.UI == nil { - d.UI = &stdUI{r: bufio.NewReader(os.Stdin)} - } - if d.HTTPTransport == nil { - d.HTTPTransport = transport.New(d.Flagset) - } - if d.Sym == nil { - d.Sym = &symbolizer.Symbolizer{Obj: d.Obj, UI: d.UI, Transport: d.HTTPTransport} - } - return d -} - -type stdUI struct { - r *bufio.Reader -} - -func (ui *stdUI) ReadLine(prompt string) (string, error) { - os.Stdout.WriteString(prompt) - return ui.r.ReadString('\n') -} - -func (ui *stdUI) Print(args ...interface{}) { - ui.fprint(os.Stderr, args) -} - -func (ui *stdUI) PrintErr(args ...interface{}) { - ui.fprint(os.Stderr, args) -} - -func (ui *stdUI) IsTerminal() bool { - return false -} - -func (ui *stdUI) WantBrowser() bool { - return true -} - -func (ui *stdUI) SetAutoComplete(func(string) string) { -} - -func (ui *stdUI) fprint(f *os.File, args []interface{}) { - text := fmt.Sprint(args...) - if !strings.HasSuffix(text, "\n") { - text += "\n" - } - f.WriteString(text) -} - -// oswriter implements the Writer interface using a regular file. -type oswriter struct{} - -func (oswriter) Open(name string) (io.WriteCloser, error) { - f, err := os.Create(name) - return f, err -} diff --git a/internal/pprof/driver/settings.go b/internal/pprof/driver/settings.go deleted file mode 100644 index f72314b1857..00000000000 --- a/internal/pprof/driver/settings.go +++ /dev/null @@ -1,157 +0,0 @@ -package driver - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "net/url" - "os" - "path/filepath" -) - -// settings holds pprof settings. -type settings struct { - // Configs holds a list of named UI configurations. - Configs []namedConfig `json:"configs"` -} - -// namedConfig associates a name with a config. -type namedConfig struct { - Name string `json:"name"` - config -} - -// settingsFileName returns the name of the file where settings should be saved. -func settingsFileName() (string, error) { - // Return "pprof/settings.json" under os.UserConfigDir(). - dir, err := os.UserConfigDir() - if err != nil { - return "", err - } - return filepath.Join(dir, "pprof", "settings.json"), nil -} - -// readSettings reads settings from fname. -func readSettings(fname string) (*settings, error) { - data, err := ioutil.ReadFile(fname) - if err != nil { - if os.IsNotExist(err) { - return &settings{}, nil - } - return nil, fmt.Errorf("could not read settings: %w", err) - } - settings := &settings{} - if err := json.Unmarshal(data, settings); err != nil { - return nil, fmt.Errorf("could not parse settings: %w", err) - } - for i := range settings.Configs { - settings.Configs[i].resetTransient() - } - return settings, nil -} - -// writeSettings saves settings to fname. -func writeSettings(fname string, settings *settings) error { - data, err := json.MarshalIndent(settings, "", " ") - if err != nil { - return fmt.Errorf("could not encode settings: %w", err) - } - - // create the settings directory if it does not exist - // XDG specifies permissions 0700 when creating settings dirs: - // https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html - if err := os.MkdirAll(filepath.Dir(fname), 0700); err != nil { - return fmt.Errorf("failed to create settings directory: %w", err) - } - - if err := ioutil.WriteFile(fname, data, 0644); err != nil { - return fmt.Errorf("failed to write settings: %w", err) - } - return nil -} - -// configMenuEntry holds information for a single config menu entry. -type configMenuEntry struct { - Name string - URL string - Current bool // Is this the currently selected config? - UserConfig bool // Is this a user-provided config? -} - -// configMenu returns a list of items to add to a menu in the web UI. -func configMenu(fname string, url url.URL) []configMenuEntry { - // Start with system configs. - configs := []namedConfig{{Name: "Default", config: defaultConfig()}} - if settings, err := readSettings(fname); err == nil { - // Add user configs. - configs = append(configs, settings.Configs...) - } - - // Convert to menu entries. - result := make([]configMenuEntry, len(configs)) - lastMatch := -1 - for i, cfg := range configs { - dst, changed := cfg.config.makeURL(url) - if !changed { - lastMatch = i - } - result[i] = configMenuEntry{ - Name: cfg.Name, - URL: dst.String(), - UserConfig: (i != 0), - } - } - // Mark the last matching config as currennt - if lastMatch >= 0 { - result[lastMatch].Current = true - } - return result -} - -// editSettings edits settings by applying fn to them. -func editSettings(fname string, fn func(s *settings) error) error { - settings, err := readSettings(fname) - if err != nil { - return err - } - if err := fn(settings); err != nil { - return err - } - return writeSettings(fname, settings) -} - -// setConfig saves the config specified in request to fname. -func setConfig(fname string, request url.URL) error { - q := request.Query() - name := q.Get("config") - if name == "" { - return fmt.Errorf("invalid config name") - } - cfg := currentConfig() - if err := cfg.applyURL(q); err != nil { - return err - } - return editSettings(fname, func(s *settings) error { - for i, c := range s.Configs { - if c.Name == name { - s.Configs[i].config = cfg - return nil - } - } - s.Configs = append(s.Configs, namedConfig{Name: name, config: cfg}) - return nil - }) -} - -// removeConfig removes config from fname. -func removeConfig(fname, config string) error { - return editSettings(fname, func(s *settings) error { - for i, c := range s.Configs { - if c.Name == config { - s.Configs = append(s.Configs[:i], s.Configs[i+1:]...) - return nil - } - } - return fmt.Errorf("config %s not found", config) - }) -} diff --git a/internal/pprof/driver/settings_test.go b/internal/pprof/driver/settings_test.go deleted file mode 100644 index f87eb490ca5..00000000000 --- a/internal/pprof/driver/settings_test.go +++ /dev/null @@ -1,247 +0,0 @@ -package driver - -import ( - "io/ioutil" - "net/url" - "os" - "path/filepath" - "reflect" - "testing" -) - -// settingsDirAndFile returns a directory in which settings should be stored -// and the name of the settings file. The caller must delete the directory when -// done. -func settingsDirAndFile(t *testing.T) (string, string) { - tmpDir, err := ioutil.TempDir("", "pprof_settings_test") - if err != nil { - t.Fatalf("error creating temporary directory: %v", err) - } - return tmpDir, filepath.Join(tmpDir, "settings.json") -} - -func TestSettings(t *testing.T) { - tmpDir, fname := settingsDirAndFile(t) - defer os.RemoveAll(tmpDir) - s, err := readSettings(fname) - if err != nil { - t.Fatalf("error reading empty settings: %v", err) - } - if len(s.Configs) != 0 { - t.Fatalf("expected empty settings; got %v", s) - } - s.Configs = append(s.Configs, namedConfig{ - Name: "Foo", - config: config{ - Focus: "focus", - // Ensure that transient fields are not saved/restored. - Output: "output", - SourcePath: "source", - TrimPath: "trim", - DivideBy: -2, - }, - }) - if err := writeSettings(fname, s); err != nil { - t.Fatal(err) - } - s2, err := readSettings(fname) - if err != nil { - t.Fatal(err) - } - - // Change the transient fields to their expected values. - s.Configs[0].resetTransient() - if !reflect.DeepEqual(s, s2) { - t.Fatalf("ReadSettings = %v; expected %v", s2, s) - } -} - -func TestParseConfig(t *testing.T) { - // Use all the fields to check they are saved/restored from URL. - cfg := config{ - Output: "", - DropNegative: true, - CallTree: true, - RelativePercentages: true, - Unit: "auto", - CompactLabels: true, - SourcePath: "", - TrimPath: "", - NodeCount: 10, - NodeFraction: 0.1, - EdgeFraction: 0.2, - Trim: true, - Focus: "focus", - Ignore: "ignore", - PruneFrom: "prune_from", - Hide: "hide", - Show: "show", - ShowFrom: "show_from", - TagFocus: "tagfocus", - TagIgnore: "tagignore", - TagShow: "tagshow", - TagHide: "taghide", - DivideBy: 1, - Mean: true, - Normalize: true, - Sort: "cum", - Granularity: "functions", - NoInlines: true, - } - url, changed := cfg.makeURL(url.URL{}) - if !changed { - t.Error("applyConfig returned changed=false after applying non-empty config") - } - cfg2 := defaultConfig() - if err := cfg2.applyURL(url.Query()); err != nil { - t.Fatalf("fromURL failed: %v", err) - } - if !reflect.DeepEqual(cfg, cfg2) { - t.Fatalf("parsed config = %+v; expected match with %+v", cfg2, cfg) - } - if url2, changed := cfg.makeURL(url); changed { - t.Errorf("ApplyConfig returned changed=true after applying same config (%q instead of expected %q", url2.String(), url.String()) - } -} - -// TestDefaultConfig verifies that default config values are omitted from URL. -func TestDefaultConfig(t *testing.T) { - cfg := defaultConfig() - url, changed := cfg.makeURL(url.URL{}) - if changed { - t.Error("applyConfig returned changed=true after applying default config") - } - if url.String() != "" { - t.Errorf("applyConfig returned %q; expecting %q", url.String(), "") - } -} - -func TestConfigMenu(t *testing.T) { - // Save some test settings. - tmpDir, fname := settingsDirAndFile(t) - defer os.RemoveAll(tmpDir) - a, b := defaultConfig(), defaultConfig() - a.Focus, b.Focus = "foo", "bar" - s := &settings{ - Configs: []namedConfig{ - {Name: "A", config: a}, - {Name: "B", config: b}, - }, - } - if err := writeSettings(fname, s); err != nil { - t.Fatal("error writing settings", err) - } - - pageURL, _ := url.Parse("/top?f=foo") - menu := configMenu(fname, *pageURL) - want := []configMenuEntry{ - {Name: "Default", URL: "/top", Current: false, UserConfig: false}, - {Name: "A", URL: "/top?f=foo", Current: true, UserConfig: true}, - {Name: "B", URL: "/top?f=bar", Current: false, UserConfig: true}, - } - if !reflect.DeepEqual(menu, want) { - t.Errorf("ConfigMenu returned %v; want %v", menu, want) - } -} - -func TestEditConfig(t *testing.T) { - tmpDir, fname := settingsDirAndFile(t) - defer os.RemoveAll(tmpDir) - - type testConfig struct { - name string - focus string - hide string - } - type testCase struct { - remove bool - request string - expect []testConfig - } - for _, c := range []testCase{ - // Create setting c1 - {false, "/?config=c1&f=foo", []testConfig{ - {"c1", "foo", ""}, - }}, - // Create setting c2 - {false, "/?config=c2&h=bar", []testConfig{ - {"c1", "foo", ""}, - {"c2", "", "bar"}, - }}, - // Overwrite c1 - {false, "/?config=c1&f=baz", []testConfig{ - {"c1", "baz", ""}, - {"c2", "", "bar"}, - }}, - // Delete c2 - {true, "c2", []testConfig{ - {"c1", "baz", ""}, - }}, - } { - if c.remove { - if err := removeConfig(fname, c.request); err != nil { - t.Errorf("error removing config %s: %v", c.request, err) - continue - } - } else { - req, err := url.Parse(c.request) - if err != nil { - t.Errorf("error parsing request %q: %v", c.request, err) - continue - } - if err := setConfig(fname, *req); err != nil { - t.Errorf("error saving request %q: %v", c.request, err) - continue - } - } - - // Check resulting settings. - s, err := readSettings(fname) - if err != nil { - t.Errorf("error reading settings after applying %q: %v", c.request, err) - continue - } - // Convert to a list that can be compared to c.expect - got := make([]testConfig, len(s.Configs)) - for i, c := range s.Configs { - got[i] = testConfig{c.Name, c.Focus, c.Hide} - } - if !reflect.DeepEqual(got, c.expect) { - t.Errorf("Settings after applying %q = %v; want %v", c.request, got, c.expect) - } - } -} - -func TestAssign(t *testing.T) { - baseConfig := currentConfig() - defer setCurrentConfig(baseConfig) - - // Test assigning to a simple field. - if err := configure("nodecount", "20"); err != nil { - t.Errorf("error setting nodecount: %v", err) - } - if n := currentConfig().NodeCount; n != 20 { - t.Errorf("incorrect nodecount; expecting 20, got %d", n) - } - - // Test assignment to a group field. - if err := configure("granularity", "files"); err != nil { - t.Errorf("error setting granularity: %v", err) - } - if g := currentConfig().Granularity; g != "files" { - t.Errorf("incorrect granularity; expecting %v, got %v", "files", g) - } - - // Test assignment to one choice of a group field. - if err := configure("lines", "t"); err != nil { - t.Errorf("error setting lines: %v", err) - } - if g := currentConfig().Granularity; g != "lines" { - t.Errorf("incorrect granularity; expecting %v, got %v", "lines", g) - } - - // Test assignment to invalid choice, - if err := configure("granularity", "cheese"); err == nil { - t.Errorf("allowed assignment of invalid granularity") - } -} diff --git a/internal/pprof/driver/svg.go b/internal/pprof/driver/svg.go deleted file mode 100644 index 62767e726d5..00000000000 --- a/internal/pprof/driver/svg.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "regexp" - "strings" - - "github.com/google/pprof/third_party/svgpan" -) - -var ( - viewBox = regexp.MustCompile(``) -) - -// massageSVG enhances the SVG output from DOT to provide better -// panning inside a web browser. It uses the svgpan library, which is -// embedded into the svgpan.JSSource variable. -func massageSVG(svg string) string { - // Work around for dot bug which misses quoting some ampersands, - // resulting on unparsable SVG. - svg = strings.Replace(svg, "&;", "&;", -1) - - // Dot's SVG output is - // - // - // - // ... - // - // - // - // Change it to - // - // - - // ` - // - // - // ... - // - // - // - - if loc := viewBox.FindStringIndex(svg); loc != nil { - svg = svg[:loc[0]] + - `` + string(svgpan.JSSource) + `` + - `` + - svg[loc[0]:] - } - - if loc := svgClose.FindStringIndex(svg); loc != nil { - svg = svg[:loc[0]] + - `` + - svg[loc[0]:] - } - - return svg -} diff --git a/internal/pprof/driver/tempfile.go b/internal/pprof/driver/tempfile.go deleted file mode 100644 index b6c8776ff83..00000000000 --- a/internal/pprof/driver/tempfile.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "fmt" - "os" - "path/filepath" - "sync" -) - -// newTempFile returns a new output file in dir with the provided prefix and suffix. -func newTempFile(dir, prefix, suffix string) (*os.File, error) { - for index := 1; index < 10000; index++ { - switch f, err := os.OpenFile(filepath.Join(dir, fmt.Sprintf("%s%03d%s", prefix, index, suffix)), os.O_RDWR|os.O_CREATE|os.O_EXCL, 0666); { - case err == nil: - return f, nil - case !os.IsExist(err): - return nil, err - } - } - // Give up - return nil, fmt.Errorf("could not create file of the form %s%03d%s", prefix, 1, suffix) -} - -var tempFiles []string -var tempFilesMu = sync.Mutex{} - -// deferDeleteTempFile marks a file to be deleted by next call to Cleanup() -func deferDeleteTempFile(path string) { - tempFilesMu.Lock() - tempFiles = append(tempFiles, path) - tempFilesMu.Unlock() -} - -// cleanupTempFiles removes any temporary files selected for deferred cleaning. -func cleanupTempFiles() error { - tempFilesMu.Lock() - defer tempFilesMu.Unlock() - var lastErr error - for _, f := range tempFiles { - if err := os.Remove(f); err != nil { - lastErr = err - } - } - tempFiles = nil - return lastErr -} diff --git a/internal/pprof/driver/tempfile_test.go b/internal/pprof/driver/tempfile_test.go deleted file mode 100644 index 70043534c1b..00000000000 --- a/internal/pprof/driver/tempfile_test.go +++ /dev/null @@ -1,55 +0,0 @@ -package driver - -import ( - "os" - "sync" - "testing" -) - -func TestNewTempFile(t *testing.T) { - const n = 100 - // Line up ready to execute goroutines with a read-write lock. - var mu sync.RWMutex - mu.Lock() - var wg sync.WaitGroup - errc := make(chan error, n) - for i := 0; i < n; i++ { - wg.Add(1) - go func() { - mu.RLock() - defer mu.RUnlock() - defer wg.Done() - f, err := newTempFile(os.TempDir(), "profile", ".tmp") - errc <- err - deferDeleteTempFile(f.Name()) - f.Close() - }() - } - // Start the file creation race. - mu.Unlock() - // Wait for the goroutines to finish. - wg.Wait() - - for i := 0; i < n; i++ { - if err := <-errc; err != nil { - t.Fatalf("newTempFile(): got %v, want no error", err) - } - } - if len(tempFiles) != n { - t.Errorf("len(tempFiles): got %d, want %d", len(tempFiles), n) - } - names := map[string]bool{} - for _, name := range tempFiles { - if names[name] { - t.Errorf("got temp file %s created multiple times", name) - break - } - names[name] = true - } - if err := cleanupTempFiles(); err != nil { - t.Errorf("cleanupTempFiles(): got error %v, want no error", err) - } - if len(tempFiles) != 0 { - t.Errorf("len(tempFiles) after the cleanup: got %d, want 0", len(tempFiles)) - } -} diff --git a/internal/pprof/driver/testdata/cppbench.contention b/internal/pprof/driver/testdata/cppbench.contention deleted file mode 100644 index 66a64c950c5..00000000000 --- a/internal/pprof/driver/testdata/cppbench.contention +++ /dev/null @@ -1,24 +0,0 @@ ---- contentionz 1 --- -cycles/second = 3201000000 -sampling period = 100 -ms since reset = 16502830 -discarded samples = 0 - 19490304 27 @ 0xbccc97 0xc61202 0x42ed5f 0x42edc1 0x42e15a 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e - 768 1 @ 0xbccc97 0xa42dc7 0xa456e4 0x7fcdc2ff214e - 5760 2 @ 0xbccc97 0xb82b73 0xb82bcb 0xb87eab 0xb8814c 0x4e969d 0x4faa17 0x4fc5f6 0x4fd028 0x4fd230 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e - 569088 1 @ 0xbccc97 0xb82b73 0xb82bcb 0xb87f08 0xb8814c 0x42ed5f 0x42edc1 0x42e15a 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e - 2432 1 @ 0xbccc97 0xb82b73 0xb82bcb 0xb87eab 0xb8814c 0x7aa74c 0x7ab844 0x7ab914 0x79e9e9 0x79e326 0x4d299e 0x4d4b7b 0x4b7be8 0x4b7ff1 0x4d2dae 0x79e80a - 2034816 3 @ 0xbccc97 0xb82f0f 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e ---- Memory map: --- - 00400000-00fcb000: cppbench_server_main - 7fcdc231e000-7fcdc2321000: /libnss_cache-2.15.so - 7fcdc2522000-7fcdc252e000: /libnss_files-2.15.so - 7fcdc272f000-7fcdc28dd000: /libc-2.15.so - 7fcdc2ae7000-7fcdc2be2000: /libm-2.15.so - 7fcdc2de3000-7fcdc2dea000: /librt-2.15.so - 7fcdc2feb000-7fcdc3003000: /libpthread-2.15.so - 7fcdc3208000-7fcdc320a000: /libdl-2.15.so - 7fcdc340c000-7fcdc3415000: /libcrypt-2.15.so - 7fcdc3645000-7fcdc3669000: /ld-2.15.so - 7fff86bff000-7fff86c00000: [vdso] - ffffffffff600000-ffffffffff601000: [vsyscall] diff --git a/internal/pprof/driver/testdata/cppbench.cpu b/internal/pprof/driver/testdata/cppbench.cpu deleted file mode 100644 index 95c22e1e8d8..00000000000 Binary files a/internal/pprof/driver/testdata/cppbench.cpu and /dev/null differ diff --git a/internal/pprof/driver/testdata/cppbench.small.contention b/internal/pprof/driver/testdata/cppbench.small.contention deleted file mode 100644 index 230cd90200c..00000000000 --- a/internal/pprof/driver/testdata/cppbench.small.contention +++ /dev/null @@ -1,19 +0,0 @@ ---- contentionz 1 --- -cycles/second = 3201000000 -sampling period = 100 -ms since reset = 16502830 -discarded samples = 0 - 100 10 @ 0xbccc97 0xc61202 0x42ed5f 0x42edc1 0x42e15a 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e ---- Memory map: --- - 00400000-00fcb000: cppbench_server_main - 7fcdc231e000-7fcdc2321000: /libnss_cache-2.15.so - 7fcdc2522000-7fcdc252e000: /libnss_files-2.15.so - 7fcdc272f000-7fcdc28dd000: /libc-2.15.so - 7fcdc2ae7000-7fcdc2be2000: /libm-2.15.so - 7fcdc2de3000-7fcdc2dea000: /librt-2.15.so - 7fcdc2feb000-7fcdc3003000: /libpthread-2.15.so - 7fcdc3208000-7fcdc320a000: /libdl-2.15.so - 7fcdc340c000-7fcdc3415000: /libcrypt-2.15.so - 7fcdc3645000-7fcdc3669000: /ld-2.15.so - 7fff86bff000-7fff86c00000: [vdso] - ffffffffff600000-ffffffffff601000: [vsyscall] diff --git a/internal/pprof/driver/testdata/file1000.src b/internal/pprof/driver/testdata/file1000.src deleted file mode 100644 index b53eeca5ecb..00000000000 --- a/internal/pprof/driver/testdata/file1000.src +++ /dev/null @@ -1,17 +0,0 @@ -line1 -line2 -line3 -line4 -line5 -line6 -line7 -line8 -line9 -line0 -line1 -line2 -line3 -line4 -line5 - - diff --git a/internal/pprof/driver/testdata/file2000.src b/internal/pprof/driver/testdata/file2000.src deleted file mode 100644 index b53eeca5ecb..00000000000 --- a/internal/pprof/driver/testdata/file2000.src +++ /dev/null @@ -1,17 +0,0 @@ -line1 -line2 -line3 -line4 -line5 -line6 -line7 -line8 -line9 -line0 -line1 -line2 -line3 -line4 -line5 - - diff --git a/internal/pprof/driver/testdata/file3000.src b/internal/pprof/driver/testdata/file3000.src deleted file mode 100644 index b53eeca5ecb..00000000000 --- a/internal/pprof/driver/testdata/file3000.src +++ /dev/null @@ -1,17 +0,0 @@ -line1 -line2 -line3 -line4 -line5 -line6 -line7 -line8 -line9 -line0 -line1 -line2 -line3 -line4 -line5 - - diff --git a/internal/pprof/driver/testdata/go.crc32.cpu b/internal/pprof/driver/testdata/go.crc32.cpu deleted file mode 100644 index ce08313de05..00000000000 Binary files a/internal/pprof/driver/testdata/go.crc32.cpu and /dev/null differ diff --git a/internal/pprof/driver/testdata/go.nomappings.crash b/internal/pprof/driver/testdata/go.nomappings.crash deleted file mode 100644 index 4915d5a2aec..00000000000 Binary files a/internal/pprof/driver/testdata/go.nomappings.crash and /dev/null differ diff --git a/internal/pprof/driver/testdata/pprof.contention.cum.files.dot b/internal/pprof/driver/testdata/pprof.contention.cum.files.dot deleted file mode 100644 index eedfacf9d6a..00000000000 --- a/internal/pprof/driver/testdata/pprof.contention.cum.files.dot +++ /dev/null @@ -1,10 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Build ID: buildid-contention" [shape=box fontsize=16 label="Build ID: buildid-contention\lComment #1\lComment #2\lType: delay\lShowing nodes accounting for 149.50ms, 100% of 149.50ms total\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="file3000.src\n32.77ms (21.92%)\nof 149.50ms (100%)" id="node1" fontsize=20 shape=box tooltip="testdata/file3000.src (149.50ms)" color="#b20000" fillcolor="#edd5d5"] -N2 [label="file1000.src\n51.20ms (34.25%)" id="node2" fontsize=23 shape=box tooltip="testdata/file1000.src (51.20ms)" color="#b23100" fillcolor="#eddbd5"] -N3 [label="file2000.src\n65.54ms (43.84%)\nof 75.78ms (50.68%)" id="node3" fontsize=24 shape=box tooltip="testdata/file2000.src (75.78ms)" color="#b22000" fillcolor="#edd9d5"] -N1 -> N3 [label=" 75.78ms" weight=51 penwidth=3 color="#b22000" tooltip="testdata/file3000.src -> testdata/file2000.src (75.78ms)" labeltooltip="testdata/file3000.src -> testdata/file2000.src (75.78ms)"] -N1 -> N2 [label=" 40.96ms" weight=28 penwidth=2 color="#b23900" tooltip="testdata/file3000.src -> testdata/file1000.src (40.96ms)" labeltooltip="testdata/file3000.src -> testdata/file1000.src (40.96ms)"] -N3 -> N2 [label=" 10.24ms" weight=7 color="#b29775" tooltip="testdata/file2000.src -> testdata/file1000.src (10.24ms)" labeltooltip="testdata/file2000.src -> testdata/file1000.src (10.24ms)"] -} diff --git a/internal/pprof/driver/testdata/pprof.contention.flat.addresses.dot.focus.ignore b/internal/pprof/driver/testdata/pprof.contention.flat.addresses.dot.focus.ignore deleted file mode 100644 index dcd0920a140..00000000000 --- a/internal/pprof/driver/testdata/pprof.contention.flat.addresses.dot.focus.ignore +++ /dev/null @@ -1,9 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Build ID: buildid-contention" [shape=box fontsize=16 label="Build ID: buildid-contention\lComment #1\lComment #2\lType: delay\lActive filters:\l focus=[X1]000\l ignore=[X3]002\lShowing nodes accounting for 40.96ms, 27.40% of 149.50ms total\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="0000000000001000\nline1000\nfile1000.src:1\n40.96ms (27.40%)" id="node1" fontsize=24 shape=box tooltip="0000000000001000 line1000 testdata/file1000.src:1 (40.96ms)" color="#b23900" fillcolor="#edddd5"] -N2 [label="0000000000003001\nline3000\nfile3000.src:5\n0 of 40.96ms (27.40%)" id="node2" fontsize=8 shape=box tooltip="0000000000003001 line3000 testdata/file3000.src:5 (40.96ms)" color="#b23900" fillcolor="#edddd5"] -N3 [label="0000000000003001\nline3001\nfile3000.src:3\n0 of 40.96ms (27.40%)" id="node3" fontsize=8 shape=box tooltip="0000000000003001 line3001 testdata/file3000.src:3 (40.96ms)" color="#b23900" fillcolor="#edddd5"] -N2 -> N3 [label=" 40.96ms\n (inline)" weight=28 penwidth=2 color="#b23900" tooltip="0000000000003001 line3000 testdata/file3000.src:5 -> 0000000000003001 line3001 testdata/file3000.src:3 (40.96ms)" labeltooltip="0000000000003001 line3000 testdata/file3000.src:5 -> 0000000000003001 line3001 testdata/file3000.src:3 (40.96ms)"] -N3 -> N1 [label=" 40.96ms" weight=28 penwidth=2 color="#b23900" tooltip="0000000000003001 line3001 testdata/file3000.src:3 -> 0000000000001000 line1000 testdata/file1000.src:1 (40.96ms)" labeltooltip="0000000000003001 line3001 testdata/file3000.src:3 -> 0000000000001000 line1000 testdata/file1000.src:1 (40.96ms)"] -} diff --git a/internal/pprof/driver/testdata/pprof.cpu.addresses.traces b/internal/pprof/driver/testdata/pprof.cpu.addresses.traces deleted file mode 100644 index 742b123e115..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.addresses.traces +++ /dev/null @@ -1,32 +0,0 @@ -File: testbinary -Type: cpu -Duration: 10s, Total samples = 1.12s (11.20%) ------------+------------------------------------------------------- - key1: tag1 - key2: tag1 - 1s 0000000000001000 line1000 testdata/file1000.src:1 - 0000000000002000 line2001 testdata/file2000.src:9 (inline) - 0000000000002000 line2000 testdata/file2000.src:4 - 0000000000003000 line3002 testdata/file3000.src:2 (inline) - 0000000000003000 line3001 testdata/file3000.src:5 (inline) - 0000000000003000 line3000 testdata/file3000.src:6 ------------+------------------------------------------------------- - key1: tag2 - key3: tag2 - 100ms 0000000000001000 line1000 testdata/file1000.src:1 - 0000000000003001 line3001 testdata/file3000.src:8 (inline) - 0000000000003001 line3000 testdata/file3000.src:9 ------------+------------------------------------------------------- - key1: tag3 - key2: tag2 - 10ms 0000000000002000 line2001 testdata/file2000.src:9 (inline) - 0000000000002000 line2000 testdata/file2000.src:4 - 0000000000003002 line3002 testdata/file3000.src:5 (inline) - 0000000000003002 line3000 testdata/file3000.src:9 ------------+------------------------------------------------------- - key1: tag4 - key2: tag1 - 10ms 0000000000003000 line3002 testdata/file3000.src:2 (inline) - 0000000000003000 line3001 testdata/file3000.src:5 (inline) - 0000000000003000 line3000 testdata/file3000.src:6 ------------+------------------------------------------------------- diff --git a/internal/pprof/driver/testdata/pprof.cpu.call_tree.callgrind b/internal/pprof/driver/testdata/pprof.cpu.call_tree.callgrind deleted file mode 100644 index e2286f631a6..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.call_tree.callgrind +++ /dev/null @@ -1,99 +0,0 @@ -positions: instr line -events: cpu(ms) - -ob=(1) /path/to/testbinary -fl=(1) testdata/file1000.src -fn=(1) line1000 -0x1000 1 1000 -* 1 100 - -ob=(1) -fl=(2) testdata/file2000.src -fn=(2) line2001 -+4096 9 10 - -ob=(1) -fl=(3) testdata/file3000.src -fn=(3) line3002 -+4096 2 10 -cfl=(2) -cfn=(4) line2000 [1/2] -calls=0 * 4 -* * 1000 - -ob=(1) -fl=(2) -fn=(5) line2000 --4096 4 0 -cfl=(2) -cfn=(6) line2001 [2/2] -calls=0 -4096 9 -* * 1000 -* 4 0 -cfl=(2) -cfn=(7) line2001 [1/2] -calls=0 * 9 -* * 10 - -ob=(1) -fl=(2) -fn=(2) -* 9 0 -cfl=(1) -cfn=(8) line1000 [1/2] -calls=0 -4096 1 -* * 1000 - -ob=(1) -fl=(3) -fn=(9) line3000 -+4096 6 0 -cfl=(3) -cfn=(10) line3001 [1/2] -calls=0 +4096 5 -* * 1010 - -ob=(1) -fl=(3) -fn=(11) line3001 -* 5 0 -cfl=(3) -cfn=(12) line3002 [1/2] -calls=0 * 2 -* * 1010 - -ob=(1) -fl=(3) -fn=(9) -+1 9 0 -cfl=(3) -cfn=(13) line3001 [2/2] -calls=0 +1 8 -* * 100 - -ob=(1) -fl=(3) -fn=(11) -* 8 0 -cfl=(1) -cfn=(14) line1000 [2/2] -calls=0 -8193 1 -* * 100 - -ob=(1) -fl=(3) -fn=(9) -+1 9 0 -cfl=(3) -cfn=(15) line3002 [2/2] -calls=0 +1 5 -* * 10 - -ob=(1) -fl=(3) -fn=(3) -* 5 0 -cfl=(2) -cfn=(16) line2000 [2/2] -calls=0 -4098 4 -* * 10 diff --git a/internal/pprof/driver/testdata/pprof.cpu.callgrind b/internal/pprof/driver/testdata/pprof.cpu.callgrind deleted file mode 100644 index 0b0499638c0..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.callgrind +++ /dev/null @@ -1,88 +0,0 @@ -positions: instr line -events: cpu(ms) - -ob=(1) /path/to/testbinary -fl=(1) testdata/file1000.src -fn=(1) line1000 -0x1000 1 1100 - -ob=(1) -fl=(2) testdata/file2000.src -fn=(2) line2001 -+4096 9 10 -cfl=(1) -cfn=(1) -calls=0 * 1 -* * 1000 - -ob=(1) -fl=(3) testdata/file3000.src -fn=(3) line3002 -+4096 2 10 -cfl=(2) -cfn=(4) line2000 -calls=0 * 4 -* * 1000 - -ob=(1) -fl=(2) -fn=(4) --4096 4 0 -cfl=(2) -cfn=(2) -calls=0 -4096 9 -* * 1010 - -ob=(1) -fl=(3) -fn=(5) line3000 -+4096 6 0 -cfl=(3) -cfn=(6) line3001 -calls=0 +4096 5 -* * 1010 - -ob=(1) -fl=(3) -fn=(6) -* 5 0 -cfl=(3) -cfn=(3) -calls=0 * 2 -* * 1010 - -ob=(1) -fl=(3) -fn=(5) -+1 9 0 -cfl=(3) -cfn=(6) -calls=0 +1 8 -* * 100 - -ob=(1) -fl=(3) -fn=(6) -* 8 0 -cfl=(1) -cfn=(1) -calls=0 -8193 1 -* * 100 - -ob=(1) -fl=(3) -fn=(5) -+1 9 0 -cfl=(3) -cfn=(3) -calls=0 +1 5 -* * 10 - -ob=(1) -fl=(3) -fn=(3) -* 5 0 -cfl=(2) -cfn=(4) -calls=0 -4098 4 -* * 10 diff --git a/internal/pprof/driver/testdata/pprof.cpu.comments b/internal/pprof/driver/testdata/pprof.cpu.comments deleted file mode 100644 index e6d9824e1b6..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.comments +++ /dev/null @@ -1 +0,0 @@ -some-comment diff --git a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.text.focus.hide b/internal/pprof/driver/testdata/pprof.cpu.cum.lines.text.focus.hide deleted file mode 100644 index f0d928d76f6..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.text.focus.hide +++ /dev/null @@ -1,8 +0,0 @@ -Active filters: - focus=[12]00 - hide=line[X3]0 -Showing nodes accounting for 1.11s, 99.11% of 1.12s total - flat flat% sum% cum cum% - 1.10s 98.21% 98.21% 1.10s 98.21% line1000 testdata/file1000.src:1 - 0 0% 98.21% 1.01s 90.18% line2000 testdata/file2000.src:4 - 0.01s 0.89% 99.11% 1.01s 90.18% line2001 testdata/file2000.src:9 (inline) diff --git a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.text.hide b/internal/pprof/driver/testdata/pprof.cpu.cum.lines.text.hide deleted file mode 100644 index bf503a57dba..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.text.hide +++ /dev/null @@ -1,7 +0,0 @@ -Active filters: - hide=line[X3]0 -Showing nodes accounting for 1.11s, 99.11% of 1.12s total - flat flat% sum% cum cum% - 1.10s 98.21% 98.21% 1.10s 98.21% line1000 testdata/file1000.src:1 - 0 0% 98.21% 1.01s 90.18% line2000 testdata/file2000.src:4 - 0.01s 0.89% 99.11% 1.01s 90.18% line2001 testdata/file2000.src:9 (inline) diff --git a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.text.show b/internal/pprof/driver/testdata/pprof.cpu.cum.lines.text.show deleted file mode 100644 index 7604cb8d7b0..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.text.show +++ /dev/null @@ -1,7 +0,0 @@ -Active filters: - show=[12]00 -Showing nodes accounting for 1.11s, 99.11% of 1.12s total - flat flat% sum% cum cum% - 1.10s 98.21% 98.21% 1.10s 98.21% line1000 testdata/file1000.src:1 - 0 0% 98.21% 1.01s 90.18% line2000 testdata/file2000.src:4 - 0.01s 0.89% 99.11% 1.01s 90.18% line2001 testdata/file2000.src:9 (inline) diff --git a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.topproto.hide b/internal/pprof/driver/testdata/pprof.cpu.cum.lines.topproto.hide deleted file mode 100644 index 94b9be83df2..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.topproto.hide +++ /dev/null @@ -1,5 +0,0 @@ -Active filters: - hide=mangled[X3]0 -Showing nodes accounting for 1s, 100% of 1s total - flat flat% sum% cum cum% - 1s 100% 100% 1s 100% mangled1000 testdata/file1000.src:1 diff --git a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.tree.show_from b/internal/pprof/driver/testdata/pprof.cpu.cum.lines.tree.show_from deleted file mode 100644 index 112b49b383d..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.cum.lines.tree.show_from +++ /dev/null @@ -1,16 +0,0 @@ -Active filters: - show_from=line2 -Showing nodes accounting for 1.01s, 90.18% of 1.12s total -----------------------------------------------------------+------------- - flat flat% sum% cum cum% calls calls% + context -----------------------------------------------------------+------------- - 0 0% 0% 1.01s 90.18% | line2000 testdata/file2000.src:4 - 1.01s 100% | line2001 testdata/file2000.src:9 (inline) -----------------------------------------------------------+------------- - 1.01s 100% | line2000 testdata/file2000.src:4 (inline) - 0.01s 0.89% 0.89% 1.01s 90.18% | line2001 testdata/file2000.src:9 - 1s 99.01% | line1000 testdata/file1000.src:1 -----------------------------------------------------------+------------- - 1s 100% | line2001 testdata/file2000.src:9 - 1s 89.29% 90.18% 1s 89.29% | line1000 testdata/file1000.src:1 -----------------------------------------------------------+------------- diff --git a/internal/pprof/driver/testdata/pprof.cpu.flat.addresses.disasm b/internal/pprof/driver/testdata/pprof.cpu.flat.addresses.disasm deleted file mode 100644 index 57987e8e2b0..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.flat.addresses.disasm +++ /dev/null @@ -1,14 +0,0 @@ -Total: 1.12s -ROUTINE ======================== line1000 - 1.10s 1.10s (flat, cum) 98.21% of Total - 1.10s 1.10s 1000: instruction one ;line1000 file1000.src:1 - . . 1001: instruction two - . . 1002: instruction three ;line1000 file1000.src:2 - . . 1003: instruction four ;line1000 file1000.src:1 -ROUTINE ======================== line3000 - 10ms 1.12s (flat, cum) 100% of Total - 10ms 1.01s 3000: instruction one ;line3000 file3000.src:6 - . 100ms 3001: instruction two ;line3000 file3000.src:9 - . 10ms 3002: instruction three - . . 3003: instruction four ;line3000 file3000.src - . . 3004: instruction five diff --git a/internal/pprof/driver/testdata/pprof.cpu.flat.addresses.noinlines.text b/internal/pprof/driver/testdata/pprof.cpu.flat.addresses.noinlines.text deleted file mode 100644 index d53c44dad91..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.flat.addresses.noinlines.text +++ /dev/null @@ -1,7 +0,0 @@ -Showing nodes accounting for 1.12s, 100% of 1.12s total -Dropped 1 node (cum <= 0.06s) - flat flat% sum% cum cum% - 1.10s 98.21% 98.21% 1.10s 98.21% 0000000000001000 line1000 testdata/file1000.src:1 - 0.01s 0.89% 99.11% 1.01s 90.18% 0000000000002000 line2000 testdata/file2000.src:4 - 0.01s 0.89% 100% 1.01s 90.18% 0000000000003000 line3000 testdata/file3000.src:6 - 0 0% 100% 0.10s 8.93% 0000000000003001 line3000 testdata/file3000.src:9 diff --git a/internal/pprof/driver/testdata/pprof.cpu.flat.addresses.weblist b/internal/pprof/driver/testdata/pprof.cpu.flat.addresses.weblist deleted file mode 100644 index 2eb6b7d4f04..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.flat.addresses.weblist +++ /dev/null @@ -1,102 +0,0 @@ - - - - - -Pprof listing - - - - - -
File: testbinary
-Type: cpu
-Duration: 10s, Total samples = 1.12s (11.20%)
Total: 1.12s

line1000

testdata/file1000.src

-
-  Total:       1.10s      1.10s (flat, cum) 98.21%
-      1        1.10s      1.10s           line1                1.10s      1.10s     1000:     instruction one                                                              file1000.src:1
-                   .          .     1001:     instruction two                                                              file1000.src:1
-                                     ⋮
-                   .          .     1003:     instruction four                                                             file1000.src:1
-
-      2            .          .           line2                    .          .     1002:     instruction three                                                            file1000.src:2
-
-      3            .          .           line3 
-      4            .          .           line4 
-      5            .          .           line5 
-      6            .          .           line6 
-      7            .          .           line7 
-
-

line3000

testdata/file3000.src

-
-  Total:        10ms      1.12s (flat, cum)   100%
-      1            .          .           line1 
-      2            .          .           line2 
-      3            .          .           line3 
-      4            .          .           line4 
-      5            .          .           line5 
-      6         10ms      1.01s           line6                                               line5                                                                        file3000.src:5
-                                                  line2                                                                    file3000.src:2
-                10ms      1.01s     3000:             instruction one                                                      file3000.src:2
-
-      7            .          .           line7 
-      8            .          .           line8 
-      9            .      110ms           line9                                               line8                                                                        file3000.src:8
-                   .      100ms     3001:         instruction two                                                          file3000.src:8
-                                              line5                                                                        file3000.src:5
-                   .       10ms     3002:         instruction three                                                        file3000.src:5
-                   .          .     3003:         instruction four                                                         
-                   .          .     3004:         instruction five                                                         
-
-     10            .          .           line0 
-     11            .          .           line1 
-     12            .          .           line2 
-     13            .          .           line3 
-     14            .          .           line4 
-
- - - - diff --git a/internal/pprof/driver/testdata/pprof.cpu.flat.filefunctions.noinlines.text b/internal/pprof/driver/testdata/pprof.cpu.flat.filefunctions.noinlines.text deleted file mode 100644 index 88fb760759c..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.flat.filefunctions.noinlines.text +++ /dev/null @@ -1,5 +0,0 @@ -Showing nodes accounting for 1.12s, 100% of 1.12s total - flat flat% sum% cum cum% - 1.10s 98.21% 98.21% 1.10s 98.21% line1000 testdata/file1000.src - 0.01s 0.89% 99.11% 1.01s 90.18% line2000 testdata/file2000.src - 0.01s 0.89% 100% 1.12s 100% line3000 testdata/file3000.src diff --git a/internal/pprof/driver/testdata/pprof.cpu.flat.functions.call_tree.dot b/internal/pprof/driver/testdata/pprof.cpu.flat.functions.call_tree.dot deleted file mode 100644 index ae57f6647ef..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.flat.functions.call_tree.dot +++ /dev/null @@ -1,21 +0,0 @@ -digraph "testbinary" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "File: testbinary" [shape=box fontsize=16 label="File: testbinary\lType: cpu\lDuration: 10s, Total samples = 1.12s (11.20%)\lShowing nodes accounting for 1.11s, 99.11% of 1.12s total\lDropped 3 nodes (cum <= 0.06s)\l\lSee https://git.io/JfYMW for how to read the graph\l" tooltip="testbinary"] } -N1 [label="line1000\n1s (89.29%)" id="node1" fontsize=24 shape=box tooltip="line1000 (1s)" color="#b20500" fillcolor="#edd6d5"] -N1_0 [label = "key1:tag1\nkey2:tag1" id="N1_0" fontsize=8 shape=box3d tooltip="1s"] -N1 -> N1_0 [label=" 1s" weight=100 tooltip="1s" labeltooltip="1s"] -N2 [label="line3000\n0 of 1.12s (100%)" id="node2" fontsize=8 shape=box tooltip="line3000 (1.12s)" color="#b20000" fillcolor="#edd5d5"] -N3 [label="line3001\n0 of 1.11s (99.11%)" id="node3" fontsize=8 shape=box tooltip="line3001 (1.11s)" color="#b20000" fillcolor="#edd5d5"] -N4 [label="line1000\n0.10s (8.93%)" id="node4" fontsize=14 shape=box tooltip="line1000 (0.10s)" color="#b28b62" fillcolor="#ede8e2"] -N4_0 [label = "key1:tag2\nkey3:tag2" id="N4_0" fontsize=8 shape=box3d tooltip="0.10s"] -N4 -> N4_0 [label=" 0.10s" weight=100 tooltip="0.10s" labeltooltip="0.10s"] -N5 [label="line3002\n0.01s (0.89%)\nof 1.01s (90.18%)" id="node5" fontsize=10 shape=box tooltip="line3002 (1.01s)" color="#b20500" fillcolor="#edd6d5"] -N6 [label="line2000\n0 of 1s (89.29%)" id="node6" fontsize=8 shape=box tooltip="line2000 (1s)" color="#b20500" fillcolor="#edd6d5"] -N7 [label="line2001\n0 of 1s (89.29%)" id="node7" fontsize=8 shape=box tooltip="line2001 (1s)" color="#b20500" fillcolor="#edd6d5"] -N2 -> N3 [label=" 1.11s\n (inline)" weight=100 penwidth=5 color="#b20000" tooltip="line3000 -> line3001 (1.11s)" labeltooltip="line3000 -> line3001 (1.11s)"] -N3 -> N5 [label=" 1.01s\n (inline)" weight=91 penwidth=5 color="#b20500" tooltip="line3001 -> line3002 (1.01s)" labeltooltip="line3001 -> line3002 (1.01s)"] -N6 -> N7 [label=" 1s\n (inline)" weight=90 penwidth=5 color="#b20500" tooltip="line2000 -> line2001 (1s)" labeltooltip="line2000 -> line2001 (1s)"] -N7 -> N1 [label=" 1s" weight=90 penwidth=5 color="#b20500" tooltip="line2001 -> line1000 (1s)" labeltooltip="line2001 -> line1000 (1s)"] -N5 -> N6 [label=" 1s" weight=90 penwidth=5 color="#b20500" tooltip="line3002 -> line2000 (1s)" labeltooltip="line3002 -> line2000 (1s)"] -N3 -> N4 [label=" 0.10s" weight=9 color="#b28b62" tooltip="line3001 -> line1000 (0.10s)" labeltooltip="line3001 -> line1000 (0.10s)"] -} diff --git a/internal/pprof/driver/testdata/pprof.cpu.flat.functions.dot b/internal/pprof/driver/testdata/pprof.cpu.flat.functions.dot deleted file mode 100644 index 4a812e45858..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.flat.functions.dot +++ /dev/null @@ -1,20 +0,0 @@ -digraph "testbinary" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "File: testbinary" [shape=box fontsize=16 label="File: testbinary\lType: cpu\lDuration: 10s, Total samples = 1.12s (11.20%)\lShowing nodes accounting for 1.12s, 100% of 1.12s total\l\lSee https://git.io/JfYMW for how to read the graph\l" tooltip="testbinary"] } -N1 [label="line1000\n1.10s (98.21%)" id="node1" fontsize=24 shape=box tooltip="line1000 (1.10s)" color="#b20000" fillcolor="#edd5d5"] -N1_0 [label = "key1:tag1\nkey2:tag1" id="N1_0" fontsize=8 shape=box3d tooltip="1s"] -N1 -> N1_0 [label=" 1s" weight=100 tooltip="1s" labeltooltip="1s"] -N1_1 [label = "key1:tag2\nkey3:tag2" id="N1_1" fontsize=8 shape=box3d tooltip="0.10s"] -N1 -> N1_1 [label=" 0.10s" weight=100 tooltip="0.10s" labeltooltip="0.10s"] -N2 [label="line3000\n0 of 1.12s (100%)" id="node2" fontsize=8 shape=box tooltip="line3000 (1.12s)" color="#b20000" fillcolor="#edd5d5"] -N3 [label="line3001\n0 of 1.11s (99.11%)" id="node3" fontsize=8 shape=box tooltip="line3001 (1.11s)" color="#b20000" fillcolor="#edd5d5"] -N4 [label="line3002\n0.01s (0.89%)\nof 1.02s (91.07%)" id="node4" fontsize=10 shape=box tooltip="line3002 (1.02s)" color="#b20400" fillcolor="#edd6d5"] -N5 [label="line2001\n0.01s (0.89%)\nof 1.01s (90.18%)" id="node5" fontsize=10 shape=box tooltip="line2001 (1.01s)" color="#b20500" fillcolor="#edd6d5"] -N6 [label="line2000\n0 of 1.01s (90.18%)" id="node6" fontsize=8 shape=box tooltip="line2000 (1.01s)" color="#b20500" fillcolor="#edd6d5"] -N2 -> N3 [label=" 1.11s\n (inline)" weight=100 penwidth=5 color="#b20000" tooltip="line3000 -> line3001 (1.11s)" labeltooltip="line3000 -> line3001 (1.11s)"] -N6 -> N5 [label=" 1.01s\n (inline)" weight=91 penwidth=5 color="#b20500" tooltip="line2000 -> line2001 (1.01s)" labeltooltip="line2000 -> line2001 (1.01s)"] -N3 -> N4 [label=" 1.01s\n (inline)" weight=91 penwidth=5 color="#b20500" tooltip="line3001 -> line3002 (1.01s)" labeltooltip="line3001 -> line3002 (1.01s)"] -N4 -> N6 [label=" 1.01s" weight=91 penwidth=5 color="#b20500" tooltip="line3002 -> line2000 (1.01s)" labeltooltip="line3002 -> line2000 (1.01s)"] -N5 -> N1 [label=" 1s" weight=90 penwidth=5 color="#b20500" tooltip="line2001 -> line1000 (1s)" labeltooltip="line2001 -> line1000 (1s)"] -N3 -> N1 [label=" 0.10s" weight=9 color="#b28b62" tooltip="line3001 -> line1000 (0.10s)" labeltooltip="line3001 -> line1000 (0.10s)"] -} diff --git a/internal/pprof/driver/testdata/pprof.cpu.flat.functions.noinlines.text b/internal/pprof/driver/testdata/pprof.cpu.flat.functions.noinlines.text deleted file mode 100644 index 493b4912de1..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.flat.functions.noinlines.text +++ /dev/null @@ -1,5 +0,0 @@ -Showing nodes accounting for 1.12s, 100% of 1.12s total - flat flat% sum% cum cum% - 1.10s 98.21% 98.21% 1.10s 98.21% line1000 - 0.01s 0.89% 99.11% 1.01s 90.18% line2000 - 0.01s 0.89% 100% 1.12s 100% line3000 diff --git a/internal/pprof/driver/testdata/pprof.cpu.flat.functions.text b/internal/pprof/driver/testdata/pprof.cpu.flat.functions.text deleted file mode 100644 index 66e4189e0a2..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.flat.functions.text +++ /dev/null @@ -1,8 +0,0 @@ -Showing nodes accounting for 1.12s, 100% of 1.12s total - flat flat% sum% cum cum% - 1.10s 98.21% 98.21% 1.10s 98.21% line1000 - 0.01s 0.89% 99.11% 1.01s 90.18% line2001 (inline) - 0.01s 0.89% 100% 1.02s 91.07% line3002 (inline) - 0 0% 100% 1.01s 90.18% line2000 - 0 0% 100% 1.12s 100% line3000 - 0 0% 100% 1.11s 99.11% line3001 (inline) diff --git a/internal/pprof/driver/testdata/pprof.cpu.lines.topproto b/internal/pprof/driver/testdata/pprof.cpu.lines.topproto deleted file mode 100644 index 33bf6814a46..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.lines.topproto +++ /dev/null @@ -1,3 +0,0 @@ -Showing nodes accounting for 1s, 100% of 1s total - flat flat% sum% cum cum% - 1s 100% 100% 1s 100% mangled1000 testdata/file1000.src:1 diff --git a/internal/pprof/driver/testdata/pprof.cpu.peek b/internal/pprof/driver/testdata/pprof.cpu.peek deleted file mode 100644 index 3b8a3537b46..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.peek +++ /dev/null @@ -1,13 +0,0 @@ -Showing nodes accounting for 1.12s, 100% of 1.12s total -----------------------------------------------------------+------------- - flat flat% sum% cum cum% calls calls% + context -----------------------------------------------------------+------------- - 1.01s 100% | line2000 (inline) - 0.01s 0.89% 0.89% 1.01s 90.18% | line2001 - 1s 99.01% | line1000 -----------------------------------------------------------+------------- - 1.11s 100% | line3000 (inline) - 0 0% 0.89% 1.11s 99.11% | line3001 - 1.01s 90.99% | line3002 (inline) - 0.10s 9.01% | line1000 -----------------------------------------------------------+------------- diff --git a/internal/pprof/driver/testdata/pprof.cpu.tags b/internal/pprof/driver/testdata/pprof.cpu.tags deleted file mode 100644 index 5998b5ba5bf..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.tags +++ /dev/null @@ -1,13 +0,0 @@ - key1: Total 1.1s - 1.0s (89.29%): tag1 - 100.0ms ( 8.93%): tag2 - 10.0ms ( 0.89%): tag3 - 10.0ms ( 0.89%): tag4 - - key2: Total 1.0s - 1.0s (99.02%): tag1 - 10.0ms ( 0.98%): tag2 - - key3: Total 100.0ms - 100.0ms ( 100%): tag2 - diff --git a/internal/pprof/driver/testdata/pprof.cpu.tags.focus.ignore b/internal/pprof/driver/testdata/pprof.cpu.tags.focus.ignore deleted file mode 100644 index 9b99d4368c2..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.tags.focus.ignore +++ /dev/null @@ -1,6 +0,0 @@ - key1: Total 100.0ms - 100.0ms ( 100%): tag2 - - key3: Total 100.0ms - 100.0ms ( 100%): tag2 - diff --git a/internal/pprof/driver/testdata/pprof.cpu.traces b/internal/pprof/driver/testdata/pprof.cpu.traces deleted file mode 100644 index dd31e2e6228..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpu.traces +++ /dev/null @@ -1,32 +0,0 @@ -File: testbinary -Type: cpu -Duration: 10s, Total samples = 1.12s (11.20%) ------------+------------------------------------------------------- - key1: tag1 - key2: tag1 - 1s line1000 - line2001 (inline) - line2000 - line3002 (inline) - line3001 (inline) - line3000 ------------+------------------------------------------------------- - key1: tag2 - key3: tag2 - 100ms line1000 - line3001 (inline) - line3000 ------------+------------------------------------------------------- - key1: tag3 - key2: tag2 - 10ms line2001 (inline) - line2000 - line3002 (inline) - line3000 ------------+------------------------------------------------------- - key1: tag4 - key2: tag1 - 10ms line3002 (inline) - line3001 (inline) - line3000 ------------+------------------------------------------------------- diff --git a/internal/pprof/driver/testdata/pprof.cpusmall.flat.addresses.tree b/internal/pprof/driver/testdata/pprof.cpusmall.flat.addresses.tree deleted file mode 100644 index 606db2b8876..00000000000 --- a/internal/pprof/driver/testdata/pprof.cpusmall.flat.addresses.tree +++ /dev/null @@ -1,17 +0,0 @@ -Showing nodes accounting for 4s, 100% of 4s total -Showing top 4 nodes out of 5 -----------------------------------------------------------+------------- - flat flat% sum% cum cum% calls calls% + context -----------------------------------------------------------+------------- - 1s 100% | 0000000000003000 [testbinary] - 1s 25.00% 25.00% 1s 25.00% | 0000000000001000 [testbinary] -----------------------------------------------------------+------------- - 1s 25.00% 50.00% 2s 50.00% | 0000000000003000 [testbinary] - 1s 50.00% | 0000000000001000 [testbinary] -----------------------------------------------------------+------------- - 1s 100% | 0000000000005000 [testbinary] - 1s 25.00% 75.00% 1s 25.00% | 0000000000004000 [testbinary] -----------------------------------------------------------+------------- - 1s 25.00% 100% 2s 50.00% | 0000000000005000 [testbinary] - 1s 50.00% | 0000000000004000 [testbinary] -----------------------------------------------------------+------------- diff --git a/internal/pprof/driver/testdata/pprof.heap.callgrind b/internal/pprof/driver/testdata/pprof.heap.callgrind deleted file mode 100644 index bfd96cb7deb..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.callgrind +++ /dev/null @@ -1,88 +0,0 @@ -positions: instr line -events: inuse_space(MB) - -ob= -fl=(1) testdata/file2000.src -fn=(1) line2001 -0x2000 2 62 -cfl=(2) testdata/file1000.src -cfn=(2) line1000 -calls=0 0x1000 1 -* * 0 - -ob= -fl=(3) testdata/file3000.src -fn=(3) line3002 -+4096 3 31 -cfl=(1) -cfn=(4) line2000 -calls=0 * 3 -* * 0 - -ob= -fl=(2) -fn=(2) --8192 1 4 - -ob= -fl=(1) -fn=(4) -+4096 3 0 -cfl=(1) -cfn=(1) -calls=0 +4096 2 -* * 63 - -ob= -fl=(3) -fn=(5) line3000 -+4096 4 0 -cfl=(3) -cfn=(6) line3001 -calls=0 +4096 2 -* * 32 - -ob= -fl=(3) -fn=(6) -* 2 0 -cfl=(3) -cfn=(3) -calls=0 * 3 -* * 32 - -ob= -fl=(3) -fn=(5) -+1 4 0 -cfl=(3) -cfn=(6) -calls=0 +1 2 -* * 3 - -ob= -fl=(3) -fn=(6) -* 2 0 -cfl=(2) -cfn=(2) -calls=0 -8193 1 -* * 3 - -ob= -fl=(3) -fn=(5) -+1 4 0 -cfl=(3) -cfn=(3) -calls=0 +1 3 -* * 62 - -ob= -fl=(3) -fn=(3) -* 3 0 -cfl=(1) -cfn=(4) -calls=0 -4098 3 -* * 62 diff --git a/internal/pprof/driver/testdata/pprof.heap.comments b/internal/pprof/driver/testdata/pprof.heap.comments deleted file mode 100644 index 6eca2fb7940..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.comments +++ /dev/null @@ -1,2 +0,0 @@ -comment -#hidden comment diff --git a/internal/pprof/driver/testdata/pprof.heap.cum.lines.tree.focus b/internal/pprof/driver/testdata/pprof.heap.cum.lines.tree.focus deleted file mode 100644 index 9d4ba72b1f9..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.cum.lines.tree.focus +++ /dev/null @@ -1,21 +0,0 @@ -Active filters: - focus=[24]00 -Showing nodes accounting for 62.50MB, 63.37% of 98.63MB total -Dropped 2 nodes (cum <= 4.93MB) -----------------------------------------------------------+------------- - flat flat% sum% cum cum% calls calls% + context -----------------------------------------------------------+------------- - 63.48MB 100% | line3002 testdata/file3000.src:3 - 0 0% 0% 63.48MB 64.36% | line2000 testdata/file2000.src:3 - 63.48MB 100% | line2001 testdata/file2000.src:2 (inline) -----------------------------------------------------------+------------- - 63.48MB 100% | line2000 testdata/file2000.src:3 (inline) - 62.50MB 63.37% 63.37% 63.48MB 64.36% | line2001 testdata/file2000.src:2 -----------------------------------------------------------+------------- - 0 0% 63.37% 63.48MB 64.36% | line3000 testdata/file3000.src:4 - 63.48MB 100% | line3002 testdata/file3000.src:3 (inline) -----------------------------------------------------------+------------- - 63.48MB 100% | line3000 testdata/file3000.src:4 (inline) - 0 0% 63.37% 63.48MB 64.36% | line3002 testdata/file3000.src:3 - 63.48MB 100% | line2000 testdata/file2000.src:3 -----------------------------------------------------------+------------- diff --git a/internal/pprof/driver/testdata/pprof.heap.cum.relative_percentages.tree.focus b/internal/pprof/driver/testdata/pprof.heap.cum.relative_percentages.tree.focus deleted file mode 100644 index c2d11838fe3..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.cum.relative_percentages.tree.focus +++ /dev/null @@ -1,21 +0,0 @@ -Active filters: - focus=[24]00 -Showing nodes accounting for 62.50MB, 98.46% of 63.48MB total -Dropped 2 nodes (cum <= 3.17MB) -----------------------------------------------------------+------------- - flat flat% sum% cum cum% calls calls% + context -----------------------------------------------------------+------------- - 63.48MB 100% | line3002 - 0 0% 0% 63.48MB 100% | line2000 - 63.48MB 100% | line2001 (inline) -----------------------------------------------------------+------------- - 63.48MB 100% | line2000 (inline) - 62.50MB 98.46% 98.46% 63.48MB 100% | line2001 -----------------------------------------------------------+------------- - 0 0% 98.46% 63.48MB 100% | line3000 - 63.48MB 100% | line3002 (inline) -----------------------------------------------------------+------------- - 63.48MB 100% | line3000 (inline) - 0 0% 98.46% 63.48MB 100% | line3002 - 63.48MB 100% | line2000 -----------------------------------------------------------+------------- diff --git a/internal/pprof/driver/testdata/pprof.heap.flat.files.seconds.text b/internal/pprof/driver/testdata/pprof.heap.flat.files.seconds.text deleted file mode 100644 index b9571ef4eca..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.flat.files.seconds.text +++ /dev/null @@ -1,2 +0,0 @@ -Showing nodes accounting for 0, 0% of 0 total - flat flat% sum% cum cum% diff --git a/internal/pprof/driver/testdata/pprof.heap.flat.files.text b/internal/pprof/driver/testdata/pprof.heap.flat.files.text deleted file mode 100644 index fd536df5736..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.flat.files.text +++ /dev/null @@ -1,5 +0,0 @@ -Showing nodes accounting for 93.75MB, 95.05% of 98.63MB total -Dropped 1 node (cum <= 4.93MB) - flat flat% sum% cum cum% - 62.50MB 63.37% 63.37% 63.48MB 64.36% testdata/file2000.src - 31.25MB 31.68% 95.05% 98.63MB 100% testdata/file3000.src diff --git a/internal/pprof/driver/testdata/pprof.heap.flat.files.text.focus b/internal/pprof/driver/testdata/pprof.heap.flat.files.text.focus deleted file mode 100644 index 20a503f9b43..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.flat.files.text.focus +++ /dev/null @@ -1,8 +0,0 @@ -Active filters: - focus=[12]00 - taghide=[X3]00 -Showing nodes accounting for 67.38MB, 68.32% of 98.63MB total - flat flat% sum% cum cum% - 62.50MB 63.37% 63.37% 63.48MB 64.36% testdata/file2000.src - 4.88MB 4.95% 68.32% 4.88MB 4.95% testdata/file1000.src - 0 0% 68.32% 67.38MB 68.32% testdata/file3000.src diff --git a/internal/pprof/driver/testdata/pprof.heap.flat.inuse_objects.text b/internal/pprof/driver/testdata/pprof.heap.flat.inuse_objects.text deleted file mode 100644 index 929461a3c1e..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.flat.inuse_objects.text +++ /dev/null @@ -1,8 +0,0 @@ -Showing nodes accounting for 150, 100% of 150 total - flat flat% sum% cum cum% - 80 53.33% 53.33% 130 86.67% line3002 (inline) - 40 26.67% 80.00% 50 33.33% line2001 (inline) - 30 20.00% 100% 30 20.00% line1000 - 0 0% 100% 50 33.33% line2000 - 0 0% 100% 150 100% line3000 - 0 0% 100% 110 73.33% line3001 (inline) diff --git a/internal/pprof/driver/testdata/pprof.heap.flat.inuse_space.dot.focus b/internal/pprof/driver/testdata/pprof.heap.flat.inuse_space.dot.focus deleted file mode 100644 index c1d3a8e5930..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.flat.inuse_space.dot.focus +++ /dev/null @@ -1,13 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lcomment\lType: inuse_space\lActive filters:\l tagfocus=1mb:2gb\lShowing nodes accounting for 62.50MB, 63.37% of 98.63MB total\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="line2001\n62.50MB (63.37%)" id="node1" fontsize=24 shape=box tooltip="line2001 (62.50MB)" color="#b21600" fillcolor="#edd8d5"] -NN1_0 [label = "1.56MB" id="NN1_0" fontsize=8 shape=box3d tooltip="62.50MB"] -N1 -> NN1_0 [label=" 62.50MB" weight=100 tooltip="62.50MB" labeltooltip="62.50MB"] -N2 [label="line3000\n0 of 62.50MB (63.37%)" id="node2" fontsize=8 shape=box tooltip="line3000 (62.50MB)" color="#b21600" fillcolor="#edd8d5"] -N3 [label="line2000\n0 of 62.50MB (63.37%)" id="node3" fontsize=8 shape=box tooltip="line2000 (62.50MB)" color="#b21600" fillcolor="#edd8d5"] -N4 [label="line3002\n0 of 62.50MB (63.37%)" id="node4" fontsize=8 shape=box tooltip="line3002 (62.50MB)" color="#b21600" fillcolor="#edd8d5"] -N3 -> N1 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line2000 -> line2001 (62.50MB)" labeltooltip="line2000 -> line2001 (62.50MB)"] -N2 -> N4 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line3000 -> line3002 (62.50MB)" labeltooltip="line3000 -> line3002 (62.50MB)"] -N4 -> N3 [label=" 62.50MB" weight=64 penwidth=4 color="#b21600" tooltip="line3002 -> line2000 (62.50MB)" labeltooltip="line3002 -> line2000 (62.50MB)"] -} diff --git a/internal/pprof/driver/testdata/pprof.heap.flat.inuse_space.dot.focus.ignore b/internal/pprof/driver/testdata/pprof.heap.flat.inuse_space.dot.focus.ignore deleted file mode 100644 index ead36d62bfe..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.flat.inuse_space.dot.focus.ignore +++ /dev/null @@ -1,16 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lcomment\lType: inuse_space\lActive filters:\l tagfocus=30kb:\l tagignore=1mb:2mb\lShowing nodes accounting for 36.13MB, 36.63% of 98.63MB total\lDropped 2 nodes (cum <= 4.93MB)\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="line3002\n31.25MB (31.68%)\nof 32.23MB (32.67%)" id="node1" fontsize=24 shape=box tooltip="line3002 (32.23MB)" color="#b23200" fillcolor="#eddcd5"] -NN1_0 [label = "400kB" id="NN1_0" fontsize=8 shape=box3d tooltip="31.25MB"] -N1 -> NN1_0 [label=" 31.25MB" weight=100 tooltip="31.25MB" labeltooltip="31.25MB"] -N2 [label="line3000\n0 of 36.13MB (36.63%)" id="node2" fontsize=8 shape=box tooltip="line3000 (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] -N3 [label="line3001\n0 of 36.13MB (36.63%)" id="node3" fontsize=8 shape=box tooltip="line3001 (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] -N4 [label="line1000\n4.88MB (4.95%)" id="node4" fontsize=15 shape=box tooltip="line1000 (4.88MB)" color="#b2a086" fillcolor="#edeae7"] -NN4_0 [label = "200kB" id="NN4_0" fontsize=8 shape=box3d tooltip="3.91MB"] -N4 -> NN4_0 [label=" 3.91MB" weight=100 tooltip="3.91MB" labeltooltip="3.91MB"] -N2 -> N3 [label=" 36.13MB\n (inline)" weight=37 penwidth=2 color="#b22e00" tooltip="line3000 -> line3001 (36.13MB)" labeltooltip="line3000 -> line3001 (36.13MB)"] -N3 -> N1 [label=" 32.23MB\n (inline)" weight=33 penwidth=2 color="#b23200" tooltip="line3001 -> line3002 (32.23MB)" labeltooltip="line3001 -> line3002 (32.23MB)"] -N3 -> N4 [label=" 3.91MB" weight=4 color="#b2a58f" tooltip="line3001 -> line1000 (3.91MB)" labeltooltip="line3001 -> line1000 (3.91MB)"] -N1 -> N4 [label=" 0.98MB" color="#b2b0a9" tooltip="line3002 ... line1000 (0.98MB)" labeltooltip="line3002 ... line1000 (0.98MB)" style="dotted" minlen=2] -} diff --git a/internal/pprof/driver/testdata/pprof.heap.flat.lines.dot.focus b/internal/pprof/driver/testdata/pprof.heap.flat.lines.dot.focus deleted file mode 100644 index 02ea91ecb6f..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.flat.lines.dot.focus +++ /dev/null @@ -1,21 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lcomment\lType: inuse_space\lActive filters:\l focus=[12]00\lShowing nodes accounting for 67.38MB, 68.32% of 98.63MB total\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="line3000\nfile3000.src:4\n0 of 67.38MB (68.32%)" id="node1" fontsize=8 shape=box tooltip="line3000 testdata/file3000.src:4 (67.38MB)" color="#b21300" fillcolor="#edd7d5"] -N2 [label="line2001\nfile2000.src:2\n62.50MB (63.37%)\nof 63.48MB (64.36%)" id="node2" fontsize=24 shape=box tooltip="line2001 testdata/file2000.src:2 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] -NN2_0 [label = "1.56MB" id="NN2_0" fontsize=8 shape=box3d tooltip="62.50MB"] -N2 -> NN2_0 [label=" 62.50MB" weight=100 tooltip="62.50MB" labeltooltip="62.50MB"] -N3 [label="line1000\nfile1000.src:1\n4.88MB (4.95%)" id="node3" fontsize=13 shape=box tooltip="line1000 testdata/file1000.src:1 (4.88MB)" color="#b2a086" fillcolor="#edeae7"] -NN3_0 [label = "200kB" id="NN3_0" fontsize=8 shape=box3d tooltip="3.91MB"] -N3 -> NN3_0 [label=" 3.91MB" weight=100 tooltip="3.91MB" labeltooltip="3.91MB"] -N4 [label="line3002\nfile3000.src:3\n0 of 63.48MB (64.36%)" id="node4" fontsize=8 shape=box tooltip="line3002 testdata/file3000.src:3 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] -N5 [label="line3001\nfile3000.src:2\n0 of 4.88MB (4.95%)" id="node5" fontsize=8 shape=box tooltip="line3001 testdata/file3000.src:2 (4.88MB)" color="#b2a086" fillcolor="#edeae7"] -N6 [label="line2000\nfile2000.src:3\n0 of 63.48MB (64.36%)" id="node6" fontsize=8 shape=box tooltip="line2000 testdata/file2000.src:3 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] -N6 -> N2 [label=" 63.48MB\n (inline)" weight=65 penwidth=4 color="#b21600" tooltip="line2000 testdata/file2000.src:3 -> line2001 testdata/file2000.src:2 (63.48MB)" labeltooltip="line2000 testdata/file2000.src:3 -> line2001 testdata/file2000.src:2 (63.48MB)"] -N4 -> N6 [label=" 63.48MB" weight=65 penwidth=4 color="#b21600" tooltip="line3002 testdata/file3000.src:3 -> line2000 testdata/file2000.src:3 (63.48MB)" labeltooltip="line3002 testdata/file3000.src:3 -> line2000 testdata/file2000.src:3 (63.48MB)"] -N1 -> N4 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line3000 testdata/file3000.src:4 -> line3002 testdata/file3000.src:3 (62.50MB)" labeltooltip="line3000 testdata/file3000.src:4 -> line3002 testdata/file3000.src:3 (62.50MB)"] -N1 -> N5 [label=" 4.88MB\n (inline)" weight=5 color="#b2a086" tooltip="line3000 testdata/file3000.src:4 -> line3001 testdata/file3000.src:2 (4.88MB)" labeltooltip="line3000 testdata/file3000.src:4 -> line3001 testdata/file3000.src:2 (4.88MB)"] -N5 -> N3 [label=" 3.91MB" weight=4 color="#b2a58f" tooltip="line3001 testdata/file3000.src:2 -> line1000 testdata/file1000.src:1 (3.91MB)" labeltooltip="line3001 testdata/file3000.src:2 -> line1000 testdata/file1000.src:1 (3.91MB)"] -N2 -> N3 [label=" 0.98MB" color="#b2b0a9" tooltip="line2001 testdata/file2000.src:2 -> line1000 testdata/file1000.src:1 (0.98MB)" labeltooltip="line2001 testdata/file2000.src:2 -> line1000 testdata/file1000.src:1 (0.98MB)" minlen=2] -N5 -> N4 [label=" 0.98MB\n (inline)" color="#b2b0a9" tooltip="line3001 testdata/file3000.src:2 -> line3002 testdata/file3000.src:3 (0.98MB)" labeltooltip="line3001 testdata/file3000.src:2 -> line3002 testdata/file3000.src:3 (0.98MB)"] -} diff --git a/internal/pprof/driver/testdata/pprof.heap.tags b/internal/pprof/driver/testdata/pprof.heap.tags deleted file mode 100644 index 630e452a9f0..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.tags +++ /dev/null @@ -1,6 +0,0 @@ - bytes: Total 98.6MB - 62.5MB (63.37%): 1.56MB - 31.2MB (31.68%): 400kB - 3.9MB ( 3.96%): 200kB - 1000.0kB ( 0.99%): 100kB - diff --git a/internal/pprof/driver/testdata/pprof.heap.tags.unit b/internal/pprof/driver/testdata/pprof.heap.tags.unit deleted file mode 100644 index 5e565fc019e..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap.tags.unit +++ /dev/null @@ -1,6 +0,0 @@ - bytes: Total 103424000.0B - 65536000.0B (63.37%): 1638400B - 32768000.0B (31.68%): 409600B - 4096000.0B ( 3.96%): 204800B - 1024000.0B ( 0.99%): 102400B - diff --git a/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_objects.text b/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_objects.text deleted file mode 100644 index 929461a3c1e..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_objects.text +++ /dev/null @@ -1,8 +0,0 @@ -Showing nodes accounting for 150, 100% of 150 total - flat flat% sum% cum cum% - 80 53.33% 53.33% 130 86.67% line3002 (inline) - 40 26.67% 80.00% 50 33.33% line2001 (inline) - 30 20.00% 100% 30 20.00% line1000 - 0 0% 100% 50 33.33% line2000 - 0 0% 100% 150 100% line3000 - 0 0% 100% 110 73.33% line3001 (inline) diff --git a/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot b/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot deleted file mode 100644 index 152f5509e51..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot +++ /dev/null @@ -1,14 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lcomment\lType: alloc_space\lActive filters:\l tagshow=[2]00\lShowing nodes accounting for 93.75MB, 95.05% of 98.63MB total\lDropped 1 node (cum <= 4.93MB)\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="line3002\n31.25MB (31.68%)\nof 94.73MB (96.04%)" id="node1" fontsize=20 shape=box tooltip="line3002 (94.73MB)" color="#b20200" fillcolor="#edd5d5"] -N2 [label="line3000\n0 of 98.63MB (100%)" id="node2" fontsize=8 shape=box tooltip="line3000 (98.63MB)" color="#b20000" fillcolor="#edd5d5"] -N3 [label="line2001\n62.50MB (63.37%)\nof 63.48MB (64.36%)" id="node3" fontsize=24 shape=box tooltip="line2001 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] -N4 [label="line2000\n0 of 63.48MB (64.36%)" id="node4" fontsize=8 shape=box tooltip="line2000 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] -N5 [label="line3001\n0 of 36.13MB (36.63%)" id="node5" fontsize=8 shape=box tooltip="line3001 (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] -N4 -> N3 [label=" 63.48MB\n (inline)" weight=65 penwidth=4 color="#b21600" tooltip="line2000 -> line2001 (63.48MB)" labeltooltip="line2000 -> line2001 (63.48MB)"] -N1 -> N4 [label=" 63.48MB" weight=65 penwidth=4 color="#b21600" tooltip="line3002 -> line2000 (63.48MB)" labeltooltip="line3002 -> line2000 (63.48MB)"] -N2 -> N1 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line3000 -> line3002 (62.50MB)" labeltooltip="line3000 -> line3002 (62.50MB)"] -N2 -> N5 [label=" 36.13MB\n (inline)" weight=37 penwidth=2 color="#b22e00" tooltip="line3000 -> line3001 (36.13MB)" labeltooltip="line3000 -> line3001 (36.13MB)"] -N5 -> N1 [label=" 32.23MB\n (inline)" weight=33 penwidth=2 color="#b23200" tooltip="line3001 -> line3002 (32.23MB)" labeltooltip="line3001 -> line3002 (32.23MB)"] -} diff --git a/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.focus b/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.focus deleted file mode 100644 index e59deef2e21..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.focus +++ /dev/null @@ -1,18 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lcomment\lType: alloc_space\lActive filters:\l focus=[234]00\lShowing nodes accounting for 93.75MB, 95.05% of 98.63MB total\lDropped 1 node (cum <= 4.93MB)\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="line3002\n31.25MB (31.68%)\nof 94.73MB (96.04%)" id="node1" fontsize=20 shape=box tooltip="line3002 (94.73MB)" color="#b20200" fillcolor="#edd5d5"] -NN1_0 [label = "400kB" id="NN1_0" fontsize=8 shape=box3d tooltip="31.25MB"] -N1 -> NN1_0 [label=" 31.25MB" weight=100 tooltip="31.25MB" labeltooltip="31.25MB"] -N2 [label="line3000\n0 of 98.63MB (100%)" id="node2" fontsize=8 shape=box tooltip="line3000 (98.63MB)" color="#b20000" fillcolor="#edd5d5"] -N3 [label="line2001\n62.50MB (63.37%)\nof 63.48MB (64.36%)" id="node3" fontsize=24 shape=box tooltip="line2001 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] -NN3_0 [label = "1.56MB" id="NN3_0" fontsize=8 shape=box3d tooltip="62.50MB"] -N3 -> NN3_0 [label=" 62.50MB" weight=100 tooltip="62.50MB" labeltooltip="62.50MB"] -N4 [label="line2000\n0 of 63.48MB (64.36%)" id="node4" fontsize=8 shape=box tooltip="line2000 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] -N5 [label="line3001\n0 of 36.13MB (36.63%)" id="node5" fontsize=8 shape=box tooltip="line3001 (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] -N4 -> N3 [label=" 63.48MB\n (inline)" weight=65 penwidth=4 color="#b21600" tooltip="line2000 -> line2001 (63.48MB)" labeltooltip="line2000 -> line2001 (63.48MB)"] -N1 -> N4 [label=" 63.48MB" weight=65 penwidth=4 color="#b21600" tooltip="line3002 -> line2000 (63.48MB)" labeltooltip="line3002 -> line2000 (63.48MB)" minlen=2] -N2 -> N1 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line3000 -> line3002 (62.50MB)" labeltooltip="line3000 -> line3002 (62.50MB)"] -N2 -> N5 [label=" 36.13MB\n (inline)" weight=37 penwidth=2 color="#b22e00" tooltip="line3000 -> line3001 (36.13MB)" labeltooltip="line3000 -> line3001 (36.13MB)"] -N5 -> N1 [label=" 32.23MB\n (inline)" weight=33 penwidth=2 color="#b23200" tooltip="line3001 -> line3002 (32.23MB)" labeltooltip="line3001 -> line3002 (32.23MB)"] -} diff --git a/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.hide b/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.hide deleted file mode 100644 index 25250f025d0..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.hide +++ /dev/null @@ -1,11 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lcomment\lType: alloc_space\lActive filters:\l hide=line.*1?23?\lShowing nodes accounting for 93.75MB, 95.05% of 98.63MB total\lDropped 1 node (cum <= 4.93MB)\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="line3000\n62.50MB (63.37%)\nof 98.63MB (100%)" id="node1" fontsize=24 shape=box tooltip="line3000 (98.63MB)" color="#b20000" fillcolor="#edd5d5"] -NN1_0 [label = "1.56MB" id="NN1_0" fontsize=8 shape=box3d tooltip="62.50MB"] -N1 -> NN1_0 [label=" 62.50MB" weight=100 tooltip="62.50MB" labeltooltip="62.50MB"] -N2 [label="line3001\n31.25MB (31.68%)\nof 36.13MB (36.63%)" id="node2" fontsize=20 shape=box tooltip="line3001 (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] -NN2_0 [label = "400kB" id="NN2_0" fontsize=8 shape=box3d tooltip="31.25MB"] -N2 -> NN2_0 [label=" 31.25MB" weight=100 tooltip="31.25MB" labeltooltip="31.25MB"] -N1 -> N2 [label=" 36.13MB\n (inline)" weight=37 penwidth=2 color="#b22e00" tooltip="line3000 -> line3001 (36.13MB)" labeltooltip="line3000 -> line3001 (36.13MB)" minlen=2] -} diff --git a/internal/pprof/driver/testdata/pprof.heap_request.tags.focus b/internal/pprof/driver/testdata/pprof.heap_request.tags.focus deleted file mode 100644 index b1a5f444d8c..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap_request.tags.focus +++ /dev/null @@ -1,8 +0,0 @@ - bytes: Total 93.8MB - 62.5MB (66.67%): 1.56MB - 31.2MB (33.33%): 400kB - - request: Total 93.8MB - 62.5MB (66.67%): 1.56MB - 31.2MB (33.33%): 400kB - diff --git a/internal/pprof/driver/testdata/pprof.heap_sizetags.dot b/internal/pprof/driver/testdata/pprof.heap_sizetags.dot deleted file mode 100644 index fb31559c73b..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap_sizetags.dot +++ /dev/null @@ -1,30 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lcomment\lType: inuse_space\lShowing nodes accounting for 93.75MB, 95.05% of 98.63MB total\lDropped 1 node (cum <= 4.93MB)\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="line3002\n31.25MB (31.68%)\nof 94.73MB (96.04%)" id="node1" fontsize=20 shape=box tooltip="line3002 (94.73MB)" color="#b20200" fillcolor="#edd5d5"] -NN1_0 [label = "16B..64B" id="NN1_0" fontsize=8 shape=box3d tooltip="93.75MB"] -N1 -> NN1_0 [label=" 93.75MB" weight=100 tooltip="93.75MB" labeltooltip="93.75MB"] -NN1_1 [label = "2B..8B" id="NN1_1" fontsize=8 shape=box3d tooltip="93.75MB"] -N1 -> NN1_1 [label=" 93.75MB" weight=100 tooltip="93.75MB" labeltooltip="93.75MB"] -NN1_2 [label = "256B..1.56MB" id="NN1_2" fontsize=8 shape=box3d tooltip="62.50MB"] -N1 -> NN1_2 [label=" 62.50MB" weight=100 tooltip="62.50MB" labeltooltip="62.50MB"] -NN1_3 [label = "128B" id="NN1_3" fontsize=8 shape=box3d tooltip="31.25MB"] -N1 -> NN1_3 [label=" 31.25MB" weight=100 tooltip="31.25MB" labeltooltip="31.25MB"] -N2 [label="line3000\n0 of 98.63MB (100%)" id="node2" fontsize=8 shape=box tooltip="line3000 (98.63MB)" color="#b20000" fillcolor="#edd5d5"] -N3 [label="line2001\n62.50MB (63.37%)\nof 63.48MB (64.36%)" id="node3" fontsize=24 shape=box tooltip="line2001 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] -NN3_0 [label = "16B..64B" id="NN3_0" fontsize=8 shape=box3d tooltip="190.43MB"] -N3 -> NN3_0 [label=" 190.43MB" weight=100 tooltip="190.43MB" labeltooltip="190.43MB" style="dotted"] -NN3_1 [label = "2B..8B" id="NN3_1" fontsize=8 shape=box3d tooltip="190.43MB"] -N3 -> NN3_1 [label=" 190.43MB" weight=100 tooltip="190.43MB" labeltooltip="190.43MB" style="dotted"] -NN3_2 [label = "256B..1.56MB" id="NN3_2" fontsize=8 shape=box3d tooltip="125.98MB"] -N3 -> NN3_2 [label=" 125.98MB" weight=100 tooltip="125.98MB" labeltooltip="125.98MB" style="dotted"] -NN3_3 [label = "128B" id="NN3_3" fontsize=8 shape=box3d tooltip="63.48MB"] -N3 -> NN3_3 [label=" 63.48MB" weight=100 tooltip="63.48MB" labeltooltip="63.48MB" style="dotted"] -N4 [label="line2000\n0 of 63.48MB (64.36%)" id="node4" fontsize=8 shape=box tooltip="line2000 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] -N5 [label="line3001\n0 of 36.13MB (36.63%)" id="node5" fontsize=8 shape=box tooltip="line3001 (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] -N4 -> N3 [label=" 63.48MB\n (inline)" weight=65 penwidth=4 color="#b21600" tooltip="line2000 -> line2001 (63.48MB)" labeltooltip="line2000 -> line2001 (63.48MB)"] -N1 -> N4 [label=" 63.48MB" weight=65 penwidth=4 color="#b21600" tooltip="line3002 -> line2000 (63.48MB)" labeltooltip="line3002 -> line2000 (63.48MB)" minlen=2] -N2 -> N1 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line3000 -> line3002 (62.50MB)" labeltooltip="line3000 -> line3002 (62.50MB)"] -N2 -> N5 [label=" 36.13MB\n (inline)" weight=37 penwidth=2 color="#b22e00" tooltip="line3000 -> line3001 (36.13MB)" labeltooltip="line3000 -> line3001 (36.13MB)"] -N5 -> N1 [label=" 32.23MB\n (inline)" weight=33 penwidth=2 color="#b23200" tooltip="line3001 -> line3002 (32.23MB)" labeltooltip="line3001 -> line3002 (32.23MB)"] -} diff --git a/internal/pprof/driver/testdata/pprof.heap_tags.traces b/internal/pprof/driver/testdata/pprof.heap_tags.traces deleted file mode 100644 index 694b4b2c56a..00000000000 --- a/internal/pprof/driver/testdata/pprof.heap_tags.traces +++ /dev/null @@ -1,32 +0,0 @@ -Build ID: buildid -comment -Type: inuse_space ------------+------------------------------------------------------- - key1: tag - bytes: 100kB - request: 100kB - 1000kB line1000 - line2001 (inline) - line2000 - line3002 (inline) - line3001 (inline) - line3000 ------------+------------------------------------------------------- - bytes: 200kB - 3.91MB line1000 - line3001 (inline) - line3000 ------------+------------------------------------------------------- - key1: tag - bytes: 1.56MB - request: 1.56MB - 62.50MB line2001 (inline) - line2000 - line3002 (inline) - line3000 ------------+------------------------------------------------------- - bytes: 400kB - 31.25MB line3002 (inline) - line3001 (inline) - line3000 ------------+------------------------------------------------------- diff --git a/internal/pprof/driver/testdata/pprof.long_name_funcs.dot b/internal/pprof/driver/testdata/pprof.long_name_funcs.dot deleted file mode 100644 index dfc2142f571..00000000000 --- a/internal/pprof/driver/testdata/pprof.long_name_funcs.dot +++ /dev/null @@ -1,9 +0,0 @@ -digraph "testbinary" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "File: testbinary" [shape=box fontsize=16 label="File: testbinary\lType: cpu\lDuration: 10s, Total samples = 1.11s (11.10%)\lShowing nodes accounting for 1.11s, 100% of 1.11s total\l\lSee https://git.io/JfYMW for how to read the graph\l" tooltip="testbinary"] } -N1 [label="package1\nobject\nfunction1\n1.10s (99.10%)" id="node1" fontsize=24 shape=box tooltip="path/to/package1.object.function1 (1.10s)" color="#b20000" fillcolor="#edd5d5"] -N2 [label="FooBar\nrun\n0.01s (0.9%)\nof 1.01s (90.99%)" id="node2" fontsize=10 shape=box tooltip="java.bar.foo.FooBar.run(java.lang.Runnable) (1.01s)" color="#b20400" fillcolor="#edd6d5"] -N3 [label="Bar\nFoo\n0 of 1.10s (99.10%)" id="node3" fontsize=8 shape=box tooltip="(anonymous namespace)::Bar::Foo (1.10s)" color="#b20000" fillcolor="#edd5d5"] -N3 -> N1 [label=" 1.10s" weight=100 penwidth=5 color="#b20000" tooltip="(anonymous namespace)::Bar::Foo -> path/to/package1.object.function1 (1.10s)" labeltooltip="(anonymous namespace)::Bar::Foo -> path/to/package1.object.function1 (1.10s)"] -N2 -> N3 [label=" 1s" weight=91 penwidth=5 color="#b20500" tooltip="java.bar.foo.FooBar.run(java.lang.Runnable) -> (anonymous namespace)::Bar::Foo (1s)" labeltooltip="java.bar.foo.FooBar.run(java.lang.Runnable) -> (anonymous namespace)::Bar::Foo (1s)"] -} diff --git a/internal/pprof/driver/testdata/pprof.long_name_funcs.text b/internal/pprof/driver/testdata/pprof.long_name_funcs.text deleted file mode 100644 index 39cb24ed6a2..00000000000 --- a/internal/pprof/driver/testdata/pprof.long_name_funcs.text +++ /dev/null @@ -1,5 +0,0 @@ -Showing nodes accounting for 1.11s, 100% of 1.11s total - flat flat% sum% cum cum% - 1.10s 99.10% 99.10% 1.10s 99.10% path/to/package1.object.function1 - 0.01s 0.9% 100% 1.01s 90.99% java.bar.foo.FooBar.run(java.lang.Runnable) - 0 0% 100% 1.10s 99.10% (anonymous namespace)::Bar::Foo diff --git a/internal/pprof/driver/testdata/pprof.unknown.flat.functions.call_tree.text b/internal/pprof/driver/testdata/pprof.unknown.flat.functions.call_tree.text deleted file mode 100644 index 78a2298f95f..00000000000 --- a/internal/pprof/driver/testdata/pprof.unknown.flat.functions.call_tree.text +++ /dev/null @@ -1,8 +0,0 @@ -Showing nodes accounting for 1.12s, 100% of 1.12s total -Showing top 5 nodes out of 6 - flat flat% sum% cum cum% - 1.10s 98.21% 98.21% 1.10s 98.21% line1000 - 0.01s 0.89% 99.11% 1.01s 90.18% line2001 (inline) - 0.01s 0.89% 100% 1.02s 91.07% line3002 (inline) - 0 0% 100% 1.01s 90.18% line2000 - 0 0% 100% 1.12s 100% line3000 diff --git a/internal/pprof/driver/webhtml.go b/internal/pprof/driver/webhtml.go deleted file mode 100644 index b9c73271b8c..00000000000 --- a/internal/pprof/driver/webhtml.go +++ /dev/null @@ -1,1414 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "html/template" - - "github.com/google/pprof/third_party/d3" - "github.com/google/pprof/third_party/d3flamegraph" -) - -// addTemplates adds a set of template definitions to templates. -func addTemplates(templates *template.Template) { - template.Must(templates.Parse(`{{define "d3script"}}` + d3.JSSource + `{{end}}`)) - template.Must(templates.Parse(`{{define "d3flamegraphscript"}}` + d3flamegraph.JSSource + `{{end}}`)) - template.Must(templates.Parse(`{{define "d3flamegraphcss"}}` + d3flamegraph.CSSSource + `{{end}}`)) - template.Must(templates.Parse(` -{{define "css"}} - -{{end}} - -{{define "header"}} -
-
-

pprof

-
- - - - {{$sampleLen := len .SampleTypes}} - {{if gt $sampleLen 1}} - - {{end}} - - - - - - - -
- -
- -
- {{.Title}} -
- {{range .Legend}}
{{.}}
{{end}} -
-
-
- -
- -
-
Save options as
- - {{range .Configs}}{{if .UserConfig}} - - -
- -
-
Delete config
-
- -
- -
{{range .Errors}}
{{.}}
{{end}}
-{{end}} - -{{define "graph" -}} - - - - - {{.Title}} - {{template "css" .}} - - - {{template "header" .}} -
- {{.HTMLBody}} -
- {{template "script" .}} - - - -{{end}} - -{{define "script"}} - -{{end}} - -{{define "top" -}} - - - - - {{.Title}} - {{template "css" .}} - - - - {{template "header" .}} -
- - - - - - - - - - - - - -
FlatFlat%Sum%CumCum%NameInlined?
-
- {{template "script" .}} - - - -{{end}} - -{{define "sourcelisting" -}} - - - - - {{.Title}} - {{template "css" .}} - {{template "weblistcss" .}} - {{template "weblistjs" .}} - - - {{template "header" .}} -
- {{.HTMLBody}} -
- {{template "script" .}} - - - -{{end}} - -{{define "plaintext" -}} - - - - - {{.Title}} - {{template "css" .}} - - - {{template "header" .}} -
-
-      {{.TextBody}}
-    
-
- {{template "script" .}} - - - -{{end}} - -{{define "flamegraph" -}} - - - - - {{.Title}} - {{template "css" .}} - - - - - {{template "header" .}} -
-
-
-
-
-
- {{template "script" .}} - - - - - - -{{end}} -`)) -} diff --git a/internal/pprof/driver/webui.go b/internal/pprof/driver/webui.go deleted file mode 100644 index 356f8b8727a..00000000000 --- a/internal/pprof/driver/webui.go +++ /dev/null @@ -1,465 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "bytes" - "fmt" - "html/template" - "net" - "net/http" - gourl "net/url" - "os" - "os/exec" - "strconv" - "strings" - "time" - - "github.com/parca-dev/parca/internal/pprof/graph" - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/report" - "github.com/google/pprof/profile" -) - -// webInterface holds the state needed for serving a browser based interface. -type webInterface struct { - prof *profile.Profile - options *plugin.Options - help map[string]string - templates *template.Template - settingsFile string -} - -func makeWebInterface(p *profile.Profile, opt *plugin.Options) (*webInterface, error) { - settingsFile, err := settingsFileName() - if err != nil { - return nil, err - } - templates := template.New("templategroup") - addTemplates(templates) - report.AddSourceTemplates(templates) - return &webInterface{ - prof: p, - options: opt, - help: make(map[string]string), - templates: templates, - settingsFile: settingsFile, - }, nil -} - -// maxEntries is the maximum number of entries to print for text interfaces. -const maxEntries = 50 - -// errorCatcher is a UI that captures errors for reporting to the browser. -type errorCatcher struct { - plugin.UI - errors []string -} - -func (ec *errorCatcher) PrintErr(args ...interface{}) { - ec.errors = append(ec.errors, strings.TrimSuffix(fmt.Sprintln(args...), "\n")) - ec.UI.PrintErr(args...) -} - -// webArgs contains arguments passed to templates in webhtml.go. -type webArgs struct { - Title string - Errors []string - Total int64 - SampleTypes []string - Legend []string - Help map[string]string - Nodes []string - HTMLBody template.HTML - TextBody string - Top []report.TextItem - FlameGraph template.JS - Configs []configMenuEntry -} - -func serveWebInterface(hostport string, p *profile.Profile, o *plugin.Options, disableBrowser bool) error { - host, port, err := getHostAndPort(hostport) - if err != nil { - return err - } - interactiveMode = true - ui, err := makeWebInterface(p, o) - if err != nil { - return err - } - for n, c := range pprofCommands { - ui.help[n] = c.description - } - for n, help := range configHelp { - ui.help[n] = help - } - ui.help["details"] = "Show information about the profile and this view" - ui.help["graph"] = "Display profile as a directed graph" - ui.help["reset"] = "Show the entire profile" - ui.help["save_config"] = "Save current settings" - - server := o.HTTPServer - if server == nil { - server = defaultWebServer - } - args := &plugin.HTTPServerArgs{ - Hostport: net.JoinHostPort(host, strconv.Itoa(port)), - Host: host, - Port: port, - Handlers: map[string]http.Handler{ - "/": http.HandlerFunc(ui.dot), - "/top": http.HandlerFunc(ui.top), - "/disasm": http.HandlerFunc(ui.disasm), - "/source": http.HandlerFunc(ui.source), - "/peek": http.HandlerFunc(ui.peek), - "/flamegraph": http.HandlerFunc(ui.flamegraph), - "/saveconfig": http.HandlerFunc(ui.saveConfig), - "/deleteconfig": http.HandlerFunc(ui.deleteConfig), - "/download": http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - w.Header().Set("Content-Type", "application/vnd.google.protobuf+gzip") - w.Header().Set("Content-Disposition", "attachment;filename=profile.pb.gz") - p.Write(w) - }), - }, - } - - url := "http://" + args.Hostport - - o.UI.Print("Serving web UI on ", url) - - if o.UI.WantBrowser() && !disableBrowser { - go openBrowser(url, o) - } - return server(args) -} - -func getHostAndPort(hostport string) (string, int, error) { - host, portStr, err := net.SplitHostPort(hostport) - if err != nil { - return "", 0, fmt.Errorf("could not split http address: %v", err) - } - if host == "" { - host = "localhost" - } - var port int - if portStr == "" { - ln, err := net.Listen("tcp", net.JoinHostPort(host, "0")) - if err != nil { - return "", 0, fmt.Errorf("could not generate random port: %v", err) - } - port = ln.Addr().(*net.TCPAddr).Port - err = ln.Close() - if err != nil { - return "", 0, fmt.Errorf("could not generate random port: %v", err) - } - } else { - port, err = strconv.Atoi(portStr) - if err != nil { - return "", 0, fmt.Errorf("invalid port number: %v", err) - } - } - return host, port, nil -} -func defaultWebServer(args *plugin.HTTPServerArgs) error { - ln, err := net.Listen("tcp", args.Hostport) - if err != nil { - return err - } - isLocal := isLocalhost(args.Host) - handler := http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - if isLocal { - // Only allow local clients - host, _, err := net.SplitHostPort(req.RemoteAddr) - if err != nil || !isLocalhost(host) { - http.Error(w, "permission denied", http.StatusForbidden) - return - } - } - h := args.Handlers[req.URL.Path] - if h == nil { - // Fall back to default behavior - h = http.DefaultServeMux - } - h.ServeHTTP(w, req) - }) - - // We serve the ui at /ui/ and redirect there from the root. This is done - // to surface any problems with serving the ui at a non-root early. See: - // - // https://github.com/google/pprof/pull/348 - mux := http.NewServeMux() - mux.Handle("/ui/", http.StripPrefix("/ui", handler)) - mux.Handle("/", redirectWithQuery("/ui")) - s := &http.Server{Handler: mux} - return s.Serve(ln) -} - -func redirectWithQuery(path string) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - pathWithQuery := &gourl.URL{Path: path, RawQuery: r.URL.RawQuery} - http.Redirect(w, r, pathWithQuery.String(), http.StatusTemporaryRedirect) - } -} - -func isLocalhost(host string) bool { - for _, v := range []string{"localhost", "127.0.0.1", "[::1]", "::1"} { - if host == v { - return true - } - } - return false -} - -func openBrowser(url string, o *plugin.Options) { - // Construct URL. - baseURL, _ := gourl.Parse(url) - current := currentConfig() - u, _ := current.makeURL(*baseURL) - - // Give server a little time to get ready. - time.Sleep(time.Millisecond * 500) - - for _, b := range browsers() { - args := strings.Split(b, " ") - if len(args) == 0 { - continue - } - viewer := exec.Command(args[0], append(args[1:], u.String())...) - viewer.Stderr = os.Stderr - if err := viewer.Start(); err == nil { - return - } - } - // No visualizer succeeded, so just print URL. - o.UI.PrintErr(u.String()) -} - -// makeReport generates a report for the specified command. -// If configEditor is not null, it is used to edit the config used for the report. -func (ui *webInterface) makeReport(w http.ResponseWriter, req *http.Request, - cmd []string, configEditor func(*config)) (*report.Report, []string) { - cfg := currentConfig() - if err := cfg.applyURL(req.URL.Query()); err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - ui.options.UI.PrintErr(err) - return nil, nil - } - if configEditor != nil { - configEditor(&cfg) - } - catcher := &errorCatcher{UI: ui.options.UI} - options := *ui.options - options.UI = catcher - _, rpt, err := generateRawReport(ui.prof, cmd, cfg, &options) - if err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - ui.options.UI.PrintErr(err) - return nil, nil - } - return rpt, catcher.errors -} - -// render generates html using the named template based on the contents of data. -func (ui *webInterface) render(w http.ResponseWriter, req *http.Request, tmpl string, - rpt *report.Report, errList, legend []string, data webArgs) { - file := getFromLegend(legend, "File: ", "unknown") - profile := getFromLegend(legend, "Type: ", "unknown") - data.Title = file + " " + profile - data.Errors = errList - data.Total = rpt.Total() - data.SampleTypes = sampleTypes(ui.prof) - data.Legend = legend - data.Help = ui.help - data.Configs = configMenu(ui.settingsFile, *req.URL) - - html := &bytes.Buffer{} - if err := ui.templates.ExecuteTemplate(html, tmpl, data); err != nil { - http.Error(w, "internal template error", http.StatusInternalServerError) - ui.options.UI.PrintErr(err) - return - } - w.Header().Set("Content-Type", "text/html") - w.Write(html.Bytes()) -} - -// dot generates a web page containing an svg diagram. -func (ui *webInterface) dot(w http.ResponseWriter, req *http.Request) { - rpt, errList := ui.makeReport(w, req, []string{"svg"}, nil) - if rpt == nil { - return // error already reported - } - - // Generate dot graph. - g, config := report.GetDOT(rpt) - legend := config.Labels - config.Labels = nil - dot := &bytes.Buffer{} - graph.ComposeDot(dot, g, &graph.DotAttributes{}, config) - - // Convert to svg. - svg, err := dotToSvg(dot.Bytes()) - if err != nil { - http.Error(w, "Could not execute dot; may need to install graphviz.", - http.StatusNotImplemented) - ui.options.UI.PrintErr("Failed to execute dot. Is Graphviz installed?\n", err) - return - } - - // Get all node names into an array. - nodes := []string{""} // dot starts with node numbered 1 - for _, n := range g.Nodes { - nodes = append(nodes, n.Info.Name) - } - - ui.render(w, req, "graph", rpt, errList, legend, webArgs{ - HTMLBody: template.HTML(string(svg)), - Nodes: nodes, - }) -} - -func dotToSvg(dot []byte) ([]byte, error) { - cmd := exec.Command("dot", "-Tsvg") - out := &bytes.Buffer{} - cmd.Stdin, cmd.Stdout, cmd.Stderr = bytes.NewBuffer(dot), out, os.Stderr - if err := cmd.Run(); err != nil { - return nil, err - } - - // Fix dot bug related to unquoted ampersands. - svg := bytes.Replace(out.Bytes(), []byte("&;"), []byte("&;"), -1) - - // Cleanup for embedding by dropping stuff before the start. - if pos := bytes.Index(svg, []byte("= 0 { - svg = svg[pos:] - } - return svg, nil -} - -func (ui *webInterface) top(w http.ResponseWriter, req *http.Request) { - rpt, errList := ui.makeReport(w, req, []string{"top"}, func(cfg *config) { - cfg.NodeCount = 500 - }) - if rpt == nil { - return // error already reported - } - top, legend := report.TextItems(rpt) - var nodes []string - for _, item := range top { - nodes = append(nodes, item.Name) - } - - ui.render(w, req, "top", rpt, errList, legend, webArgs{ - Top: top, - Nodes: nodes, - }) -} - -// disasm generates a web page containing disassembly. -func (ui *webInterface) disasm(w http.ResponseWriter, req *http.Request) { - args := []string{"disasm", req.URL.Query().Get("f")} - rpt, errList := ui.makeReport(w, req, args, nil) - if rpt == nil { - return // error already reported - } - - out := &bytes.Buffer{} - if err := report.PrintAssembly(out, rpt, ui.options.Obj, maxEntries); err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - ui.options.UI.PrintErr(err) - return - } - - legend := report.ProfileLabels(rpt) - ui.render(w, req, "plaintext", rpt, errList, legend, webArgs{ - TextBody: out.String(), - }) - -} - -// source generates a web page containing source code annotated with profile -// data. -func (ui *webInterface) source(w http.ResponseWriter, req *http.Request) { - args := []string{"weblist", req.URL.Query().Get("f")} - rpt, errList := ui.makeReport(w, req, args, nil) - if rpt == nil { - return // error already reported - } - - // Generate source listing. - var body bytes.Buffer - if err := report.PrintWebList(&body, rpt, ui.options.Obj, maxEntries); err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - ui.options.UI.PrintErr(err) - return - } - - legend := report.ProfileLabels(rpt) - ui.render(w, req, "sourcelisting", rpt, errList, legend, webArgs{ - HTMLBody: template.HTML(body.String()), - }) -} - -// peek generates a web page listing callers/callers. -func (ui *webInterface) peek(w http.ResponseWriter, req *http.Request) { - args := []string{"peek", req.URL.Query().Get("f")} - rpt, errList := ui.makeReport(w, req, args, func(cfg *config) { - cfg.Granularity = "lines" - }) - if rpt == nil { - return // error already reported - } - - out := &bytes.Buffer{} - if err := report.Generate(out, rpt, ui.options.Obj); err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - ui.options.UI.PrintErr(err) - return - } - - legend := report.ProfileLabels(rpt) - ui.render(w, req, "plaintext", rpt, errList, legend, webArgs{ - TextBody: out.String(), - }) -} - -// saveConfig saves URL configuration. -func (ui *webInterface) saveConfig(w http.ResponseWriter, req *http.Request) { - if err := setConfig(ui.settingsFile, *req.URL); err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - ui.options.UI.PrintErr(err) - return - } -} - -// deleteConfig deletes a configuration. -func (ui *webInterface) deleteConfig(w http.ResponseWriter, req *http.Request) { - name := req.URL.Query().Get("config") - if err := removeConfig(ui.settingsFile, name); err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - ui.options.UI.PrintErr(err) - return - } -} - -// getFromLegend returns the suffix of an entry in legend that starts -// with param. It returns def if no such entry is found. -func getFromLegend(legend []string, param, def string) string { - for _, s := range legend { - if strings.HasPrefix(s, param) { - return s[len(param):] - } - } - return def -} diff --git a/internal/pprof/driver/webui_test.go b/internal/pprof/driver/webui_test.go deleted file mode 100644 index 0c154e8b9b1..00000000000 --- a/internal/pprof/driver/webui_test.go +++ /dev/null @@ -1,289 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "fmt" - "io/ioutil" - "net" - "net/http" - "net/http/httptest" - "net/url" - "os/exec" - "regexp" - "runtime" - "sync" - "testing" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/proftest" - "github.com/google/pprof/profile" -) - -func TestWebInterface(t *testing.T) { - if runtime.GOOS == "nacl" || runtime.GOOS == "js" { - t.Skip("test assumes tcp available") - } - - prof := makeFakeProfile() - - // Custom http server creator - var server *httptest.Server - serverCreated := make(chan bool) - creator := func(a *plugin.HTTPServerArgs) error { - server = httptest.NewServer(http.HandlerFunc( - func(w http.ResponseWriter, r *http.Request) { - if h := a.Handlers[r.URL.Path]; h != nil { - h.ServeHTTP(w, r) - } - })) - serverCreated <- true - return nil - } - - // Start server and wait for it to be initialized - go serveWebInterface("unused:1234", prof, &plugin.Options{ - Obj: fakeObjTool{}, - UI: &proftest.TestUI{T: t}, - HTTPServer: creator, - }, false) - <-serverCreated - defer server.Close() - - haveDot := false - if _, err := exec.LookPath("dot"); err == nil { - haveDot = true - } - - type testCase struct { - path string - want []string - needDot bool - } - testcases := []testCase{ - {"/", []string{"F1", "F2", "F3", "testbin", "cpu"}, true}, - {"/top", []string{`"Name":"F2","InlineLabel":"","Flat":200,"Cum":300,"FlatFormat":"200ms","CumFormat":"300ms"}`}, false}, - {"/source?f=" + url.QueryEscape("F[12]"), []string{ - "F1", - "F2", - `\. +300ms .*f1:asm`, // Cumulative count for F1 - "200ms +300ms .*f2:asm", // Flat + cumulative count for F2 - }, false}, - {"/peek?f=" + url.QueryEscape("F[12]"), - []string{"300ms.*F1", "200ms.*300ms.*F2"}, false}, - {"/disasm?f=" + url.QueryEscape("F[12]"), - []string{"f1:asm", "f2:asm"}, false}, - {"/flamegraph", []string{"File: testbin", "\"n\":\"root\"", "\"n\":\"F1\"", "var flamegraph = function", "function hierarchy"}, false}, - } - for _, c := range testcases { - if c.needDot && !haveDot { - t.Log("skipping", c.path, "since dot (graphviz) does not seem to be installed") - continue - } - - res, err := http.Get(server.URL + c.path) - if err != nil { - t.Error("could not fetch", c.path, err) - continue - } - data, err := ioutil.ReadAll(res.Body) - if err != nil { - t.Error("could not read response", c.path, err) - continue - } - result := string(data) - for _, w := range c.want { - if match, _ := regexp.MatchString(w, result); !match { - t.Errorf("response for %s does not match "+ - "expected pattern '%s'; "+ - "actual result:\n%s", c.path, w, result) - } - } - } - - // Also fetch all the test case URLs in parallel to test thread - // safety when run under the race detector. - var wg sync.WaitGroup - for _, c := range testcases { - if c.needDot && !haveDot { - continue - } - path := server.URL + c.path - for count := 0; count < 2; count++ { - wg.Add(1) - go func() { - defer wg.Done() - res, err := http.Get(path) - if err != nil { - t.Error("could not fetch", c.path, err) - return - } - if _, err = ioutil.ReadAll(res.Body); err != nil { - t.Error("could not read response", c.path, err) - } - }() - } - } - wg.Wait() -} - -// Implement fake object file support. - -const addrBase = 0x1000 -const fakeSource = "testdata/file1000.src" - -type fakeObj struct{} - -func (f fakeObj) Close() error { return nil } -func (f fakeObj) Name() string { return "testbin" } -func (f fakeObj) ObjAddr(addr uint64) (uint64, error) { return addr, nil } -func (f fakeObj) BuildID() string { return "" } -func (f fakeObj) SourceLine(addr uint64) ([]plugin.Frame, error) { - return nil, fmt.Errorf("SourceLine unimplemented") -} -func (f fakeObj) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) { - return []*plugin.Sym{ - { - Name: []string{"F1"}, File: fakeSource, - Start: addrBase, End: addrBase + 10, - }, - { - Name: []string{"F2"}, File: fakeSource, - Start: addrBase + 10, End: addrBase + 20, - }, - { - Name: []string{"F3"}, File: fakeSource, - Start: addrBase + 20, End: addrBase + 30, - }, - }, nil -} - -type fakeObjTool struct{} - -func (obj fakeObjTool) Open(file string, start, limit, offset uint64) (plugin.ObjFile, error) { - return fakeObj{}, nil -} - -func (obj fakeObjTool) Disasm(file string, start, end uint64, intelSyntax bool) ([]plugin.Inst, error) { - return []plugin.Inst{ - {Addr: addrBase + 10, Text: "f1:asm", Function: "F1", Line: 3}, - {Addr: addrBase + 20, Text: "f2:asm", Function: "F2", Line: 11}, - {Addr: addrBase + 30, Text: "d3:asm", Function: "F3", Line: 22}, - }, nil -} - -func makeFakeProfile() *profile.Profile { - // Three functions: F1, F2, F3 with three lines, 11, 22, 33. - funcs := []*profile.Function{ - {ID: 1, Name: "F1", Filename: fakeSource, StartLine: 3}, - {ID: 2, Name: "F2", Filename: fakeSource, StartLine: 5}, - {ID: 3, Name: "F3", Filename: fakeSource, StartLine: 7}, - } - lines := []profile.Line{ - {Function: funcs[0], Line: 11}, - {Function: funcs[1], Line: 22}, - {Function: funcs[2], Line: 33}, - } - mapping := []*profile.Mapping{ - { - ID: 1, - Start: addrBase, - Limit: addrBase + 100, - Offset: 0, - File: "testbin", - HasFunctions: true, - HasFilenames: true, - HasLineNumbers: true, - }, - } - - // Three interesting addresses: base+{10,20,30} - locs := []*profile.Location{ - {ID: 1, Address: addrBase + 10, Line: lines[0:1], Mapping: mapping[0]}, - {ID: 2, Address: addrBase + 20, Line: lines[1:2], Mapping: mapping[0]}, - {ID: 3, Address: addrBase + 30, Line: lines[2:3], Mapping: mapping[0]}, - } - - // Two stack traces. - return &profile.Profile{ - PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, - Period: 1, - DurationNanos: 10e9, - SampleType: []*profile.ValueType{ - {Type: "cpu", Unit: "milliseconds"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{locs[2], locs[1], locs[0]}, - Value: []int64{100}, - }, - { - Location: []*profile.Location{locs[1], locs[0]}, - Value: []int64{200}, - }, - }, - Location: locs, - Function: funcs, - Mapping: mapping, - } -} - -func TestGetHostAndPort(t *testing.T) { - if runtime.GOOS == "nacl" || runtime.GOOS == "js" { - t.Skip("test assumes tcp available") - } - - type testCase struct { - hostport string - wantHost string - wantPort int - wantRandomPort bool - } - - testCases := []testCase{ - {":", "localhost", 0, true}, - {":4681", "localhost", 4681, false}, - {"localhost:4681", "localhost", 4681, false}, - } - for _, tc := range testCases { - host, port, err := getHostAndPort(tc.hostport) - if err != nil { - t.Errorf("could not get host and port for %q: %v", tc.hostport, err) - } - if got, want := host, tc.wantHost; got != want { - t.Errorf("for %s, got host %s, want %s", tc.hostport, got, want) - continue - } - if !tc.wantRandomPort { - if got, want := port, tc.wantPort; got != want { - t.Errorf("for %s, got port %d, want %d", tc.hostport, got, want) - continue - } - } - } -} - -func TestIsLocalHost(t *testing.T) { - for _, s := range []string{"localhost:10000", "[::1]:10000", "127.0.0.1:10000"} { - host, _, err := net.SplitHostPort(s) - if err != nil { - t.Error("unexpected error when splitting", s) - continue - } - if !isLocalhost(host) { - t.Errorf("host %s from %s not considered local", host, s) - } - } -} diff --git a/internal/pprof/elfexec/elfexec.go b/internal/pprof/elfexec/elfexec.go deleted file mode 100644 index e0f3f7ed6ea..00000000000 --- a/internal/pprof/elfexec/elfexec.go +++ /dev/null @@ -1,364 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package elfexec provides utility routines to examine ELF binaries. -package elfexec - -import ( - "bufio" - "debug/elf" - "encoding/binary" - "fmt" - "io" -) - -const ( - maxNoteSize = 1 << 20 // in bytes - noteTypeGNUBuildID = 3 -) - -// elfNote is the payload of a Note Section in an ELF file. -type elfNote struct { - Name string // Contents of the "name" field, omitting the trailing zero byte. - Desc []byte // Contents of the "desc" field. - Type uint32 // Contents of the "type" field. -} - -// parseNotes returns the notes from a SHT_NOTE section or PT_NOTE segment. -func parseNotes(reader io.Reader, alignment int, order binary.ByteOrder) ([]elfNote, error) { - r := bufio.NewReader(reader) - - // padding returns the number of bytes required to pad the given size to an - // alignment boundary. - padding := func(size int) int { - return ((size + (alignment - 1)) &^ (alignment - 1)) - size - } - - var notes []elfNote - for { - noteHeader := make([]byte, 12) // 3 4-byte words - if _, err := io.ReadFull(r, noteHeader); err == io.EOF { - break - } else if err != nil { - return nil, err - } - namesz := order.Uint32(noteHeader[0:4]) - descsz := order.Uint32(noteHeader[4:8]) - typ := order.Uint32(noteHeader[8:12]) - - if uint64(namesz) > uint64(maxNoteSize) { - return nil, fmt.Errorf("note name too long (%d bytes)", namesz) - } - var name string - if namesz > 0 { - // Documentation differs as to whether namesz is meant to include the - // trailing zero, but everyone agrees that name is null-terminated. - // So we'll just determine the actual length after the fact. - var err error - name, err = r.ReadString('\x00') - if err == io.EOF { - return nil, fmt.Errorf("missing note name (want %d bytes)", namesz) - } else if err != nil { - return nil, err - } - namesz = uint32(len(name)) - name = name[:len(name)-1] - } - - // Drop padding bytes until the desc field. - for n := padding(len(noteHeader) + int(namesz)); n > 0; n-- { - if _, err := r.ReadByte(); err == io.EOF { - return nil, fmt.Errorf( - "missing %d bytes of padding after note name", n) - } else if err != nil { - return nil, err - } - } - - if uint64(descsz) > uint64(maxNoteSize) { - return nil, fmt.Errorf("note desc too long (%d bytes)", descsz) - } - desc := make([]byte, int(descsz)) - if _, err := io.ReadFull(r, desc); err == io.EOF { - return nil, fmt.Errorf("missing desc (want %d bytes)", len(desc)) - } else if err != nil { - return nil, err - } - - notes = append(notes, elfNote{Name: name, Desc: desc, Type: typ}) - - // Drop padding bytes until the next note or the end of the section, - // whichever comes first. - for n := padding(len(desc)); n > 0; n-- { - if _, err := r.ReadByte(); err == io.EOF { - // We hit the end of the section before an alignment boundary. - // This can happen if this section is at the end of the file or the next - // section has a smaller alignment requirement. - break - } else if err != nil { - return nil, err - } - } - } - return notes, nil -} - -// GetBuildID returns the GNU build-ID for an ELF binary. -// -// If no build-ID was found but the binary was read without error, it returns -// (nil, nil). -func GetBuildID(binary io.ReaderAt) ([]byte, error) { - f, err := elf.NewFile(binary) - if err != nil { - return nil, err - } - - findBuildID := func(notes []elfNote) ([]byte, error) { - var buildID []byte - for _, note := range notes { - if note.Name == "GNU" && note.Type == noteTypeGNUBuildID { - if buildID == nil { - buildID = note.Desc - } else { - return nil, fmt.Errorf("multiple build ids found, don't know which to use") - } - } - } - return buildID, nil - } - - for _, p := range f.Progs { - if p.Type != elf.PT_NOTE { - continue - } - notes, err := parseNotes(p.Open(), int(p.Align), f.ByteOrder) - if err != nil { - return nil, err - } - if b, err := findBuildID(notes); b != nil || err != nil { - return b, err - } - } - for _, s := range f.Sections { - if s.Type != elf.SHT_NOTE { - continue - } - notes, err := parseNotes(s.Open(), int(s.Addralign), f.ByteOrder) - if err != nil { - return nil, err - } - if b, err := findBuildID(notes); b != nil || err != nil { - return b, err - } - } - return nil, nil -} - -// GetBase determines the base address to subtract from virtual -// address to get symbol table address. For an executable, the base -// is 0. Otherwise, it's a shared library, and the base is the -// address where the mapping starts. The kernel is special, and may -// use the address of the _stext symbol as the mmap start. _stext -// offset can be obtained with `nm vmlinux | grep _stext` -func GetBase(fh *elf.FileHeader, loadSegment *elf.ProgHeader, stextOffset *uint64, start, limit, offset uint64) (uint64, error) { - const ( - pageSize = 4096 - // PAGE_OFFSET for PowerPC64, see arch/powerpc/Kconfig in the kernel sources. - pageOffsetPpc64 = 0xc000000000000000 - ) - - if start == 0 && offset == 0 && (limit == ^uint64(0) || limit == 0) { - // Some tools may introduce a fake mapping that spans the entire - // address space. Assume that the address has already been - // adjusted, so no additional base adjustment is necessary. - return 0, nil - } - - switch fh.Type { - case elf.ET_EXEC: - if loadSegment == nil { - // Assume fixed-address executable and so no adjustment. - return 0, nil - } - if stextOffset == nil && start > 0 && start < 0x8000000000000000 { - // A regular user-mode executable. Compute the base offset using same - // arithmetics as in ET_DYN case below, see the explanation there. - // Ideally, the condition would just be "stextOffset == nil" as that - // represents the address of _stext symbol in the vmlinux image. Alas, - // the caller may skip reading it from the binary (it's expensive to scan - // all the symbols) and so it may be nil even for the kernel executable. - // So additionally check that the start is within the user-mode half of - // the 64-bit address space. - return start - offset + loadSegment.Off - loadSegment.Vaddr, nil - } - // Various kernel heuristics and cases follow. - if loadSegment.Vaddr == start-offset { - return offset, nil - } - if start == 0 && limit != 0 { - // ChromeOS remaps its kernel to 0. Nothing else should come - // down this path. Empirical values: - // VADDR=0xffffffff80200000 - // stextOffset=0xffffffff80200198 - if stextOffset != nil { - return -*stextOffset, nil - } - return -loadSegment.Vaddr, nil - } - if start >= loadSegment.Vaddr && limit > start && (offset == 0 || offset == pageOffsetPpc64 || offset == start) { - // Some kernels look like: - // VADDR=0xffffffff80200000 - // stextOffset=0xffffffff80200198 - // Start=0xffffffff83200000 - // Limit=0xffffffff84200000 - // Offset=0 (0xc000000000000000 for PowerPC64) (== Start for ASLR kernel) - // So the base should be: - if stextOffset != nil && (start%pageSize) == (*stextOffset%pageSize) { - // perf uses the address of _stext as start. Some tools may - // adjust for this before calling GetBase, in which case the page - // alignment should be different from that of stextOffset. - return start - *stextOffset, nil - } - - return start - loadSegment.Vaddr, nil - } else if start%pageSize != 0 && stextOffset != nil && *stextOffset%pageSize == start%pageSize { - // ChromeOS remaps its kernel to 0 + start%pageSize. Nothing - // else should come down this path. Empirical values: - // start=0x198 limit=0x2f9fffff offset=0 - // VADDR=0xffffffff81000000 - // stextOffset=0xffffffff81000198 - return start - *stextOffset, nil - } - - return 0, fmt.Errorf("don't know how to handle EXEC segment: %v start=0x%x limit=0x%x offset=0x%x", *loadSegment, start, limit, offset) - case elf.ET_REL: - if offset != 0 { - return 0, fmt.Errorf("don't know how to handle mapping.Offset") - } - return start, nil - case elf.ET_DYN: - // The process mapping information, start = start of virtual address range, - // and offset = offset in the executable file of the start address, tells us - // that a runtime virtual address x maps to a file offset - // fx = x - start + offset. - if loadSegment == nil { - return start - offset, nil - } - // The program header, if not nil, indicates the offset in the file where - // the executable segment is located (loadSegment.Off), and the base virtual - // address where the first byte of the segment is loaded - // (loadSegment.Vaddr). A file offset fx maps to a virtual (symbol) address - // sx = fx - loadSegment.Off + loadSegment.Vaddr. - // - // Thus, a runtime virtual address x maps to a symbol address - // sx = x - start + offset - loadSegment.Off + loadSegment.Vaddr. - return start - offset + loadSegment.Off - loadSegment.Vaddr, nil - } - return 0, fmt.Errorf("don't know how to handle FileHeader.Type %v", fh.Type) -} - -// FindTextProgHeader finds the program segment header containing the .text -// section or nil if the segment cannot be found. -func FindTextProgHeader(f *elf.File) *elf.ProgHeader { - for _, s := range f.Sections { - if s.Name == ".text" { - // Find the LOAD segment containing the .text section. - for _, p := range f.Progs { - if p.Type == elf.PT_LOAD && p.Flags&elf.PF_X != 0 && s.Addr >= p.Vaddr && s.Addr < p.Vaddr+p.Memsz { - return &p.ProgHeader - } - } - } - } - return nil -} - -// ProgramHeadersForMapping returns the program segment headers that overlap -// the runtime mapping with file offset mapOff and memory size mapSz. We skip -// over segments zero file size because their file offset values are unreliable. -// Even if overlapping, a segment is not selected if its aligned file offset is -// greater than the mapping file offset, or if the mapping includes the last -// page of the segment, but not the full segment and the mapping includes -// additional pages after the segment end. -// The function returns a slice of pointers to the headers in the input -// slice, which are valid only while phdrs is not modified or discarded. -func ProgramHeadersForMapping(phdrs []elf.ProgHeader, mapOff, mapSz uint64) []*elf.ProgHeader { - const ( - // pageSize defines the virtual memory page size used by the loader. This - // value is dependent on the memory management unit of the CPU. The page - // size is 4KB virtually on all the architectures that we care about, so we - // define this metric as a constant. If we encounter architectures where - // page sie is not 4KB, we must try to guess the page size on the system - // where the profile was collected, possibly using the architecture - // specified in the ELF file header. - pageSize = 4096 - pageOffsetMask = pageSize - 1 - ) - mapLimit := mapOff + mapSz - var headers []*elf.ProgHeader - for i := range phdrs { - p := &phdrs[i] - // Skip over segments with zero file size. Their file offsets can have - // arbitrary values, see b/195427553. - if p.Filesz == 0 { - continue - } - segLimit := p.Off + p.Memsz - // The segment must overlap the mapping. - if p.Type == elf.PT_LOAD && mapOff < segLimit && p.Off < mapLimit { - // If the mapping offset is strictly less than the page aligned segment - // offset, then this mapping comes from a differnt segment, fixes - // b/179920361. - alignedSegOffset := uint64(0) - if p.Off > (p.Vaddr & pageOffsetMask) { - alignedSegOffset = p.Off - (p.Vaddr & pageOffsetMask) - } - if mapOff < alignedSegOffset { - continue - } - // If the mapping starts in the middle of the segment, it covers less than - // one page of the segment, and it extends at least one page past the - // segment, then this mapping comes from a different segment. - if mapOff > p.Off && (segLimit < mapOff+pageSize) && (mapLimit >= segLimit+pageSize) { - continue - } - headers = append(headers, p) - } - } - return headers -} - -// HeaderForFileOffset attempts to identify a unique program header that -// includes the given file offset. It returns an error if it cannot identify a -// unique header. -func HeaderForFileOffset(headers []*elf.ProgHeader, fileOffset uint64) (*elf.ProgHeader, error) { - var ph *elf.ProgHeader - for _, h := range headers { - if fileOffset >= h.Off && fileOffset < h.Off+h.Memsz { - if ph != nil { - // Assuming no other bugs, this can only happen if we have two or - // more small program segments that fit on the same page, and a - // segment other than the last one includes uninitialized data, or - // if the debug binary used for symbolization is stripped of some - // sections, so segment file sizes are smaller than memory sizes. - return nil, fmt.Errorf("found second program header (%#v) that matches file offset %x, first program header is %#v. Is this a stripped binary, or does the first program segment contain uninitialized data?", *h, fileOffset, *ph) - } - ph = h - } - } - if ph == nil { - return nil, fmt.Errorf("no program header matches file offset %x", fileOffset) - } - return ph, nil -} diff --git a/internal/pprof/elfexec/elfexec_test.go b/internal/pprof/elfexec/elfexec_test.go deleted file mode 100644 index fe6fc9c9896..00000000000 --- a/internal/pprof/elfexec/elfexec_test.go +++ /dev/null @@ -1,472 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package elfexec - -import ( - "debug/elf" - "fmt" - "reflect" - "strings" - "testing" -) - -func TestGetBase(t *testing.T) { - - fhExec := &elf.FileHeader{ - Type: elf.ET_EXEC, - } - fhRel := &elf.FileHeader{ - Type: elf.ET_REL, - } - fhDyn := &elf.FileHeader{ - Type: elf.ET_DYN, - } - lsOffset := &elf.ProgHeader{ - Vaddr: 0x400000, - Off: 0x200000, - } - kernelHeader := &elf.ProgHeader{ - Vaddr: 0xffffffff81000000, - } - kernelAslrHeader := &elf.ProgHeader{ - Vaddr: 0xffffffff80200000, - Off: 0x1000, - } - ppc64KernelHeader := &elf.ProgHeader{ - Vaddr: 0xc000000000000000, - } - - testcases := []struct { - label string - fh *elf.FileHeader - loadSegment *elf.ProgHeader - stextOffset *uint64 - start, limit, offset uint64 - want uint64 - wanterr bool - }{ - {"exec", fhExec, nil, nil, 0x400000, 0, 0, 0, false}, - {"exec offset", fhExec, lsOffset, nil, 0x400000, 0x800000, 0, 0x200000, false}, - {"exec offset 2", fhExec, lsOffset, nil, 0x200000, 0x600000, 0, 0, false}, - {"exec nomap", fhExec, nil, nil, 0, 0, 0, 0, false}, - {"exec kernel", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0xffffffff82000198, 0xffffffff83000198, 0, 0x1000000, false}, - {"exec kernel", fhExec, kernelHeader, uint64p(0xffffffff810002b8), 0xffffffff81000000, 0xffffffffa0000000, 0x0, 0x0, false}, - {"exec kernel ASLR", fhExec, kernelHeader, uint64p(0xffffffff810002b8), 0xffffffff81000000, 0xffffffffa0000000, 0xffffffff81000000, 0x0, false}, - // TODO(aalexand): Figure out where this test case exactly comes from and - // whether it's still relevant. - {"exec kernel ASLR 2", fhExec, kernelAslrHeader, nil, 0xffffffff83e00000, 0xfffffffffc3fffff, 0x3c00000, 0x3c00000, false}, - {"exec PPC64 kernel", fhExec, ppc64KernelHeader, uint64p(0xc000000000000000), 0xc000000000000000, 0xd00000001a730000, 0x0, 0x0, false}, - {"exec chromeos kernel", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0, 0x10197, 0, 0x7efffe68, false}, - {"exec chromeos kernel 2", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0, 0x10198, 0, 0x7efffe68, false}, - {"exec chromeos kernel 3", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0x198, 0x100000, 0, 0x7f000000, false}, - {"exec chromeos kernel 4", fhExec, kernelHeader, uint64p(0xffffffff81200198), 0x198, 0x100000, 0, 0x7ee00000, false}, - {"exec chromeos kernel unremapped", fhExec, kernelHeader, uint64p(0xffffffff810001c8), 0xffffffff834001c8, 0xffffffffc0000000, 0xffffffff834001c8, 0x2400000, false}, - {"dyn", fhDyn, nil, nil, 0x200000, 0x300000, 0, 0x200000, false}, - {"dyn map", fhDyn, lsOffset, nil, 0x0, 0x300000, 0, 0xFFFFFFFFFFE00000, false}, - {"dyn nomap", fhDyn, nil, nil, 0x0, 0x0, 0, 0, false}, - {"dyn map+offset", fhDyn, lsOffset, nil, 0x900000, 0xa00000, 0x200000, 0x500000, false}, - {"rel", fhRel, nil, nil, 0x2000000, 0x3000000, 0, 0x2000000, false}, - {"rel nomap", fhRel, nil, nil, 0x0, ^uint64(0), 0, 0, false}, - {"rel offset", fhRel, nil, nil, 0x100000, 0x200000, 0x1, 0, true}, - } - - for _, tc := range testcases { - base, err := GetBase(tc.fh, tc.loadSegment, tc.stextOffset, tc.start, tc.limit, tc.offset) - if err != nil { - if !tc.wanterr { - t.Errorf("%s: want no error, got %v", tc.label, err) - } - continue - } - if tc.wanterr { - t.Errorf("%s: want error, got nil", tc.label) - continue - } - if base != tc.want { - t.Errorf("%s: want 0x%x, got 0x%x", tc.label, tc.want, base) - } - } -} - -func uint64p(n uint64) *uint64 { - return &n -} - -func TestFindProgHeaderForMapping(t *testing.T) { - buildList := func(headers []*elf.ProgHeader) (result string) { - builder := strings.Builder{} - if err := builder.WriteByte('['); err != nil { - t.Error("Failed to append '[' to the builder") - } - defer func() { - if err := builder.WriteByte(']'); err != nil { - t.Error("Failed to append ']' to the builder") - } - result = builder.String() - }() - if len(headers) == 0 { - if _, err := builder.WriteString("nil"); err != nil { - t.Error("Failed to append 'nil' to the builder") - } - return - } - if _, err := builder.WriteString(fmt.Sprintf("%#v", *headers[0])); err != nil { - t.Error("Failed to append first header to the builder") - } - for i, h := range headers[1:] { - if _, err := builder.WriteString(fmt.Sprintf(", %#v", *h)); err != nil { - t.Errorf("Failed to append header %d to the builder", i+1) - } - } - return - } - - // Variuos ELF program headers for unit tests. - tinyHeaders := []elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}, - } - tinyBadBSSHeaders := []elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x100, Memsz: 0x1f0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xd80, Vaddr: 0x400d80, Paddr: 0x400d80, Filesz: 0x90, Memsz: 0x90, Align: 0x200000}, - } - smallHeaders := []elf.ProgHeader{ - {Type: elf.PT_PHDR, Flags: elf.PF_R | elf.PF_X, Off: 0x40, Vaddr: 0x400040, Paddr: 0x400040, Filesz: 0x1f8, Memsz: 0x1f8, Align: 8}, - {Type: elf.PT_INTERP, Flags: elf.PF_R, Off: 0x238, Vaddr: 0x400238, Paddr: 0x400238, Filesz: 0x1c, Memsz: 0x1c, Align: 1}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0x400000, Paddr: 0x400000, Filesz: 0x6fc, Memsz: 0x6fc, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe10, Vaddr: 0x600e10, Paddr: 0x600e10, Filesz: 0x230, Memsz: 0x238, Align: 0x200000}, - {Type: elf.PT_DYNAMIC, Flags: elf.PF_R | elf.PF_W, Off: 0xe28, Vaddr: 0x600e28, Paddr: 0x600e28, Filesz: 0x1d0, Memsz: 0x1d0, Align: 8}, - } - smallBadBSSHeaders := []elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0x200000, Paddr: 0x200000, Filesz: 0x6fc, Memsz: 0x6fc, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x700, Vaddr: 0x400700, Paddr: 0x400700, Filesz: 0x500, Memsz: 0x710, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe10, Vaddr: 0x600e10, Paddr: 0x600e10, Filesz: 0x230, Memsz: 0x238, Align: 0x200000}, - } - mediumHeaders := []elf.ProgHeader{ - {Type: elf.PT_PHDR, Flags: elf.PF_R, Off: 0x40, Vaddr: 0x40, Paddr: 0x40, Filesz: 0x268, Memsz: 0x268, Align: 8}, - {Type: elf.PT_INTERP, Flags: elf.PF_R, Off: 0x2a8, Vaddr: 0x2a8, Paddr: 0x2a8, Filesz: 0x28, Memsz: 0x28, Align: 1}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0x51800, Memsz: 0x51800, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x51800, Vaddr: 0x251800, Paddr: 0x251800, Filesz: 0x24a8, Memsz: 0x24e8, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x53d00, Vaddr: 0x453d00, Paddr: 0x453d00, Filesz: 0x13a58, Memsz: 0x91a198, Align: 0x200000}, - {Type: elf.PT_TLS, Flags: elf.PF_R, Off: 0x51800, Vaddr: 0x51800, Paddr: 0x51800, Filesz: 0x0, Memsz: 0x38, Align: 0x8}, - {Type: elf.PT_DYNAMIC, Flags: elf.PF_R | elf.PF_W, Off: 0x51d00, Vaddr: 0x251d00, Paddr: 0x251d00, Filesz: 0x1ef0, Memsz: 0x1ef0, Align: 8}, - } - largeHeaders := []elf.ProgHeader{ - {Type: elf.PT_PHDR, Flags: elf.PF_R, Off: 0x40, Vaddr: 0x40, Paddr: 0x40, Filesz: 0x268, Memsz: 0x268, Align: 8}, - {Type: elf.PT_INTERP, Flags: elf.PF_R, Off: 0x2a8, Vaddr: 0x2a8, Paddr: 0x2a8, Filesz: 0x28, Memsz: 0x28, Align: 1}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0x2ec5d2c0, Memsz: 0x2ec5d2c0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x2ec5d2c0, Vaddr: 0x2ee5d2c0, Paddr: 0x2ee5d2c0, Filesz: 0x1361118, Memsz: 0x1361150, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x2ffbe440, Vaddr: 0x303be440, Paddr: 0x303be440, Filesz: 0x4637c0, Memsz: 0xc91610, Align: 0x200000}, - {Type: elf.PT_TLS, Flags: elf.PF_R, Off: 0x2ec5d2c0, Vaddr: 0x2ee5d2c0, Paddr: 0x2ee5d2c0, Filesz: 0x120, Memsz: 0x103f8, Align: 0x40}, - {Type: elf.PT_DYNAMIC, Flags: elf.PF_R | elf.PF_W, Off: 0x2ffbc9e0, Vaddr: 0x301bc9e0, Paddr: 0x301bc9e0, Filesz: 0x1f0, Memsz: 0x1f0, Align: 8}, - } - ffmpegHeaders := []elf.ProgHeader{ - {Type: elf.PT_PHDR, Flags: elf.PF_R, Off: 0x40, Vaddr: 0x200040, Paddr: 0x200040, Filesz: 0x1f8, Memsz: 0x1f8, Align: 8}, - {Type: elf.PT_INTERP, Flags: elf.PF_R, Off: 0x238, Vaddr: 0x200238, Paddr: 0x200238, Filesz: 0x28, Memsz: 0x28, Align: 1}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0x200000, Paddr: 0x200000, Filesz: 0x48d8410, Memsz: 0x48d8410, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x48d8440, Vaddr: 0x4cd8440, Paddr: 0x4cd8440, Filesz: 0x18cbe0, Memsz: 0xd2fb70, Align: 0x200000}, - {Type: elf.PT_TLS, Flags: elf.PF_R, Off: 0x48d8440, Vaddr: 0x4cd8440, Paddr: 0x4cd8440, Filesz: 0xa8, Memsz: 0x468, Align: 0x40}, - {Type: elf.PT_DYNAMIC, Flags: elf.PF_R | elf.PF_W, Off: 0x4a63ad0, Vaddr: 0x4e63ad0, Paddr: 0x4e63ad0, Filesz: 0x200, Memsz: 0x200, Align: 8}, - } - sentryHeaders := []elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_X + elf.PF_R, Off: 0x0, Vaddr: 0x7f0000000000, Paddr: 0x7f0000000000, Filesz: 0xbc64d5, Memsz: 0xbc64d5, Align: 0x1000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R, Off: 0xbc7000, Vaddr: 0x7f0000bc7000, Paddr: 0x7f0000bc7000, Filesz: 0xcd6b30, Memsz: 0xcd6b30, Align: 0x1000}, - {Type: elf.PT_LOAD, Flags: elf.PF_W + elf.PF_R, Off: 0x189e000, Vaddr: 0x7f000189e000, Paddr: 0x7f000189e000, Filesz: 0x58180, Memsz: 0x92d10, Align: 0x1000}, - } - - for _, tc := range []struct { - desc string - phdrs []elf.ProgHeader - pgoff uint64 - memsz uint64 - wantHeaders []*elf.ProgHeader - }{ - { - desc: "no prog headers", - phdrs: nil, - pgoff: 0, - memsz: 0x1000, - wantHeaders: nil, - }, - { - desc: "tiny file, 4KB at offset 0 matches both headers, b/178747588", - phdrs: tinyHeaders, - pgoff: 0, - memsz: 0x1000, - wantHeaders: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}, - }, - }, - { - desc: "tiny file, file offset 4KB matches no headers", - phdrs: tinyHeaders, - pgoff: 0x1000, - memsz: 0x1000, - wantHeaders: nil, - }, - { - desc: "tiny file with unaligned memsz matches executable segment", - phdrs: tinyHeaders, - pgoff: 0, - memsz: 0xc80, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}}, - }, - { - desc: "tiny file with unaligned offset matches data segment", - phdrs: tinyHeaders, - pgoff: 0xc80, - memsz: 0x1000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}}, - }, - { - desc: "tiny bad BSS file, 4KB at offset 0 matches all three headers", - phdrs: tinyBadBSSHeaders, - pgoff: 0, - memsz: 0x1000, - wantHeaders: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x100, Memsz: 0x1f0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xd80, Vaddr: 0x400d80, Paddr: 0x400d80, Filesz: 0x90, Memsz: 0x90, Align: 0x200000}, - }, - }, - { - desc: "small file, offset 0, memsz 4KB matches both segments", - phdrs: smallHeaders, - pgoff: 0, - memsz: 0x1000, - wantHeaders: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0x400000, Paddr: 0x400000, Filesz: 0x6fc, Memsz: 0x6fc, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe10, Vaddr: 0x600e10, Paddr: 0x600e10, Filesz: 0x230, Memsz: 0x238, Align: 0x200000}, - }, - }, - { - desc: "small file, offset 0, memsz 8KB matches both segments", - phdrs: smallHeaders, - pgoff: 0, - memsz: 0x2000, - wantHeaders: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0x400000, Paddr: 0x400000, Filesz: 0x6fc, Memsz: 0x6fc, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe10, Vaddr: 0x600e10, Paddr: 0x600e10, Filesz: 0x230, Memsz: 0x238, Align: 0x200000}, - }, - }, - { - desc: "small file, offset 4KB matches data segment", - phdrs: smallHeaders, - pgoff: 0x1000, - memsz: 0x1000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe10, Vaddr: 0x600e10, Paddr: 0x600e10, Filesz: 0x230, Memsz: 0x238, Align: 0x200000}}, - }, - { - desc: "small file, offset 8KB matches no segment", - phdrs: smallHeaders, - pgoff: 0x2000, - memsz: 0x1000, - wantHeaders: nil, - }, - { - desc: "small bad BSS file, offset 0, memsz 4KB matches all three segments", - phdrs: smallBadBSSHeaders, - pgoff: 0, - memsz: 0x1000, - wantHeaders: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0x200000, Paddr: 0x200000, Filesz: 0x6fc, Memsz: 0x6fc, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x700, Vaddr: 0x400700, Paddr: 0x400700, Filesz: 0x500, Memsz: 0x710, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe10, Vaddr: 0x600e10, Paddr: 0x600e10, Filesz: 0x230, Memsz: 0x238, Align: 0x200000}, - }, - }, - { - desc: "small bad BSS file, offset 0, memsz 8KB matches all three segments", - phdrs: smallBadBSSHeaders, - pgoff: 0, - memsz: 0x2000, - wantHeaders: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0x200000, Paddr: 0x200000, Filesz: 0x6fc, Memsz: 0x6fc, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x700, Vaddr: 0x400700, Paddr: 0x400700, Filesz: 0x500, Memsz: 0x710, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe10, Vaddr: 0x600e10, Paddr: 0x600e10, Filesz: 0x230, Memsz: 0x238, Align: 0x200000}, - }, - }, - { - desc: "small bad BSS file, offset 4KB matches second data segment", - phdrs: smallBadBSSHeaders, - pgoff: 0x1000, - memsz: 0x1000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe10, Vaddr: 0x600e10, Paddr: 0x600e10, Filesz: 0x230, Memsz: 0x238, Align: 0x200000}}, - }, - { - desc: "medium file large mapping that includes all address space matches executable segment, b/179920361", - phdrs: mediumHeaders, - pgoff: 0, - memsz: 0xd6e000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0x51800, Memsz: 0x51800, Align: 0x200000}}, - }, - { - desc: "large file executable mapping matches executable segment", - phdrs: largeHeaders, - pgoff: 0, - memsz: 0x2ec5e000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0x2ec5d2c0, Memsz: 0x2ec5d2c0, Align: 0x200000}}, - }, - { - desc: "large file first data mapping matches first data segment", - phdrs: largeHeaders, - pgoff: 0x2ec5d000, - memsz: 0x1362000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x2ec5d2c0, Vaddr: 0x2ee5d2c0, Paddr: 0x2ee5d2c0, Filesz: 0x1361118, Memsz: 0x1361150, Align: 0x200000}}, - }, - { - desc: "large file, split second data mapping matches second data segment", - phdrs: largeHeaders, - pgoff: 0x2ffbe000, - memsz: 0xb11000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0x2ffbe440, Vaddr: 0x303be440, Paddr: 0x303be440, Filesz: 0x4637c0, Memsz: 0xc91610, Align: 0x200000}}, - }, - { - desc: "sentry headers, mapping for last page of executable segment matches executable segment", - phdrs: sentryHeaders, - pgoff: 0xbc6000, - memsz: 0x1000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_X + elf.PF_R, Off: 0x0, Vaddr: 0x7f0000000000, Paddr: 0x7f0000000000, Filesz: 0xbc64d5, Memsz: 0xbc64d5, Align: 0x1000}}, - }, - { - desc: "ffmpeg headers, split mapping for executable segment matches executable segment, b/193176694", - phdrs: ffmpegHeaders, - pgoff: 0, - memsz: 0x48d8000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0x200000, Paddr: 0x200000, Filesz: 0x48d8410, Memsz: 0x48d8410, Align: 0x200000}}, - }, - { - desc: "segments with no file bits (b/195427553), mapping for executable segment matches executable segment", - phdrs: []elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R, Off: 0x0, Vaddr: 0x0, Paddr: 0x0, Filesz: 0x115000, Memsz: 0x115000, Align: 0x1000}, - {Type: elf.PT_LOAD, Flags: elf.PF_X + elf.PF_R, Off: 0x115000, Vaddr: 0x115000, Paddr: 0x115000, Filesz: 0x361e15, Memsz: 0x361e15, Align: 0x1000}, - {Type: elf.PT_LOAD, Flags: elf.PF_W + elf.PF_R, Off: 0x0, Vaddr: 0x477000, Paddr: 0x477000, Filesz: 0x0, Memsz: 0x33c, Align: 0x1000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R, Off: 0x0, Vaddr: 0x478000, Paddr: 0x478000, Filesz: 0x0, Memsz: 0x47dc28, Align: 0x1000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R, Off: 0x477000, Vaddr: 0x8f6000, Paddr: 0x8f6000, Filesz: 0x140, Memsz: 0x140, Align: 0x1000}, - {Type: elf.PT_LOAD, Flags: elf.PF_W + elf.PF_R, Off: 0x478000, Vaddr: 0x8f7000, Paddr: 0x8f7000, Filesz: 0x38, Memsz: 0x38, Align: 0x1000}, - }, - pgoff: 0x115000, - memsz: 0x362000, - wantHeaders: []*elf.ProgHeader{{Type: elf.PT_LOAD, Flags: elf.PF_X + elf.PF_R, Off: 0x115000, Vaddr: 0x115000, Paddr: 0x115000, Filesz: 0x361e15, Memsz: 0x361e15, Align: 0x1000}}, - }, - } { - t.Run(tc.desc, func(t *testing.T) { - gotHeaders := ProgramHeadersForMapping(tc.phdrs, tc.pgoff, tc.memsz) - if !reflect.DeepEqual(gotHeaders, tc.wantHeaders) { - t.Errorf("got program headers %q; want %q", buildList(gotHeaders), buildList(tc.wantHeaders)) - } - }) - } -} - -func TestHeaderForFileOffset(t *testing.T) { - for _, tc := range []struct { - desc string - headers []*elf.ProgHeader - fileOffset uint64 - wantError bool - want *elf.ProgHeader - }{ - { - desc: "no headers, want error", - headers: nil, - wantError: true, - }, - { - desc: "three headers, BSS in last segment, file offset selects first header", - headers: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe70, Vaddr: 0x400e70, Paddr: 0x400e70, Filesz: 0x90, Memsz: 0x100, Align: 0x200000}, - }, - fileOffset: 0xc79, - want: &elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - }, - { - desc: "three headers, BSS in last segment, file offset selects second header", - headers: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe70, Vaddr: 0x400e70, Paddr: 0x400e70, Filesz: 0x90, Memsz: 0x100, Align: 0x200000}, - }, - fileOffset: 0xc80, - want: &elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}, - }, - { - desc: "three headers, BSS in last segment, file offset selects third header", - headers: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe70, Vaddr: 0x400e70, Paddr: 0x400e70, Filesz: 0x90, Memsz: 0x100, Align: 0x200000}, - }, - fileOffset: 0xef0, - want: &elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe70, Vaddr: 0x400e70, Paddr: 0x400e70, Filesz: 0x90, Memsz: 0x100, Align: 0x200000}, - }, - { - desc: "three headers, BSS in last segment, file offset in uninitialized section selects third header", - headers: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe70, Vaddr: 0x400e70, Paddr: 0x400e70, Filesz: 0x90, Memsz: 0x100, Align: 0x200000}, - }, - fileOffset: 0xf40, - want: &elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe70, Vaddr: 0x400e70, Paddr: 0x400e70, Filesz: 0x90, Memsz: 0x100, Align: 0x200000}, - }, - { - desc: "three headers, BSS in last segment, file offset past any segment gives error", - headers: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x1f0, Memsz: 0x1f0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xe70, Vaddr: 0x400e70, Paddr: 0x400e70, Filesz: 0x90, Memsz: 0x100, Align: 0x200000}, - }, - fileOffset: 0xf70, - wantError: true, - }, - { - desc: "three headers, BSS in second segment, file offset in mapped section selects second header", - headers: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x100, Memsz: 0x1f0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xd80, Vaddr: 0x400d80, Paddr: 0x400d80, Filesz: 0x100, Memsz: 0x100, Align: 0x200000}, - }, - fileOffset: 0xd79, - want: &elf.ProgHeader{Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x100, Memsz: 0x1f0, Align: 0x200000}, - }, - { - desc: "three headers, BSS in second segment, file offset in unmapped section gives error", - headers: []*elf.ProgHeader{ - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_X, Off: 0, Vaddr: 0, Paddr: 0, Filesz: 0xc80, Memsz: 0xc80, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xc80, Vaddr: 0x200c80, Paddr: 0x200c80, Filesz: 0x100, Memsz: 0x1f0, Align: 0x200000}, - {Type: elf.PT_LOAD, Flags: elf.PF_R | elf.PF_W, Off: 0xd80, Vaddr: 0x400d80, Paddr: 0x400d80, Filesz: 0x100, Memsz: 0x100, Align: 0x200000}, - }, - fileOffset: 0xd80, - wantError: true, - }, - } { - t.Run(tc.desc, func(t *testing.T) { - got, err := HeaderForFileOffset(tc.headers, tc.fileOffset) - if (err != nil) != tc.wantError { - t.Errorf("got error %v, want any error=%v", err, tc.wantError) - } - if err != nil { - return - } - if !reflect.DeepEqual(got, tc.want) { - t.Errorf("got program header %#v, want %#v", got, tc.want) - } - }) - } -} diff --git a/internal/pprof/graph/dotgraph.go b/internal/pprof/graph/dotgraph.go deleted file mode 100644 index d1c6221cbf0..00000000000 --- a/internal/pprof/graph/dotgraph.go +++ /dev/null @@ -1,491 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package graph - -import ( - "fmt" - "io" - "math" - "path/filepath" - "strings" - - "github.com/parca-dev/parca/internal/pprof/measurement" -) - -// DotAttributes contains details about the graph itself, giving -// insight into how its elements should be rendered. -type DotAttributes struct { - Nodes map[*Node]*DotNodeAttributes // A map allowing each Node to have its own visualization option -} - -// DotNodeAttributes contains Node specific visualization options. -type DotNodeAttributes struct { - Shape string // The optional shape of the node when rendered visually - Bold bool // If the node should be bold or not - Peripheries int // An optional number of borders to place around a node - URL string // An optional url link to add to a node - Formatter func(*NodeInfo) string // An optional formatter for the node's label -} - -// DotConfig contains attributes about how a graph should be -// constructed and how it should look. -type DotConfig struct { - Title string // The title of the DOT graph - LegendURL string // The URL to link to from the legend. - Labels []string // The labels for the DOT's legend - - FormatValue func(int64) string // A formatting function for values - Total int64 // The total weight of the graph, used to compute percentages -} - -const maxNodelets = 4 // Number of nodelets for labels (both numeric and non) - -// ComposeDot creates and writes a in the DOT format to the writer, using -// the configurations given. -func ComposeDot(w io.Writer, g *Graph, a *DotAttributes, c *DotConfig) { - builder := &builder{w, a, c} - - // Begin constructing DOT by adding a title and legend. - builder.start() - defer builder.finish() - builder.addLegend() - - if len(g.Nodes) == 0 { - return - } - - // Preprocess graph to get id map and find max flat. - nodeIDMap := make(map[*Node]int) - hasNodelets := make(map[*Node]bool) - - maxFlat := float64(abs64(g.Nodes[0].FlatValue())) - for i, n := range g.Nodes { - nodeIDMap[n] = i + 1 - if float64(abs64(n.FlatValue())) > maxFlat { - maxFlat = float64(abs64(n.FlatValue())) - } - } - - edges := EdgeMap{} - - // Add nodes and nodelets to DOT builder. - for _, n := range g.Nodes { - builder.addNode(n, nodeIDMap[n], maxFlat) - hasNodelets[n] = builder.addNodelets(n, nodeIDMap[n]) - - // Collect all edges. Use a fake node to support multiple incoming edges. - for _, e := range n.Out { - edges[&Node{}] = e - } - } - - // Add edges to DOT builder. Sort edges by frequency as a hint to the graph layout engine. - for _, e := range edges.Sort() { - builder.addEdge(e, nodeIDMap[e.Src], nodeIDMap[e.Dest], hasNodelets[e.Src]) - } -} - -// builder wraps an io.Writer and understands how to compose DOT formatted elements. -type builder struct { - io.Writer - attributes *DotAttributes - config *DotConfig -} - -// start generates a title and initial node in DOT format. -func (b *builder) start() { - graphname := "unnamed" - if b.config.Title != "" { - graphname = b.config.Title - } - fmt.Fprintln(b, `digraph "`+graphname+`" {`) - fmt.Fprintln(b, `node [style=filled fillcolor="#f8f8f8"]`) -} - -// finish closes the opening curly bracket in the constructed DOT buffer. -func (b *builder) finish() { - fmt.Fprintln(b, "}") -} - -// addLegend generates a legend in DOT format. -func (b *builder) addLegend() { - labels := b.config.Labels - if len(labels) == 0 { - return - } - title := labels[0] - fmt.Fprintf(b, `subgraph cluster_L { "%s" [shape=box fontsize=16`, title) - fmt.Fprintf(b, ` label="%s\l"`, strings.Join(escapeAllForDot(labels), `\l`)) - if b.config.LegendURL != "" { - fmt.Fprintf(b, ` URL="%s" target="_blank"`, b.config.LegendURL) - } - if b.config.Title != "" { - fmt.Fprintf(b, ` tooltip="%s"`, b.config.Title) - } - fmt.Fprintf(b, "] }\n") -} - -// addNode generates a graph node in DOT format. -func (b *builder) addNode(node *Node, nodeID int, maxFlat float64) { - flat, cum := node.FlatValue(), node.CumValue() - attrs := b.attributes.Nodes[node] - - // Populate label for node. - var label string - if attrs != nil && attrs.Formatter != nil { - label = attrs.Formatter(&node.Info) - } else { - label = multilinePrintableName(&node.Info) - } - - flatValue := b.config.FormatValue(flat) - if flat != 0 { - label = label + fmt.Sprintf(`%s (%s)`, - flatValue, - strings.TrimSpace(measurement.Percentage(flat, b.config.Total))) - } else { - label = label + "0" - } - cumValue := flatValue - if cum != flat { - if flat != 0 { - label = label + `\n` - } else { - label = label + " " - } - cumValue = b.config.FormatValue(cum) - label = label + fmt.Sprintf(`of %s (%s)`, - cumValue, - strings.TrimSpace(measurement.Percentage(cum, b.config.Total))) - } - - // Scale font sizes from 8 to 24 based on percentage of flat frequency. - // Use non linear growth to emphasize the size difference. - baseFontSize, maxFontGrowth := 8, 16.0 - fontSize := baseFontSize - if maxFlat != 0 && flat != 0 && float64(abs64(flat)) <= maxFlat { - fontSize += int(math.Ceil(maxFontGrowth * math.Sqrt(float64(abs64(flat))/maxFlat))) - } - - // Determine node shape. - shape := "box" - if attrs != nil && attrs.Shape != "" { - shape = attrs.Shape - } - - // Create DOT attribute for node. - attr := fmt.Sprintf(`label="%s" id="node%d" fontsize=%d shape=%s tooltip="%s (%s)" color="%s" fillcolor="%s"`, - label, nodeID, fontSize, shape, escapeForDot(node.Info.PrintableName()), cumValue, - dotColor(float64(node.CumValue())/float64(abs64(b.config.Total)), false), - dotColor(float64(node.CumValue())/float64(abs64(b.config.Total)), true)) - - // Add on extra attributes if provided. - if attrs != nil { - // Make bold if specified. - if attrs.Bold { - attr += ` style="bold,filled"` - } - - // Add peripheries if specified. - if attrs.Peripheries != 0 { - attr += fmt.Sprintf(` peripheries=%d`, attrs.Peripheries) - } - - // Add URL if specified. target="_blank" forces the link to open in a new tab. - if attrs.URL != "" { - attr += fmt.Sprintf(` URL="%s" target="_blank"`, attrs.URL) - } - } - - fmt.Fprintf(b, "N%d [%s]\n", nodeID, attr) -} - -// addNodelets generates the DOT boxes for the node tags if they exist. -func (b *builder) addNodelets(node *Node, nodeID int) bool { - var nodelets string - - // Populate two Tag slices, one for LabelTags and one for NumericTags. - var ts []*Tag - lnts := make(map[string][]*Tag) - for _, t := range node.LabelTags { - ts = append(ts, t) - } - for l, tm := range node.NumericTags { - for _, t := range tm { - lnts[l] = append(lnts[l], t) - } - } - - // For leaf nodes, print cumulative tags (includes weight from - // children that have been deleted). - // For internal nodes, print only flat tags. - flatTags := len(node.Out) > 0 - - // Select the top maxNodelets alphanumeric labels by weight. - SortTags(ts, flatTags) - if len(ts) > maxNodelets { - ts = ts[:maxNodelets] - } - for i, t := range ts { - w := t.CumValue() - if flatTags { - w = t.FlatValue() - } - if w == 0 { - continue - } - weight := b.config.FormatValue(w) - nodelets += fmt.Sprintf(`N%d_%d [label = "%s" id="N%d_%d" fontsize=8 shape=box3d tooltip="%s"]`+"\n", nodeID, i, t.Name, nodeID, i, weight) - nodelets += fmt.Sprintf(`N%d -> N%d_%d [label=" %s" weight=100 tooltip="%s" labeltooltip="%s"]`+"\n", nodeID, nodeID, i, weight, weight, weight) - if nts := lnts[t.Name]; nts != nil { - nodelets += b.numericNodelets(nts, maxNodelets, flatTags, fmt.Sprintf(`N%d_%d`, nodeID, i)) - } - } - - if nts := lnts[""]; nts != nil { - nodelets += b.numericNodelets(nts, maxNodelets, flatTags, fmt.Sprintf(`N%d`, nodeID)) - } - - fmt.Fprint(b, nodelets) - return nodelets != "" -} - -func (b *builder) numericNodelets(nts []*Tag, maxNumNodelets int, flatTags bool, source string) string { - nodelets := "" - - // Collapse numeric labels into maxNumNodelets buckets, of the form: - // 1MB..2MB, 3MB..5MB, ... - for j, t := range b.collapsedTags(nts, maxNumNodelets, flatTags) { - w, attr := t.CumValue(), ` style="dotted"` - if flatTags || t.FlatValue() == t.CumValue() { - w, attr = t.FlatValue(), "" - } - if w != 0 { - weight := b.config.FormatValue(w) - nodelets += fmt.Sprintf(`N%s_%d [label = "%s" id="N%s_%d" fontsize=8 shape=box3d tooltip="%s"]`+"\n", source, j, t.Name, source, j, weight) - nodelets += fmt.Sprintf(`%s -> N%s_%d [label=" %s" weight=100 tooltip="%s" labeltooltip="%s"%s]`+"\n", source, source, j, weight, weight, weight, attr) - } - } - return nodelets -} - -// addEdge generates a graph edge in DOT format. -func (b *builder) addEdge(edge *Edge, from, to int, hasNodelets bool) { - var inline string - if edge.Inline { - inline = `\n (inline)` - } - w := b.config.FormatValue(edge.WeightValue()) - attr := fmt.Sprintf(`label=" %s%s"`, w, inline) - if b.config.Total != 0 { - // Note: edge.weight > b.config.Total is possible for profile diffs. - if weight := 1 + int(min64(abs64(edge.WeightValue()*100/b.config.Total), 100)); weight > 1 { - attr = fmt.Sprintf(`%s weight=%d`, attr, weight) - } - if width := 1 + int(min64(abs64(edge.WeightValue()*5/b.config.Total), 5)); width > 1 { - attr = fmt.Sprintf(`%s penwidth=%d`, attr, width) - } - attr = fmt.Sprintf(`%s color="%s"`, attr, - dotColor(float64(edge.WeightValue())/float64(abs64(b.config.Total)), false)) - } - arrow := "->" - if edge.Residual { - arrow = "..." - } - tooltip := fmt.Sprintf(`"%s %s %s (%s)"`, - escapeForDot(edge.Src.Info.PrintableName()), arrow, - escapeForDot(edge.Dest.Info.PrintableName()), w) - attr = fmt.Sprintf(`%s tooltip=%s labeltooltip=%s`, attr, tooltip, tooltip) - - if edge.Residual { - attr = attr + ` style="dotted"` - } - - if hasNodelets { - // Separate children further if source has tags. - attr = attr + " minlen=2" - } - - fmt.Fprintf(b, "N%d -> N%d [%s]\n", from, to, attr) -} - -// dotColor returns a color for the given score (between -1.0 and -// 1.0), with -1.0 colored green, 0.0 colored grey, and 1.0 colored -// red. If isBackground is true, then a light (low-saturation) -// color is returned (suitable for use as a background color); -// otherwise, a darker color is returned (suitable for use as a -// foreground color). -func dotColor(score float64, isBackground bool) string { - // A float between 0.0 and 1.0, indicating the extent to which - // colors should be shifted away from grey (to make positive and - // negative values easier to distinguish, and to make more use of - // the color range.) - const shift = 0.7 - - // Saturation and value (in hsv colorspace) for background colors. - const bgSaturation = 0.1 - const bgValue = 0.93 - - // Saturation and value (in hsv colorspace) for foreground colors. - const fgSaturation = 1.0 - const fgValue = 0.7 - - // Choose saturation and value based on isBackground. - var saturation float64 - var value float64 - if isBackground { - saturation = bgSaturation - value = bgValue - } else { - saturation = fgSaturation - value = fgValue - } - - // Limit the score values to the range [-1.0, 1.0]. - score = math.Max(-1.0, math.Min(1.0, score)) - - // Reduce saturation near score=0 (so it is colored grey, rather than yellow). - if math.Abs(score) < 0.2 { - saturation *= math.Abs(score) / 0.2 - } - - // Apply 'shift' to move scores away from 0.0 (grey). - if score > 0.0 { - score = math.Pow(score, (1.0 - shift)) - } - if score < 0.0 { - score = -math.Pow(-score, (1.0 - shift)) - } - - var r, g, b float64 // red, green, blue - if score < 0.0 { - g = value - r = value * (1 + saturation*score) - } else { - r = value - g = value * (1 - saturation*score) - } - b = value * (1 - saturation) - return fmt.Sprintf("#%02x%02x%02x", uint8(r*255.0), uint8(g*255.0), uint8(b*255.0)) -} - -func multilinePrintableName(info *NodeInfo) string { - infoCopy := *info - infoCopy.Name = escapeForDot(ShortenFunctionName(infoCopy.Name)) - infoCopy.Name = strings.Replace(infoCopy.Name, "::", `\n`, -1) - infoCopy.Name = strings.Replace(infoCopy.Name, ".", `\n`, -1) - if infoCopy.File != "" { - infoCopy.File = filepath.Base(infoCopy.File) - } - return strings.Join(infoCopy.NameComponents(), `\n`) + `\n` -} - -// collapsedTags trims and sorts a slice of tags. -func (b *builder) collapsedTags(ts []*Tag, count int, flatTags bool) []*Tag { - ts = SortTags(ts, flatTags) - if len(ts) <= count { - return ts - } - - tagGroups := make([][]*Tag, count) - for i, t := range (ts)[:count] { - tagGroups[i] = []*Tag{t} - } - for _, t := range (ts)[count:] { - g, d := 0, tagDistance(t, tagGroups[0][0]) - for i := 1; i < count; i++ { - if nd := tagDistance(t, tagGroups[i][0]); nd < d { - g, d = i, nd - } - } - tagGroups[g] = append(tagGroups[g], t) - } - - var nts []*Tag - for _, g := range tagGroups { - l, w, c := b.tagGroupLabel(g) - nts = append(nts, &Tag{ - Name: l, - Flat: w, - Cum: c, - }) - } - return SortTags(nts, flatTags) -} - -func tagDistance(t, u *Tag) float64 { - v, _ := measurement.Scale(u.Value, u.Unit, t.Unit) - if v < float64(t.Value) { - return float64(t.Value) - v - } - return v - float64(t.Value) -} - -func (b *builder) tagGroupLabel(g []*Tag) (label string, flat, cum int64) { - if len(g) == 1 { - t := g[0] - return measurement.Label(t.Value, t.Unit), t.FlatValue(), t.CumValue() - } - min := g[0] - max := g[0] - df, f := min.FlatDiv, min.Flat - dc, c := min.CumDiv, min.Cum - for _, t := range g[1:] { - if v, _ := measurement.Scale(t.Value, t.Unit, min.Unit); int64(v) < min.Value { - min = t - } - if v, _ := measurement.Scale(t.Value, t.Unit, max.Unit); int64(v) > max.Value { - max = t - } - f += t.Flat - df += t.FlatDiv - c += t.Cum - dc += t.CumDiv - } - if df != 0 { - f = f / df - } - if dc != 0 { - c = c / dc - } - - // Tags are not scaled with the selected output unit because tags are often - // much smaller than other values which appear, so the range of tag sizes - // sometimes would appear to be "0..0" when scaled to the selected output unit. - return measurement.Label(min.Value, min.Unit) + ".." + measurement.Label(max.Value, max.Unit), f, c -} - -func min64(a, b int64) int64 { - if a < b { - return a - } - return b -} - -// escapeAllForDot applies escapeForDot to all strings in the given slice. -func escapeAllForDot(in []string) []string { - var out = make([]string, len(in)) - for i := range in { - out[i] = escapeForDot(in[i]) - } - return out -} - -// escapeForDot escapes double quotes and backslashes, and replaces Graphviz's -// "center" character (\n) with a left-justified character. -// See https://graphviz.org/doc/info/attrs.html#k:escString for more info. -func escapeForDot(str string) string { - return strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(str, `\`, `\\`), `"`, `\"`), "\n", `\l`) -} diff --git a/internal/pprof/graph/dotgraph_test.go b/internal/pprof/graph/dotgraph_test.go deleted file mode 100644 index 5da220de370..00000000000 --- a/internal/pprof/graph/dotgraph_test.go +++ /dev/null @@ -1,387 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package graph - -import ( - "bytes" - "flag" - "fmt" - "io/ioutil" - "path/filepath" - "reflect" - "strconv" - "strings" - "testing" - - "github.com/parca-dev/parca/internal/pprof/proftest" -) - -var updateFlag = flag.Bool("update", false, "Update the golden files") - -func TestComposeWithStandardGraph(t *testing.T) { - g := baseGraph() - a, c := baseAttrsAndConfig() - - var buf bytes.Buffer - ComposeDot(&buf, g, a, c) - - compareGraphs(t, buf.Bytes(), "compose1.dot") -} - -func TestComposeWithNodeAttributesAndZeroFlat(t *testing.T) { - g := baseGraph() - a, c := baseAttrsAndConfig() - - // Set NodeAttributes for Node 1. - a.Nodes[g.Nodes[0]] = &DotNodeAttributes{ - Shape: "folder", - Bold: true, - Peripheries: 2, - URL: "www.google.com", - Formatter: func(ni *NodeInfo) string { - return strings.ToUpper(ni.Name) - }, - } - - // Set Flat value to zero on Node 2. - g.Nodes[1].Flat = 0 - - var buf bytes.Buffer - ComposeDot(&buf, g, a, c) - - compareGraphs(t, buf.Bytes(), "compose2.dot") -} - -func TestComposeWithTagsAndResidualEdge(t *testing.T) { - g := baseGraph() - a, c := baseAttrsAndConfig() - - // Add tags to Node 1. - g.Nodes[0].LabelTags["a"] = &Tag{ - Name: "tag1", - Cum: 10, - Flat: 10, - } - g.Nodes[0].NumericTags[""] = TagMap{ - "b": &Tag{ - Name: "tag2", - Cum: 20, - Flat: 20, - Unit: "ms", - }, - } - - // Set edge to be Residual. - g.Nodes[0].Out[g.Nodes[1]].Residual = true - - var buf bytes.Buffer - ComposeDot(&buf, g, a, c) - - compareGraphs(t, buf.Bytes(), "compose3.dot") -} - -func TestComposeWithNestedTags(t *testing.T) { - g := baseGraph() - a, c := baseAttrsAndConfig() - - // Add tags to Node 1. - g.Nodes[0].LabelTags["tag1"] = &Tag{ - Name: "tag1", - Cum: 10, - Flat: 10, - } - g.Nodes[0].NumericTags["tag1"] = TagMap{ - "tag2": &Tag{ - Name: "tag2", - Cum: 20, - Flat: 20, - Unit: "ms", - }, - } - - var buf bytes.Buffer - ComposeDot(&buf, g, a, c) - - compareGraphs(t, buf.Bytes(), "compose5.dot") -} - -func TestComposeWithEmptyGraph(t *testing.T) { - g := &Graph{} - a, c := baseAttrsAndConfig() - - var buf bytes.Buffer - ComposeDot(&buf, g, a, c) - - compareGraphs(t, buf.Bytes(), "compose4.dot") -} - -func TestComposeWithStandardGraphAndURL(t *testing.T) { - g := baseGraph() - a, c := baseAttrsAndConfig() - c.LegendURL = "http://example.com" - - var buf bytes.Buffer - ComposeDot(&buf, g, a, c) - - compareGraphs(t, buf.Bytes(), "compose6.dot") -} - -func TestComposeWithNamesThatNeedEscaping(t *testing.T) { - g := baseGraph() - a, c := baseAttrsAndConfig() - g.Nodes[0].Info = NodeInfo{Name: `var"src"`} - g.Nodes[1].Info = NodeInfo{Name: `var"#dest#"`} - - var buf bytes.Buffer - ComposeDot(&buf, g, a, c) - - compareGraphs(t, buf.Bytes(), "compose7.dot") -} - -func baseGraph() *Graph { - src := &Node{ - Info: NodeInfo{Name: "src"}, - Flat: 10, - Cum: 25, - In: make(EdgeMap), - Out: make(EdgeMap), - LabelTags: make(TagMap), - NumericTags: make(map[string]TagMap), - } - dest := &Node{ - Info: NodeInfo{Name: "dest"}, - Flat: 15, - Cum: 25, - In: make(EdgeMap), - Out: make(EdgeMap), - LabelTags: make(TagMap), - NumericTags: make(map[string]TagMap), - } - edge := &Edge{ - Src: src, - Dest: dest, - Weight: 10, - } - src.Out[dest] = edge - src.In[src] = edge - return &Graph{ - Nodes: Nodes{ - src, - dest, - }, - } -} - -func baseAttrsAndConfig() (*DotAttributes, *DotConfig) { - a := &DotAttributes{ - Nodes: make(map[*Node]*DotNodeAttributes), - } - c := &DotConfig{ - Title: "testtitle", - Labels: []string{"label1", "label2", `label3: "foo"`}, - Total: 100, - FormatValue: func(v int64) string { - return strconv.FormatInt(v, 10) - }, - } - return a, c -} - -func compareGraphs(t *testing.T, got []byte, wantFile string) { - wantFile = filepath.Join("testdata", wantFile) - want, err := ioutil.ReadFile(wantFile) - if err != nil { - t.Fatalf("error reading test file %s: %v", wantFile, err) - } - - if string(got) != string(want) { - d, err := proftest.Diff(got, want) - if err != nil { - t.Fatalf("error finding diff: %v", err) - } - t.Errorf("Compose incorrectly wrote %s", string(d)) - if *updateFlag { - err := ioutil.WriteFile(wantFile, got, 0644) - if err != nil { - t.Errorf("failed to update the golden file %q: %v", wantFile, err) - } - } - } -} - -func TestNodeletCountCapping(t *testing.T) { - labelTags := make(TagMap) - for i := 0; i < 10; i++ { - name := fmt.Sprintf("tag-%d", i) - labelTags[name] = &Tag{ - Name: name, - Flat: 10, - Cum: 10, - } - } - numTags := make(TagMap) - for i := 0; i < 10; i++ { - name := fmt.Sprintf("num-tag-%d", i) - numTags[name] = &Tag{ - Name: name, - Unit: "mb", - Value: 16, - Flat: 10, - Cum: 10, - } - } - node1 := &Node{ - Info: NodeInfo{Name: "node1-with-tags"}, - Flat: 10, - Cum: 10, - NumericTags: map[string]TagMap{"": numTags}, - LabelTags: labelTags, - } - node2 := &Node{ - Info: NodeInfo{Name: "node2"}, - Flat: 15, - Cum: 15, - } - node3 := &Node{ - Info: NodeInfo{Name: "node3"}, - Flat: 15, - Cum: 15, - } - g := &Graph{ - Nodes: Nodes{ - node1, - node2, - node3, - }, - } - for n := 1; n <= 3; n++ { - input := maxNodelets + n - if got, want := len(g.SelectTopNodes(input, true)), n; got != want { - t.Errorf("SelectTopNodes(%d): got %d nodes, want %d", input, got, want) - } - } -} - -func TestMultilinePrintableName(t *testing.T) { - ni := &NodeInfo{ - Name: "test1.test2::test3", - File: "src/file.cc", - Address: 123, - Lineno: 999, - } - - want := fmt.Sprintf(`%016x\ntest1\ntest2\ntest3\nfile.cc:999\n`, 123) - if got := multilinePrintableName(ni); got != want { - t.Errorf("multilinePrintableName(%#v) == %q, want %q", ni, got, want) - } -} - -func TestTagCollapse(t *testing.T) { - - makeTag := func(name, unit string, value, flat, cum int64) *Tag { - return &Tag{name, unit, value, flat, 0, cum, 0} - } - - tagSource := []*Tag{ - makeTag("12mb", "mb", 12, 100, 100), - makeTag("1kb", "kb", 1, 1, 1), - makeTag("1mb", "mb", 1, 1000, 1000), - makeTag("2048mb", "mb", 2048, 1000, 1000), - makeTag("1b", "b", 1, 100, 100), - makeTag("2b", "b", 2, 100, 100), - makeTag("7b", "b", 7, 100, 100), - } - - tagWant := [][]*Tag{ - { - makeTag("1B..2GB", "", 0, 2401, 2401), - }, - { - makeTag("2GB", "", 0, 1000, 1000), - makeTag("1B..12MB", "", 0, 1401, 1401), - }, - { - makeTag("2GB", "", 0, 1000, 1000), - makeTag("12MB", "", 0, 100, 100), - makeTag("1B..1MB", "", 0, 1301, 1301), - }, - { - makeTag("2GB", "", 0, 1000, 1000), - makeTag("1MB", "", 0, 1000, 1000), - makeTag("2B..1kB", "", 0, 201, 201), - makeTag("1B", "", 0, 100, 100), - makeTag("12MB", "", 0, 100, 100), - }, - } - - for _, tc := range tagWant { - var got, want []*Tag - b := builder{nil, &DotAttributes{}, &DotConfig{}} - got = b.collapsedTags(tagSource, len(tc), true) - want = SortTags(tc, true) - - if !reflect.DeepEqual(got, want) { - t.Errorf("collapse to %d, got:\n%v\nwant:\n%v", len(tc), tagString(got), tagString(want)) - } - } -} - -func TestEscapeForDot(t *testing.T) { - for _, tc := range []struct { - desc string - input []string - want []string - }{ - { - desc: "with multiple doubles quotes", - input: []string{`label: "foo" and "bar"`}, - want: []string{`label: \"foo\" and \"bar\"`}, - }, - { - desc: "with graphviz center line character", - input: []string{"label: foo \n bar"}, - want: []string{`label: foo \l bar`}, - }, - { - desc: "with two backslashes", - input: []string{`label: \\`}, - want: []string{`label: \\\\`}, - }, - { - desc: "with two double quotes together", - input: []string{`label: ""`}, - want: []string{`label: \"\"`}, - }, - { - desc: "with multiple labels", - input: []string{`label1: "foo"`, `label2: "bar"`}, - want: []string{`label1: \"foo\"`, `label2: \"bar\"`}, - }, - } { - t.Run(tc.desc, func(t *testing.T) { - if got := escapeAllForDot(tc.input); !reflect.DeepEqual(got, tc.want) { - t.Errorf("escapeAllForDot(%s) = %s, want %s", tc.input, got, tc.want) - } - }) - } -} - -func tagString(t []*Tag) string { - var ret []string - for _, s := range t { - ret = append(ret, fmt.Sprintln(s)) - } - return strings.Join(ret, ":") -} diff --git a/internal/pprof/graph/graph.go b/internal/pprof/graph/graph.go deleted file mode 100644 index 74b904c402e..00000000000 --- a/internal/pprof/graph/graph.go +++ /dev/null @@ -1,1170 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package graph collects a set of samples into a directed graph. -package graph - -import ( - "fmt" - "math" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - - "github.com/google/pprof/profile" -) - -var ( - // Removes package name and method arguments for Java method names. - // See tests for examples. - javaRegExp = regexp.MustCompile(`^(?:[a-z]\w*\.)*([A-Z][\w\$]*\.(?:|[a-z][\w\$]*(?:\$\d+)?))(?:(?:\()|$)`) - // Removes package name and method arguments for Go function names. - // See tests for examples. - goRegExp = regexp.MustCompile(`^(?:[\w\-\.]+\/)+(.+)`) - // Removes potential module versions in a package path. - goVerRegExp = regexp.MustCompile(`^(.*?)/v(?:[2-9]|[1-9][0-9]+)([./].*)$`) - // Strips C++ namespace prefix from a C++ function / method name. - // NOTE: Make sure to keep the template parameters in the name. Normally, - // template parameters are stripped from the C++ names but when - // -symbolize=demangle=templates flag is used, they will not be. - // See tests for examples. - cppRegExp = regexp.MustCompile(`^(?:[_a-zA-Z]\w*::)+(_*[A-Z]\w*::~?[_a-zA-Z]\w*(?:<.*>)?)`) - cppAnonymousPrefixRegExp = regexp.MustCompile(`^\(anonymous namespace\)::`) -) - -// Graph summarizes a performance profile into a format that is -// suitable for visualization. -type Graph struct { - Nodes Nodes -} - -// Options encodes the options for constructing a graph -type Options struct { - SampleValue func(s []int64) int64 // Function to compute the value of a sample - SampleMeanDivisor func(s []int64) int64 // Function to compute the divisor for mean graphs, or nil - FormatTag func(int64, string) string // Function to format a sample tag value into a string - ObjNames bool // Always preserve obj filename - OrigFnNames bool // Preserve original (eg mangled) function names - - CallTree bool // Build a tree instead of a graph - DropNegative bool // Drop nodes with overall negative values - - KeptNodes NodeSet // If non-nil, only use nodes in this set -} - -// Nodes is an ordered collection of graph nodes. -type Nodes []*Node - -// Node is an entry on a profiling report. It represents a unique -// program location. -type Node struct { - // Info describes the source location associated to this node. - Info NodeInfo - - // Function represents the function that this node belongs to. On - // graphs with sub-function resolution (eg line number or - // addresses), two nodes in a NodeMap that are part of the same - // function have the same value of Node.Function. If the Node - // represents the whole function, it points back to itself. - Function *Node - - // Values associated to this node. Flat is exclusive to this node, - // Cum includes all descendents. - Flat, FlatDiv, Cum, CumDiv int64 - - // In and out Contains the nodes immediately reaching or reached by - // this node. - In, Out EdgeMap - - // LabelTags provide additional information about subsets of a sample. - LabelTags TagMap - - // NumericTags provide additional values for subsets of a sample. - // Numeric tags are optionally associated to a label tag. The key - // for NumericTags is the name of the LabelTag they are associated - // to, or "" for numeric tags not associated to a label tag. - NumericTags map[string]TagMap -} - -// FlatValue returns the exclusive value for this node, computing the -// mean if a divisor is available. -func (n *Node) FlatValue() int64 { - if n.FlatDiv == 0 { - return n.Flat - } - return n.Flat / n.FlatDiv -} - -// CumValue returns the inclusive value for this node, computing the -// mean if a divisor is available. -func (n *Node) CumValue() int64 { - if n.CumDiv == 0 { - return n.Cum - } - return n.Cum / n.CumDiv -} - -// AddToEdge increases the weight of an edge between two nodes. If -// there isn't such an edge one is created. -func (n *Node) AddToEdge(to *Node, v int64, residual, inline bool) { - n.AddToEdgeDiv(to, 0, v, residual, inline) -} - -// AddToEdgeDiv increases the weight of an edge between two nodes. If -// there isn't such an edge one is created. -func (n *Node) AddToEdgeDiv(to *Node, dv, v int64, residual, inline bool) { - if n.Out[to] != to.In[n] { - panic(fmt.Errorf("asymmetric edges %v %v", *n, *to)) - } - - if e := n.Out[to]; e != nil { - e.WeightDiv += dv - e.Weight += v - if residual { - e.Residual = true - } - if !inline { - e.Inline = false - } - return - } - - info := &Edge{Src: n, Dest: to, WeightDiv: dv, Weight: v, Residual: residual, Inline: inline} - n.Out[to] = info - to.In[n] = info -} - -// NodeInfo contains the attributes for a node. -type NodeInfo struct { - Name string - OrigName string - Address uint64 - File string - StartLine, Lineno int - Objfile string -} - -// PrintableName calls the Node's Formatter function with a single space separator. -func (i *NodeInfo) PrintableName() string { - return strings.Join(i.NameComponents(), " ") -} - -// NameComponents returns the components of the printable name to be used for a node. -func (i *NodeInfo) NameComponents() []string { - var name []string - if i.Address != 0 { - name = append(name, fmt.Sprintf("%016x", i.Address)) - } - if fun := i.Name; fun != "" { - name = append(name, fun) - } - - switch { - case i.Lineno != 0: - // User requested line numbers, provide what we have. - name = append(name, fmt.Sprintf("%s:%d", i.File, i.Lineno)) - case i.File != "": - // User requested file name, provide it. - name = append(name, i.File) - case i.Name != "": - // User requested function name. It was already included. - case i.Objfile != "": - // Only binary name is available - name = append(name, "["+filepath.Base(i.Objfile)+"]") - default: - // Do not leave it empty if there is no information at all. - name = append(name, "") - } - return name -} - -// NodeMap maps from a node info struct to a node. It is used to merge -// report entries with the same info. -type NodeMap map[NodeInfo]*Node - -// NodeSet is a collection of node info structs. -type NodeSet map[NodeInfo]bool - -// NodePtrSet is a collection of nodes. Trimming a graph or tree requires a set -// of objects which uniquely identify the nodes to keep. In a graph, NodeInfo -// works as a unique identifier; however, in a tree multiple nodes may share -// identical NodeInfos. A *Node does uniquely identify a node so we can use that -// instead. Though a *Node also uniquely identifies a node in a graph, -// currently, during trimming, graphs are rebuilt from scratch using only the -// NodeSet, so there would not be the required context of the initial graph to -// allow for the use of *Node. -type NodePtrSet map[*Node]bool - -// FindOrInsertNode takes the info for a node and either returns a matching node -// from the node map if one exists, or adds one to the map if one does not. -// If kept is non-nil, nodes are only added if they can be located on it. -func (nm NodeMap) FindOrInsertNode(info NodeInfo, kept NodeSet) *Node { - if kept != nil { - if _, ok := kept[info]; !ok { - return nil - } - } - - if n, ok := nm[info]; ok { - return n - } - - n := &Node{ - Info: info, - In: make(EdgeMap), - Out: make(EdgeMap), - LabelTags: make(TagMap), - NumericTags: make(map[string]TagMap), - } - nm[info] = n - if info.Address == 0 && info.Lineno == 0 { - // This node represents the whole function, so point Function - // back to itself. - n.Function = n - return n - } - // Find a node that represents the whole function. - info.Address = 0 - info.Lineno = 0 - n.Function = nm.FindOrInsertNode(info, nil) - return n -} - -// EdgeMap is used to represent the incoming/outgoing edges from a node. -type EdgeMap map[*Node]*Edge - -// Edge contains any attributes to be represented about edges in a graph. -type Edge struct { - Src, Dest *Node - // The summary weight of the edge - Weight, WeightDiv int64 - - // residual edges connect nodes that were connected through a - // separate node, which has been removed from the report. - Residual bool - // An inline edge represents a call that was inlined into the caller. - Inline bool -} - -// WeightValue returns the weight value for this edge, normalizing if a -// divisor is available. -func (e *Edge) WeightValue() int64 { - if e.WeightDiv == 0 { - return e.Weight - } - return e.Weight / e.WeightDiv -} - -// Tag represent sample annotations -type Tag struct { - Name string - Unit string // Describe the value, "" for non-numeric tags - Value int64 - Flat, FlatDiv int64 - Cum, CumDiv int64 -} - -// FlatValue returns the exclusive value for this tag, computing the -// mean if a divisor is available. -func (t *Tag) FlatValue() int64 { - if t.FlatDiv == 0 { - return t.Flat - } - return t.Flat / t.FlatDiv -} - -// CumValue returns the inclusive value for this tag, computing the -// mean if a divisor is available. -func (t *Tag) CumValue() int64 { - if t.CumDiv == 0 { - return t.Cum - } - return t.Cum / t.CumDiv -} - -// TagMap is a collection of tags, classified by their name. -type TagMap map[string]*Tag - -// SortTags sorts a slice of tags based on their weight. -func SortTags(t []*Tag, flat bool) []*Tag { - ts := tags{t, flat} - sort.Sort(ts) - return ts.t -} - -// New summarizes performance data from a profile into a graph. -func New(prof *profile.Profile, o *Options) *Graph { - if o.CallTree { - return newTree(prof, o) - } - g, _ := newGraph(prof, o) - return g -} - -// newGraph computes a graph from a profile. It returns the graph, and -// a map from the profile location indices to the corresponding graph -// nodes. -func newGraph(prof *profile.Profile, o *Options) (*Graph, map[uint64]Nodes) { - nodes, locationMap := CreateNodes(prof, o) - seenNode := make(map[*Node]bool) - seenEdge := make(map[nodePair]bool) - for _, sample := range prof.Sample { - var w, dw int64 - w = o.SampleValue(sample.Value) - if o.SampleMeanDivisor != nil { - dw = o.SampleMeanDivisor(sample.Value) - } - if dw == 0 && w == 0 { - continue - } - for k := range seenNode { - delete(seenNode, k) - } - for k := range seenEdge { - delete(seenEdge, k) - } - var parent *Node - // A residual edge goes over one or more nodes that were not kept. - residual := false - - labels := joinLabels(sample) - // Group the sample frames, based on a global map. - for i := len(sample.Location) - 1; i >= 0; i-- { - l := sample.Location[i] - locNodes := locationMap[l.ID] - for ni := len(locNodes) - 1; ni >= 0; ni-- { - n := locNodes[ni] - if n == nil { - residual = true - continue - } - // Add cum weight to all nodes in stack, avoiding double counting. - if _, ok := seenNode[n]; !ok { - seenNode[n] = true - n.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, false) - } - // Update edge weights for all edges in stack, avoiding double counting. - if _, ok := seenEdge[nodePair{n, parent}]; !ok && parent != nil && n != parent { - seenEdge[nodePair{n, parent}] = true - parent.AddToEdgeDiv(n, dw, w, residual, ni != len(locNodes)-1) - } - parent = n - residual = false - } - } - if parent != nil && !residual { - // Add flat weight to leaf node. - parent.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, true) - } - } - - return selectNodesForGraph(nodes, o.DropNegative), locationMap -} - -func selectNodesForGraph(nodes Nodes, dropNegative bool) *Graph { - // Collect nodes into a graph. - gNodes := make(Nodes, 0, len(nodes)) - for _, n := range nodes { - if n == nil { - continue - } - if n.Cum == 0 && n.Flat == 0 { - continue - } - if dropNegative && isNegative(n) { - continue - } - gNodes = append(gNodes, n) - } - return &Graph{gNodes} -} - -type nodePair struct { - src, dest *Node -} - -func newTree(prof *profile.Profile, o *Options) (g *Graph) { - parentNodeMap := make(map[*Node]NodeMap, len(prof.Sample)) - for _, sample := range prof.Sample { - var w, dw int64 - w = o.SampleValue(sample.Value) - if o.SampleMeanDivisor != nil { - dw = o.SampleMeanDivisor(sample.Value) - } - if dw == 0 && w == 0 { - continue - } - var parent *Node - labels := joinLabels(sample) - // Group the sample frames, based on a per-node map. - for i := len(sample.Location) - 1; i >= 0; i-- { - l := sample.Location[i] - lines := l.Line - if len(lines) == 0 { - lines = []profile.Line{{}} // Create empty line to include location info. - } - for lidx := len(lines) - 1; lidx >= 0; lidx-- { - nodeMap := parentNodeMap[parent] - if nodeMap == nil { - nodeMap = make(NodeMap) - parentNodeMap[parent] = nodeMap - } - n := nodeMap.findOrInsertLine(l, lines[lidx], o) - if n == nil { - continue - } - n.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, false) - if parent != nil { - parent.AddToEdgeDiv(n, dw, w, false, lidx != len(lines)-1) - } - parent = n - } - } - if parent != nil { - parent.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, true) - } - } - - nodes := make(Nodes, len(prof.Location)) - for _, nm := range parentNodeMap { - nodes = append(nodes, nm.nodes()...) - } - return selectNodesForGraph(nodes, o.DropNegative) -} - -// ShortenFunctionName returns a shortened version of a function's name. -func ShortenFunctionName(f string) string { - f = cppAnonymousPrefixRegExp.ReplaceAllString(f, "") - f = goVerRegExp.ReplaceAllString(f, `${1}${2}`) - for _, re := range []*regexp.Regexp{goRegExp, javaRegExp, cppRegExp} { - if matches := re.FindStringSubmatch(f); len(matches) >= 2 { - return strings.Join(matches[1:], "") - } - } - return f -} - -// TrimTree trims a Graph in forest form, keeping only the nodes in kept. This -// will not work correctly if even a single node has multiple parents. -func (g *Graph) TrimTree(kept NodePtrSet) { - // Creates a new list of nodes - oldNodes := g.Nodes - g.Nodes = make(Nodes, 0, len(kept)) - - for _, cur := range oldNodes { - // A node may not have multiple parents - if len(cur.In) > 1 { - panic("TrimTree only works on trees") - } - - // If a node should be kept, add it to the new list of nodes - if _, ok := kept[cur]; ok { - g.Nodes = append(g.Nodes, cur) - continue - } - - // If a node has no parents, then delete all of the in edges of its - // children to make them each roots of their own trees. - if len(cur.In) == 0 { - for _, outEdge := range cur.Out { - delete(outEdge.Dest.In, cur) - } - continue - } - - // Get the parent. This works since at this point cur.In must contain only - // one element. - if len(cur.In) != 1 { - panic("Get parent assertion failed. cur.In expected to be of length 1.") - } - var parent *Node - for _, edge := range cur.In { - parent = edge.Src - } - - parentEdgeInline := parent.Out[cur].Inline - - // Remove the edge from the parent to this node - delete(parent.Out, cur) - - // Reconfigure every edge from the current node to now begin at the parent. - for _, outEdge := range cur.Out { - child := outEdge.Dest - - delete(child.In, cur) - child.In[parent] = outEdge - parent.Out[child] = outEdge - - outEdge.Src = parent - outEdge.Residual = true - // If the edge from the parent to the current node and the edge from the - // current node to the child are both inline, then this resulting residual - // edge should also be inline - outEdge.Inline = parentEdgeInline && outEdge.Inline - } - } - g.RemoveRedundantEdges() -} - -func joinLabels(s *profile.Sample) string { - if len(s.Label) == 0 { - return "" - } - - var labels []string - for key, vals := range s.Label { - for _, v := range vals { - labels = append(labels, key+":"+v) - } - } - sort.Strings(labels) - return strings.Join(labels, `\n`) -} - -// isNegative returns true if the node is considered as "negative" for the -// purposes of drop_negative. -func isNegative(n *Node) bool { - switch { - case n.Flat < 0: - return true - case n.Flat == 0 && n.Cum < 0: - return true - default: - return false - } -} - -// CreateNodes creates graph nodes for all locations in a profile. It -// returns set of all nodes, plus a mapping of each location to the -// set of corresponding nodes (one per location.Line). -func CreateNodes(prof *profile.Profile, o *Options) (Nodes, map[uint64]Nodes) { - locations := make(map[uint64]Nodes, len(prof.Location)) - nm := make(NodeMap, len(prof.Location)) - for _, l := range prof.Location { - lines := l.Line - if len(lines) == 0 { - lines = []profile.Line{{}} // Create empty line to include location info. - } - nodes := make(Nodes, len(lines)) - for ln := range lines { - nodes[ln] = nm.findOrInsertLine(l, lines[ln], o) - } - locations[l.ID] = nodes - } - return nm.nodes(), locations -} - -func (nm NodeMap) nodes() Nodes { - nodes := make(Nodes, 0, len(nm)) - for _, n := range nm { - nodes = append(nodes, n) - } - return nodes -} - -func (nm NodeMap) findOrInsertLine(l *profile.Location, li profile.Line, o *Options) *Node { - var objfile string - if m := l.Mapping; m != nil && m.File != "" { - objfile = m.File - } - - if ni := nodeInfo(l, li, objfile, o); ni != nil { - return nm.FindOrInsertNode(*ni, o.KeptNodes) - } - return nil -} - -func nodeInfo(l *profile.Location, line profile.Line, objfile string, o *Options) *NodeInfo { - if line.Function == nil { - return &NodeInfo{Address: l.Address, Objfile: objfile} - } - ni := &NodeInfo{ - Address: l.Address, - Lineno: int(line.Line), - Name: line.Function.Name, - } - if fname := line.Function.Filename; fname != "" { - ni.File = filepath.Clean(fname) - } - if o.OrigFnNames { - ni.OrigName = line.Function.SystemName - } - if o.ObjNames || (ni.Name == "" && ni.OrigName == "") { - ni.Objfile = objfile - ni.StartLine = int(line.Function.StartLine) - } - return ni -} - -type tags struct { - t []*Tag - flat bool -} - -func (t tags) Len() int { return len(t.t) } -func (t tags) Swap(i, j int) { t.t[i], t.t[j] = t.t[j], t.t[i] } -func (t tags) Less(i, j int) bool { - if !t.flat { - if t.t[i].Cum != t.t[j].Cum { - return abs64(t.t[i].Cum) > abs64(t.t[j].Cum) - } - } - if t.t[i].Flat != t.t[j].Flat { - return abs64(t.t[i].Flat) > abs64(t.t[j].Flat) - } - return t.t[i].Name < t.t[j].Name -} - -// Sum adds the flat and cum values of a set of nodes. -func (ns Nodes) Sum() (flat int64, cum int64) { - for _, n := range ns { - flat += n.Flat - cum += n.Cum - } - return -} - -func (n *Node) addSample(dw, w int64, labels string, numLabel map[string][]int64, numUnit map[string][]string, format func(int64, string) string, flat bool) { - // Update sample value - if flat { - n.FlatDiv += dw - n.Flat += w - } else { - n.CumDiv += dw - n.Cum += w - } - - // Add string tags - if labels != "" { - t := n.LabelTags.findOrAddTag(labels, "", 0) - if flat { - t.FlatDiv += dw - t.Flat += w - } else { - t.CumDiv += dw - t.Cum += w - } - } - - numericTags := n.NumericTags[labels] - if numericTags == nil { - numericTags = TagMap{} - n.NumericTags[labels] = numericTags - } - // Add numeric tags - if format == nil { - format = defaultLabelFormat - } - for k, nvals := range numLabel { - units := numUnit[k] - for i, v := range nvals { - var t *Tag - if len(units) > 0 { - t = numericTags.findOrAddTag(format(v, units[i]), units[i], v) - } else { - t = numericTags.findOrAddTag(format(v, k), k, v) - } - if flat { - t.FlatDiv += dw - t.Flat += w - } else { - t.CumDiv += dw - t.Cum += w - } - } - } -} - -func defaultLabelFormat(v int64, key string) string { - return strconv.FormatInt(v, 10) -} - -func (m TagMap) findOrAddTag(label, unit string, value int64) *Tag { - l := m[label] - if l == nil { - l = &Tag{ - Name: label, - Unit: unit, - Value: value, - } - m[label] = l - } - return l -} - -// String returns a text representation of a graph, for debugging purposes. -func (g *Graph) String() string { - var s []string - - nodeIndex := make(map[*Node]int, len(g.Nodes)) - - for i, n := range g.Nodes { - nodeIndex[n] = i + 1 - } - - for i, n := range g.Nodes { - name := n.Info.PrintableName() - var in, out []int - - for _, from := range n.In { - in = append(in, nodeIndex[from.Src]) - } - for _, to := range n.Out { - out = append(out, nodeIndex[to.Dest]) - } - s = append(s, fmt.Sprintf("%d: %s[flat=%d cum=%d] %x -> %v ", i+1, name, n.Flat, n.Cum, in, out)) - } - return strings.Join(s, "\n") -} - -// DiscardLowFrequencyNodes returns a set of the nodes at or over a -// specific cum value cutoff. -func (g *Graph) DiscardLowFrequencyNodes(nodeCutoff int64) NodeSet { - return makeNodeSet(g.Nodes, nodeCutoff) -} - -// DiscardLowFrequencyNodePtrs returns a NodePtrSet of nodes at or over a -// specific cum value cutoff. -func (g *Graph) DiscardLowFrequencyNodePtrs(nodeCutoff int64) NodePtrSet { - cutNodes := getNodesAboveCumCutoff(g.Nodes, nodeCutoff) - kept := make(NodePtrSet, len(cutNodes)) - for _, n := range cutNodes { - kept[n] = true - } - return kept -} - -func makeNodeSet(nodes Nodes, nodeCutoff int64) NodeSet { - cutNodes := getNodesAboveCumCutoff(nodes, nodeCutoff) - kept := make(NodeSet, len(cutNodes)) - for _, n := range cutNodes { - kept[n.Info] = true - } - return kept -} - -// getNodesAboveCumCutoff returns all the nodes which have a Cum value greater -// than or equal to cutoff. -func getNodesAboveCumCutoff(nodes Nodes, nodeCutoff int64) Nodes { - cutoffNodes := make(Nodes, 0, len(nodes)) - for _, n := range nodes { - if abs64(n.Cum) < nodeCutoff { - continue - } - cutoffNodes = append(cutoffNodes, n) - } - return cutoffNodes -} - -// TrimLowFrequencyTags removes tags that have less than -// the specified weight. -func (g *Graph) TrimLowFrequencyTags(tagCutoff int64) { - // Remove nodes with value <= total*nodeFraction - for _, n := range g.Nodes { - n.LabelTags = trimLowFreqTags(n.LabelTags, tagCutoff) - for s, nt := range n.NumericTags { - n.NumericTags[s] = trimLowFreqTags(nt, tagCutoff) - } - } -} - -func trimLowFreqTags(tags TagMap, minValue int64) TagMap { - kept := TagMap{} - for s, t := range tags { - if abs64(t.Flat) >= minValue || abs64(t.Cum) >= minValue { - kept[s] = t - } - } - return kept -} - -// TrimLowFrequencyEdges removes edges that have less than -// the specified weight. Returns the number of edges removed -func (g *Graph) TrimLowFrequencyEdges(edgeCutoff int64) int { - var droppedEdges int - for _, n := range g.Nodes { - for src, e := range n.In { - if abs64(e.Weight) < edgeCutoff { - delete(n.In, src) - delete(src.Out, n) - droppedEdges++ - } - } - } - return droppedEdges -} - -// SortNodes sorts the nodes in a graph based on a specific heuristic. -func (g *Graph) SortNodes(cum bool, visualMode bool) { - // Sort nodes based on requested mode - switch { - case visualMode: - // Specialized sort to produce a more visually-interesting graph - g.Nodes.Sort(EntropyOrder) - case cum: - g.Nodes.Sort(CumNameOrder) - default: - g.Nodes.Sort(FlatNameOrder) - } -} - -// SelectTopNodePtrs returns a set of the top maxNodes *Node in a graph. -func (g *Graph) SelectTopNodePtrs(maxNodes int, visualMode bool) NodePtrSet { - set := make(NodePtrSet) - for _, node := range g.selectTopNodes(maxNodes, visualMode) { - set[node] = true - } - return set -} - -// SelectTopNodes returns a set of the top maxNodes nodes in a graph. -func (g *Graph) SelectTopNodes(maxNodes int, visualMode bool) NodeSet { - return makeNodeSet(g.selectTopNodes(maxNodes, visualMode), 0) -} - -// selectTopNodes returns a slice of the top maxNodes nodes in a graph. -func (g *Graph) selectTopNodes(maxNodes int, visualMode bool) Nodes { - if maxNodes > 0 { - if visualMode { - var count int - // If generating a visual graph, count tags as nodes. Update - // maxNodes to account for them. - for i, n := range g.Nodes { - tags := countTags(n) - if tags > maxNodelets { - tags = maxNodelets - } - if count += tags + 1; count >= maxNodes { - maxNodes = i + 1 - break - } - } - } - } - if maxNodes > len(g.Nodes) { - maxNodes = len(g.Nodes) - } - return g.Nodes[:maxNodes] -} - -// countTags counts the tags with flat count. This underestimates the -// number of tags being displayed, but in practice is close enough. -func countTags(n *Node) int { - count := 0 - for _, e := range n.LabelTags { - if e.Flat != 0 { - count++ - } - } - for _, t := range n.NumericTags { - for _, e := range t { - if e.Flat != 0 { - count++ - } - } - } - return count -} - -// RemoveRedundantEdges removes residual edges if the destination can -// be reached through another path. This is done to simplify the graph -// while preserving connectivity. -func (g *Graph) RemoveRedundantEdges() { - // Walk the nodes and outgoing edges in reverse order to prefer - // removing edges with the lowest weight. - for i := len(g.Nodes); i > 0; i-- { - n := g.Nodes[i-1] - in := n.In.Sort() - for j := len(in); j > 0; j-- { - e := in[j-1] - if !e.Residual { - // Do not remove edges heavier than a non-residual edge, to - // avoid potential confusion. - break - } - if isRedundantEdge(e) { - delete(e.Src.Out, e.Dest) - delete(e.Dest.In, e.Src) - } - } - } -} - -// isRedundantEdge determines if there is a path that allows e.Src -// to reach e.Dest after removing e. -func isRedundantEdge(e *Edge) bool { - src, n := e.Src, e.Dest - seen := map[*Node]bool{n: true} - queue := Nodes{n} - for len(queue) > 0 { - n := queue[0] - queue = queue[1:] - for _, ie := range n.In { - if e == ie || seen[ie.Src] { - continue - } - if ie.Src == src { - return true - } - seen[ie.Src] = true - queue = append(queue, ie.Src) - } - } - return false -} - -// nodeSorter is a mechanism used to allow a report to be sorted -// in different ways. -type nodeSorter struct { - rs Nodes - less func(l, r *Node) bool -} - -func (s nodeSorter) Len() int { return len(s.rs) } -func (s nodeSorter) Swap(i, j int) { s.rs[i], s.rs[j] = s.rs[j], s.rs[i] } -func (s nodeSorter) Less(i, j int) bool { return s.less(s.rs[i], s.rs[j]) } - -// Sort reorders a slice of nodes based on the specified ordering -// criteria. The result is sorted in decreasing order for (absolute) -// numeric quantities, alphabetically for text, and increasing for -// addresses. -func (ns Nodes) Sort(o NodeOrder) error { - var s nodeSorter - - switch o { - case FlatNameOrder: - s = nodeSorter{ns, - func(l, r *Node) bool { - if iv, jv := abs64(l.Flat), abs64(r.Flat); iv != jv { - return iv > jv - } - if iv, jv := l.Info.PrintableName(), r.Info.PrintableName(); iv != jv { - return iv < jv - } - if iv, jv := abs64(l.Cum), abs64(r.Cum); iv != jv { - return iv > jv - } - return compareNodes(l, r) - }, - } - case FlatCumNameOrder: - s = nodeSorter{ns, - func(l, r *Node) bool { - if iv, jv := abs64(l.Flat), abs64(r.Flat); iv != jv { - return iv > jv - } - if iv, jv := abs64(l.Cum), abs64(r.Cum); iv != jv { - return iv > jv - } - if iv, jv := l.Info.PrintableName(), r.Info.PrintableName(); iv != jv { - return iv < jv - } - return compareNodes(l, r) - }, - } - case NameOrder: - s = nodeSorter{ns, - func(l, r *Node) bool { - if iv, jv := l.Info.Name, r.Info.Name; iv != jv { - return iv < jv - } - return compareNodes(l, r) - }, - } - case FileOrder: - s = nodeSorter{ns, - func(l, r *Node) bool { - if iv, jv := l.Info.File, r.Info.File; iv != jv { - return iv < jv - } - if iv, jv := l.Info.StartLine, r.Info.StartLine; iv != jv { - return iv < jv - } - return compareNodes(l, r) - }, - } - case AddressOrder: - s = nodeSorter{ns, - func(l, r *Node) bool { - if iv, jv := l.Info.Address, r.Info.Address; iv != jv { - return iv < jv - } - return compareNodes(l, r) - }, - } - case CumNameOrder, EntropyOrder: - // Hold scoring for score-based ordering - var score map[*Node]int64 - scoreOrder := func(l, r *Node) bool { - if iv, jv := abs64(score[l]), abs64(score[r]); iv != jv { - return iv > jv - } - if iv, jv := l.Info.PrintableName(), r.Info.PrintableName(); iv != jv { - return iv < jv - } - if iv, jv := abs64(l.Flat), abs64(r.Flat); iv != jv { - return iv > jv - } - return compareNodes(l, r) - } - - switch o { - case CumNameOrder: - score = make(map[*Node]int64, len(ns)) - for _, n := range ns { - score[n] = n.Cum - } - s = nodeSorter{ns, scoreOrder} - case EntropyOrder: - score = make(map[*Node]int64, len(ns)) - for _, n := range ns { - score[n] = entropyScore(n) - } - s = nodeSorter{ns, scoreOrder} - } - default: - return fmt.Errorf("report: unrecognized sort ordering: %d", o) - } - sort.Sort(s) - return nil -} - -// compareNodes compares two nodes to provide a deterministic ordering -// between them. Two nodes cannot have the same Node.Info value. -func compareNodes(l, r *Node) bool { - return fmt.Sprint(l.Info) < fmt.Sprint(r.Info) -} - -// entropyScore computes a score for a node representing how important -// it is to include this node on a graph visualization. It is used to -// sort the nodes and select which ones to display if we have more -// nodes than desired in the graph. This number is computed by looking -// at the flat and cum weights of the node and the incoming/outgoing -// edges. The fundamental idea is to penalize nodes that have a simple -// fallthrough from their incoming to the outgoing edge. -func entropyScore(n *Node) int64 { - score := float64(0) - - if len(n.In) == 0 { - score++ // Favor entry nodes - } else { - score += edgeEntropyScore(n, n.In, 0) - } - - if len(n.Out) == 0 { - score++ // Favor leaf nodes - } else { - score += edgeEntropyScore(n, n.Out, n.Flat) - } - - return int64(score*float64(n.Cum)) + n.Flat -} - -// edgeEntropyScore computes the entropy value for a set of edges -// coming in or out of a node. Entropy (as defined in information -// theory) refers to the amount of information encoded by the set of -// edges. A set of edges that have a more interesting distribution of -// samples gets a higher score. -func edgeEntropyScore(n *Node, edges EdgeMap, self int64) float64 { - score := float64(0) - total := self - for _, e := range edges { - if e.Weight > 0 { - total += abs64(e.Weight) - } - } - if total != 0 { - for _, e := range edges { - frac := float64(abs64(e.Weight)) / float64(total) - score += -frac * math.Log2(frac) - } - if self > 0 { - frac := float64(abs64(self)) / float64(total) - score += -frac * math.Log2(frac) - } - } - return score -} - -// NodeOrder sets the ordering for a Sort operation -type NodeOrder int - -// Sorting options for node sort. -const ( - FlatNameOrder NodeOrder = iota - FlatCumNameOrder - CumNameOrder - NameOrder - FileOrder - AddressOrder - EntropyOrder -) - -// Sort returns a slice of the edges in the map, in a consistent -// order. The sort order is first based on the edge weight -// (higher-to-lower) and then by the node names to avoid flakiness. -func (e EdgeMap) Sort() []*Edge { - el := make(edgeList, 0, len(e)) - for _, w := range e { - el = append(el, w) - } - - sort.Sort(el) - return el -} - -// Sum returns the total weight for a set of nodes. -func (e EdgeMap) Sum() int64 { - var ret int64 - for _, edge := range e { - ret += edge.Weight - } - return ret -} - -type edgeList []*Edge - -func (el edgeList) Len() int { - return len(el) -} - -func (el edgeList) Less(i, j int) bool { - if el[i].Weight != el[j].Weight { - return abs64(el[i].Weight) > abs64(el[j].Weight) - } - - from1 := el[i].Src.Info.PrintableName() - from2 := el[j].Src.Info.PrintableName() - if from1 != from2 { - return from1 < from2 - } - - to1 := el[i].Dest.Info.PrintableName() - to2 := el[j].Dest.Info.PrintableName() - - return to1 < to2 -} - -func (el edgeList) Swap(i, j int) { - el[i], el[j] = el[j], el[i] -} - -func abs64(i int64) int64 { - if i < 0 { - return -i - } - return i -} diff --git a/internal/pprof/graph/graph_test.go b/internal/pprof/graph/graph_test.go deleted file mode 100644 index bdcb984ee28..00000000000 --- a/internal/pprof/graph/graph_test.go +++ /dev/null @@ -1,533 +0,0 @@ -// Copyright 2016 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package graph - -import ( - "fmt" - "testing" - - "github.com/google/pprof/profile" -) - -func edgeDebugString(edge *Edge) string { - debug := "" - debug += fmt.Sprintf("\t\tSrc: %p\n", edge.Src) - debug += fmt.Sprintf("\t\tDest: %p\n", edge.Dest) - debug += fmt.Sprintf("\t\tWeight: %d\n", edge.Weight) - debug += fmt.Sprintf("\t\tResidual: %t\n", edge.Residual) - debug += fmt.Sprintf("\t\tInline: %t\n", edge.Inline) - return debug -} - -func edgeMapsDebugString(in, out EdgeMap) string { - debug := "" - debug += "In Edges:\n" - for parent, edge := range in { - debug += fmt.Sprintf("\tParent: %p\n", parent) - debug += edgeDebugString(edge) - } - debug += "Out Edges:\n" - for child, edge := range out { - debug += fmt.Sprintf("\tChild: %p\n", child) - debug += edgeDebugString(edge) - } - return debug -} - -func graphDebugString(graph *Graph) string { - debug := "" - for i, node := range graph.Nodes { - debug += fmt.Sprintf("Node %d: %p\n", i, node) - } - - for i, node := range graph.Nodes { - debug += "\n" - debug += fmt.Sprintf("=== Node %d: %p ===\n", i, node) - debug += edgeMapsDebugString(node.In, node.Out) - } - return debug -} - -func expectedNodesDebugString(expected []expectedNode) string { - debug := "" - for i, node := range expected { - debug += fmt.Sprintf("Node %d: %p\n", i, node.node) - } - - for i, node := range expected { - debug += "\n" - debug += fmt.Sprintf("=== Node %d: %p ===\n", i, node.node) - debug += edgeMapsDebugString(node.in, node.out) - } - return debug -} - -// edgeMapsEqual checks if all the edges in this equal all the edges in that. -func edgeMapsEqual(this, that EdgeMap) bool { - if len(this) != len(that) { - return false - } - for node, thisEdge := range this { - if *thisEdge != *that[node] { - return false - } - } - return true -} - -// nodesEqual checks if node is equal to expected. -func nodesEqual(node *Node, expected expectedNode) bool { - return node == expected.node && edgeMapsEqual(node.In, expected.in) && - edgeMapsEqual(node.Out, expected.out) -} - -// graphsEqual checks if graph is equivalent to the graph templated by expected. -func graphsEqual(graph *Graph, expected []expectedNode) bool { - if len(graph.Nodes) != len(expected) { - return false - } - expectedSet := make(map[*Node]expectedNode) - for i := range expected { - expectedSet[expected[i].node] = expected[i] - } - - for _, node := range graph.Nodes { - expectedNode, found := expectedSet[node] - if !found || !nodesEqual(node, expectedNode) { - return false - } - } - return true -} - -type expectedNode struct { - node *Node - in, out EdgeMap -} - -type trimTreeTestcase struct { - initial *Graph - expected []expectedNode - keep NodePtrSet -} - -// makeExpectedEdgeResidual makes the edge from parent to child residual. -func makeExpectedEdgeResidual(parent, child expectedNode) { - parent.out[child.node].Residual = true - child.in[parent.node].Residual = true -} - -func makeEdgeInline(edgeMap EdgeMap, node *Node) { - edgeMap[node].Inline = true -} - -func setEdgeWeight(edgeMap EdgeMap, node *Node, weight int64) { - edgeMap[node].Weight = weight -} - -// createEdges creates directed edges from the parent to each of the children. -func createEdges(parent *Node, children ...*Node) { - for _, child := range children { - edge := &Edge{ - Src: parent, - Dest: child, - } - parent.Out[child] = edge - child.In[parent] = edge - } -} - -// createEmptyNode creates a node without any edges. -func createEmptyNode() *Node { - return &Node{ - In: make(EdgeMap), - Out: make(EdgeMap), - } -} - -// createExpectedNodes creates a slice of expectedNodes from nodes. -func createExpectedNodes(nodes ...*Node) ([]expectedNode, NodePtrSet) { - expected := make([]expectedNode, len(nodes)) - keep := make(NodePtrSet, len(nodes)) - - for i, node := range nodes { - expected[i] = expectedNode{ - node: node, - in: make(EdgeMap), - out: make(EdgeMap), - } - keep[node] = true - } - - return expected, keep -} - -// createExpectedEdges creates directed edges from the parent to each of the -// children. -func createExpectedEdges(parent expectedNode, children ...expectedNode) { - for _, child := range children { - edge := &Edge{ - Src: parent.node, - Dest: child.node, - } - parent.out[child.node] = edge - child.in[parent.node] = edge - } -} - -// createTestCase1 creates a test case that initially looks like: -// 0 -// |(5) -// 1 -// (3)/ \(4) -// 2 3. -// -// After keeping 0, 2, and 3, it expects the graph: -// 0 -// (3)/ \(4) -// 2 3. -func createTestCase1() trimTreeTestcase { - // Create initial graph - graph := &Graph{make(Nodes, 4)} - nodes := graph.Nodes - for i := range nodes { - nodes[i] = createEmptyNode() - } - createEdges(nodes[0], nodes[1]) - createEdges(nodes[1], nodes[2], nodes[3]) - makeEdgeInline(nodes[0].Out, nodes[1]) - makeEdgeInline(nodes[1].Out, nodes[2]) - setEdgeWeight(nodes[0].Out, nodes[1], 5) - setEdgeWeight(nodes[1].Out, nodes[2], 3) - setEdgeWeight(nodes[1].Out, nodes[3], 4) - - // Create expected graph - expected, keep := createExpectedNodes(nodes[0], nodes[2], nodes[3]) - createExpectedEdges(expected[0], expected[1], expected[2]) - makeEdgeInline(expected[0].out, expected[1].node) - makeExpectedEdgeResidual(expected[0], expected[1]) - makeExpectedEdgeResidual(expected[0], expected[2]) - setEdgeWeight(expected[0].out, expected[1].node, 3) - setEdgeWeight(expected[0].out, expected[2].node, 4) - return trimTreeTestcase{ - initial: graph, - expected: expected, - keep: keep, - } -} - -// createTestCase2 creates a test case that initially looks like: -// 3 -// | (12) -// 1 -// | (8) -// 2 -// | (15) -// 0 -// | (10) -// 4. -// -// After keeping 3 and 4, it expects the graph: -// 3 -// | (10) -// 4. -func createTestCase2() trimTreeTestcase { - // Create initial graph - graph := &Graph{make(Nodes, 5)} - nodes := graph.Nodes - for i := range nodes { - nodes[i] = createEmptyNode() - } - createEdges(nodes[3], nodes[1]) - createEdges(nodes[1], nodes[2]) - createEdges(nodes[2], nodes[0]) - createEdges(nodes[0], nodes[4]) - setEdgeWeight(nodes[3].Out, nodes[1], 12) - setEdgeWeight(nodes[1].Out, nodes[2], 8) - setEdgeWeight(nodes[2].Out, nodes[0], 15) - setEdgeWeight(nodes[0].Out, nodes[4], 10) - - // Create expected graph - expected, keep := createExpectedNodes(nodes[3], nodes[4]) - createExpectedEdges(expected[0], expected[1]) - makeExpectedEdgeResidual(expected[0], expected[1]) - setEdgeWeight(expected[0].out, expected[1].node, 10) - return trimTreeTestcase{ - initial: graph, - expected: expected, - keep: keep, - } -} - -// createTestCase3 creates an initially empty graph and expects an empty graph -// after trimming. -func createTestCase3() trimTreeTestcase { - graph := &Graph{make(Nodes, 0)} - expected, keep := createExpectedNodes() - return trimTreeTestcase{ - initial: graph, - expected: expected, - keep: keep, - } -} - -// createTestCase4 creates a test case that initially looks like: -// 0. -// -// After keeping 0, it expects the graph: -// 0. -func createTestCase4() trimTreeTestcase { - graph := &Graph{make(Nodes, 1)} - nodes := graph.Nodes - for i := range nodes { - nodes[i] = createEmptyNode() - } - expected, keep := createExpectedNodes(nodes[0]) - return trimTreeTestcase{ - initial: graph, - expected: expected, - keep: keep, - } -} - -func createTrimTreeTestCases() []trimTreeTestcase { - caseGenerators := []func() trimTreeTestcase{ - createTestCase1, - createTestCase2, - createTestCase3, - createTestCase4, - } - cases := make([]trimTreeTestcase, len(caseGenerators)) - for i, gen := range caseGenerators { - cases[i] = gen() - } - return cases -} - -func TestTrimTree(t *testing.T) { - tests := createTrimTreeTestCases() - for _, test := range tests { - graph := test.initial - graph.TrimTree(test.keep) - if !graphsEqual(graph, test.expected) { - t.Fatalf("Graphs do not match.\nExpected: %s\nFound: %s\n", - expectedNodesDebugString(test.expected), - graphDebugString(graph)) - } - } -} - -func nodeTestProfile() *profile.Profile { - mappings := []*profile.Mapping{ - { - ID: 1, - File: "symbolized_binary", - }, - { - ID: 2, - File: "unsymbolized_library_1", - }, - { - ID: 3, - File: "unsymbolized_library_2", - }, - } - functions := []*profile.Function{ - {ID: 1, Name: "symname"}, - {ID: 2}, - } - locations := []*profile.Location{ - { - ID: 1, - Mapping: mappings[0], - Line: []profile.Line{ - {Function: functions[0]}, - }, - }, - { - ID: 2, - Mapping: mappings[1], - Line: []profile.Line{ - {Function: functions[1]}, - }, - }, - { - ID: 3, - Mapping: mappings[2], - }, - } - return &profile.Profile{ - PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, - SampleType: []*profile.ValueType{ - {Type: "type", Unit: "unit"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{locations[0]}, - Value: []int64{1}, - }, - { - Location: []*profile.Location{locations[1]}, - Value: []int64{1}, - }, - { - Location: []*profile.Location{locations[2]}, - Value: []int64{1}, - }, - }, - Location: locations, - Function: functions, - Mapping: mappings, - } -} - -// TestCreateNodes checks that nodes are properly created for a simple profile. -func TestCreateNodes(t *testing.T) { - testProfile := nodeTestProfile() - wantNodeSet := NodeSet{ - {Name: "symname"}: true, - {Objfile: "unsymbolized_library_1"}: true, - {Objfile: "unsymbolized_library_2"}: true, - } - - nodes, _ := CreateNodes(testProfile, &Options{}) - if len(nodes) != len(wantNodeSet) { - t.Errorf("got %d nodes, want %d", len(nodes), len(wantNodeSet)) - } - for _, node := range nodes { - if !wantNodeSet[node.Info] { - t.Errorf("unexpected node %v", node.Info) - } - } -} - -func TestShortenFunctionName(t *testing.T) { - type testCase struct { - name string - want string - } - testcases := []testCase{ - { - "root", - "root", - }, - { - "syscall.Syscall", - "syscall.Syscall", - }, - { - "net/http.(*conn).serve", - "http.(*conn).serve", - }, - { - "github.com/blahBlah/foo.Foo", - "foo.Foo", - }, - { - "github.com/BlahBlah/foo.Foo", - "foo.Foo", - }, - { - "github.com/blah-blah/foo_bar.(*FooBar).Foo", - "foo_bar.(*FooBar).Foo", - }, - { - "encoding/json.(*structEncoder).(encoding/json.encode)-fm", - "json.(*structEncoder).(encoding/json.encode)-fm", - }, - { - "github.com/blah/blah/vendor/gopkg.in/redis.v3.(*baseClient).(github.com/blah/blah/vendor/gopkg.in/redis.v3.process)-fm", - "redis.v3.(*baseClient).(github.com/blah/blah/vendor/gopkg.in/redis.v3.process)-fm", - }, - { - "github.com/foo/bar/v4.(*Foo).Bar", - "bar.(*Foo).Bar", - }, - { - "github.com/foo/bar/v4/baz.Foo.Bar", - "baz.Foo.Bar", - }, - { - "github.com/foo/bar/v123.(*Foo).Bar", - "bar.(*Foo).Bar", - }, - { - "github.com/foobar/v0.(*Foo).Bar", - "v0.(*Foo).Bar", - }, - { - "github.com/foobar/v1.(*Foo).Bar", - "v1.(*Foo).Bar", - }, - { - "example.org/v2xyz.Foo", - "v2xyz.Foo", - }, - { - "github.com/foo/bar/v4/v4.(*Foo).Bar", - "v4.(*Foo).Bar", - }, - { - "github.com/foo/bar/v4/foo/bar/v4.(*Foo).Bar", - "v4.(*Foo).Bar", - }, - { - "java.util.concurrent.ThreadPoolExecutor$Worker.run", - "ThreadPoolExecutor$Worker.run", - }, - { - "java.bar.foo.FooBar.run(java.lang.Runnable)", - "FooBar.run", - }, - { - "(anonymous namespace)::Bar::Foo", - "Bar::Foo", - }, - { - "(anonymous namespace)::foo", - "foo", - }, - { - "cpp::namespace::Class::method()::$_100::operator()", - "Class::method", - }, - { - "foo_bar::Foo::bar", - "Foo::bar", - }, - { - "cpp::namespace::Class::method()", - "Class::method", - }, - { - "foo", - "foo", - }, - { - "com.google.perftools.gwp.benchmark.FloatBench.lambda$run$0", - "FloatBench.lambda$run$0", - }, - { - "java.bar.foo.FooBar.run$0", - "FooBar.run$0", - }, - } - for _, tc := range testcases { - name := ShortenFunctionName(tc.name) - if got, want := name, tc.want; got != want { - t.Errorf("ShortenFunctionName(%q) = %q, want %q", tc.name, got, want) - } - } -} diff --git a/internal/pprof/graph/testdata/compose1.dot b/internal/pprof/graph/testdata/compose1.dot deleted file mode 100644 index a0842ee0ee2..00000000000 --- a/internal/pprof/graph/testdata/compose1.dot +++ /dev/null @@ -1,7 +0,0 @@ -digraph "testtitle" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\llabel3: \"foo\"\l" tooltip="testtitle"] } -N1 [label="src\n10 (10.00%)\nof 25 (25.00%)" id="node1" fontsize=22 shape=box tooltip="src (25)" color="#b23c00" fillcolor="#edddd5"] -N2 [label="dest\n15 (15.00%)\nof 25 (25.00%)" id="node2" fontsize=24 shape=box tooltip="dest (25)" color="#b23c00" fillcolor="#edddd5"] -N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="src -> dest (10)" labeltooltip="src -> dest (10)"] -} diff --git a/internal/pprof/graph/testdata/compose2.dot b/internal/pprof/graph/testdata/compose2.dot deleted file mode 100644 index 44c2aecd565..00000000000 --- a/internal/pprof/graph/testdata/compose2.dot +++ /dev/null @@ -1,7 +0,0 @@ -digraph "testtitle" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\llabel3: \"foo\"\l" tooltip="testtitle"] } -N1 [label="SRC10 (10.00%)\nof 25 (25.00%)" id="node1" fontsize=24 shape=folder tooltip="src (25)" color="#b23c00" fillcolor="#edddd5" style="bold,filled" peripheries=2 URL="www.google.com" target="_blank"] -N2 [label="dest\n0 of 25 (25.00%)" id="node2" fontsize=8 shape=box tooltip="dest (25)" color="#b23c00" fillcolor="#edddd5"] -N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="src -> dest (10)" labeltooltip="src -> dest (10)"] -} diff --git a/internal/pprof/graph/testdata/compose3.dot b/internal/pprof/graph/testdata/compose3.dot deleted file mode 100644 index f22ad9fe4cd..00000000000 --- a/internal/pprof/graph/testdata/compose3.dot +++ /dev/null @@ -1,11 +0,0 @@ -digraph "testtitle" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\llabel3: \"foo\"\l" tooltip="testtitle"] } -N1 [label="src\n10 (10.00%)\nof 25 (25.00%)" id="node1" fontsize=22 shape=box tooltip="src (25)" color="#b23c00" fillcolor="#edddd5"] -N1_0 [label = "tag1" id="N1_0" fontsize=8 shape=box3d tooltip="10"] -N1 -> N1_0 [label=" 10" weight=100 tooltip="10" labeltooltip="10"] -NN1_0 [label = "tag2" id="NN1_0" fontsize=8 shape=box3d tooltip="20"] -N1 -> NN1_0 [label=" 20" weight=100 tooltip="20" labeltooltip="20"] -N2 [label="dest\n15 (15.00%)\nof 25 (25.00%)" id="node2" fontsize=24 shape=box tooltip="dest (25)" color="#b23c00" fillcolor="#edddd5"] -N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="src ... dest (10)" labeltooltip="src ... dest (10)" style="dotted" minlen=2] -} diff --git a/internal/pprof/graph/testdata/compose4.dot b/internal/pprof/graph/testdata/compose4.dot deleted file mode 100644 index ed770d101d3..00000000000 --- a/internal/pprof/graph/testdata/compose4.dot +++ /dev/null @@ -1,4 +0,0 @@ -digraph "testtitle" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\llabel3: \"foo\"\l" tooltip="testtitle"] } -} diff --git a/internal/pprof/graph/testdata/compose5.dot b/internal/pprof/graph/testdata/compose5.dot deleted file mode 100644 index 3f2285c31ca..00000000000 --- a/internal/pprof/graph/testdata/compose5.dot +++ /dev/null @@ -1,11 +0,0 @@ -digraph "testtitle" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\llabel3: \"foo\"\l" tooltip="testtitle"] } -N1 [label="src\n10 (10.00%)\nof 25 (25.00%)" id="node1" fontsize=22 shape=box tooltip="src (25)" color="#b23c00" fillcolor="#edddd5"] -N1_0 [label = "tag1" id="N1_0" fontsize=8 shape=box3d tooltip="10"] -N1 -> N1_0 [label=" 10" weight=100 tooltip="10" labeltooltip="10"] -NN1_0_0 [label = "tag2" id="NN1_0_0" fontsize=8 shape=box3d tooltip="20"] -N1_0 -> NN1_0_0 [label=" 20" weight=100 tooltip="20" labeltooltip="20"] -N2 [label="dest\n15 (15.00%)\nof 25 (25.00%)" id="node2" fontsize=24 shape=box tooltip="dest (25)" color="#b23c00" fillcolor="#edddd5"] -N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="src -> dest (10)" labeltooltip="src -> dest (10)" minlen=2] -} diff --git a/internal/pprof/graph/testdata/compose6.dot b/internal/pprof/graph/testdata/compose6.dot deleted file mode 100644 index 1dfc3feeb4e..00000000000 --- a/internal/pprof/graph/testdata/compose6.dot +++ /dev/null @@ -1,7 +0,0 @@ -digraph "testtitle" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\llabel3: \"foo\"\l" URL="http://example.com" target="_blank" tooltip="testtitle"] } -N1 [label="src\n10 (10.00%)\nof 25 (25.00%)" id="node1" fontsize=22 shape=box tooltip="src (25)" color="#b23c00" fillcolor="#edddd5"] -N2 [label="dest\n15 (15.00%)\nof 25 (25.00%)" id="node2" fontsize=24 shape=box tooltip="dest (25)" color="#b23c00" fillcolor="#edddd5"] -N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="src -> dest (10)" labeltooltip="src -> dest (10)"] -} diff --git a/internal/pprof/graph/testdata/compose7.dot b/internal/pprof/graph/testdata/compose7.dot deleted file mode 100644 index 8f749a77976..00000000000 --- a/internal/pprof/graph/testdata/compose7.dot +++ /dev/null @@ -1,7 +0,0 @@ -digraph "testtitle" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\llabel3: \"foo\"\l" tooltip="testtitle"] } -N1 [label="var\"src\"\n10 (10.00%)\nof 25 (25.00%)" id="node1" fontsize=22 shape=box tooltip="var\"src\" (25)" color="#b23c00" fillcolor="#edddd5"] -N2 [label="var\"#dest#\"\n15 (15.00%)\nof 25 (25.00%)" id="node2" fontsize=24 shape=box tooltip="var\"#dest#\" (25)" color="#b23c00" fillcolor="#edddd5"] -N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="var\"src\" -> var\"#dest#\" (10)" labeltooltip="var\"src\" -> var\"#dest#\" (10)"] -} diff --git a/internal/pprof/measurement/measurement.go b/internal/pprof/measurement/measurement.go deleted file mode 100644 index 53325740a3e..00000000000 --- a/internal/pprof/measurement/measurement.go +++ /dev/null @@ -1,296 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package measurement export utility functions to manipulate/format performance profile sample values. -package measurement - -import ( - "fmt" - "math" - "strings" - "time" - - "github.com/google/pprof/profile" -) - -// ScaleProfiles updates the units in a set of profiles to make them -// compatible. It scales the profiles to the smallest unit to preserve -// data. -func ScaleProfiles(profiles []*profile.Profile) error { - if len(profiles) == 0 { - return nil - } - periodTypes := make([]*profile.ValueType, 0, len(profiles)) - for _, p := range profiles { - if p.PeriodType != nil { - periodTypes = append(periodTypes, p.PeriodType) - } - } - periodType, err := CommonValueType(periodTypes) - if err != nil { - return fmt.Errorf("period type: %v", err) - } - - // Identify common sample types - numSampleTypes := len(profiles[0].SampleType) - for _, p := range profiles[1:] { - if numSampleTypes != len(p.SampleType) { - return fmt.Errorf("inconsistent samples type count: %d != %d", numSampleTypes, len(p.SampleType)) - } - } - sampleType := make([]*profile.ValueType, numSampleTypes) - for i := 0; i < numSampleTypes; i++ { - sampleTypes := make([]*profile.ValueType, len(profiles)) - for j, p := range profiles { - sampleTypes[j] = p.SampleType[i] - } - sampleType[i], err = CommonValueType(sampleTypes) - if err != nil { - return fmt.Errorf("sample types: %v", err) - } - } - - for _, p := range profiles { - if p.PeriodType != nil && periodType != nil { - period, _ := Scale(p.Period, p.PeriodType.Unit, periodType.Unit) - p.Period, p.PeriodType.Unit = int64(period), periodType.Unit - } - ratios := make([]float64, len(p.SampleType)) - for i, st := range p.SampleType { - if sampleType[i] == nil { - ratios[i] = 1 - continue - } - ratios[i], _ = Scale(1, st.Unit, sampleType[i].Unit) - p.SampleType[i].Unit = sampleType[i].Unit - } - if err := p.ScaleN(ratios); err != nil { - return fmt.Errorf("scale: %v", err) - } - } - return nil -} - -// CommonValueType returns the finest type from a set of compatible -// types. -func CommonValueType(ts []*profile.ValueType) (*profile.ValueType, error) { - if len(ts) <= 1 { - return nil, nil - } - minType := ts[0] - for _, t := range ts[1:] { - if !compatibleValueTypes(minType, t) { - return nil, fmt.Errorf("incompatible types: %v %v", *minType, *t) - } - if ratio, _ := Scale(1, t.Unit, minType.Unit); ratio < 1 { - minType = t - } - } - rcopy := *minType - return &rcopy, nil -} - -func compatibleValueTypes(v1, v2 *profile.ValueType) bool { - if v1 == nil || v2 == nil { - return true // No grounds to disqualify. - } - // Remove trailing 's' to permit minor mismatches. - if t1, t2 := strings.TrimSuffix(v1.Type, "s"), strings.TrimSuffix(v2.Type, "s"); t1 != t2 { - return false - } - - return v1.Unit == v2.Unit || - (timeUnits.sniffUnit(v1.Unit) != nil && timeUnits.sniffUnit(v2.Unit) != nil) || - (memoryUnits.sniffUnit(v1.Unit) != nil && memoryUnits.sniffUnit(v2.Unit) != nil) || - (gcuUnits.sniffUnit(v1.Unit) != nil && gcuUnits.sniffUnit(v2.Unit) != nil) -} - -// Scale a measurement from an unit to a different unit and returns -// the scaled value and the target unit. The returned target unit -// will be empty if uninteresting (could be skipped). -func Scale(value int64, fromUnit, toUnit string) (float64, string) { - // Avoid infinite recursion on overflow. - if value < 0 && -value > 0 { - v, u := Scale(-value, fromUnit, toUnit) - return -v, u - } - if m, u, ok := memoryUnits.convertUnit(value, fromUnit, toUnit); ok { - return m, u - } - if t, u, ok := timeUnits.convertUnit(value, fromUnit, toUnit); ok { - return t, u - } - if g, u, ok := gcuUnits.convertUnit(value, fromUnit, toUnit); ok { - return g, u - } - // Skip non-interesting units. - switch toUnit { - case "count", "sample", "unit", "minimum", "auto": - return float64(value), "" - default: - return float64(value), toUnit - } -} - -// Label returns the label used to describe a certain measurement. -func Label(value int64, unit string) string { - return ScaledLabel(value, unit, "auto") -} - -// ScaledLabel scales the passed-in measurement (if necessary) and -// returns the label used to describe a float measurement. -func ScaledLabel(value int64, fromUnit, toUnit string) string { - v, u := Scale(value, fromUnit, toUnit) - sv := strings.TrimSuffix(fmt.Sprintf("%.2f", v), ".00") - if sv == "0" || sv == "-0" { - return "0" - } - return sv + u -} - -// Percentage computes the percentage of total of a value, and encodes -// it as a string. At least two digits of precision are printed. -func Percentage(value, total int64) string { - var ratio float64 - if total != 0 { - ratio = math.Abs(float64(value)/float64(total)) * 100 - } - switch { - case math.Abs(ratio) >= 99.95 && math.Abs(ratio) <= 100.05: - return " 100%" - case math.Abs(ratio) >= 1.0: - return fmt.Sprintf("%5.2f%%", ratio) - default: - return fmt.Sprintf("%5.2g%%", ratio) - } -} - -// unit includes a list of aliases representing a specific unit and a factor -// which one can multiple a value in the specified unit by to get the value -// in terms of the base unit. -type unit struct { - canonicalName string - aliases []string - factor float64 -} - -// unitType includes a list of units that are within the same category (i.e. -// memory or time units) and a default unit to use for this type of unit. -type unitType struct { - defaultUnit unit - units []unit -} - -// findByAlias returns the unit associated with the specified alias. It returns -// nil if the unit with such alias is not found. -func (ut unitType) findByAlias(alias string) *unit { - for _, u := range ut.units { - for _, a := range u.aliases { - if alias == a { - return &u - } - } - } - return nil -} - -// sniffUnit simpifies the input alias and returns the unit associated with the -// specified alias. It returns nil if the unit with such alias is not found. -func (ut unitType) sniffUnit(unit string) *unit { - unit = strings.ToLower(unit) - if len(unit) > 2 { - unit = strings.TrimSuffix(unit, "s") - } - return ut.findByAlias(unit) -} - -// autoScale takes in the value with units of the base unit and returns -// that value scaled to a reasonable unit if a reasonable unit is -// found. -func (ut unitType) autoScale(value float64) (float64, string, bool) { - var f float64 - var unit string - for _, u := range ut.units { - if u.factor >= f && (value/u.factor) >= 1.0 { - f = u.factor - unit = u.canonicalName - } - } - if f == 0 { - return 0, "", false - } - return value / f, unit, true -} - -// convertUnit converts a value from the fromUnit to the toUnit, autoscaling -// the value if the toUnit is "minimum" or "auto". If the fromUnit is not -// included in the unitType, then a false boolean will be returned. If the -// toUnit is not in the unitType, the value will be returned in terms of the -// default unitType. -func (ut unitType) convertUnit(value int64, fromUnitStr, toUnitStr string) (float64, string, bool) { - fromUnit := ut.sniffUnit(fromUnitStr) - if fromUnit == nil { - return 0, "", false - } - v := float64(value) * fromUnit.factor - if toUnitStr == "minimum" || toUnitStr == "auto" { - if v, u, ok := ut.autoScale(v); ok { - return v, u, true - } - return v / ut.defaultUnit.factor, ut.defaultUnit.canonicalName, true - } - toUnit := ut.sniffUnit(toUnitStr) - if toUnit == nil { - return v / ut.defaultUnit.factor, ut.defaultUnit.canonicalName, true - } - return v / toUnit.factor, toUnit.canonicalName, true -} - -var memoryUnits = unitType{ - units: []unit{ - {"B", []string{"b", "byte"}, 1}, - {"kB", []string{"kb", "kbyte", "kilobyte"}, float64(1 << 10)}, - {"MB", []string{"mb", "mbyte", "megabyte"}, float64(1 << 20)}, - {"GB", []string{"gb", "gbyte", "gigabyte"}, float64(1 << 30)}, - {"TB", []string{"tb", "tbyte", "terabyte"}, float64(1 << 40)}, - {"PB", []string{"pb", "pbyte", "petabyte"}, float64(1 << 50)}, - }, - defaultUnit: unit{"B", []string{"b", "byte"}, 1}, -} - -var timeUnits = unitType{ - units: []unit{ - {"ns", []string{"ns", "nanosecond"}, float64(time.Nanosecond)}, - {"us", []string{"μs", "us", "microsecond"}, float64(time.Microsecond)}, - {"ms", []string{"ms", "millisecond"}, float64(time.Millisecond)}, - {"s", []string{"s", "sec", "second"}, float64(time.Second)}, - {"hrs", []string{"hour", "hr"}, float64(time.Hour)}, - }, - defaultUnit: unit{"s", []string{}, float64(time.Second)}, -} - -var gcuUnits = unitType{ - units: []unit{ - {"n*GCU", []string{"nanogcu"}, 1e-9}, - {"u*GCU", []string{"microgcu"}, 1e-6}, - {"m*GCU", []string{"milligcu"}, 1e-3}, - {"GCU", []string{"gcu"}, 1}, - {"k*GCU", []string{"kilogcu"}, 1e3}, - {"M*GCU", []string{"megagcu"}, 1e6}, - {"G*GCU", []string{"gigagcu"}, 1e9}, - {"T*GCU", []string{"teragcu"}, 1e12}, - {"P*GCU", []string{"petagcu"}, 1e15}, - }, - defaultUnit: unit{"GCU", []string{}, 1.0}, -} diff --git a/internal/pprof/measurement/measurement_test.go b/internal/pprof/measurement/measurement_test.go deleted file mode 100644 index 7521a64d262..00000000000 --- a/internal/pprof/measurement/measurement_test.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package measurement - -import ( - "math" - "testing" -) - -func TestScale(t *testing.T) { - for _, tc := range []struct { - value int64 - fromUnit, toUnit string - wantValue float64 - wantUnit string - }{ - {1, "s", "ms", 1000, "ms"}, - {1, "kb", "b", 1024, "B"}, - {1, "kbyte", "b", 1024, "B"}, - {1, "kilobyte", "b", 1024, "B"}, - {1, "mb", "kb", 1024, "kB"}, - {1, "gb", "mb", 1024, "MB"}, - {1024, "gb", "tb", 1, "TB"}, - {1024, "tb", "pb", 1, "PB"}, - {2048, "mb", "auto", 2, "GB"}, - {3.1536e7, "s", "auto", 8760, "hrs"}, - {-1, "s", "ms", -1000, "ms"}, - {1, "foo", "count", 1, ""}, - {1, "foo", "bar", 1, "bar"}, - {2000, "count", "count", 2000, ""}, - {2000, "count", "auto", 2000, ""}, - {2000, "count", "minimum", 2000, ""}, - {8e10, "nanogcu", "petagcus", 8e-14, "P*GCU"}, - {1.5e10, "microGCU", "teraGCU", 1.5e-8, "T*GCU"}, - {3e6, "milliGCU", "gigagcu", 3e-6, "G*GCU"}, - {1000, "kilogcu", "megagcu", 1, "M*GCU"}, - {2000, "GCU", "kiloGCU", 2, "k*GCU"}, - {7, "megaGCU", "gcu", 7e6, "GCU"}, - {5, "gigagcus", "milligcu", 5e12, "m*GCU"}, - {7, "teragcus", "microGCU", 7e18, "u*GCU"}, - {1, "petaGCU", "nanogcus", 1e24, "n*GCU"}, - {100, "NanoGCU", "auto", 100, "n*GCU"}, - {5000, "nanogcu", "auto", 5, "u*GCU"}, - {3000, "MicroGCU", "auto", 3, "m*GCU"}, - {4000, "MilliGCU", "auto", 4, "GCU"}, - {4000, "GCU", "auto", 4, "k*GCU"}, - {5000, "KiloGCU", "auto", 5, "M*GCU"}, - {6000, "MegaGCU", "auto", 6, "G*GCU"}, - {7000, "GigaGCU", "auto", 7, "T*GCU"}, - {8000, "TeraGCU", "auto", 8, "P*GCU"}, - {9000, "PetaGCU", "auto", 9000, "P*GCU"}, - } { - if gotValue, gotUnit := Scale(tc.value, tc.fromUnit, tc.toUnit); !floatEqual(gotValue, tc.wantValue) || gotUnit != tc.wantUnit { - t.Errorf("Scale(%d, %q, %q) = (%g, %q), want (%g, %q)", - tc.value, tc.fromUnit, tc.toUnit, gotValue, gotUnit, tc.wantValue, tc.wantUnit) - } - } -} - -func floatEqual(a, b float64) bool { - diff := math.Abs(a - b) - avg := (math.Abs(a) + math.Abs(b)) / 2 - return diff/avg < 0.0001 -} diff --git a/internal/pprof/plugin/plugin.go b/internal/pprof/plugin/plugin.go deleted file mode 100644 index a57a0b20a96..00000000000 --- a/internal/pprof/plugin/plugin.go +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package plugin defines the plugin implementations that the main pprof driver requires. -package plugin - -import ( - "io" - "net/http" - "regexp" - "time" - - "github.com/google/pprof/profile" -) - -// Options groups all the optional plugins into pprof. -type Options struct { - Writer Writer - Flagset FlagSet - Fetch Fetcher - Sym Symbolizer - Obj ObjTool - UI UI - - // HTTPServer is a function that should block serving http requests, - // including the handlers specified in args. If non-nil, pprof will - // invoke this function if necessary to provide a web interface. - // - // If HTTPServer is nil, pprof will use its own internal HTTP server. - // - // A common use for a custom HTTPServer is to provide custom - // authentication checks. - HTTPServer func(args *HTTPServerArgs) error - HTTPTransport http.RoundTripper -} - -// Writer provides a mechanism to write data under a certain name, -// typically a filename. -type Writer interface { - Open(name string) (io.WriteCloser, error) -} - -// A FlagSet creates and parses command-line flags. -// It is similar to the standard flag.FlagSet. -type FlagSet interface { - // Bool, Int, Float64, and String define new flags, - // like the functions of the same name in package flag. - Bool(name string, def bool, usage string) *bool - Int(name string, def int, usage string) *int - Float64(name string, def float64, usage string) *float64 - String(name string, def string, usage string) *string - - // StringList is similar to String but allows multiple values for a - // single flag - StringList(name string, def string, usage string) *[]*string - - // ExtraUsage returns any additional text that should be printed after the - // standard usage message. The extra usage message returned includes all text - // added with AddExtraUsage(). - // The typical use of ExtraUsage is to show any custom flags defined by the - // specific pprof plugins being used. - ExtraUsage() string - - // AddExtraUsage appends additional text to the end of the extra usage message. - AddExtraUsage(eu string) - - // Parse initializes the flags with their values for this run - // and returns the non-flag command line arguments. - // If an unknown flag is encountered or there are no arguments, - // Parse should call usage and return nil. - Parse(usage func()) []string -} - -// A Fetcher reads and returns the profile named by src. src can be a -// local file path or a URL. duration and timeout are units specified -// by the end user, or 0 by default. duration refers to the length of -// the profile collection, if applicable, and timeout is the amount of -// time to wait for a profile before returning an error. Returns the -// fetched profile, the URL of the actual source of the profile, or an -// error. -type Fetcher interface { - Fetch(src string, duration, timeout time.Duration) (*profile.Profile, string, error) -} - -// A Symbolizer introduces symbol information into a profile. -type Symbolizer interface { - Symbolize(mode string, srcs MappingSources, prof *profile.Profile) error -} - -// MappingSources map each profile.Mapping to the source of the profile. -// The key is either Mapping.File or Mapping.BuildId. -type MappingSources map[string][]struct { - Source string // URL of the source the mapping was collected from - Start uint64 // delta applied to addresses from this source (to represent Merge adjustments) -} - -// An ObjTool inspects shared libraries and executable files. -type ObjTool interface { - // Open opens the named object file. If the object is a shared - // library, start/limit/offset are the addresses where it is mapped - // into memory in the address space being inspected. - Open(file string, start, limit, offset uint64) (ObjFile, error) - - // Disasm disassembles the named object file, starting at - // the start address and stopping at (before) the end address. - Disasm(file string, start, end uint64, intelSyntax bool) ([]Inst, error) -} - -// An Inst is a single instruction in an assembly listing. -type Inst struct { - Addr uint64 // virtual address of instruction - Text string // instruction text - Function string // function name - File string // source file - Line int // source line -} - -// An ObjFile is a single object file: a shared library or executable. -type ObjFile interface { - // Name returns the underlyinf file name, if available - Name() string - - // ObjAddr returns the objdump (linker) address corresponding to a runtime - // address, and an error. - ObjAddr(addr uint64) (uint64, error) - - // BuildID returns the GNU build ID of the file, or an empty string. - BuildID() string - - // SourceLine reports the source line information for a given - // address in the file. Due to inlining, the source line information - // is in general a list of positions representing a call stack, - // with the leaf function first. - SourceLine(addr uint64) ([]Frame, error) - - // Symbols returns a list of symbols in the object file. - // If r is not nil, Symbols restricts the list to symbols - // with names matching the regular expression. - // If addr is not zero, Symbols restricts the list to symbols - // containing that address. - Symbols(r *regexp.Regexp, addr uint64) ([]*Sym, error) - - // Close closes the file, releasing associated resources. - Close() error -} - -// A Frame describes a single line in a source file. -type Frame struct { - Func string // name of function - File string // source file name - Line int // line in file -} - -// A Sym describes a single symbol in an object file. -type Sym struct { - Name []string // names of symbol (many if symbol was dedup'ed) - File string // object file containing symbol - Start uint64 // start virtual address - End uint64 // virtual address of last byte in sym (Start+size-1) -} - -// A UI manages user interactions. -type UI interface { - // Read returns a line of text (a command) read from the user. - // prompt is printed before reading the command. - ReadLine(prompt string) (string, error) - - // Print shows a message to the user. - // It formats the text as fmt.Print would and adds a final \n if not already present. - // For line-based UI, Print writes to standard error. - // (Standard output is reserved for report data.) - Print(...interface{}) - - // PrintErr shows an error message to the user. - // It formats the text as fmt.Print would and adds a final \n if not already present. - // For line-based UI, PrintErr writes to standard error. - PrintErr(...interface{}) - - // IsTerminal returns whether the UI is known to be tied to an - // interactive terminal (as opposed to being redirected to a file). - IsTerminal() bool - - // WantBrowser indicates whether a browser should be opened with the -http option. - WantBrowser() bool - - // SetAutoComplete instructs the UI to call complete(cmd) to obtain - // the auto-completion of cmd, if the UI supports auto-completion at all. - SetAutoComplete(complete func(string) string) -} - -// HTTPServerArgs contains arguments needed by an HTTP server that -// is exporting a pprof web interface. -type HTTPServerArgs struct { - // Hostport contains the http server address (derived from flags). - Hostport string - - Host string // Host portion of Hostport - Port int // Port portion of Hostport - - // Handlers maps from URL paths to the handler to invoke to - // serve that path. - Handlers map[string]http.Handler -} diff --git a/internal/pprof/proftest/proftest.go b/internal/pprof/proftest/proftest.go deleted file mode 100644 index 160eb66fcae..00000000000 --- a/internal/pprof/proftest/proftest.go +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package proftest provides some utility routines to test other -// packages related to profiles. -package proftest - -import ( - "encoding/json" - "fmt" - "io" - "io/ioutil" - "os" - "os/exec" - "regexp" - "testing" -) - -// Diff compares two byte arrays using the diff tool to highlight the -// differences. It is meant for testing purposes to display the -// differences between expected and actual output. -func Diff(b1, b2 []byte) (data []byte, err error) { - f1, err := ioutil.TempFile("", "proto_test") - if err != nil { - return nil, err - } - defer os.Remove(f1.Name()) - defer f1.Close() - - f2, err := ioutil.TempFile("", "proto_test") - if err != nil { - return nil, err - } - defer os.Remove(f2.Name()) - defer f2.Close() - - f1.Write(b1) - f2.Write(b2) - - data, err = exec.Command("diff", "-u", f1.Name(), f2.Name()).CombinedOutput() - if len(data) > 0 { - // diff exits with a non-zero status when the files don't match. - // Ignore that failure as long as we get output. - err = nil - } - if err != nil { - data = []byte(fmt.Sprintf("diff failed: %v\nb1: %q\nb2: %q\n", err, b1, b2)) - err = nil - } - return -} - -// EncodeJSON encodes a value into a byte array. This is intended for -// testing purposes. -func EncodeJSON(x interface{}) []byte { - data, err := json.MarshalIndent(x, "", " ") - if err != nil { - panic(err) - } - data = append(data, '\n') - return data -} - -// TestUI implements the plugin.UI interface, triggering test failures -// if more than Ignore errors not matching AllowRx are printed. -// Also tracks the number of times the error matches AllowRx in -// NumAllowRxMatches. -type TestUI struct { - T *testing.T - Ignore int - AllowRx string - NumAllowRxMatches int - Input []string - index int -} - -// ReadLine returns no input, as no input is expected during testing. -func (ui *TestUI) ReadLine(_ string) (string, error) { - if ui.index >= len(ui.Input) { - return "", io.EOF - } - input := ui.Input[ui.index] - ui.index++ - if input == "**error**" { - return "", fmt.Errorf("error: %s", input) - } - return input, nil -} - -// Print messages are discarded by the test UI. -func (ui *TestUI) Print(args ...interface{}) { -} - -// PrintErr messages may trigger an error failure. A fixed number of -// error messages are permitted when appropriate. -func (ui *TestUI) PrintErr(args ...interface{}) { - if ui.AllowRx != "" { - if matched, err := regexp.MatchString(ui.AllowRx, fmt.Sprint(args...)); matched || err != nil { - if err != nil { - ui.T.Errorf("failed to match against regex %q: %v", ui.AllowRx, err) - } - ui.NumAllowRxMatches++ - return - } - } - if ui.Ignore > 0 { - ui.Ignore-- - return - } - // Stringify arguments with fmt.Sprint() to match what default UI - // implementation does. Without this Error() calls fmt.Sprintln() which - // _always_ adds spaces between arguments, unlike fmt.Sprint() which only - // adds them between arguments if neither is string. - ui.T.Error("unexpected error: " + fmt.Sprint(args...)) -} - -// IsTerminal indicates if the UI is an interactive terminal. -func (ui *TestUI) IsTerminal() bool { - return false -} - -// WantBrowser indicates whether a browser should be opened with the -http option. -func (ui *TestUI) WantBrowser() bool { - return false -} - -// SetAutoComplete is not supported by the test UI. -func (ui *TestUI) SetAutoComplete(_ func(string) string) { -} diff --git a/internal/pprof/report/report.go b/internal/pprof/report/report.go deleted file mode 100644 index cb148279056..00000000000 --- a/internal/pprof/report/report.go +++ /dev/null @@ -1,1321 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package report summarizes a performance profile into a -// human-readable report. -package report - -import ( - "fmt" - "io" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - "text/tabwriter" - "time" - - "github.com/parca-dev/parca/internal/pprof/graph" - "github.com/parca-dev/parca/internal/pprof/measurement" - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/google/pprof/profile" -) - -// Output formats. -const ( - Callgrind = iota - Comments - Dis - Dot - List - Proto - Raw - Tags - Text - TopProto - Traces - Tree - WebList -) - -// Options are the formatting and filtering options used to generate a -// profile. -type Options struct { - OutputFormat int - - CumSort bool - CallTree bool - DropNegative bool - CompactLabels bool - Ratio float64 - Title string - ProfileLabels []string - ActiveFilters []string - NumLabelUnits map[string]string - - NodeCount int - NodeFraction float64 - EdgeFraction float64 - - SampleValue func(s []int64) int64 - SampleMeanDivisor func(s []int64) int64 - SampleType string - SampleUnit string // Unit for the sample data from the profile. - - OutputUnit string // Units for data formatting in report. - - Symbol *regexp.Regexp // Symbols to include on disassembly report. - SourcePath string // Search path for source files. - TrimPath string // Paths to trim from source file paths. - - IntelSyntax bool // Whether or not to print assembly in Intel syntax. -} - -// Generate generates a report as directed by the Report. -func Generate(w io.Writer, rpt *Report, obj plugin.ObjTool) error { - o := rpt.options - - switch o.OutputFormat { - case Comments: - return printComments(w, rpt) - case Dot: - return printDOT(w, rpt) - case Tree: - return printTree(w, rpt) - case Text: - return printText(w, rpt) - case Traces: - return printTraces(w, rpt) - case Raw: - fmt.Fprint(w, rpt.prof.String()) - return nil - case Tags: - return printTags(w, rpt) - case Proto: - return printProto(w, rpt) - case TopProto: - return printTopProto(w, rpt) - case Dis: - return printAssembly(w, rpt, obj) - case List: - return printSource(w, rpt) - case WebList: - return printWebSource(w, rpt, obj) - case Callgrind: - return printCallgrind(w, rpt) - } - return fmt.Errorf("unexpected output format") -} - -// newTrimmedGraph creates a graph for this report, trimmed according -// to the report options. -func (rpt *Report) newTrimmedGraph() (g *graph.Graph, origCount, droppedNodes, droppedEdges int) { - o := rpt.options - - // Build a graph and refine it. On each refinement step we must rebuild the graph from the samples, - // as the graph itself doesn't contain enough information to preserve full precision. - visualMode := o.OutputFormat == Dot - cumSort := o.CumSort - - // The call_tree option is only honored when generating visual representations of the callgraph. - callTree := o.CallTree && (o.OutputFormat == Dot || o.OutputFormat == Callgrind) - - // First step: Build complete graph to identify low frequency nodes, based on their cum weight. - g = rpt.newGraph(nil) - totalValue, _ := g.Nodes.Sum() - nodeCutoff := abs64(int64(float64(totalValue) * o.NodeFraction)) - edgeCutoff := abs64(int64(float64(totalValue) * o.EdgeFraction)) - - // Filter out nodes with cum value below nodeCutoff. - if nodeCutoff > 0 { - if callTree { - if nodesKept := g.DiscardLowFrequencyNodePtrs(nodeCutoff); len(g.Nodes) != len(nodesKept) { - droppedNodes = len(g.Nodes) - len(nodesKept) - g.TrimTree(nodesKept) - } - } else { - if nodesKept := g.DiscardLowFrequencyNodes(nodeCutoff); len(g.Nodes) != len(nodesKept) { - droppedNodes = len(g.Nodes) - len(nodesKept) - g = rpt.newGraph(nodesKept) - } - } - } - origCount = len(g.Nodes) - - // Second step: Limit the total number of nodes. Apply specialized heuristics to improve - // visualization when generating dot output. - g.SortNodes(cumSort, visualMode) - if nodeCount := o.NodeCount; nodeCount > 0 { - // Remove low frequency tags and edges as they affect selection. - g.TrimLowFrequencyTags(nodeCutoff) - g.TrimLowFrequencyEdges(edgeCutoff) - if callTree { - if nodesKept := g.SelectTopNodePtrs(nodeCount, visualMode); len(g.Nodes) != len(nodesKept) { - g.TrimTree(nodesKept) - g.SortNodes(cumSort, visualMode) - } - } else { - if nodesKept := g.SelectTopNodes(nodeCount, visualMode); len(g.Nodes) != len(nodesKept) { - g = rpt.newGraph(nodesKept) - g.SortNodes(cumSort, visualMode) - } - } - } - - // Final step: Filter out low frequency tags and edges, and remove redundant edges that clutter - // the graph. - g.TrimLowFrequencyTags(nodeCutoff) - droppedEdges = g.TrimLowFrequencyEdges(edgeCutoff) - if visualMode { - g.RemoveRedundantEdges() - } - return -} - -func (rpt *Report) selectOutputUnit(g *graph.Graph) { - o := rpt.options - - // Select best unit for profile output. - // Find the appropriate units for the smallest non-zero sample - if o.OutputUnit != "minimum" || len(g.Nodes) == 0 { - return - } - var minValue int64 - - for _, n := range g.Nodes { - nodeMin := abs64(n.FlatValue()) - if nodeMin == 0 { - nodeMin = abs64(n.CumValue()) - } - if nodeMin > 0 && (minValue == 0 || nodeMin < minValue) { - minValue = nodeMin - } - } - maxValue := rpt.total - if minValue == 0 { - minValue = maxValue - } - - if r := o.Ratio; r > 0 && r != 1 { - minValue = int64(float64(minValue) * r) - maxValue = int64(float64(maxValue) * r) - } - - _, minUnit := measurement.Scale(minValue, o.SampleUnit, "minimum") - _, maxUnit := measurement.Scale(maxValue, o.SampleUnit, "minimum") - - unit := minUnit - if minUnit != maxUnit && minValue*100 < maxValue && o.OutputFormat != Callgrind { - // Minimum and maximum values have different units. Scale - // minimum by 100 to use larger units, allowing minimum value to - // be scaled down to 0.01, except for callgrind reports since - // they can only represent integer values. - _, unit = measurement.Scale(100*minValue, o.SampleUnit, "minimum") - } - - if unit != "" { - o.OutputUnit = unit - } else { - o.OutputUnit = o.SampleUnit - } -} - -// newGraph creates a new graph for this report. If nodes is non-nil, -// only nodes whose info matches are included. Otherwise, all nodes -// are included, without trimming. -func (rpt *Report) newGraph(nodes graph.NodeSet) *graph.Graph { - o := rpt.options - - // Clean up file paths using heuristics. - prof := rpt.prof - for _, f := range prof.Function { - f.Filename = trimPath(f.Filename, o.TrimPath, o.SourcePath) - } - // Removes all numeric tags except for the bytes tag prior - // to making graph. - // TODO: modify to select first numeric tag if no bytes tag - for _, s := range prof.Sample { - numLabels := make(map[string][]int64, len(s.NumLabel)) - numUnits := make(map[string][]string, len(s.NumLabel)) - for k, vs := range s.NumLabel { - if k == "bytes" { - unit := o.NumLabelUnits[k] - numValues := make([]int64, len(vs)) - numUnit := make([]string, len(vs)) - for i, v := range vs { - numValues[i] = v - numUnit[i] = unit - } - numLabels[k] = append(numLabels[k], numValues...) - numUnits[k] = append(numUnits[k], numUnit...) - } - } - s.NumLabel = numLabels - s.NumUnit = numUnits - } - - // Remove label marking samples from the base profiles, so it does not appear - // as a nodelet in the graph view. - prof.RemoveLabel("pprof::base") - - formatTag := func(v int64, key string) string { - return measurement.ScaledLabel(v, key, o.OutputUnit) - } - - gopt := &graph.Options{ - SampleValue: o.SampleValue, - SampleMeanDivisor: o.SampleMeanDivisor, - FormatTag: formatTag, - CallTree: o.CallTree && (o.OutputFormat == Dot || o.OutputFormat == Callgrind), - DropNegative: o.DropNegative, - KeptNodes: nodes, - } - - // Only keep binary names for disassembly-based reports, otherwise - // remove it to allow merging of functions across binaries. - switch o.OutputFormat { - case Raw, List, WebList, Dis, Callgrind: - gopt.ObjNames = true - } - - return graph.New(rpt.prof, gopt) -} - -// printProto writes the incoming proto via thw writer w. -// If the divide_by option has been specified, samples are scaled appropriately. -func printProto(w io.Writer, rpt *Report) error { - p, o := rpt.prof, rpt.options - - // Apply the sample ratio to all samples before saving the profile. - if r := o.Ratio; r > 0 && r != 1 { - for _, sample := range p.Sample { - for i, v := range sample.Value { - sample.Value[i] = int64(float64(v) * r) - } - } - } - return p.Write(w) -} - -// printTopProto writes a list of the hottest routines in a profile as a profile.proto. -func printTopProto(w io.Writer, rpt *Report) error { - p := rpt.prof - o := rpt.options - g, _, _, _ := rpt.newTrimmedGraph() - rpt.selectOutputUnit(g) - - out := profile.Profile{ - SampleType: []*profile.ValueType{ - {Type: "cum", Unit: o.OutputUnit}, - {Type: "flat", Unit: o.OutputUnit}, - }, - TimeNanos: p.TimeNanos, - DurationNanos: p.DurationNanos, - PeriodType: p.PeriodType, - Period: p.Period, - } - functionMap := make(functionMap) - for i, n := range g.Nodes { - f, added := functionMap.findOrAdd(n.Info) - if added { - out.Function = append(out.Function, f) - } - flat, cum := n.FlatValue(), n.CumValue() - l := &profile.Location{ - ID: uint64(i + 1), - Address: n.Info.Address, - Line: []profile.Line{ - { - Line: int64(n.Info.Lineno), - Function: f, - }, - }, - } - - fv, _ := measurement.Scale(flat, o.SampleUnit, o.OutputUnit) - cv, _ := measurement.Scale(cum, o.SampleUnit, o.OutputUnit) - s := &profile.Sample{ - Location: []*profile.Location{l}, - Value: []int64{int64(cv), int64(fv)}, - } - out.Location = append(out.Location, l) - out.Sample = append(out.Sample, s) - } - - return out.Write(w) -} - -type functionMap map[string]*profile.Function - -// findOrAdd takes a node representing a function, adds the function -// represented by the node to the map if the function is not already present, -// and returns the function the node represents. This also returns a boolean, -// which is true if the function was added and false otherwise. -func (fm functionMap) findOrAdd(ni graph.NodeInfo) (*profile.Function, bool) { - fName := fmt.Sprintf("%q%q%q%d", ni.Name, ni.OrigName, ni.File, ni.StartLine) - - if f := fm[fName]; f != nil { - return f, false - } - - f := &profile.Function{ - ID: uint64(len(fm) + 1), - Name: ni.Name, - SystemName: ni.OrigName, - Filename: ni.File, - StartLine: int64(ni.StartLine), - } - fm[fName] = f - return f, true -} - -// printAssembly prints an annotated assembly listing. -func printAssembly(w io.Writer, rpt *Report, obj plugin.ObjTool) error { - return PrintAssembly(w, rpt, obj, -1) -} - -// PrintAssembly prints annotated disassembly of rpt to w. -func PrintAssembly(w io.Writer, rpt *Report, obj plugin.ObjTool, maxFuncs int) error { - o := rpt.options - prof := rpt.prof - - g := rpt.newGraph(nil) - - // If the regexp source can be parsed as an address, also match - // functions that land on that address. - var address *uint64 - if hex, err := strconv.ParseUint(o.Symbol.String(), 0, 64); err == nil { - address = &hex - } - - fmt.Fprintln(w, "Total:", rpt.formatValue(rpt.total)) - symbols := symbolsFromBinaries(prof, g, o.Symbol, address, obj) - symNodes := nodesPerSymbol(g.Nodes, symbols) - - // Sort for printing. - var syms []*objSymbol - for s := range symNodes { - syms = append(syms, s) - } - byName := func(a, b *objSymbol) bool { - if na, nb := a.sym.Name[0], b.sym.Name[0]; na != nb { - return na < nb - } - return a.sym.Start < b.sym.Start - } - if maxFuncs < 0 { - sort.Sort(orderSyms{syms, byName}) - } else { - byFlatSum := func(a, b *objSymbol) bool { - suma, _ := symNodes[a].Sum() - sumb, _ := symNodes[b].Sum() - if suma != sumb { - return suma > sumb - } - return byName(a, b) - } - sort.Sort(orderSyms{syms, byFlatSum}) - if len(syms) > maxFuncs { - syms = syms[:maxFuncs] - } - } - - if len(syms) == 0 { - return fmt.Errorf("no matches found for regexp: %s", o.Symbol) - } - - // Correlate the symbols from the binary with the profile samples. - for _, s := range syms { - sns := symNodes[s] - - // Gather samples for this symbol. - flatSum, cumSum := sns.Sum() - - // Get the function assembly. - insts, err := obj.Disasm(s.sym.File, s.sym.Start, s.sym.End, o.IntelSyntax) - if err != nil { - return err - } - - ns := annotateAssembly(insts, sns, s.file) - - fmt.Fprintf(w, "ROUTINE ======================== %s\n", s.sym.Name[0]) - for _, name := range s.sym.Name[1:] { - fmt.Fprintf(w, " AKA ======================== %s\n", name) - } - fmt.Fprintf(w, "%10s %10s (flat, cum) %s of Total\n", - rpt.formatValue(flatSum), rpt.formatValue(cumSum), - measurement.Percentage(cumSum, rpt.total)) - - function, file, line := "", "", 0 - for _, n := range ns { - locStr := "" - // Skip loc information if it hasn't changed from previous instruction. - if n.function != function || n.file != file || n.line != line { - function, file, line = n.function, n.file, n.line - if n.function != "" { - locStr = n.function + " " - } - if n.file != "" { - locStr += n.file - if n.line != 0 { - locStr += fmt.Sprintf(":%d", n.line) - } - } - } - switch { - case locStr == "": - // No location info, just print the instruction. - fmt.Fprintf(w, "%10s %10s %10x: %s\n", - valueOrDot(n.flatValue(), rpt), - valueOrDot(n.cumValue(), rpt), - n.address, n.instruction, - ) - case len(n.instruction) < 40: - // Short instruction, print loc on the same line. - fmt.Fprintf(w, "%10s %10s %10x: %-40s;%s\n", - valueOrDot(n.flatValue(), rpt), - valueOrDot(n.cumValue(), rpt), - n.address, n.instruction, - locStr, - ) - default: - // Long instruction, print loc on a separate line. - fmt.Fprintf(w, "%74s;%s\n", "", locStr) - fmt.Fprintf(w, "%10s %10s %10x: %s\n", - valueOrDot(n.flatValue(), rpt), - valueOrDot(n.cumValue(), rpt), - n.address, n.instruction, - ) - } - } - } - return nil -} - -// symbolsFromBinaries examines the binaries listed on the profile -// that have associated samples, and identifies symbols matching rx. -func symbolsFromBinaries(prof *profile.Profile, g *graph.Graph, rx *regexp.Regexp, address *uint64, obj plugin.ObjTool) []*objSymbol { - hasSamples := make(map[string]bool) - // Only examine mappings that have samples that match the - // regexp. This is an optimization to speed up pprof. - for _, n := range g.Nodes { - if name := n.Info.PrintableName(); rx.MatchString(name) && n.Info.Objfile != "" { - hasSamples[n.Info.Objfile] = true - } - } - - // Walk all mappings looking for matching functions with samples. - var objSyms []*objSymbol - for _, m := range prof.Mapping { - if !hasSamples[m.File] { - if address == nil || !(m.Start <= *address && *address <= m.Limit) { - continue - } - } - - f, err := obj.Open(m.File, m.Start, m.Limit, m.Offset) - if err != nil { - fmt.Printf("%v\n", err) - continue - } - - // Find symbols in this binary matching the user regexp. - var addr uint64 - if address != nil { - addr = *address - } - msyms, err := f.Symbols(rx, addr) - f.Close() - if err != nil { - continue - } - for _, ms := range msyms { - objSyms = append(objSyms, - &objSymbol{ - sym: ms, - file: f, - }, - ) - } - } - - return objSyms -} - -// objSym represents a symbol identified from a binary. It includes -// the SymbolInfo from the disasm package and the base that must be -// added to correspond to sample addresses -type objSymbol struct { - sym *plugin.Sym - file plugin.ObjFile -} - -// orderSyms is a wrapper type to sort []*objSymbol by a supplied comparator. -type orderSyms struct { - v []*objSymbol - less func(a, b *objSymbol) bool -} - -func (o orderSyms) Len() int { return len(o.v) } -func (o orderSyms) Less(i, j int) bool { return o.less(o.v[i], o.v[j]) } -func (o orderSyms) Swap(i, j int) { o.v[i], o.v[j] = o.v[j], o.v[i] } - -// nodesPerSymbol classifies nodes into a group of symbols. -func nodesPerSymbol(ns graph.Nodes, symbols []*objSymbol) map[*objSymbol]graph.Nodes { - symNodes := make(map[*objSymbol]graph.Nodes) - for _, s := range symbols { - // Gather samples for this symbol. - for _, n := range ns { - if address, err := s.file.ObjAddr(n.Info.Address); err == nil && address >= s.sym.Start && address < s.sym.End { - symNodes[s] = append(symNodes[s], n) - } - } - } - return symNodes -} - -type assemblyInstruction struct { - address uint64 - instruction string - function string - file string - line int - flat, cum int64 - flatDiv, cumDiv int64 - startsBlock bool - inlineCalls []callID -} - -type callID struct { - file string - line int -} - -func (a *assemblyInstruction) flatValue() int64 { - if a.flatDiv != 0 { - return a.flat / a.flatDiv - } - return a.flat -} - -func (a *assemblyInstruction) cumValue() int64 { - if a.cumDiv != 0 { - return a.cum / a.cumDiv - } - return a.cum -} - -// annotateAssembly annotates a set of assembly instructions with a -// set of samples. It returns a set of nodes to display. base is an -// offset to adjust the sample addresses. -func annotateAssembly(insts []plugin.Inst, samples graph.Nodes, file plugin.ObjFile) []assemblyInstruction { - // Add end marker to simplify printing loop. - insts = append(insts, plugin.Inst{ - Addr: ^uint64(0), - }) - - // Ensure samples are sorted by address. - samples.Sort(graph.AddressOrder) - - s := 0 - asm := make([]assemblyInstruction, 0, len(insts)) - for ix, in := range insts[:len(insts)-1] { - n := assemblyInstruction{ - address: in.Addr, - instruction: in.Text, - function: in.Function, - line: in.Line, - } - if in.File != "" { - n.file = filepath.Base(in.File) - } - - // Sum all the samples until the next instruction (to account - // for samples attributed to the middle of an instruction). - for next := insts[ix+1].Addr; s < len(samples); s++ { - if addr, err := file.ObjAddr(samples[s].Info.Address); err != nil || addr >= next { - break - } - sample := samples[s] - n.flatDiv += sample.FlatDiv - n.flat += sample.Flat - n.cumDiv += sample.CumDiv - n.cum += sample.Cum - if f := sample.Info.File; f != "" && n.file == "" { - n.file = filepath.Base(f) - } - if ln := sample.Info.Lineno; ln != 0 && n.line == 0 { - n.line = ln - } - if f := sample.Info.Name; f != "" && n.function == "" { - n.function = f - } - } - asm = append(asm, n) - } - - return asm -} - -// valueOrDot formats a value according to a report, intercepting zero -// values. -func valueOrDot(value int64, rpt *Report) string { - if value == 0 { - return "." - } - return rpt.formatValue(value) -} - -// printTags collects all tags referenced in the profile and prints -// them in a sorted table. -func printTags(w io.Writer, rpt *Report) error { - p := rpt.prof - - o := rpt.options - formatTag := func(v int64, key string) string { - return measurement.ScaledLabel(v, key, o.OutputUnit) - } - - // Hashtable to keep accumulate tags as key,value,count. - tagMap := make(map[string]map[string]int64) - for _, s := range p.Sample { - for key, vals := range s.Label { - for _, val := range vals { - valueMap, ok := tagMap[key] - if !ok { - valueMap = make(map[string]int64) - tagMap[key] = valueMap - } - valueMap[val] += o.SampleValue(s.Value) - } - } - for key, vals := range s.NumLabel { - unit := o.NumLabelUnits[key] - for _, nval := range vals { - val := formatTag(nval, unit) - valueMap, ok := tagMap[key] - if !ok { - valueMap = make(map[string]int64) - tagMap[key] = valueMap - } - valueMap[val] += o.SampleValue(s.Value) - } - } - } - - tagKeys := make([]*graph.Tag, 0, len(tagMap)) - for key := range tagMap { - tagKeys = append(tagKeys, &graph.Tag{Name: key}) - } - tabw := tabwriter.NewWriter(w, 0, 0, 1, ' ', tabwriter.AlignRight) - for _, tagKey := range graph.SortTags(tagKeys, true) { - var total int64 - key := tagKey.Name - tags := make([]*graph.Tag, 0, len(tagMap[key])) - for t, c := range tagMap[key] { - total += c - tags = append(tags, &graph.Tag{Name: t, Flat: c}) - } - - f, u := measurement.Scale(total, o.SampleUnit, o.OutputUnit) - fmt.Fprintf(tabw, "%s:\t Total %.1f%s\n", key, f, u) - for _, t := range graph.SortTags(tags, true) { - f, u := measurement.Scale(t.FlatValue(), o.SampleUnit, o.OutputUnit) - if total > 0 { - fmt.Fprintf(tabw, " \t%.1f%s (%s):\t %s\n", f, u, measurement.Percentage(t.FlatValue(), total), t.Name) - } else { - fmt.Fprintf(tabw, " \t%.1f%s:\t %s\n", f, u, t.Name) - } - } - fmt.Fprintln(tabw) - } - return tabw.Flush() -} - -// printComments prints all freeform comments in the profile. -func printComments(w io.Writer, rpt *Report) error { - p := rpt.prof - - for _, c := range p.Comments { - fmt.Fprintln(w, c) - } - return nil -} - -// TextItem holds a single text report entry. -type TextItem struct { - Name string - InlineLabel string // Not empty if inlined - Flat, Cum int64 // Raw values - FlatFormat, CumFormat string // Formatted values -} - -// TextItems returns a list of text items from the report and a list -// of labels that describe the report. -func TextItems(rpt *Report) ([]TextItem, []string) { - g, origCount, droppedNodes, _ := rpt.newTrimmedGraph() - rpt.selectOutputUnit(g) - labels := reportLabels(rpt, g, origCount, droppedNodes, 0, false) - - var items []TextItem - var flatSum int64 - for _, n := range g.Nodes { - name, flat, cum := n.Info.PrintableName(), n.FlatValue(), n.CumValue() - - var inline, noinline bool - for _, e := range n.In { - if e.Inline { - inline = true - } else { - noinline = true - } - } - - var inl string - if inline { - if noinline { - inl = "(partial-inline)" - } else { - inl = "(inline)" - } - } - - flatSum += flat - items = append(items, TextItem{ - Name: name, - InlineLabel: inl, - Flat: flat, - Cum: cum, - FlatFormat: rpt.formatValue(flat), - CumFormat: rpt.formatValue(cum), - }) - } - return items, labels -} - -// printText prints a flat text report for a profile. -func printText(w io.Writer, rpt *Report) error { - items, labels := TextItems(rpt) - fmt.Fprintln(w, strings.Join(labels, "\n")) - fmt.Fprintf(w, "%10s %5s%% %5s%% %10s %5s%%\n", - "flat", "flat", "sum", "cum", "cum") - var flatSum int64 - for _, item := range items { - inl := item.InlineLabel - if inl != "" { - inl = " " + inl - } - flatSum += item.Flat - fmt.Fprintf(w, "%10s %s %s %10s %s %s%s\n", - item.FlatFormat, measurement.Percentage(item.Flat, rpt.total), - measurement.Percentage(flatSum, rpt.total), - item.CumFormat, measurement.Percentage(item.Cum, rpt.total), - item.Name, inl) - } - return nil -} - -// printTraces prints all traces from a profile. -func printTraces(w io.Writer, rpt *Report) error { - fmt.Fprintln(w, strings.Join(ProfileLabels(rpt), "\n")) - - prof := rpt.prof - o := rpt.options - - const separator = "-----------+-------------------------------------------------------" - - _, locations := graph.CreateNodes(prof, &graph.Options{}) - for _, sample := range prof.Sample { - type stk struct { - *graph.NodeInfo - inline bool - } - var stack []stk - for _, loc := range sample.Location { - nodes := locations[loc.ID] - for i, n := range nodes { - // The inline flag may be inaccurate if 'show' or 'hide' filter is - // used. See https://github.com/google/pprof/issues/511. - inline := i != len(nodes)-1 - stack = append(stack, stk{&n.Info, inline}) - } - } - - if len(stack) == 0 { - continue - } - - fmt.Fprintln(w, separator) - // Print any text labels for the sample. - var labels []string - for s, vs := range sample.Label { - labels = append(labels, fmt.Sprintf("%10s: %s\n", s, strings.Join(vs, " "))) - } - sort.Strings(labels) - fmt.Fprint(w, strings.Join(labels, "")) - - // Print any numeric labels for the sample - var numLabels []string - for key, vals := range sample.NumLabel { - unit := o.NumLabelUnits[key] - numValues := make([]string, len(vals)) - for i, vv := range vals { - numValues[i] = measurement.Label(vv, unit) - } - numLabels = append(numLabels, fmt.Sprintf("%10s: %s\n", key, strings.Join(numValues, " "))) - } - sort.Strings(numLabels) - fmt.Fprint(w, strings.Join(numLabels, "")) - - var d, v int64 - v = o.SampleValue(sample.Value) - if o.SampleMeanDivisor != nil { - d = o.SampleMeanDivisor(sample.Value) - } - // Print call stack. - if d != 0 { - v = v / d - } - for i, s := range stack { - var vs, inline string - if i == 0 { - vs = rpt.formatValue(v) - } - if s.inline { - inline = " (inline)" - } - fmt.Fprintf(w, "%10s %s%s\n", vs, s.PrintableName(), inline) - } - } - fmt.Fprintln(w, separator) - return nil -} - -// printCallgrind prints a graph for a profile on callgrind format. -func printCallgrind(w io.Writer, rpt *Report) error { - o := rpt.options - rpt.options.NodeFraction = 0 - rpt.options.EdgeFraction = 0 - rpt.options.NodeCount = 0 - - g, _, _, _ := rpt.newTrimmedGraph() - rpt.selectOutputUnit(g) - - nodeNames := getDisambiguatedNames(g) - - fmt.Fprintln(w, "positions: instr line") - fmt.Fprintln(w, "events:", o.SampleType+"("+o.OutputUnit+")") - - objfiles := make(map[string]int) - files := make(map[string]int) - names := make(map[string]int) - - // prevInfo points to the previous NodeInfo. - // It is used to group cost lines together as much as possible. - var prevInfo *graph.NodeInfo - for _, n := range g.Nodes { - if prevInfo == nil || n.Info.Objfile != prevInfo.Objfile || n.Info.File != prevInfo.File || n.Info.Name != prevInfo.Name { - fmt.Fprintln(w) - fmt.Fprintln(w, "ob="+callgrindName(objfiles, n.Info.Objfile)) - fmt.Fprintln(w, "fl="+callgrindName(files, n.Info.File)) - fmt.Fprintln(w, "fn="+callgrindName(names, n.Info.Name)) - } - - addr := callgrindAddress(prevInfo, n.Info.Address) - sv, _ := measurement.Scale(n.FlatValue(), o.SampleUnit, o.OutputUnit) - fmt.Fprintf(w, "%s %d %d\n", addr, n.Info.Lineno, int64(sv)) - - // Print outgoing edges. - for _, out := range n.Out.Sort() { - c, _ := measurement.Scale(out.Weight, o.SampleUnit, o.OutputUnit) - callee := out.Dest - fmt.Fprintln(w, "cfl="+callgrindName(files, callee.Info.File)) - fmt.Fprintln(w, "cfn="+callgrindName(names, nodeNames[callee])) - // pprof doesn't have a flat weight for a call, leave as 0. - fmt.Fprintf(w, "calls=0 %s %d\n", callgrindAddress(prevInfo, callee.Info.Address), callee.Info.Lineno) - // TODO: This address may be in the middle of a call - // instruction. It would be best to find the beginning - // of the instruction, but the tools seem to handle - // this OK. - fmt.Fprintf(w, "* * %d\n", int64(c)) - } - - prevInfo = &n.Info - } - - return nil -} - -// getDisambiguatedNames returns a map from each node in the graph to -// the name to use in the callgrind output. Callgrind merges all -// functions with the same [file name, function name]. Add a [%d/n] -// suffix to disambiguate nodes with different values of -// node.Function, which we want to keep separate. In particular, this -// affects graphs created with --call_tree, where nodes from different -// contexts are associated to different Functions. -func getDisambiguatedNames(g *graph.Graph) map[*graph.Node]string { - nodeName := make(map[*graph.Node]string, len(g.Nodes)) - - type names struct { - file, function string - } - - // nameFunctionIndex maps the callgrind names (filename, function) - // to the node.Function values found for that name, and each - // node.Function value to a sequential index to be used on the - // disambiguated name. - nameFunctionIndex := make(map[names]map[*graph.Node]int) - for _, n := range g.Nodes { - nm := names{n.Info.File, n.Info.Name} - p, ok := nameFunctionIndex[nm] - if !ok { - p = make(map[*graph.Node]int) - nameFunctionIndex[nm] = p - } - if _, ok := p[n.Function]; !ok { - p[n.Function] = len(p) - } - } - - for _, n := range g.Nodes { - nm := names{n.Info.File, n.Info.Name} - nodeName[n] = n.Info.Name - if p := nameFunctionIndex[nm]; len(p) > 1 { - // If there is more than one function, add suffix to disambiguate. - nodeName[n] += fmt.Sprintf(" [%d/%d]", p[n.Function]+1, len(p)) - } - } - return nodeName -} - -// callgrindName implements the callgrind naming compression scheme. -// For names not previously seen returns "(N) name", where N is a -// unique index. For names previously seen returns "(N)" where N is -// the index returned the first time. -func callgrindName(names map[string]int, name string) string { - if name == "" { - return "" - } - if id, ok := names[name]; ok { - return fmt.Sprintf("(%d)", id) - } - id := len(names) + 1 - names[name] = id - return fmt.Sprintf("(%d) %s", id, name) -} - -// callgrindAddress implements the callgrind subposition compression scheme if -// possible. If prevInfo != nil, it contains the previous address. The current -// address can be given relative to the previous address, with an explicit +/- -// to indicate it is relative, or * for the same address. -func callgrindAddress(prevInfo *graph.NodeInfo, curr uint64) string { - abs := fmt.Sprintf("%#x", curr) - if prevInfo == nil { - return abs - } - - prev := prevInfo.Address - if prev == curr { - return "*" - } - - diff := int64(curr - prev) - relative := fmt.Sprintf("%+d", diff) - - // Only bother to use the relative address if it is actually shorter. - if len(relative) < len(abs) { - return relative - } - - return abs -} - -// printTree prints a tree-based report in text form. -func printTree(w io.Writer, rpt *Report) error { - const separator = "----------------------------------------------------------+-------------" - const legend = " flat flat% sum% cum cum% calls calls% + context " - - g, origCount, droppedNodes, _ := rpt.newTrimmedGraph() - rpt.selectOutputUnit(g) - - fmt.Fprintln(w, strings.Join(reportLabels(rpt, g, origCount, droppedNodes, 0, false), "\n")) - - fmt.Fprintln(w, separator) - fmt.Fprintln(w, legend) - var flatSum int64 - - rx := rpt.options.Symbol - matched := 0 - for _, n := range g.Nodes { - name, flat, cum := n.Info.PrintableName(), n.FlatValue(), n.CumValue() - - // Skip any entries that do not match the regexp (for the "peek" command). - if rx != nil && !rx.MatchString(name) { - continue - } - matched++ - - fmt.Fprintln(w, separator) - // Print incoming edges. - inEdges := n.In.Sort() - for _, in := range inEdges { - var inline string - if in.Inline { - inline = " (inline)" - } - fmt.Fprintf(w, "%50s %s | %s%s\n", rpt.formatValue(in.Weight), - measurement.Percentage(in.Weight, cum), in.Src.Info.PrintableName(), inline) - } - - // Print current node. - flatSum += flat - fmt.Fprintf(w, "%10s %s %s %10s %s | %s\n", - rpt.formatValue(flat), - measurement.Percentage(flat, rpt.total), - measurement.Percentage(flatSum, rpt.total), - rpt.formatValue(cum), - measurement.Percentage(cum, rpt.total), - name) - - // Print outgoing edges. - outEdges := n.Out.Sort() - for _, out := range outEdges { - var inline string - if out.Inline { - inline = " (inline)" - } - fmt.Fprintf(w, "%50s %s | %s%s\n", rpt.formatValue(out.Weight), - measurement.Percentage(out.Weight, cum), out.Dest.Info.PrintableName(), inline) - } - } - if len(g.Nodes) > 0 { - fmt.Fprintln(w, separator) - } - if rx != nil && matched == 0 { - return fmt.Errorf("no matches found for regexp: %s", rx) - } - return nil -} - -// GetDOT returns a graph suitable for dot processing along with some -// configuration information. -func GetDOT(rpt *Report) (*graph.Graph, *graph.DotConfig) { - g, origCount, droppedNodes, droppedEdges := rpt.newTrimmedGraph() - rpt.selectOutputUnit(g) - labels := reportLabels(rpt, g, origCount, droppedNodes, droppedEdges, true) - - c := &graph.DotConfig{ - Title: rpt.options.Title, - Labels: labels, - FormatValue: rpt.formatValue, - Total: rpt.total, - } - return g, c -} - -// printDOT prints an annotated callgraph in DOT format. -func printDOT(w io.Writer, rpt *Report) error { - g, c := GetDOT(rpt) - graph.ComposeDot(w, g, &graph.DotAttributes{}, c) - return nil -} - -// ProfileLabels returns printable labels for a profile. -func ProfileLabels(rpt *Report) []string { - label := []string{} - prof := rpt.prof - o := rpt.options - if len(prof.Mapping) > 0 { - if prof.Mapping[0].File != "" { - label = append(label, "File: "+filepath.Base(prof.Mapping[0].File)) - } - if prof.Mapping[0].BuildID != "" { - label = append(label, "Build ID: "+prof.Mapping[0].BuildID) - } - } - // Only include comments that do not start with '#'. - for _, c := range prof.Comments { - if !strings.HasPrefix(c, "#") { - label = append(label, c) - } - } - if o.SampleType != "" { - label = append(label, "Type: "+o.SampleType) - } - if prof.TimeNanos != 0 { - const layout = "Jan 2, 2006 at 3:04pm (MST)" - label = append(label, "Time: "+time.Unix(0, prof.TimeNanos).Format(layout)) - } - if prof.DurationNanos != 0 { - duration := measurement.Label(prof.DurationNanos, "nanoseconds") - totalNanos, totalUnit := measurement.Scale(rpt.total, o.SampleUnit, "nanoseconds") - var ratio string - if totalUnit == "ns" && totalNanos != 0 { - ratio = "(" + measurement.Percentage(int64(totalNanos), prof.DurationNanos) + ")" - } - label = append(label, fmt.Sprintf("Duration: %s, Total samples = %s %s", duration, rpt.formatValue(rpt.total), ratio)) - } - return label -} - -// reportLabels returns printable labels for a report. Includes -// profileLabels. -func reportLabels(rpt *Report, g *graph.Graph, origCount, droppedNodes, droppedEdges int, fullHeaders bool) []string { - nodeFraction := rpt.options.NodeFraction - edgeFraction := rpt.options.EdgeFraction - nodeCount := len(g.Nodes) - - var label []string - if len(rpt.options.ProfileLabels) > 0 { - label = append(label, rpt.options.ProfileLabels...) - } else if fullHeaders || !rpt.options.CompactLabels { - label = ProfileLabels(rpt) - } - - var flatSum int64 - for _, n := range g.Nodes { - flatSum = flatSum + n.FlatValue() - } - - if len(rpt.options.ActiveFilters) > 0 { - activeFilters := legendActiveFilters(rpt.options.ActiveFilters) - label = append(label, activeFilters...) - } - - label = append(label, fmt.Sprintf("Showing nodes accounting for %s, %s of %s total", rpt.formatValue(flatSum), strings.TrimSpace(measurement.Percentage(flatSum, rpt.total)), rpt.formatValue(rpt.total))) - - if rpt.total != 0 { - if droppedNodes > 0 { - label = append(label, genLabel(droppedNodes, "node", "cum", - rpt.formatValue(abs64(int64(float64(rpt.total)*nodeFraction))))) - } - if droppedEdges > 0 { - label = append(label, genLabel(droppedEdges, "edge", "freq", - rpt.formatValue(abs64(int64(float64(rpt.total)*edgeFraction))))) - } - if nodeCount > 0 && nodeCount < origCount { - label = append(label, fmt.Sprintf("Showing top %d nodes out of %d", - nodeCount, origCount)) - } - } - - // Help new users understand the graph. - // A new line is intentionally added here to better show this message. - if fullHeaders { - label = append(label, "\nSee https://git.io/JfYMW for how to read the graph") - } - - return label -} - -func legendActiveFilters(activeFilters []string) []string { - legendActiveFilters := make([]string, len(activeFilters)+1) - legendActiveFilters[0] = "Active filters:" - for i, s := range activeFilters { - if len(s) > 80 { - s = s[:80] + "…" - } - legendActiveFilters[i+1] = " " + s - } - return legendActiveFilters -} - -func genLabel(d int, n, l, f string) string { - if d > 1 { - n = n + "s" - } - return fmt.Sprintf("Dropped %d %s (%s <= %s)", d, n, l, f) -} - -// New builds a new report indexing the sample values interpreting the -// samples with the provided function. -func New(prof *profile.Profile, o *Options) *Report { - format := func(v int64) string { - if r := o.Ratio; r > 0 && r != 1 { - fv := float64(v) * r - v = int64(fv) - } - return measurement.ScaledLabel(v, o.SampleUnit, o.OutputUnit) - } - return &Report{prof, computeTotal(prof, o.SampleValue, o.SampleMeanDivisor), - o, format} -} - -// NewDefault builds a new report indexing the last sample value -// available. -func NewDefault(prof *profile.Profile, options Options) *Report { - index := len(prof.SampleType) - 1 - o := &options - if o.Title == "" && len(prof.Mapping) > 0 && prof.Mapping[0].File != "" { - o.Title = filepath.Base(prof.Mapping[0].File) - } - o.SampleType = prof.SampleType[index].Type - o.SampleUnit = strings.ToLower(prof.SampleType[index].Unit) - o.SampleValue = func(v []int64) int64 { - return v[index] - } - return New(prof, o) -} - -// computeTotal computes the sum of the absolute value of all sample values. -// If any samples have label indicating they belong to the diff base, then the -// total will only include samples with that label. -func computeTotal(prof *profile.Profile, value, meanDiv func(v []int64) int64) int64 { - var div, total, diffDiv, diffTotal int64 - for _, sample := range prof.Sample { - var d, v int64 - v = value(sample.Value) - if meanDiv != nil { - d = meanDiv(sample.Value) - } - if v < 0 { - v = -v - } - total += v - div += d - if sample.DiffBaseSample() { - diffTotal += v - diffDiv += d - } - } - if diffTotal > 0 { - total = diffTotal - div = diffDiv - } - if div != 0 { - return total / div - } - return total -} - -// Report contains the data and associated routines to extract a -// report from a profile. -type Report struct { - prof *profile.Profile - total int64 - options *Options - formatValue func(int64) string -} - -// Total returns the total number of samples in a report. -func (rpt *Report) Total() int64 { return rpt.total } - -func abs64(i int64) int64 { - if i < 0 { - return -i - } - return i -} diff --git a/internal/pprof/report/report_test.go b/internal/pprof/report/report_test.go deleted file mode 100644 index 7b8b010cad1..00000000000 --- a/internal/pprof/report/report_test.go +++ /dev/null @@ -1,468 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package report - -import ( - "bytes" - "io/ioutil" - "regexp" - "runtime" - "strings" - "testing" - - "github.com/parca-dev/parca/internal/pprof/binutils" - "github.com/parca-dev/parca/internal/pprof/graph" - "github.com/parca-dev/parca/internal/pprof/proftest" - "github.com/google/pprof/profile" -) - -type testcase struct { - rpt *Report - want string -} - -func TestSource(t *testing.T) { - const path = "testdata/" - - sampleValue1 := func(v []int64) int64 { - return v[1] - } - - for _, tc := range []testcase{ - { - rpt: New( - testProfile.Copy(), - &Options{ - OutputFormat: List, - Symbol: regexp.MustCompile(`.`), - TrimPath: "/some/path", - - SampleValue: sampleValue1, - SampleUnit: testProfile.SampleType[1].Unit, - }, - ), - want: path + "source.rpt", - }, - { - rpt: New( - testProfile.Copy(), - &Options{ - OutputFormat: Dot, - CallTree: true, - Symbol: regexp.MustCompile(`.`), - TrimPath: "/some/path", - - SampleValue: sampleValue1, - SampleUnit: testProfile.SampleType[1].Unit, - }, - ), - want: path + "source.dot", - }, - } { - var b bytes.Buffer - if err := Generate(&b, tc.rpt, &binutils.Binutils{}); err != nil { - t.Fatalf("%s: %v", tc.want, err) - } - - gold, err := ioutil.ReadFile(tc.want) - if err != nil { - t.Fatalf("%s: %v", tc.want, err) - } - if runtime.GOOS == "windows" { - if tc.rpt.options.OutputFormat == Dot { - // The .dot test has the paths inside strings, so \ must be escaped. - gold = bytes.Replace(gold, []byte("testdata/"), []byte(`testdata\\`), -1) - } else { - gold = bytes.Replace(gold, []byte("testdata/"), []byte(`testdata\`), -1) - } - } - if string(b.String()) != string(gold) { - d, err := proftest.Diff(gold, b.Bytes()) - if err != nil { - t.Fatalf("%s: %v", "source", err) - } - t.Error("source" + "\n" + string(d) + "\n" + "gold:\n" + tc.want) - } - } -} - -// TestFilter ensures that commands with a regexp filter argument return an -// error if there are no results. -func TestFilter(t *testing.T) { - const filter = "doesNotExist" - - tests := []struct { - name string - format int - }{ - { - name: "list", - format: List, - }, - { - name: "weblist", - format: WebList, - }, - { - name: "disasm", - format: Dis, - }, - { - // N.B. Tree with a Symbol is "peek". - name: "peek", - format: Tree, - }, - } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - rpt := New(testProfile.Copy(), &Options{ - OutputFormat: tc.format, - Symbol: regexp.MustCompile(filter), - SampleValue: func(v []int64) int64 { return v[1] }, - SampleUnit: testProfile.SampleType[1].Unit, - }) - - var buf bytes.Buffer - err := Generate(&buf, rpt, &binutils.Binutils{}) - if err == nil { - t.Fatalf("Generate got nil, want error; buf = %s", buf.String()) - } - if !strings.Contains(err.Error(), filter) { - t.Errorf("Error got %v, want it to contain %q", err, filter) - } - }) - } -} - -var testM = []*profile.Mapping{ - { - ID: 1, - HasFunctions: true, - HasFilenames: true, - HasLineNumbers: true, - HasInlineFrames: true, - }, -} - -var testF = []*profile.Function{ - { - ID: 1, - Name: "main", - Filename: "testdata/source1", - }, - { - ID: 2, - Name: "foo", - Filename: "testdata/source1", - }, - { - ID: 3, - Name: "bar", - Filename: "testdata/source1", - }, - { - ID: 4, - Name: "tee", - Filename: "/some/path/testdata/source2", - }, -} - -var testL = []*profile.Location{ - { - ID: 1, - Mapping: testM[0], - Line: []profile.Line{ - { - Function: testF[0], - Line: 2, - }, - }, - }, - { - ID: 2, - Mapping: testM[0], - Line: []profile.Line{ - { - Function: testF[1], - Line: 4, - }, - }, - }, - { - ID: 3, - Mapping: testM[0], - Line: []profile.Line{ - { - Function: testF[2], - Line: 10, - }, - }, - }, - { - ID: 4, - Mapping: testM[0], - Line: []profile.Line{ - { - Function: testF[3], - Line: 2, - }, - }, - }, - { - ID: 5, - Mapping: testM[0], - Line: []profile.Line{ - { - Function: testF[3], - Line: 8, - }, - }, - }, -} - -var testProfile = &profile.Profile{ - PeriodType: &profile.ValueType{Type: "cpu", Unit: "millisecond"}, - Period: 10, - DurationNanos: 10e9, - SampleType: []*profile.ValueType{ - {Type: "samples", Unit: "count"}, - {Type: "cpu", Unit: "cycles"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{testL[0]}, - Value: []int64{1, 1}, - }, - { - Location: []*profile.Location{testL[2], testL[1], testL[0]}, - Value: []int64{1, 10}, - }, - { - Location: []*profile.Location{testL[4], testL[2], testL[0]}, - Value: []int64{1, 100}, - }, - { - Location: []*profile.Location{testL[3], testL[0]}, - Value: []int64{1, 1000}, - }, - { - Location: []*profile.Location{testL[4], testL[3], testL[0]}, - Value: []int64{1, 10000}, - }, - }, - Location: testL, - Function: testF, - Mapping: testM, -} - -func TestDisambiguation(t *testing.T) { - parent1 := &graph.Node{Info: graph.NodeInfo{Name: "parent1"}} - parent2 := &graph.Node{Info: graph.NodeInfo{Name: "parent2"}} - child1 := &graph.Node{Info: graph.NodeInfo{Name: "child"}, Function: parent1} - child2 := &graph.Node{Info: graph.NodeInfo{Name: "child"}, Function: parent2} - child3 := &graph.Node{Info: graph.NodeInfo{Name: "child"}, Function: parent1} - sibling := &graph.Node{Info: graph.NodeInfo{Name: "sibling"}, Function: parent1} - - n := []*graph.Node{parent1, parent2, child1, child2, child3, sibling} - - wanted := map[*graph.Node]string{ - parent1: "parent1", - parent2: "parent2", - child1: "child [1/2]", - child2: "child [2/2]", - child3: "child [1/2]", - sibling: "sibling", - } - - g := &graph.Graph{Nodes: n} - - names := getDisambiguatedNames(g) - - for node, want := range wanted { - if got := names[node]; got != want { - t.Errorf("name %s, got %s, want %s", node.Info.Name, got, want) - } - } -} - -func TestFunctionMap(t *testing.T) { - - fm := make(functionMap) - nodes := []graph.NodeInfo{ - {Name: "fun1"}, - {Name: "fun2", File: "filename"}, - {Name: "fun1"}, - {Name: "fun2", File: "filename2"}, - } - - want := []struct { - wantFunction profile.Function - wantAdded bool - }{ - {profile.Function{ID: 1, Name: "fun1"}, true}, - {profile.Function{ID: 2, Name: "fun2", Filename: "filename"}, true}, - {profile.Function{ID: 1, Name: "fun1"}, false}, - {profile.Function{ID: 3, Name: "fun2", Filename: "filename2"}, true}, - } - - for i, tc := range nodes { - gotFunc, gotAdded := fm.findOrAdd(tc) - if got, want := gotFunc, want[i].wantFunction; *got != want { - t.Errorf("%d: got %v, want %v", i, got, want) - } - if got, want := gotAdded, want[i].wantAdded; got != want { - t.Errorf("%d: got %v, want %v", i, got, want) - } - } -} - -func TestLegendActiveFilters(t *testing.T) { - activeFilterInput := []string{ - "focus=123|456|789|101112|131415|161718|192021|222324|252627|282930|313233|343536|363738|acbdefghijklmnop", - "show=short filter", - } - expectedLegendActiveFilter := []string{ - "Active filters:", - " focus=123|456|789|101112|131415|161718|192021|222324|252627|282930|313233|343536…", - " show=short filter", - } - legendActiveFilter := legendActiveFilters(activeFilterInput) - if len(legendActiveFilter) != len(expectedLegendActiveFilter) { - t.Errorf("wanted length %v got length %v", len(expectedLegendActiveFilter), len(legendActiveFilter)) - } - for i := range legendActiveFilter { - if legendActiveFilter[i] != expectedLegendActiveFilter[i] { - t.Errorf("%d: want \"%v\", got \"%v\"", i, expectedLegendActiveFilter[i], legendActiveFilter[i]) - } - } -} - -func TestComputeTotal(t *testing.T) { - p1 := testProfile.Copy() - p1.Sample = []*profile.Sample{ - { - Location: []*profile.Location{testL[0]}, - Value: []int64{1, 1}, - }, - { - Location: []*profile.Location{testL[2], testL[1], testL[0]}, - Value: []int64{1, 10}, - }, - { - Location: []*profile.Location{testL[4], testL[2], testL[0]}, - Value: []int64{1, 100}, - }, - } - - p2 := testProfile.Copy() - p2.Sample = []*profile.Sample{ - { - Location: []*profile.Location{testL[0]}, - Value: []int64{1, 1}, - }, - { - Location: []*profile.Location{testL[2], testL[1], testL[0]}, - Value: []int64{1, -10}, - }, - { - Location: []*profile.Location{testL[4], testL[2], testL[0]}, - Value: []int64{1, 100}, - }, - } - - p3 := testProfile.Copy() - p3.Sample = []*profile.Sample{ - { - Location: []*profile.Location{testL[0]}, - Value: []int64{10000, 1}, - }, - { - Location: []*profile.Location{testL[2], testL[1], testL[0]}, - Value: []int64{-10, 3}, - Label: map[string][]string{"pprof::base": {"true"}}, - }, - { - Location: []*profile.Location{testL[2], testL[1], testL[0]}, - Value: []int64{1000, -10}, - }, - { - Location: []*profile.Location{testL[2], testL[1], testL[0]}, - Value: []int64{-9000, 3}, - Label: map[string][]string{"pprof::base": {"true"}}, - }, - { - Location: []*profile.Location{testL[2], testL[1], testL[0]}, - Value: []int64{-1, 3}, - Label: map[string][]string{"pprof::base": {"true"}}, - }, - { - Location: []*profile.Location{testL[4], testL[2], testL[0]}, - Value: []int64{100, 100}, - }, - { - Location: []*profile.Location{testL[2], testL[1], testL[0]}, - Value: []int64{100, 3}, - Label: map[string][]string{"pprof::base": {"true"}}, - }, - } - - testcases := []struct { - desc string - prof *profile.Profile - value, meanDiv func(v []int64) int64 - wantTotal int64 - }{ - { - desc: "no diff base, all positive values, index 1", - prof: p1, - value: func(v []int64) int64 { - return v[0] - }, - wantTotal: 3, - }, - { - desc: "no diff base, all positive values, index 2", - prof: p1, - value: func(v []int64) int64 { - return v[1] - }, - wantTotal: 111, - }, - { - desc: "no diff base, some negative values", - prof: p2, - value: func(v []int64) int64 { - return v[1] - }, - wantTotal: 111, - }, - { - desc: "diff base, some negative values", - prof: p3, - value: func(v []int64) int64 { - return v[0] - }, - wantTotal: 9111, - }, - } - - for _, tc := range testcases { - t.Run(tc.desc, func(t *testing.T) { - if gotTotal := computeTotal(tc.prof, tc.value, tc.meanDiv); gotTotal != tc.wantTotal { - t.Errorf("got total %d, want %v", gotTotal, tc.wantTotal) - } - }) - } -} diff --git a/internal/pprof/report/source.go b/internal/pprof/report/source.go deleted file mode 100644 index 2d694d1d74a..00000000000 --- a/internal/pprof/report/source.go +++ /dev/null @@ -1,1114 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package report - -// This file contains routines related to the generation of annotated -// source listings. - -import ( - "bufio" - "fmt" - "html/template" - "io" - "os" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - - "github.com/parca-dev/parca/internal/pprof/graph" - "github.com/parca-dev/parca/internal/pprof/measurement" - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/google/pprof/profile" -) - -// printSource prints an annotated source listing, include all -// functions with samples that match the regexp rpt.options.symbol. -// The sources are sorted by function name and then by filename to -// eliminate potential nondeterminism. -func printSource(w io.Writer, rpt *Report) error { - o := rpt.options - g := rpt.newGraph(nil) - - // Identify all the functions that match the regexp provided. - // Group nodes for each matching function. - var functions graph.Nodes - functionNodes := make(map[string]graph.Nodes) - for _, n := range g.Nodes { - if !o.Symbol.MatchString(n.Info.Name) { - continue - } - if functionNodes[n.Info.Name] == nil { - functions = append(functions, n) - } - functionNodes[n.Info.Name] = append(functionNodes[n.Info.Name], n) - } - functions.Sort(graph.NameOrder) - - if len(functionNodes) == 0 { - return fmt.Errorf("no matches found for regexp: %s", o.Symbol) - } - - sourcePath := o.SourcePath - if sourcePath == "" { - wd, err := os.Getwd() - if err != nil { - return fmt.Errorf("could not stat current dir: %v", err) - } - sourcePath = wd - } - reader := newSourceReader(sourcePath, o.TrimPath) - - fmt.Fprintf(w, "Total: %s\n", rpt.formatValue(rpt.total)) - for _, fn := range functions { - name := fn.Info.Name - - // Identify all the source files associated to this function. - // Group nodes for each source file. - var sourceFiles graph.Nodes - fileNodes := make(map[string]graph.Nodes) - for _, n := range functionNodes[name] { - if n.Info.File == "" { - continue - } - if fileNodes[n.Info.File] == nil { - sourceFiles = append(sourceFiles, n) - } - fileNodes[n.Info.File] = append(fileNodes[n.Info.File], n) - } - - if len(sourceFiles) == 0 { - fmt.Fprintf(w, "No source information for %s\n", name) - continue - } - - sourceFiles.Sort(graph.FileOrder) - - // Print each file associated with this function. - for _, fl := range sourceFiles { - filename := fl.Info.File - fns := fileNodes[filename] - flatSum, cumSum := fns.Sum() - - fnodes, _, err := getSourceFromFile(filename, reader, fns, 0, 0) - fmt.Fprintf(w, "ROUTINE ======================== %s in %s\n", name, filename) - fmt.Fprintf(w, "%10s %10s (flat, cum) %s of Total\n", - rpt.formatValue(flatSum), rpt.formatValue(cumSum), - measurement.Percentage(cumSum, rpt.total)) - - if err != nil { - fmt.Fprintf(w, " Error: %v\n", err) - continue - } - - for _, fn := range fnodes { - fmt.Fprintf(w, "%10s %10s %6d:%s\n", valueOrDot(fn.Flat, rpt), valueOrDot(fn.Cum, rpt), fn.Info.Lineno, fn.Info.Name) - } - } - } - return nil -} - -// printWebSource prints an annotated source listing, include all -// functions with samples that match the regexp rpt.options.symbol. -func printWebSource(w io.Writer, rpt *Report, obj plugin.ObjTool) error { - printHeader(w, rpt) - if err := PrintWebList(w, rpt, obj, -1); err != nil { - return err - } - printPageClosing(w) - return nil -} - -// sourcePrinter holds state needed for generating source+asm HTML listing. -type sourcePrinter struct { - reader *sourceReader - synth *synthCode - objectTool plugin.ObjTool - objects map[string]plugin.ObjFile // Opened object files - sym *regexp.Regexp // May be nil - files map[string]*sourceFile // Set of files to print. - insts map[uint64]instructionInfo // Instructions of interest (keyed by address). - - // Set of function names that we are interested in (because they had - // a sample and match sym). - interest map[string]bool - - // Mapping from system function names to printable names. - prettyNames map[string]string -} - -// addrInfo holds information for an address we are interested in. -type addrInfo struct { - loc *profile.Location // Always non-nil - obj plugin.ObjFile // May be nil -} - -// instructionInfo holds collected information for an instruction. -type instructionInfo struct { - objAddr uint64 // Address in object file (with base subtracted out) - length int // Instruction length in bytes - disasm string // Disassembly of instruction - file string // For top-level function in which instruction occurs - line int // For top-level function in which instruction occurs - flat, cum int64 // Samples to report (divisor already applied) -} - -// sourceFile contains collected information for files we will print. -type sourceFile struct { - fname string - cum int64 - flat int64 - lines map[int][]sourceInst // Instructions to show per line - funcName map[int]string // Function name per line -} - -// sourceInst holds information for an instruction to be displayed. -type sourceInst struct { - addr uint64 - stack []callID // Inlined call-stack -} - -// sourceFunction contains information for a contiguous range of lines per function we -// will print. -type sourceFunction struct { - name string - begin, end int // Line numbers (end is not included in the range) - flat, cum int64 -} - -// addressRange is a range of addresses plus the object file that contains it. -type addressRange struct { - begin, end uint64 - obj plugin.ObjFile - mapping *profile.Mapping - score int64 // Used to order ranges for processing -} - -// PrintWebList prints annotated source listing of rpt to w. -// rpt.prof should contain inlined call info. -func PrintWebList(w io.Writer, rpt *Report, obj plugin.ObjTool, maxFiles int) error { - sourcePath := rpt.options.SourcePath - if sourcePath == "" { - wd, err := os.Getwd() - if err != nil { - return fmt.Errorf("could not stat current dir: %v", err) - } - sourcePath = wd - } - sp := newSourcePrinter(rpt, obj, sourcePath) - if len(sp.interest) == 0 { - return fmt.Errorf("no matches found for regexp: %s", rpt.options.Symbol) - } - sp.print(w, maxFiles, rpt) - sp.close() - return nil -} - -func newSourcePrinter(rpt *Report, obj plugin.ObjTool, sourcePath string) *sourcePrinter { - sp := &sourcePrinter{ - reader: newSourceReader(sourcePath, rpt.options.TrimPath), - synth: newSynthCode(rpt.prof.Mapping), - objectTool: obj, - objects: map[string]plugin.ObjFile{}, - sym: rpt.options.Symbol, - files: map[string]*sourceFile{}, - insts: map[uint64]instructionInfo{}, - prettyNames: map[string]string{}, - interest: map[string]bool{}, - } - - // If the regexp source can be parsed as an address, also match - // functions that land on that address. - var address *uint64 - if sp.sym != nil { - if hex, err := strconv.ParseUint(sp.sym.String(), 0, 64); err == nil { - address = &hex - } - } - - addrs := map[uint64]addrInfo{} - flat := map[uint64]int64{} - cum := map[uint64]int64{} - - // Record an interest in the function corresponding to lines[index]. - markInterest := func(addr uint64, loc *profile.Location, index int) { - fn := loc.Line[index] - if fn.Function == nil { - return - } - sp.interest[fn.Function.Name] = true - sp.interest[fn.Function.SystemName] = true - if _, ok := addrs[addr]; !ok { - addrs[addr] = addrInfo{loc, sp.objectFile(loc.Mapping)} - } - } - - // See if sp.sym matches line. - matches := func(line profile.Line) bool { - if line.Function == nil { - return false - } - return sp.sym.MatchString(line.Function.Name) || - sp.sym.MatchString(line.Function.SystemName) || - sp.sym.MatchString(line.Function.Filename) - } - - // Extract sample counts and compute set of interesting functions. - for _, sample := range rpt.prof.Sample { - value := rpt.options.SampleValue(sample.Value) - if rpt.options.SampleMeanDivisor != nil { - div := rpt.options.SampleMeanDivisor(sample.Value) - if div != 0 { - value /= div - } - } - - // Find call-sites matching sym. - for i := len(sample.Location) - 1; i >= 0; i-- { - loc := sample.Location[i] - for _, line := range loc.Line { - if line.Function == nil { - continue - } - sp.prettyNames[line.Function.SystemName] = line.Function.Name - } - - addr := loc.Address - if addr == 0 { - // Some profiles are missing valid addresses. - addr = sp.synth.address(loc) - } - - cum[addr] += value - if i == 0 { - flat[addr] += value - } - - if sp.sym == nil || (address != nil && addr == *address) { - // Interested in top-level entry of stack. - if len(loc.Line) > 0 { - markInterest(addr, loc, len(loc.Line)-1) - } - continue - } - - // Search in inlined stack for a match. - matchFile := (loc.Mapping != nil && sp.sym.MatchString(loc.Mapping.File)) - for j, line := range loc.Line { - if (j == 0 && matchFile) || matches(line) { - markInterest(addr, loc, j) - } - } - } - } - - sp.expandAddresses(rpt, addrs, flat) - sp.initSamples(flat, cum) - return sp -} - -func (sp *sourcePrinter) close() { - for _, objFile := range sp.objects { - if objFile != nil { - objFile.Close() - } - } -} - -func (sp *sourcePrinter) expandAddresses(rpt *Report, addrs map[uint64]addrInfo, flat map[uint64]int64) { - // We found interesting addresses (ones with non-zero samples) above. - // Get covering address ranges and disassemble the ranges. - ranges, unprocessed := sp.splitIntoRanges(rpt.prof, addrs, flat) - sp.handleUnprocessed(addrs, unprocessed) - - // Trim ranges if there are too many. - const maxRanges = 25 - sort.Slice(ranges, func(i, j int) bool { - return ranges[i].score > ranges[j].score - }) - if len(ranges) > maxRanges { - ranges = ranges[:maxRanges] - } - - for _, r := range ranges { - objBegin, err := r.obj.ObjAddr(r.begin) - if err != nil { - fmt.Fprintf(os.Stderr, "Failed to compute objdump address for range start %x: %v\n", r.begin, err) - continue - } - objEnd, err := r.obj.ObjAddr(r.end) - if err != nil { - fmt.Fprintf(os.Stderr, "Failed to compute objdump address for range end %x: %v\n", r.end, err) - continue - } - base := r.begin - objBegin - insts, err := sp.objectTool.Disasm(r.mapping.File, objBegin, objEnd, rpt.options.IntelSyntax) - if err != nil { - // TODO(sanjay): Report that the covered addresses are missing. - continue - } - - var lastFrames []plugin.Frame - var lastAddr, maxAddr uint64 - for i, inst := range insts { - addr := inst.Addr + base - - // Guard against duplicate output from Disasm. - if addr <= maxAddr { - continue - } - maxAddr = addr - - length := 1 - if i+1 < len(insts) && insts[i+1].Addr > inst.Addr { - // Extend to next instruction. - length = int(insts[i+1].Addr - inst.Addr) - } - - // Get inlined-call-stack for address. - frames, err := r.obj.SourceLine(addr) - if err != nil { - // Construct a frame from disassembler output. - frames = []plugin.Frame{{Func: inst.Function, File: inst.File, Line: inst.Line}} - } - - x := instructionInfo{objAddr: inst.Addr, length: length, disasm: inst.Text} - if len(frames) > 0 { - // We could consider using the outer-most caller's source - // location so we give the some hint as to where the - // inlining happened that led to this instruction. So for - // example, suppose we have the following (inlined) call - // chains for this instruction: - // F1->G->H - // F2->G->H - // We could tag the instructions from the first call with - // F1 and instructions from the second call with F2. But - // that leads to a somewhat confusing display. So for now, - // we stick with just the inner-most location (i.e., H). - // In the future we will consider changing the display to - // make caller info more visible. - index := 0 // Inner-most frame - x.file = frames[index].File - x.line = frames[index].Line - } - sp.insts[addr] = x - - // We sometimes get instructions with a zero reported line number. - // Make such instructions have the same line info as the preceding - // instruction, if an earlier instruction is found close enough. - const neighborhood = 32 - if len(frames) > 0 && frames[0].Line != 0 { - lastFrames = frames - lastAddr = addr - } else if (addr-lastAddr <= neighborhood) && lastFrames != nil { - frames = lastFrames - } - - sp.addStack(addr, frames) - } - } -} - -func (sp *sourcePrinter) addStack(addr uint64, frames []plugin.Frame) { - // See if the stack contains a function we are interested in. - for i, f := range frames { - if !sp.interest[f.Func] { - continue - } - - // Record sub-stack under frame's file/line. - fname := canonicalizeFileName(f.File) - file := sp.files[fname] - if file == nil { - file = &sourceFile{ - fname: fname, - lines: map[int][]sourceInst{}, - funcName: map[int]string{}, - } - sp.files[fname] = file - } - callees := frames[:i] - stack := make([]callID, 0, len(callees)) - for j := len(callees) - 1; j >= 0; j-- { // Reverse so caller is first - stack = append(stack, callID{ - file: callees[j].File, - line: callees[j].Line, - }) - } - file.lines[f.Line] = append(file.lines[f.Line], sourceInst{addr, stack}) - - // Remember the first function name encountered per source line - // and assume that that line belongs to that function. - if _, ok := file.funcName[f.Line]; !ok { - file.funcName[f.Line] = f.Func - } - } -} - -// synthAsm is the special disassembler value used for instructions without an object file. -const synthAsm = "" - -// handleUnprocessed handles addresses that were skipped by splitIntoRanges because they -// did not belong to a known object file. -func (sp *sourcePrinter) handleUnprocessed(addrs map[uint64]addrInfo, unprocessed []uint64) { - // makeFrames synthesizes a []plugin.Frame list for the specified address. - // The result will typically have length 1, but may be longer if address corresponds - // to inlined calls. - makeFrames := func(addr uint64) []plugin.Frame { - loc := addrs[addr].loc - stack := make([]plugin.Frame, 0, len(loc.Line)) - for _, line := range loc.Line { - fn := line.Function - if fn == nil { - continue - } - stack = append(stack, plugin.Frame{ - Func: fn.Name, - File: fn.Filename, - Line: int(line.Line), - }) - } - return stack - } - - for _, addr := range unprocessed { - frames := makeFrames(addr) - x := instructionInfo{ - objAddr: addr, - length: 1, - disasm: synthAsm, - } - if len(frames) > 0 { - x.file = frames[0].File - x.line = frames[0].Line - } - sp.insts[addr] = x - - sp.addStack(addr, frames) - } -} - -// splitIntoRanges converts the set of addresses we are interested in into a set of address -// ranges to disassemble. It also returns the set of addresses found that did not have an -// associated object file and were therefore not added to an address range. -func (sp *sourcePrinter) splitIntoRanges(prof *profile.Profile, addrMap map[uint64]addrInfo, flat map[uint64]int64) ([]addressRange, []uint64) { - // Partition addresses into two sets: ones with a known object file, and ones without. - var addrs, unprocessed []uint64 - for addr, info := range addrMap { - if info.obj != nil { - addrs = append(addrs, addr) - } else { - unprocessed = append(unprocessed, addr) - } - } - sort.Slice(addrs, func(i, j int) bool { return addrs[i] < addrs[j] }) - - const expand = 500 // How much to expand range to pick up nearby addresses. - var result []addressRange - for i, n := 0, len(addrs); i < n; { - begin, end := addrs[i], addrs[i] - sum := flat[begin] - i++ - - info := addrMap[begin] - m := info.loc.Mapping - obj := info.obj // Non-nil because of the partitioning done above. - - // Find following addresses that are close enough to addrs[i]. - for i < n && addrs[i] <= end+2*expand && addrs[i] < m.Limit { - // When we expand ranges by "expand" on either side, the ranges - // for addrs[i] and addrs[i-1] will merge. - end = addrs[i] - sum += flat[end] - i++ - } - if m.Start-begin >= expand { - begin -= expand - } else { - begin = m.Start - } - if m.Limit-end >= expand { - end += expand - } else { - end = m.Limit - } - - result = append(result, addressRange{begin, end, obj, m, sum}) - } - return result, unprocessed -} - -func (sp *sourcePrinter) initSamples(flat, cum map[uint64]int64) { - for addr, inst := range sp.insts { - // Move all samples that were assigned to the middle of an instruction to the - // beginning of that instruction. This takes care of samples that were recorded - // against pc+1. - instEnd := addr + uint64(inst.length) - for p := addr; p < instEnd; p++ { - inst.flat += flat[p] - inst.cum += cum[p] - } - sp.insts[addr] = inst - } -} - -func (sp *sourcePrinter) print(w io.Writer, maxFiles int, rpt *Report) { - // Finalize per-file counts. - for _, file := range sp.files { - seen := map[uint64]bool{} - for _, line := range file.lines { - for _, x := range line { - if seen[x.addr] { - // Same address can be displayed multiple times in a file - // (e.g., if we show multiple inlined functions). - // Avoid double-counting samples in this case. - continue - } - seen[x.addr] = true - inst := sp.insts[x.addr] - file.cum += inst.cum - file.flat += inst.flat - } - } - } - - // Get sorted list of files to print. - var files []*sourceFile - for _, f := range sp.files { - files = append(files, f) - } - order := func(i, j int) bool { return files[i].flat > files[j].flat } - if maxFiles < 0 { - // Order by name for compatibility with old code. - order = func(i, j int) bool { return files[i].fname < files[j].fname } - maxFiles = len(files) - } - sort.Slice(files, order) - for i, f := range files { - if i < maxFiles { - sp.printFile(w, f, rpt) - } - } -} - -func (sp *sourcePrinter) printFile(w io.Writer, f *sourceFile, rpt *Report) { - for _, fn := range sp.functions(f) { - if fn.cum == 0 { - continue - } - printFunctionHeader(w, fn.name, f.fname, fn.flat, fn.cum, rpt) - var asm []assemblyInstruction - for l := fn.begin; l < fn.end; l++ { - lineContents, ok := sp.reader.line(f.fname, l) - if !ok { - if len(f.lines[l]) == 0 { - // Outside of range of valid lines and nothing to print. - continue - } - if l == 0 { - // Line number 0 shows up if line number is not known. - lineContents = "" - } else { - // Past end of file, but have data to print. - lineContents = "???" - } - } - - // Make list of assembly instructions. - asm = asm[:0] - var flatSum, cumSum int64 - var lastAddr uint64 - for _, inst := range f.lines[l] { - addr := inst.addr - x := sp.insts[addr] - flatSum += x.flat - cumSum += x.cum - startsBlock := (addr != lastAddr+uint64(sp.insts[lastAddr].length)) - lastAddr = addr - - // divisors already applied, so leave flatDiv,cumDiv as 0 - asm = append(asm, assemblyInstruction{ - address: x.objAddr, - instruction: x.disasm, - function: fn.name, - file: x.file, - line: x.line, - flat: x.flat, - cum: x.cum, - startsBlock: startsBlock, - inlineCalls: inst.stack, - }) - } - - printFunctionSourceLine(w, l, flatSum, cumSum, lineContents, asm, sp.reader, rpt) - } - printFunctionClosing(w) - } -} - -// functions splits apart the lines to show in a file into a list of per-function ranges. -func (sp *sourcePrinter) functions(f *sourceFile) []sourceFunction { - var funcs []sourceFunction - - // Get interesting lines in sorted order. - lines := make([]int, 0, len(f.lines)) - for l := range f.lines { - lines = append(lines, l) - } - sort.Ints(lines) - - // Merge adjacent lines that are in same function and not too far apart. - const mergeLimit = 20 - for _, l := range lines { - name := f.funcName[l] - if pretty, ok := sp.prettyNames[name]; ok { - // Use demangled name if available. - name = pretty - } - - fn := sourceFunction{name: name, begin: l, end: l + 1} - for _, x := range f.lines[l] { - inst := sp.insts[x.addr] - fn.flat += inst.flat - fn.cum += inst.cum - } - - // See if we should merge into preceding function. - if len(funcs) > 0 { - last := funcs[len(funcs)-1] - if l-last.end < mergeLimit && last.name == name { - last.end = l + 1 - last.flat += fn.flat - last.cum += fn.cum - funcs[len(funcs)-1] = last - continue - } - } - - // Add new function. - funcs = append(funcs, fn) - } - - // Expand function boundaries to show neighborhood. - const expand = 5 - for i, f := range funcs { - if i == 0 { - // Extend backwards, stopping at line number 1, but do not disturb 0 - // since that is a special line number that can show up when addr2line - // cannot determine the real line number. - if f.begin > expand { - f.begin -= expand - } else if f.begin > 1 { - f.begin = 1 - } - } else { - // Find gap from predecessor and divide between predecessor and f. - halfGap := (f.begin - funcs[i-1].end) / 2 - if halfGap > expand { - halfGap = expand - } - funcs[i-1].end += halfGap - f.begin -= halfGap - } - funcs[i] = f - } - - // Also extend the ending point of the last function. - if len(funcs) > 0 { - funcs[len(funcs)-1].end += expand - } - - return funcs -} - -// objectFile return the object for the specified mapping, opening it if necessary. -// It returns nil on error. -func (sp *sourcePrinter) objectFile(m *profile.Mapping) plugin.ObjFile { - if m == nil { - return nil - } - if object, ok := sp.objects[m.File]; ok { - return object // May be nil if we detected an error earlier. - } - object, err := sp.objectTool.Open(m.File, m.Start, m.Limit, m.Offset) - if err != nil { - object = nil - } - sp.objects[m.File] = object // Cache even on error. - return object -} - -// printHeader prints the page header for a weblist report. -func printHeader(w io.Writer, rpt *Report) { - fmt.Fprintln(w, ` - - - - -Pprof listing`) - fmt.Fprintln(w, weblistPageCSS) - fmt.Fprintln(w, weblistPageScript) - fmt.Fprint(w, "\n\n\n") - - var labels []string - for _, l := range ProfileLabels(rpt) { - labels = append(labels, template.HTMLEscapeString(l)) - } - - fmt.Fprintf(w, `
%s
Total: %s
`, - strings.Join(labels, "
\n"), - rpt.formatValue(rpt.total), - ) -} - -// printFunctionHeader prints a function header for a weblist report. -func printFunctionHeader(w io.Writer, name, path string, flatSum, cumSum int64, rpt *Report) { - fmt.Fprintf(w, `

%s

%s

-
-  Total:  %10s %10s (flat, cum) %s
-`,
-		template.HTMLEscapeString(name), template.HTMLEscapeString(path),
-		rpt.formatValue(flatSum), rpt.formatValue(cumSum),
-		measurement.Percentage(cumSum, rpt.total))
-}
-
-// printFunctionSourceLine prints a source line and the corresponding assembly.
-func printFunctionSourceLine(w io.Writer, lineNo int, flat, cum int64, lineContents string,
-	assembly []assemblyInstruction, reader *sourceReader, rpt *Report) {
-	if len(assembly) == 0 {
-		fmt.Fprintf(w,
-			" %6d   %10s %10s %8s  %s \n",
-			lineNo,
-			valueOrDot(flat, rpt), valueOrDot(cum, rpt),
-			"", template.HTMLEscapeString(lineContents))
-		return
-	}
-
-	nestedInfo := false
-	cl := "deadsrc"
-	for _, an := range assembly {
-		if len(an.inlineCalls) > 0 || an.instruction != synthAsm {
-			nestedInfo = true
-			cl = "livesrc"
-		}
-	}
-
-	fmt.Fprintf(w,
-		" %6d   %10s %10s %8s  %s ",
-		lineNo, cl,
-		valueOrDot(flat, rpt), valueOrDot(cum, rpt),
-		"", template.HTMLEscapeString(lineContents))
-	if nestedInfo {
-		srcIndent := indentation(lineContents)
-		printNested(w, srcIndent, assembly, reader, rpt)
-	}
-	fmt.Fprintln(w)
-}
-
-func printNested(w io.Writer, srcIndent int, assembly []assemblyInstruction, reader *sourceReader, rpt *Report) {
-	fmt.Fprint(w, "")
-	var curCalls []callID
-	for i, an := range assembly {
-		if an.startsBlock && i != 0 {
-			// Insert a separator between discontiguous blocks.
-			fmt.Fprintf(w, " %8s %28s\n", "", "⋮")
-		}
-
-		var fileline string
-		if an.file != "" {
-			fileline = fmt.Sprintf("%s:%d", template.HTMLEscapeString(filepath.Base(an.file)), an.line)
-		}
-		flat, cum := an.flat, an.cum
-
-		// Print inlined call context.
-		for j, c := range an.inlineCalls {
-			if j < len(curCalls) && curCalls[j] == c {
-				// Skip if same as previous instruction.
-				continue
-			}
-			curCalls = nil
-			fline, ok := reader.line(c.file, c.line)
-			if !ok {
-				fline = ""
-			}
-			text := strings.Repeat(" ", srcIndent+4+4*j) + strings.TrimSpace(fline)
-			fmt.Fprintf(w, " %8s %10s %10s %8s  %s %s:%d\n",
-				"", "", "", "",
-				template.HTMLEscapeString(rightPad(text, 80)),
-				template.HTMLEscapeString(filepath.Base(c.file)), c.line)
-		}
-		curCalls = an.inlineCalls
-		if an.instruction == synthAsm {
-			continue
-		}
-		text := strings.Repeat(" ", srcIndent+4+4*len(curCalls)) + an.instruction
-		fmt.Fprintf(w, " %8s %10s %10s %8x: %s %s\n",
-			"", valueOrDot(flat, rpt), valueOrDot(cum, rpt), an.address,
-			template.HTMLEscapeString(rightPad(text, 80)),
-			// fileline should not be escaped since it was formed by appending
-			// line number (just digits) to an escaped file name. Escaping here
-			// would cause double-escaping of file name.
-			fileline)
-	}
-	fmt.Fprint(w, "")
-}
-
-// printFunctionClosing prints the end of a function in a weblist report.
-func printFunctionClosing(w io.Writer) {
-	fmt.Fprintln(w, "
") -} - -// printPageClosing prints the end of the page in a weblist report. -func printPageClosing(w io.Writer) { - fmt.Fprintln(w, weblistPageClosing) -} - -// getSourceFromFile collects the sources of a function from a source -// file and annotates it with the samples in fns. Returns the sources -// as nodes, using the info.name field to hold the source code. -func getSourceFromFile(file string, reader *sourceReader, fns graph.Nodes, start, end int) (graph.Nodes, string, error) { - lineNodes := make(map[int]graph.Nodes) - - // Collect source coordinates from profile. - const margin = 5 // Lines before first/after last sample. - if start == 0 { - if fns[0].Info.StartLine != 0 { - start = fns[0].Info.StartLine - } else { - start = fns[0].Info.Lineno - margin - } - } else { - start -= margin - } - if end == 0 { - end = fns[0].Info.Lineno - } - end += margin - for _, n := range fns { - lineno := n.Info.Lineno - nodeStart := n.Info.StartLine - if nodeStart == 0 { - nodeStart = lineno - margin - } - nodeEnd := lineno + margin - if nodeStart < start { - start = nodeStart - } else if nodeEnd > end { - end = nodeEnd - } - lineNodes[lineno] = append(lineNodes[lineno], n) - } - if start < 1 { - start = 1 - } - - var src graph.Nodes - for lineno := start; lineno <= end; lineno++ { - line, ok := reader.line(file, lineno) - if !ok { - break - } - flat, cum := lineNodes[lineno].Sum() - src = append(src, &graph.Node{ - Info: graph.NodeInfo{ - Name: strings.TrimRight(line, "\n"), - Lineno: lineno, - }, - Flat: flat, - Cum: cum, - }) - } - if err := reader.fileError(file); err != nil { - return nil, file, err - } - return src, file, nil -} - -// sourceReader provides access to source code with caching of file contents. -type sourceReader struct { - // searchPath is a filepath.ListSeparator-separated list of directories where - // source files should be searched. - searchPath string - - // trimPath is a filepath.ListSeparator-separated list of paths to trim. - trimPath string - - // files maps from path name to a list of lines. - // files[*][0] is unused since line numbering starts at 1. - files map[string][]string - - // errors collects errors encountered per file. These errors are - // consulted before returning out of these module. - errors map[string]error -} - -func newSourceReader(searchPath, trimPath string) *sourceReader { - return &sourceReader{ - searchPath, - trimPath, - make(map[string][]string), - make(map[string]error), - } -} - -func (reader *sourceReader) fileError(path string) error { - return reader.errors[path] -} - -// line returns the line numbered "lineno" in path, or _,false if lineno is out of range. -func (reader *sourceReader) line(path string, lineno int) (string, bool) { - lines, ok := reader.files[path] - if !ok { - // Read and cache file contents. - lines = []string{""} // Skip 0th line - f, err := openSourceFile(path, reader.searchPath, reader.trimPath) - if err != nil { - reader.errors[path] = err - } else { - s := bufio.NewScanner(f) - for s.Scan() { - lines = append(lines, s.Text()) - } - f.Close() - if s.Err() != nil { - reader.errors[path] = err - } - } - reader.files[path] = lines - } - if lineno <= 0 || lineno >= len(lines) { - return "", false - } - return lines[lineno], true -} - -// openSourceFile opens a source file from a name encoded in a profile. File -// names in a profile after can be relative paths, so search them in each of -// the paths in searchPath and their parents. In case the profile contains -// absolute paths, additional paths may be configured to trim from the source -// paths in the profile. This effectively turns the path into a relative path -// searching it using searchPath as usual). -func openSourceFile(path, searchPath, trim string) (*os.File, error) { - path = trimPath(path, trim, searchPath) - // If file is still absolute, require file to exist. - if filepath.IsAbs(path) { - f, err := os.Open(path) - return f, err - } - // Scan each component of the path. - for _, dir := range filepath.SplitList(searchPath) { - // Search up for every parent of each possible path. - for { - filename := filepath.Join(dir, path) - if f, err := os.Open(filename); err == nil { - return f, nil - } - parent := filepath.Dir(dir) - if parent == dir { - break - } - dir = parent - } - } - - return nil, fmt.Errorf("could not find file %s on path %s", path, searchPath) -} - -// trimPath cleans up a path by removing prefixes that are commonly -// found on profiles plus configured prefixes. -// TODO(aalexand): Consider optimizing out the redundant work done in this -// function if it proves to matter. -func trimPath(path, trimPath, searchPath string) string { - // Keep path variable intact as it's used below to form the return value. - sPath, searchPath := filepath.ToSlash(path), filepath.ToSlash(searchPath) - if trimPath == "" { - // If the trim path is not configured, try to guess it heuristically: - // search for basename of each search path in the original path and, if - // found, strip everything up to and including the basename. So, for - // example, given original path "/some/remote/path/my-project/foo/bar.c" - // and search path "/my/local/path/my-project" the heuristic will return - // "/my/local/path/my-project/foo/bar.c". - for _, dir := range filepath.SplitList(searchPath) { - want := "/" + filepath.Base(dir) + "/" - if found := strings.Index(sPath, want); found != -1 { - return path[found+len(want):] - } - } - } - // Trim configured trim prefixes. - trimPaths := append(filepath.SplitList(filepath.ToSlash(trimPath)), "/proc/self/cwd/./", "/proc/self/cwd/") - for _, trimPath := range trimPaths { - if !strings.HasSuffix(trimPath, "/") { - trimPath += "/" - } - if strings.HasPrefix(sPath, trimPath) { - return path[len(trimPath):] - } - } - return path -} - -func indentation(line string) int { - column := 0 - for _, c := range line { - if c == ' ' { - column++ - } else if c == '\t' { - column++ - for column%8 != 0 { - column++ - } - } else { - break - } - } - return column -} - -// rightPad pads the input with spaces on the right-hand-side to make it have -// at least width n. It treats tabs as enough spaces that lead to the next -// 8-aligned tab-stop. -func rightPad(s string, n int) string { - var str strings.Builder - - // Convert tabs to spaces as we go so padding works regardless of what prefix - // is placed before the result. - column := 0 - for _, c := range s { - column++ - if c == '\t' { - str.WriteRune(' ') - for column%8 != 0 { - column++ - str.WriteRune(' ') - } - } else { - str.WriteRune(c) - } - } - for column < n { - column++ - str.WriteRune(' ') - } - return str.String() -} - -func canonicalizeFileName(fname string) string { - fname = strings.TrimPrefix(fname, "/proc/self/cwd/") - fname = strings.TrimPrefix(fname, "./") - return filepath.Clean(fname) -} diff --git a/internal/pprof/report/source_html.go b/internal/pprof/report/source_html.go deleted file mode 100644 index 17c9f6eb947..00000000000 --- a/internal/pprof/report/source_html.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package report - -import ( - "html/template" -) - -// AddSourceTemplates adds templates used by PrintWebList to t. -func AddSourceTemplates(t *template.Template) { - template.Must(t.Parse(`{{define "weblistcss"}}` + weblistPageCSS + `{{end}}`)) - template.Must(t.Parse(`{{define "weblistjs"}}` + weblistPageScript + `{{end}}`)) -} - -const weblistPageCSS = `` - -const weblistPageScript = `` - -const weblistPageClosing = ` - - -` diff --git a/internal/pprof/report/source_test.go b/internal/pprof/report/source_test.go deleted file mode 100644 index cff6e9ba45c..00000000000 --- a/internal/pprof/report/source_test.go +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package report - -import ( - "bytes" - "fmt" - "io/ioutil" - "os" - "path/filepath" - "regexp" - "runtime" - "strings" - "testing" - - "github.com/parca-dev/parca/internal/pprof/binutils" - "github.com/google/pprof/profile" -) - -func TestWebList(t *testing.T) { - if runtime.GOOS != "linux" || runtime.GOARCH != "amd64" { - t.Skip("weblist only tested on x86-64 linux") - } - - cpu := readProfile(filepath.Join("testdata", "sample.cpu"), t) - rpt := New(cpu, &Options{ - OutputFormat: WebList, - Symbol: regexp.MustCompile("busyLoop"), - SampleValue: func(v []int64) int64 { return v[1] }, - SampleUnit: cpu.SampleType[1].Unit, - }) - var buf bytes.Buffer - if err := Generate(&buf, rpt, &binutils.Binutils{}); err != nil { - t.Fatalf("could not generate weblist: %v", err) - } - output := buf.String() - - for _, expect := range []string{"func busyLoop", "callq.*mapassign"} { - if match, _ := regexp.MatchString(expect, output); !match { - t.Errorf("weblist output does not contain '%s':\n%s", expect, output) - } - } -} - -func TestSourceSyntheticAddress(t *testing.T) { - testSourceMapping(t, true) -} - -func TestSourceMissingMapping(t *testing.T) { - testSourceMapping(t, false) -} - -// testSourceMapping checks that source info is found even when no applicable -// Mapping/objectFile exists. The locations used in the test are either zero -// (if zeroAddress is true), or non-zero (otherwise). -func testSourceMapping(t *testing.T, zeroAddress bool) { - nextAddr := uint64(0) - - makeLoc := func(name, fname string, line int64) *profile.Location { - if !zeroAddress { - nextAddr++ - } - return &profile.Location{ - Address: nextAddr, - Line: []profile.Line{ - { - Function: &profile.Function{Name: name, Filename: fname}, - Line: line, - }, - }, - } - } - - // Create profile that will need synthetic addresses since it has no mappings. - foo100 := makeLoc("foo", "foo.go", 100) - bar50 := makeLoc("bar", "bar.go", 50) - prof := &profile.Profile{ - Sample: []*profile.Sample{ - { - Value: []int64{9}, - Location: []*profile.Location{foo100, bar50}, - }, - { - Value: []int64{17}, - Location: []*profile.Location{bar50}, - }, - }, - } - rpt := &Report{ - prof: prof, - options: &Options{ - Symbol: regexp.MustCompile("foo|bar"), - SampleValue: func(s []int64) int64 { return s[0] }, - }, - formatValue: func(v int64) string { return fmt.Sprint(v) }, - } - - var out bytes.Buffer - err := PrintWebList(&out, rpt, nil, -1) - if err != nil { - t.Fatalf("PrintWebList returned unexpected error: %v", err) - } - got := out.String() - expect := regexp.MustCompile( - `(?s)` + // Allow "." to match newline - `bar\.go.* 50\b.* 17 +26 .*` + - `foo\.go.* 100\b.* 9 +9 `) - if !expect.MatchString(got) { - t.Errorf("expected regular expression %v does not match output:\n%s\n", expect, got) - } -} - -func TestOpenSourceFile(t *testing.T) { - tempdir, err := ioutil.TempDir("", "") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - const lsep = string(filepath.ListSeparator) - for _, tc := range []struct { - desc string - searchPath string - trimPath string - fs []string - path string - wantPath string // If empty, error is wanted. - }{ - { - desc: "exact absolute path is found", - fs: []string{"foo/bar.cc"}, - path: "$dir/foo/bar.cc", - wantPath: "$dir/foo/bar.cc", - }, - { - desc: "exact relative path is found", - searchPath: "$dir", - fs: []string{"foo/bar.cc"}, - path: "foo/bar.cc", - wantPath: "$dir/foo/bar.cc", - }, - { - desc: "multiple search path", - searchPath: "some/path" + lsep + "$dir", - fs: []string{"foo/bar.cc"}, - path: "foo/bar.cc", - wantPath: "$dir/foo/bar.cc", - }, - { - desc: "relative path is found in parent dir", - searchPath: "$dir/foo/bar", - fs: []string{"bar.cc", "foo/bar/baz.cc"}, - path: "bar.cc", - wantPath: "$dir/bar.cc", - }, - { - desc: "trims configured prefix", - searchPath: "$dir", - trimPath: "some-path" + lsep + "/some/remote/path", - fs: []string{"my-project/foo/bar.cc"}, - path: "/some/remote/path/my-project/foo/bar.cc", - wantPath: "$dir/my-project/foo/bar.cc", - }, - { - desc: "trims heuristically", - searchPath: "$dir/my-project", - fs: []string{"my-project/foo/bar.cc"}, - path: "/some/remote/path/my-project/foo/bar.cc", - wantPath: "$dir/my-project/foo/bar.cc", - }, - { - desc: "error when not found", - path: "foo.cc", - }, - } { - t.Run(tc.desc, func(t *testing.T) { - defer func() { - if err := os.RemoveAll(tempdir); err != nil { - t.Fatalf("failed to remove dir %q: %v", tempdir, err) - } - }() - for _, f := range tc.fs { - path := filepath.Join(tempdir, filepath.FromSlash(f)) - dir := filepath.Dir(path) - if err := os.MkdirAll(dir, 0755); err != nil { - t.Fatalf("failed to create dir %q: %v", dir, err) - } - if err := ioutil.WriteFile(path, nil, 0644); err != nil { - t.Fatalf("failed to create file %q: %v", path, err) - } - } - tc.searchPath = filepath.FromSlash(strings.Replace(tc.searchPath, "$dir", tempdir, -1)) - tc.path = filepath.FromSlash(strings.Replace(tc.path, "$dir", tempdir, 1)) - tc.wantPath = filepath.FromSlash(strings.Replace(tc.wantPath, "$dir", tempdir, 1)) - if file, err := openSourceFile(tc.path, tc.searchPath, tc.trimPath); err != nil && tc.wantPath != "" { - t.Errorf("openSourceFile(%q, %q, %q) = err %v, want path %q", tc.path, tc.searchPath, tc.trimPath, err, tc.wantPath) - } else if err == nil { - defer file.Close() - gotPath := file.Name() - if tc.wantPath == "" { - t.Errorf("openSourceFile(%q, %q, %q) = %q, want error", tc.path, tc.searchPath, tc.trimPath, gotPath) - } else if gotPath != tc.wantPath { - t.Errorf("openSourceFile(%q, %q, %q) = %q, want path %q", tc.path, tc.searchPath, tc.trimPath, gotPath, tc.wantPath) - } - } - }) - } -} - -func TestIndentation(t *testing.T) { - for _, c := range []struct { - str string - wantIndent int - }{ - {"", 0}, - {"foobar", 0}, - {" foo", 2}, - {"\tfoo", 8}, - {"\t foo", 9}, - {" \tfoo", 8}, - {" \tfoo", 8}, - {" \tfoo", 16}, - } { - if n := indentation(c.str); n != c.wantIndent { - t.Errorf("indentation(%v): got %d, want %d", c.str, n, c.wantIndent) - } - } -} - -func TestRightPad(t *testing.T) { - for _, c := range []struct { - pad int - in string - expect string - }{ - {0, "", ""}, - {4, "", " "}, - {4, "x", "x "}, - {4, "abcd", "abcd"}, // No padding because of overflow - {4, "abcde", "abcde"}, // No padding because of overflow - {10, "\tx", " x "}, - {10, "w\txy\tz", "w xy z"}, - {20, "w\txy\tz", "w xy z "}, - } { - out := rightPad(c.in, c.pad) - if out != c.expect { - t.Errorf("rightPad(%q, %d): got %q, want %q", c.in, c.pad, out, c.expect) - } - } -} - -func readProfile(fname string, t *testing.T) *profile.Profile { - file, err := os.Open(fname) - if err != nil { - t.Fatalf("%s: could not open profile: %v", fname, err) - } - defer file.Close() - p, err := profile.Parse(file) - if err != nil { - t.Fatalf("%s: could not parse profile: %v", fname, err) - } - - // Fix file names so they do not include absolute path names. - fix := func(s string) string { - const testdir = "/internal/report/" - pos := strings.Index(s, testdir) - if pos == -1 { - return s - } - return s[pos+len(testdir):] - } - for _, m := range p.Mapping { - m.File = fix(m.File) - } - for _, f := range p.Function { - f.Filename = fix(f.Filename) - } - - return p -} diff --git a/internal/pprof/report/synth.go b/internal/pprof/report/synth.go deleted file mode 100644 index 7a35bbcda8f..00000000000 --- a/internal/pprof/report/synth.go +++ /dev/null @@ -1,39 +0,0 @@ -package report - -import ( - "github.com/google/pprof/profile" -) - -// synthCode assigns addresses to locations without an address. -type synthCode struct { - next uint64 - addr map[*profile.Location]uint64 // Synthesized address assigned to a location -} - -func newSynthCode(mappings []*profile.Mapping) *synthCode { - // Find a larger address than any mapping. - s := &synthCode{next: 1} - for _, m := range mappings { - if s.next < m.Limit { - s.next = m.Limit - } - } - return s -} - -// address returns the synthetic address for loc, creating one if needed. -func (s *synthCode) address(loc *profile.Location) uint64 { - if loc.Address != 0 { - panic("can only synthesize addresses for locations without an address") - } - if addr, ok := s.addr[loc]; ok { - return addr - } - if s.addr == nil { - s.addr = map[*profile.Location]uint64{} - } - addr := s.next - s.next++ - s.addr[loc] = addr - return addr -} diff --git a/internal/pprof/report/synth_test.go b/internal/pprof/report/synth_test.go deleted file mode 100644 index 4d8895a466b..00000000000 --- a/internal/pprof/report/synth_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package report - -import ( - "testing" - - "github.com/google/pprof/profile" -) - -func TestSynthAddresses(t *testing.T) { - s := newSynthCode(nil) - l1 := &profile.Location{} - addr1 := s.address(l1) - if s.address(l1) != addr1 { - t.Errorf("different calls with same location returned different addresses") - } - - l2 := &profile.Location{} - addr2 := s.address(l2) - if addr2 == addr1 { - t.Errorf("same address assigned to different locations") - } - -} - -func TestSynthAvoidsMapping(t *testing.T) { - mappings := []*profile.Mapping{ - {Start: 100, Limit: 200}, - {Start: 300, Limit: 400}, - } - s := newSynthCode(mappings) - loc := &profile.Location{} - addr := s.address(loc) - if addr >= 100 && addr < 200 || addr >= 300 && addr < 400 { - t.Errorf("synthetic location %d overlaps mapping %v", addr, mappings) - } -} diff --git a/internal/pprof/report/testdata/README.md b/internal/pprof/report/testdata/README.md deleted file mode 100644 index 2b60fcca6c7..00000000000 --- a/internal/pprof/report/testdata/README.md +++ /dev/null @@ -1,10 +0,0 @@ -sample/ contains a sample program that can be profiled. -sample.bin is its x86-64 binary. -sample.cpu is a profile generated by sample.bin. - -To update the binary and profile: - -```shell -go build -o sample.bin ./sample -./sample.bin -cpuprofile sample.cpu -``` diff --git a/internal/pprof/report/testdata/sample.bin b/internal/pprof/report/testdata/sample.bin deleted file mode 100755 index 2b849d5727a..00000000000 Binary files a/internal/pprof/report/testdata/sample.bin and /dev/null differ diff --git a/internal/pprof/report/testdata/sample.cpu b/internal/pprof/report/testdata/sample.cpu deleted file mode 100644 index 50eea72ea2d..00000000000 Binary files a/internal/pprof/report/testdata/sample.cpu and /dev/null differ diff --git a/internal/pprof/report/testdata/sample/sample.go b/internal/pprof/report/testdata/sample/sample.go deleted file mode 100644 index 196a7ea6c62..00000000000 --- a/internal/pprof/report/testdata/sample/sample.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// sample program that is used to produce some of the files in -// pprof/internal/report/testdata. -package main - -import ( - "flag" - "fmt" - "log" - "math" - "os" - "runtime/pprof" -) - -var cpuProfile = flag.String("cpuprofile", "", "where to write cpu profile") - -func main() { - flag.Parse() - f, err := os.Create(*cpuProfile) - if err != nil { - log.Fatal("could not create CPU profile: ", err) - } - if err := pprof.StartCPUProfile(f); err != nil { - log.Fatal("could not start CPU profile: ", err) - } - defer pprof.StopCPUProfile() - busyLoop() -} - -func busyLoop() { - m := make(map[int]int) - for i := 0; i < 1000000; i++ { - m[i] = i + 10 - } - var sum float64 - for i := 0; i < 100; i++ { - for _, v := range m { - sum += math.Abs(float64(v)) - } - } - fmt.Println("Sum", sum) -} diff --git a/internal/pprof/report/testdata/source.dot b/internal/pprof/report/testdata/source.dot deleted file mode 100644 index e0dadb1bc2e..00000000000 --- a/internal/pprof/report/testdata/source.dot +++ /dev/null @@ -1,17 +0,0 @@ -digraph "unnamed" { -node [style=filled fillcolor="#f8f8f8"] -subgraph cluster_L { "Duration: 10s, Total samples = 11111 " [shape=box fontsize=16 label="Duration: 10s, Total samples = 11111 \lShowing nodes accounting for 11111, 100% of 11111 total\l\lSee https://git.io/JfYMW for how to read the graph\l"] } -N1 [label="tee\nsource2:8\n10000 (90.00%)" id="node1" fontsize=24 shape=box tooltip="tee testdata/source2:8 (10000)" color="#b20500" fillcolor="#edd6d5"] -N2 [label="main\nsource1:2\n1 (0.009%)\nof 11111 (100%)" id="node2" fontsize=9 shape=box tooltip="main testdata/source1:2 (11111)" color="#b20000" fillcolor="#edd5d5"] -N3 [label="tee\nsource2:2\n1000 (9.00%)\nof 11000 (99.00%)" id="node3" fontsize=14 shape=box tooltip="tee testdata/source2:2 (11000)" color="#b20000" fillcolor="#edd5d5"] -N4 [label="tee\nsource2:8\n100 (0.9%)" id="node4" fontsize=10 shape=box tooltip="tee testdata/source2:8 (100)" color="#b2b0aa" fillcolor="#edecec"] -N5 [label="bar\nsource1:10\n10 (0.09%)" id="node5" fontsize=9 shape=box tooltip="bar testdata/source1:10 (10)" color="#b2b2b1" fillcolor="#ededed"] -N6 [label="bar\nsource1:10\n0 of 100 (0.9%)" id="node6" fontsize=8 shape=box tooltip="bar testdata/source1:10 (100)" color="#b2b0aa" fillcolor="#edecec"] -N7 [label="foo\nsource1:4\n0 of 10 (0.09%)" id="node7" fontsize=8 shape=box tooltip="foo testdata/source1:4 (10)" color="#b2b2b1" fillcolor="#ededed"] -N2 -> N3 [label=" 11000" weight=100 penwidth=5 color="#b20000" tooltip="main testdata/source1:2 -> tee testdata/source2:2 (11000)" labeltooltip="main testdata/source1:2 -> tee testdata/source2:2 (11000)"] -N3 -> N1 [label=" 10000" weight=91 penwidth=5 color="#b20500" tooltip="tee testdata/source2:2 -> tee testdata/source2:8 (10000)" labeltooltip="tee testdata/source2:2 -> tee testdata/source2:8 (10000)"] -N6 -> N4 [label=" 100" color="#b2b0aa" tooltip="bar testdata/source1:10 -> tee testdata/source2:8 (100)" labeltooltip="bar testdata/source1:10 -> tee testdata/source2:8 (100)"] -N2 -> N6 [label=" 100" color="#b2b0aa" tooltip="main testdata/source1:2 -> bar testdata/source1:10 (100)" labeltooltip="main testdata/source1:2 -> bar testdata/source1:10 (100)"] -N7 -> N5 [label=" 10" color="#b2b2b1" tooltip="foo testdata/source1:4 -> bar testdata/source1:10 (10)" labeltooltip="foo testdata/source1:4 -> bar testdata/source1:10 (10)"] -N2 -> N7 [label=" 10" color="#b2b2b1" tooltip="main testdata/source1:2 -> foo testdata/source1:4 (10)" labeltooltip="main testdata/source1:2 -> foo testdata/source1:4 (10)"] -} diff --git a/internal/pprof/report/testdata/source.rpt b/internal/pprof/report/testdata/source.rpt deleted file mode 100644 index 9ec7b3b0867..00000000000 --- a/internal/pprof/report/testdata/source.rpt +++ /dev/null @@ -1,49 +0,0 @@ -Total: 11111 -ROUTINE ======================== bar in testdata/source1 - 10 110 (flat, cum) 0.99% of Total - . . 5:source1 line 5; - . . 6:source1 line 6; - . . 7:source1 line 7; - . . 8:source1 line 8; - . . 9:source1 line 9; - 10 110 10:source1 line 10; - . . 11:source1 line 11; - . . 12:source1 line 12; - . . 13:source1 line 13; - . . 14:source1 line 14; - . . 15:source1 line 15; -ROUTINE ======================== foo in testdata/source1 - 0 10 (flat, cum) 0.09% of Total - . . 1:source1 line 1; - . . 2:source1 line 2; - . . 3:source1 line 3; - . 10 4:source1 line 4; - . . 5:source1 line 5; - . . 6:source1 line 6; - . . 7:source1 line 7; - . . 8:source1 line 8; - . . 9:source1 line 9; -ROUTINE ======================== main in testdata/source1 - 1 11111 (flat, cum) 100% of Total - . . 1:source1 line 1; - 1 11111 2:source1 line 2; - . . 3:source1 line 3; - . . 4:source1 line 4; - . . 5:source1 line 5; - . . 6:source1 line 6; - . . 7:source1 line 7; -ROUTINE ======================== tee in testdata/source2 - 11100 21100 (flat, cum) 189.90% of Total - . . 1:source2 line 1; - 1000 11000 2:source2 line 2; - . . 3:source2 line 3; - . . 4:source2 line 4; - . . 5:source2 line 5; - . . 6:source2 line 6; - . . 7:source2 line 7; - 10100 10100 8:source2 line 8; - . . 9:source2 line 9; - . . 10:source2 line 10; - . . 11:source2 line 11; - . . 12:source2 line 12; - . . 13:source2 line 13; diff --git a/internal/pprof/report/testdata/source1 b/internal/pprof/report/testdata/source1 deleted file mode 100644 index 70e3fc3397a..00000000000 --- a/internal/pprof/report/testdata/source1 +++ /dev/null @@ -1,19 +0,0 @@ -source1 line 1; -source1 line 2; -source1 line 3; -source1 line 4; -source1 line 5; -source1 line 6; -source1 line 7; -source1 line 8; -source1 line 9; -source1 line 10; -source1 line 11; -source1 line 12; -source1 line 13; -source1 line 14; -source1 line 15; -source1 line 16; -source1 line 17; -source1 line 18; - diff --git a/internal/pprof/report/testdata/source2 b/internal/pprof/report/testdata/source2 deleted file mode 100644 index 54f99ccac6b..00000000000 --- a/internal/pprof/report/testdata/source2 +++ /dev/null @@ -1,19 +0,0 @@ -source2 line 1; -source2 line 2; -source2 line 3; -source2 line 4; -source2 line 5; -source2 line 6; -source2 line 7; -source2 line 8; -source2 line 9; -source2 line 10; -source2 line 11; -source2 line 12; -source2 line 13; -source2 line 14; -source2 line 15; -source2 line 16; -source2 line 17; -source2 line 18; - diff --git a/internal/pprof/symbolizer/symbolizer.go b/internal/pprof/symbolizer/symbolizer.go deleted file mode 100644 index a8ac6d4ac6e..00000000000 --- a/internal/pprof/symbolizer/symbolizer.go +++ /dev/null @@ -1,361 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package symbolizer provides a routine to populate a profile with -// symbol, file and line number information. It relies on the -// addr2liner and demangle packages to do the actual work. -package symbolizer - -import ( - "fmt" - "io/ioutil" - "net/http" - "net/url" - "path/filepath" - "strings" - - "github.com/parca-dev/parca/internal/pprof/binutils" - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/symbolz" - "github.com/google/pprof/profile" - "github.com/ianlancetaylor/demangle" -) - -// Symbolizer implements the plugin.Symbolize interface. -type Symbolizer struct { - Obj plugin.ObjTool - UI plugin.UI - Transport http.RoundTripper -} - -// test taps for dependency injection -var symbolzSymbolize = symbolz.Symbolize -var localSymbolize = doLocalSymbolize -var demangleFunction = Demangle - -// Symbolize attempts to symbolize profile p. First uses binutils on -// local binaries; if the source is a URL it attempts to get any -// missed entries using symbolz. -func (s *Symbolizer) Symbolize(mode string, sources plugin.MappingSources, p *profile.Profile) error { - remote, local, fast, force, demanglerMode := true, true, false, false, "" - for _, o := range strings.Split(strings.ToLower(mode), ":") { - switch o { - case "": - continue - case "none", "no": - return nil - case "local": - remote, local = false, true - case "fastlocal": - remote, local, fast = false, true, true - case "remote": - remote, local = true, false - case "force": - force = true - default: - switch d := strings.TrimPrefix(o, "demangle="); d { - case "full", "none", "templates": - demanglerMode = d - force = true - continue - case "default": - continue - } - s.UI.PrintErr("ignoring unrecognized symbolization option: " + mode) - s.UI.PrintErr("expecting -symbolize=[local|fastlocal|remote|none][:force][:demangle=[none|full|templates|default]") - } - } - - var err error - if local { - // Symbolize locally using binutils. - if err = localSymbolize(p, fast, force, s.Obj, s.UI); err != nil { - s.UI.PrintErr("local symbolization: " + err.Error()) - } - } - if remote { - post := func(source, post string) ([]byte, error) { - return postURL(source, post, s.Transport) - } - if err = symbolzSymbolize(p, force, sources, post, s.UI); err != nil { - return err // Ran out of options. - } - } - - demangleFunction(p, force, demanglerMode) - return nil -} - -// postURL issues a POST to a URL over HTTP. -func postURL(source, post string, tr http.RoundTripper) ([]byte, error) { - client := &http.Client{ - Transport: tr, - } - resp, err := client.Post(source, "application/octet-stream", strings.NewReader(post)) - if err != nil { - return nil, fmt.Errorf("http post %s: %v", source, err) - } - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("http post %s: %v", source, statusCodeError(resp)) - } - return ioutil.ReadAll(resp.Body) -} - -func statusCodeError(resp *http.Response) error { - if resp.Header.Get("X-Go-Pprof") != "" && strings.Contains(resp.Header.Get("Content-Type"), "text/plain") { - // error is from pprof endpoint - if body, err := ioutil.ReadAll(resp.Body); err == nil { - return fmt.Errorf("server response: %s - %s", resp.Status, body) - } - } - return fmt.Errorf("server response: %s", resp.Status) -} - -// doLocalSymbolize adds symbol and line number information to all locations -// in a profile. mode enables some options to control -// symbolization. -func doLocalSymbolize(prof *profile.Profile, fast, force bool, obj plugin.ObjTool, ui plugin.UI) error { - if fast { - if bu, ok := obj.(*binutils.Binutils); ok { - bu.SetFastSymbolization(true) - } - } - - mt, err := newMapping(prof, obj, ui, force) - if err != nil { - return err - } - defer mt.close() - - functions := make(map[profile.Function]*profile.Function) - for _, l := range mt.prof.Location { - m := l.Mapping - segment := mt.segments[m] - if segment == nil { - // Nothing to do. - continue - } - - stack, err := segment.SourceLine(l.Address) - if err != nil || len(stack) == 0 { - // No answers from addr2line. - continue - } - - l.Line = make([]profile.Line, len(stack)) - l.IsFolded = false - for i, frame := range stack { - if frame.Func != "" { - m.HasFunctions = true - } - if frame.File != "" { - m.HasFilenames = true - } - if frame.Line != 0 { - m.HasLineNumbers = true - } - f := &profile.Function{ - Name: frame.Func, - SystemName: frame.Func, - Filename: frame.File, - } - if fp := functions[*f]; fp != nil { - f = fp - } else { - functions[*f] = f - f.ID = uint64(len(mt.prof.Function)) + 1 - mt.prof.Function = append(mt.prof.Function, f) - } - l.Line[i] = profile.Line{ - Function: f, - Line: int64(frame.Line), - } - } - - if len(stack) > 0 { - m.HasInlineFrames = true - } - } - - return nil -} - -// Demangle updates the function names in a profile with demangled C++ -// names, simplified according to demanglerMode. If force is set, -// overwrite any names that appear already demangled. -func Demangle(prof *profile.Profile, force bool, demanglerMode string) { - if force { - // Remove the current demangled names to force demangling - for _, f := range prof.Function { - if f.Name != "" && f.SystemName != "" { - f.Name = f.SystemName - } - } - } - - var options []demangle.Option - switch demanglerMode { - case "": // demangled, simplified: no parameters, no templates, no return type - options = []demangle.Option{demangle.NoParams, demangle.NoTemplateParams} - case "templates": // demangled, simplified: no parameters, no return type - options = []demangle.Option{demangle.NoParams} - case "full": - options = []demangle.Option{demangle.NoClones} - case "none": // no demangling - return - } - - // Copy the options because they may be updated by the call. - o := make([]demangle.Option, len(options)) - for _, fn := range prof.Function { - if fn.Name != "" && fn.SystemName != fn.Name { - continue // Already demangled. - } - copy(o, options) - if demangled := demangle.Filter(fn.SystemName, o...); demangled != fn.SystemName { - fn.Name = demangled - continue - } - // Could not demangle. Apply heuristics in case the name is - // already demangled. - name := fn.SystemName - if looksLikeDemangledCPlusPlus(name) { - if demanglerMode == "" || demanglerMode == "templates" { - name = removeMatching(name, '(', ')') - } - if demanglerMode == "" { - name = removeMatching(name, '<', '>') - } - } - fn.Name = name - } -} - -// looksLikeDemangledCPlusPlus is a heuristic to decide if a name is -// the result of demangling C++. If so, further heuristics will be -// applied to simplify the name. -func looksLikeDemangledCPlusPlus(demangled string) bool { - if strings.Contains(demangled, ".<") { // Skip java names of the form "class." - return false - } - return strings.ContainsAny(demangled, "<>[]") || strings.Contains(demangled, "::") -} - -// removeMatching removes nested instances of start..end from name. -func removeMatching(name string, start, end byte) string { - s := string(start) + string(end) - var nesting, first, current int - for index := strings.IndexAny(name[current:], s); index != -1; index = strings.IndexAny(name[current:], s) { - switch current += index; name[current] { - case start: - nesting++ - if nesting == 1 { - first = current - } - case end: - nesting-- - switch { - case nesting < 0: - return name // Mismatch, abort - case nesting == 0: - name = name[:first] + name[current+1:] - current = first - 1 - } - } - current++ - } - return name -} - -// newMapping creates a mappingTable for a profile. -func newMapping(prof *profile.Profile, obj plugin.ObjTool, ui plugin.UI, force bool) (*mappingTable, error) { - mt := &mappingTable{ - prof: prof, - segments: make(map[*profile.Mapping]plugin.ObjFile), - } - - // Identify used mappings - mappings := make(map[*profile.Mapping]bool) - for _, l := range prof.Location { - mappings[l.Mapping] = true - } - - missingBinaries := false - for midx, m := range prof.Mapping { - if !mappings[m] { - continue - } - - // Do not attempt to re-symbolize a mapping that has already been symbolized. - if !force && (m.HasFunctions || m.HasFilenames || m.HasLineNumbers) { - continue - } - - if m.File == "" { - if midx == 0 { - ui.PrintErr("Main binary filename not available.") - continue - } - missingBinaries = true - continue - } - - // Skip well-known system mappings - if m.Unsymbolizable() { - continue - } - - // Skip mappings pointing to a source URL - if m.BuildID == "" { - if u, err := url.Parse(m.File); err == nil && u.IsAbs() && strings.Contains(strings.ToLower(u.Scheme), "http") { - continue - } - } - - name := filepath.Base(m.File) - f, err := obj.Open(m.File, m.Start, m.Limit, m.Offset) - if err != nil { - ui.PrintErr("Local symbolization failed for ", name, ": ", err) - missingBinaries = true - continue - } - if fid := f.BuildID(); m.BuildID != "" && fid != "" && fid != m.BuildID { - ui.PrintErr("Local symbolization failed for ", name, ": build ID mismatch") - f.Close() - continue - } - - mt.segments[m] = f - } - if missingBinaries { - ui.PrintErr("Some binary filenames not available. Symbolization may be incomplete.\n" + - "Try setting PPROF_BINARY_PATH to the search path for local binaries.") - } - return mt, nil -} - -// mappingTable contains the mechanisms for symbolization of a -// profile. -type mappingTable struct { - prof *profile.Profile - segments map[*profile.Mapping]plugin.ObjFile -} - -// Close releases any external processes being used for the mapping. -func (mt *mappingTable) close() { - for _, segment := range mt.segments { - segment.Close() - } -} diff --git a/internal/pprof/symbolizer/symbolizer_test.go b/internal/pprof/symbolizer/symbolizer_test.go deleted file mode 100644 index f875c7e8c7e..00000000000 --- a/internal/pprof/symbolizer/symbolizer_test.go +++ /dev/null @@ -1,300 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package symbolizer - -import ( - "fmt" - "regexp" - "sort" - "strings" - "testing" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/proftest" - "github.com/google/pprof/profile" -) - -var testM = []*profile.Mapping{ - { - ID: 1, - Start: 0x1000, - Limit: 0x5000, - File: "mapping", - }, -} - -var testL = []*profile.Location{ - { - ID: 1, - Mapping: testM[0], - Address: 1000, - }, - { - ID: 2, - Mapping: testM[0], - Address: 2000, - }, - { - ID: 3, - Mapping: testM[0], - Address: 3000, - }, - { - ID: 4, - Mapping: testM[0], - Address: 4000, - }, - { - ID: 5, - Mapping: testM[0], - Address: 5000, - }, -} - -var testProfile = profile.Profile{ - DurationNanos: 10e9, - SampleType: []*profile.ValueType{ - {Type: "cpu", Unit: "cycles"}, - }, - Sample: []*profile.Sample{ - { - Location: []*profile.Location{testL[0]}, - Value: []int64{1}, - }, - { - Location: []*profile.Location{testL[1], testL[0]}, - Value: []int64{10}, - }, - { - Location: []*profile.Location{testL[2], testL[0]}, - Value: []int64{100}, - }, - { - Location: []*profile.Location{testL[3], testL[0]}, - Value: []int64{1}, - }, - { - Location: []*profile.Location{testL[4], testL[3], testL[0]}, - Value: []int64{10000}, - }, - }, - Location: testL, - Mapping: testM, - PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, - Period: 10, -} - -func TestSymbolization(t *testing.T) { - sSym := symbolzSymbolize - lSym := localSymbolize - defer func() { - symbolzSymbolize = sSym - localSymbolize = lSym - demangleFunction = Demangle - }() - symbolzSymbolize = symbolzMock - localSymbolize = localMock - demangleFunction = demangleMock - - type testcase struct { - mode string - wantComment string - } - - s := Symbolizer{ - Obj: mockObjTool{}, - UI: &proftest.TestUI{T: t}, - } - for i, tc := range []testcase{ - { - "local", - "local=[]", - }, - { - "fastlocal", - "local=[fast]", - }, - { - "remote", - "symbolz=[]", - }, - { - "", - "local=[]:symbolz=[]", - }, - { - "demangle=none", - "demangle=[none]:force:local=[force]:symbolz=[force]", - }, - { - "remote:demangle=full", - "demangle=[full]:force:symbolz=[force]", - }, - { - "local:demangle=templates", - "demangle=[templates]:force:local=[force]", - }, - { - "force:remote", - "force:symbolz=[force]", - }, - } { - prof := testProfile.Copy() - if err := s.Symbolize(tc.mode, nil, prof); err != nil { - t.Errorf("symbolize #%d: %v", i, err) - continue - } - sort.Strings(prof.Comments) - if got, want := strings.Join(prof.Comments, ":"), tc.wantComment; got != want { - t.Errorf("%q: got %s, want %s", tc.mode, got, want) - continue - } - } -} - -func symbolzMock(p *profile.Profile, force bool, sources plugin.MappingSources, syms func(string, string) ([]byte, error), ui plugin.UI) error { - var args []string - if force { - args = append(args, "force") - } - p.Comments = append(p.Comments, "symbolz=["+strings.Join(args, ",")+"]") - return nil -} - -func localMock(p *profile.Profile, fast, force bool, obj plugin.ObjTool, ui plugin.UI) error { - var args []string - if fast { - args = append(args, "fast") - } - if force { - args = append(args, "force") - } - p.Comments = append(p.Comments, "local=["+strings.Join(args, ",")+"]") - return nil -} - -func demangleMock(p *profile.Profile, force bool, mode string) { - if force { - p.Comments = append(p.Comments, "force") - } - if mode != "" { - p.Comments = append(p.Comments, "demangle=["+mode+"]") - } -} - -func TestLocalSymbolization(t *testing.T) { - prof := testProfile.Copy() - - if prof.HasFunctions() { - t.Error("unexpected function names") - } - if prof.HasFileLines() { - t.Error("unexpected filenames or line numbers") - } - - b := mockObjTool{} - if err := localSymbolize(prof, false, false, b, &proftest.TestUI{T: t}); err != nil { - t.Fatalf("localSymbolize(): %v", err) - } - - for _, loc := range prof.Location { - if err := checkSymbolizedLocation(loc.Address, loc.Line); err != nil { - t.Errorf("location %d: %v", loc.Address, err) - } - } - if !prof.HasFunctions() { - t.Error("missing function names") - } - if !prof.HasFileLines() { - t.Error("missing filenames or line numbers") - } -} - -func checkSymbolizedLocation(a uint64, got []profile.Line) error { - want, ok := mockAddresses[a] - if !ok { - return fmt.Errorf("unexpected address") - } - if len(want) != len(got) { - return fmt.Errorf("want len %d, got %d", len(want), len(got)) - } - - for i, w := range want { - g := got[i] - if g.Function.Name != w.Func { - return fmt.Errorf("want function: %q, got %q", w.Func, g.Function.Name) - } - if g.Function.Filename != w.File { - return fmt.Errorf("want filename: %q, got %q", w.File, g.Function.Filename) - } - if g.Line != int64(w.Line) { - return fmt.Errorf("want lineno: %d, got %d", w.Line, g.Line) - } - } - return nil -} - -var mockAddresses = map[uint64][]plugin.Frame{ - 1000: {frame("fun11", "file11.src", 10)}, - 2000: {frame("fun21", "file21.src", 20), frame("fun22", "file22.src", 20)}, - 3000: {frame("fun31", "file31.src", 30), frame("fun32", "file32.src", 30), frame("fun33", "file33.src", 30)}, - 4000: {frame("fun41", "file41.src", 40), frame("fun42", "file42.src", 40), frame("fun43", "file43.src", 40), frame("fun44", "file44.src", 40)}, - 5000: {frame("fun51", "file51.src", 50), frame("fun52", "file52.src", 50), frame("fun53", "file53.src", 50), frame("fun54", "file54.src", 50), frame("fun55", "file55.src", 50)}, -} - -func frame(fname, file string, line int) plugin.Frame { - return plugin.Frame{ - Func: fname, - File: file, - Line: line} -} - -type mockObjTool struct{} - -func (mockObjTool) Open(file string, start, limit, offset uint64) (plugin.ObjFile, error) { - return mockObjFile{frames: mockAddresses}, nil -} - -func (mockObjTool) Disasm(file string, start, end uint64, intelSyntax bool) ([]plugin.Inst, error) { - return nil, fmt.Errorf("disassembly not supported") -} - -type mockObjFile struct { - frames map[uint64][]plugin.Frame -} - -func (mockObjFile) Name() string { - return "" -} - -func (mockObjFile) ObjAddr(addr uint64) (uint64, error) { - return addr, nil -} - -func (mockObjFile) BuildID() string { - return "" -} - -func (mf mockObjFile) SourceLine(addr uint64) ([]plugin.Frame, error) { - return mf.frames[addr], nil -} - -func (mockObjFile) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) { - return []*plugin.Sym{}, nil -} - -func (mockObjFile) Close() error { - return nil -} diff --git a/internal/pprof/symbolz/symbolz.go b/internal/pprof/symbolz/symbolz.go deleted file mode 100644 index d8eb0cb4479..00000000000 --- a/internal/pprof/symbolz/symbolz.go +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package symbolz symbolizes a profile using the output from the symbolz -// service. -package symbolz - -import ( - "bytes" - "fmt" - "io" - "net/url" - "path" - "regexp" - "strconv" - "strings" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/google/pprof/profile" -) - -var ( - symbolzRE = regexp.MustCompile(`(0x[[:xdigit:]]+)\s+(.*)`) -) - -// Symbolize symbolizes profile p by parsing data returned by a symbolz -// handler. syms receives the symbolz query (hex addresses separated by '+') -// and returns the symbolz output in a string. If force is false, it will only -// symbolize locations from mappings not already marked as HasFunctions. Never -// attempts symbolization of addresses from unsymbolizable system -// mappings as those may look negative - e.g. "[vsyscall]". -func Symbolize(p *profile.Profile, force bool, sources plugin.MappingSources, syms func(string, string) ([]byte, error), ui plugin.UI) error { - for _, m := range p.Mapping { - if !force && m.HasFunctions { - // Only check for HasFunctions as symbolz only populates function names. - continue - } - // Skip well-known system mappings. - if m.Unsymbolizable() { - continue - } - mappingSources := sources[m.File] - if m.BuildID != "" { - mappingSources = append(mappingSources, sources[m.BuildID]...) - } - for _, source := range mappingSources { - if symz := symbolz(source.Source); symz != "" { - if err := symbolizeMapping(symz, int64(source.Start)-int64(m.Start), syms, m, p); err != nil { - return err - } - m.HasFunctions = true - break - } - } - } - - return nil -} - -// hasGperftoolsSuffix checks whether path ends with one of the suffixes listed in -// pprof_remote_servers.html from the gperftools distribution -func hasGperftoolsSuffix(path string) bool { - suffixes := []string{ - "/pprof/heap", - "/pprof/growth", - "/pprof/profile", - "/pprof/pmuprofile", - "/pprof/contention", - } - for _, s := range suffixes { - if strings.HasSuffix(path, s) { - return true - } - } - return false -} - -// symbolz returns the corresponding symbolz source for a profile URL. -func symbolz(source string) string { - if url, err := url.Parse(source); err == nil && url.Host != "" { - // All paths in the net/http/pprof Go package contain /debug/pprof/ - if strings.Contains(url.Path, "/debug/pprof/") || hasGperftoolsSuffix(url.Path) { - url.Path = path.Clean(url.Path + "/../symbol") - } else { - url.Path = "/symbolz" - } - url.RawQuery = "" - return url.String() - } - - return "" -} - -// symbolizeMapping symbolizes locations belonging to a Mapping by querying -// a symbolz handler. An offset is applied to all addresses to take care of -// normalization occurred for merged Mappings. -func symbolizeMapping(source string, offset int64, syms func(string, string) ([]byte, error), m *profile.Mapping, p *profile.Profile) error { - // Construct query of addresses to symbolize. - var a []string - for _, l := range p.Location { - if l.Mapping == m && l.Address != 0 && len(l.Line) == 0 { - // Compensate for normalization. - addr, overflow := adjust(l.Address, offset) - if overflow { - return fmt.Errorf("cannot adjust address %d by %d, it would overflow (mapping %v)", l.Address, offset, l.Mapping) - } - a = append(a, fmt.Sprintf("%#x", addr)) - } - } - - if len(a) == 0 { - // No addresses to symbolize. - return nil - } - - lines := make(map[uint64]profile.Line) - functions := make(map[string]*profile.Function) - - b, err := syms(source, strings.Join(a, "+")) - if err != nil { - return err - } - - buf := bytes.NewBuffer(b) - for { - l, err := buf.ReadString('\n') - - if err != nil { - if err == io.EOF { - break - } - return err - } - - if symbol := symbolzRE.FindStringSubmatch(l); len(symbol) == 3 { - origAddr, err := strconv.ParseUint(symbol[1], 0, 64) - if err != nil { - return fmt.Errorf("unexpected parse failure %s: %v", symbol[1], err) - } - // Reapply offset expected by the profile. - addr, overflow := adjust(origAddr, -offset) - if overflow { - return fmt.Errorf("cannot adjust symbolz address %d by %d, it would overflow", origAddr, -offset) - } - - name := symbol[2] - fn := functions[name] - if fn == nil { - fn = &profile.Function{ - ID: uint64(len(p.Function) + 1), - Name: name, - SystemName: name, - } - functions[name] = fn - p.Function = append(p.Function, fn) - } - - lines[addr] = profile.Line{Function: fn} - } - } - - for _, l := range p.Location { - if l.Mapping != m { - continue - } - if line, ok := lines[l.Address]; ok { - l.Line = []profile.Line{line} - } - } - - return nil -} - -// adjust shifts the specified address by the signed offset. It returns the -// adjusted address. It signals that the address cannot be adjusted without an -// overflow by returning true in the second return value. -func adjust(addr uint64, offset int64) (uint64, bool) { - adj := uint64(int64(addr) + offset) - if offset < 0 { - if adj >= addr { - return 0, true - } - } else { - if adj < addr { - return 0, true - } - } - return adj, false -} diff --git a/internal/pprof/symbolz/symbolz_test.go b/internal/pprof/symbolz/symbolz_test.go deleted file mode 100644 index 190fa4bf37c..00000000000 --- a/internal/pprof/symbolz/symbolz_test.go +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package symbolz - -import ( - "fmt" - "math" - "strings" - "testing" - - "github.com/parca-dev/parca/internal/pprof/plugin" - "github.com/parca-dev/parca/internal/pprof/proftest" - "github.com/google/pprof/profile" -) - -func TestSymbolzURL(t *testing.T) { - for try, want := range map[string]string{ - "http://host:8000/profilez": "http://host:8000/symbolz", - "http://host:8000/profilez?seconds=5": "http://host:8000/symbolz", - "http://host:8000/profilez?seconds=5&format=proto": "http://host:8000/symbolz", - "http://host:8000/heapz?format=legacy": "http://host:8000/symbolz", - "http://host:8000/debug/pprof/profile": "http://host:8000/debug/pprof/symbol", - "http://host:8000/debug/pprof/profile?seconds=10": "http://host:8000/debug/pprof/symbol", - "http://host:8000/debug/pprof/heap": "http://host:8000/debug/pprof/symbol", - "http://some.host:8080/some/deeper/path/debug/pprof/endpoint?param=value": "http://some.host:8080/some/deeper/path/debug/pprof/symbol", - "http://host:8000/pprof/profile": "http://host:8000/pprof/symbol", - "http://host:8000/pprof/profile?seconds=15": "http://host:8000/pprof/symbol", - "http://host:8000/pprof/heap": "http://host:8000/pprof/symbol", - "http://host:8000/debug/pprof/block": "http://host:8000/debug/pprof/symbol", - "http://host:8000/debug/pprof/trace?seconds=5": "http://host:8000/debug/pprof/symbol", - "http://host:8000/debug/pprof/mutex": "http://host:8000/debug/pprof/symbol", - "http://host/whatever/pprof/heap": "http://host/whatever/pprof/symbol", - "http://host/whatever/pprof/growth": "http://host/whatever/pprof/symbol", - "http://host/whatever/pprof/profile": "http://host/whatever/pprof/symbol", - "http://host/whatever/pprof/pmuprofile": "http://host/whatever/pprof/symbol", - "http://host/whatever/pprof/contention": "http://host/whatever/pprof/symbol", - } { - if got := symbolz(try); got != want { - t.Errorf(`symbolz(%s)=%s, want "%s"`, try, got, want) - } - } -} - -func TestSymbolize(t *testing.T) { - s := plugin.MappingSources{ - "buildid": []struct { - Source string - Start uint64 - }{ - {Source: "http://localhost:80/profilez"}, - }, - } - - for _, hasFunctions := range []bool{false, true} { - for _, force := range []bool{false, true} { - p := testProfile(hasFunctions) - - if err := Symbolize(p, force, s, fetchSymbols, &proftest.TestUI{T: t}); err != nil { - t.Errorf("symbolz: %v", err) - continue - } - var wantSym, wantNoSym []*profile.Location - if force || !hasFunctions { - wantNoSym = p.Location[:1] - wantSym = p.Location[1:] - } else { - wantNoSym = p.Location - } - - if err := checkSymbolized(wantSym, true); err != nil { - t.Errorf("symbolz hasFns=%v force=%v: %v", hasFunctions, force, err) - } - if err := checkSymbolized(wantNoSym, false); err != nil { - t.Errorf("symbolz hasFns=%v force=%v: %v", hasFunctions, force, err) - } - } - } -} - -func testProfile(hasFunctions bool) *profile.Profile { - m := []*profile.Mapping{ - { - ID: 1, - Start: 0x1000, - Limit: 0x5000, - BuildID: "buildid", - HasFunctions: hasFunctions, - }, - } - p := &profile.Profile{ - Location: []*profile.Location{ - {ID: 1, Mapping: m[0], Address: 0x1000}, - {ID: 2, Mapping: m[0], Address: 0x2000}, - {ID: 3, Mapping: m[0], Address: 0x3000}, - {ID: 4, Mapping: m[0], Address: 0x4000}, - }, - Mapping: m, - } - - return p -} - -func checkSymbolized(locs []*profile.Location, wantSymbolized bool) error { - for _, loc := range locs { - if !wantSymbolized && len(loc.Line) != 0 { - return fmt.Errorf("unexpected symbolization for %#x: %v", loc.Address, loc.Line) - } - if wantSymbolized { - if len(loc.Line) != 1 { - return fmt.Errorf("expected symbolization for %#x: %v", loc.Address, loc.Line) - } - address := loc.Address - loc.Mapping.Start - if got, want := loc.Line[0].Function.Name, fmt.Sprintf("%#x", address); got != want { - return fmt.Errorf("symbolz %#x, got %s, want %s", address, got, want) - } - } - } - return nil -} - -func fetchSymbols(source, post string) ([]byte, error) { - var symbolz string - - addresses := strings.Split(post, "+") - // Do not symbolize the first symbol. - for _, address := range addresses[1:] { - symbolz += fmt.Sprintf("%s\t%s\n", address, address) - } - return []byte(symbolz), nil -} - -func TestAdjust(t *testing.T) { - for _, tc := range []struct { - addr uint64 - offset int64 - wantAdj uint64 - wantOverflow bool - }{{math.MaxUint64, 0, math.MaxUint64, false}, - {math.MaxUint64, 1, 0, true}, - {math.MaxUint64 - 1, 1, math.MaxUint64, false}, - {math.MaxUint64 - 1, 2, 0, true}, - {math.MaxInt64 + 1, math.MaxInt64, math.MaxUint64, false}, - {0, 0, 0, false}, - {0, -1, 0, true}, - {1, -1, 0, false}, - {2, -1, 1, false}, - {2, -2, 0, false}, - {2, -3, 0, true}, - {-math.MinInt64, math.MinInt64, 0, false}, - {-math.MinInt64 + 1, math.MinInt64, 1, false}, - {-math.MinInt64 - 1, math.MinInt64, 0, true}, - } { - if adj, overflow := adjust(tc.addr, tc.offset); adj != tc.wantAdj || overflow != tc.wantOverflow { - t.Errorf("adjust(%d, %d) = (%d, %t), want (%d, %t)", tc.addr, tc.offset, adj, overflow, tc.wantAdj, tc.wantOverflow) - } - } -} diff --git a/internal/pprof/transport/transport.go b/internal/pprof/transport/transport.go deleted file mode 100644 index 063f01134d1..00000000000 --- a/internal/pprof/transport/transport.go +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright 2018 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package transport provides a mechanism to send requests with https cert, -// key, and CA. -package transport - -import ( - "crypto/tls" - "crypto/x509" - "fmt" - "io/ioutil" - "net/http" - "sync" - - "github.com/parca-dev/parca/internal/pprof/plugin" -) - -type transport struct { - cert *string - key *string - ca *string - caCertPool *x509.CertPool - certs []tls.Certificate - initOnce sync.Once - initErr error -} - -const extraUsage = ` -tls_cert TLS client certificate file for fetching profile and symbols - -tls_key TLS private key file for fetching profile and symbols - -tls_ca TLS CA certs file for fetching profile and symbols` - -// New returns a round tripper for making requests with the -// specified cert, key, and ca. The flags tls_cert, tls_key, and tls_ca are -// added to the flagset to allow a user to specify the cert, key, and ca. If -// the flagset is nil, no flags will be added, and users will not be able to -// use these flags. -func New(flagset plugin.FlagSet) http.RoundTripper { - if flagset == nil { - return &transport{} - } - flagset.AddExtraUsage(extraUsage) - return &transport{ - cert: flagset.String("tls_cert", "", "TLS client certificate file for fetching profile and symbols"), - key: flagset.String("tls_key", "", "TLS private key file for fetching profile and symbols"), - ca: flagset.String("tls_ca", "", "TLS CA certs file for fetching profile and symbols"), - } -} - -// initialize uses the cert, key, and ca to initialize the certs -// to use these when making requests. -func (tr *transport) initialize() error { - var cert, key, ca string - if tr.cert != nil { - cert = *tr.cert - } - if tr.key != nil { - key = *tr.key - } - if tr.ca != nil { - ca = *tr.ca - } - - if cert != "" && key != "" { - tlsCert, err := tls.LoadX509KeyPair(cert, key) - if err != nil { - return fmt.Errorf("could not load certificate/key pair specified by -tls_cert and -tls_key: %v", err) - } - tr.certs = []tls.Certificate{tlsCert} - } else if cert == "" && key != "" { - return fmt.Errorf("-tls_key is specified, so -tls_cert must also be specified") - } else if cert != "" && key == "" { - return fmt.Errorf("-tls_cert is specified, so -tls_key must also be specified") - } - - if ca != "" { - caCertPool := x509.NewCertPool() - caCert, err := ioutil.ReadFile(ca) - if err != nil { - return fmt.Errorf("could not load CA specified by -tls_ca: %v", err) - } - caCertPool.AppendCertsFromPEM(caCert) - tr.caCertPool = caCertPool - } - - return nil -} - -// RoundTrip executes a single HTTP transaction, returning -// a Response for the provided Request. -func (tr *transport) RoundTrip(req *http.Request) (*http.Response, error) { - tr.initOnce.Do(func() { - tr.initErr = tr.initialize() - }) - if tr.initErr != nil { - return nil, tr.initErr - } - - tlsConfig := &tls.Config{ - RootCAs: tr.caCertPool, - Certificates: tr.certs, - } - - if req.URL.Scheme == "https+insecure" { - // Make shallow copy of request, and req.URL, so the request's URL can be - // modified. - r := *req - *r.URL = *req.URL - req = &r - tlsConfig.InsecureSkipVerify = true - req.URL.Scheme = "https" - } - - transport := http.Transport{ - Proxy: http.ProxyFromEnvironment, - TLSClientConfig: tlsConfig, - } - - return transport.RoundTrip(req) -} diff --git a/pkg/debuginfo/store.go b/pkg/debuginfo/store.go index e6a0d814304..42e131d3d61 100644 --- a/pkg/debuginfo/store.go +++ b/pkg/debuginfo/store.go @@ -27,6 +27,7 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" "github.com/google/pprof/profile" + "github.com/parca-dev/parca/pkg/symbol" "github.com/thanos-io/thanos/pkg/objstore" "github.com/thanos-io/thanos/pkg/objstore/client" "google.golang.org/grpc/codes" @@ -34,7 +35,6 @@ import ( "gopkg.in/yaml.v2" debuginfopb "github.com/parca-dev/parca/gen/proto/go/parca/debuginfo/v1alpha1" - "github.com/parca-dev/parca/internal/pprof/binutils" ) var ErrDebugInfoNotFound = errors.New("debug info not found") @@ -64,11 +64,11 @@ type Store struct { logger log.Logger cacheDir string - symbolizer *symbolizer + symbolizer *symbol.Symbolizer } // NewStore returns a new debug info store -func NewStore(logger log.Logger, config *Config) (*Store, error) { +func NewStore(logger log.Logger, symbolizer *symbol.Symbolizer, config *Config) (*Store, error) { cfg, err := yaml.Marshal(config.Bucket) if err != nil { return nil, fmt.Errorf("marshal content of object storage configuration: %w", err) @@ -90,13 +90,10 @@ func NewStore(logger log.Logger, config *Config) (*Store, error) { } return &Store{ - logger: log.With(logger, "component", "debuginfo"), - bucket: bucket, - cacheDir: cache.Directory, - symbolizer: &symbolizer{ - logger: log.With(logger, "component", "debuginfo/symbolizer"), - bu: &binutils.Binutils{}, - }, + logger: log.With(logger, "component", "debuginfo"), + bucket: bucket, + cacheDir: cache.Directory, + symbolizer: symbolizer, }, nil } @@ -197,8 +194,6 @@ func validateId(id string) error { return nil } -type addr2Line func(addr uint64) ([]profile.Line, error) - func (s *Store) Symbolize(ctx context.Context, m *profile.Mapping, locations ...*profile.Location) (map[*profile.Location][]profile.Line, error) { localObjPath, err := s.fetchObjectFile(ctx, m.BuildID) if err != nil { @@ -206,7 +201,7 @@ func (s *Store) Symbolize(ctx context.Context, m *profile.Mapping, locations ... return nil, fmt.Errorf("failed to symbolize mapping: %w", err) } - sourceLine, err := s.symbolizer.createAddr2Line(m, localObjPath) + liner, err := s.symbolizer.NewLiner(m, localObjPath) if err != nil { const msg = "failed to create add2LineFunc" level.Debug(s.logger).Log("msg", msg, "object", m.BuildID, "err", err) @@ -215,7 +210,7 @@ func (s *Store) Symbolize(ctx context.Context, m *profile.Mapping, locations ... locationLines := map[*profile.Location][]profile.Line{} for _, loc := range locations { - lines, err := sourceLine(loc.Address) + lines, err := liner.PCToLines(loc.Address) if err != nil { level.Debug(s.logger).Log("msg", "failed to extract source lines", "object", m.BuildID, "err", err) continue diff --git a/pkg/debuginfo/store_test.go b/pkg/debuginfo/store_test.go index bc5274147f8..cb6970d0733 100644 --- a/pkg/debuginfo/store_test.go +++ b/pkg/debuginfo/store_test.go @@ -25,6 +25,7 @@ import ( "github.com/go-kit/log" debuginfopb "github.com/parca-dev/parca/gen/proto/go/parca/debuginfo/v1alpha1" + "github.com/parca-dev/parca/pkg/symbol" "github.com/stretchr/testify/require" "github.com/thanos-io/thanos/pkg/objstore/client" "github.com/thanos-io/thanos/pkg/objstore/filesystem" @@ -40,20 +41,24 @@ func TestStore(t *testing.T) { require.NoError(t, err) defer os.RemoveAll(cacheDir) - s, err := NewStore(log.NewNopLogger(), &Config{ - Bucket: &client.BucketConfig{ - Type: client.FILESYSTEM, - Config: filesystem.Config{ - Directory: dir, + logger := log.NewNopLogger() + s, err := NewStore( + logger, + symbol.NewSymbolizer(logger), + &Config{ + Bucket: &client.BucketConfig{ + Type: client.FILESYSTEM, + Config: filesystem.Config{ + Directory: dir, + }, }, - }, - Cache: &CacheConfig{ - Type: FILESYSTEM, - Config: &FilesystemCacheConfig{ - Directory: cacheDir, + Cache: &CacheConfig{ + Type: FILESYSTEM, + Config: &FilesystemCacheConfig{ + Directory: cacheDir, + }, }, - }, - }) + }) require.NoError(t, err) lis, err := net.Listen("tcp", ":0") diff --git a/pkg/debuginfo/symbolizer.go b/pkg/debuginfo/symbolizer.go deleted file mode 100644 index b00d550a5e9..00000000000 --- a/pkg/debuginfo/symbolizer.go +++ /dev/null @@ -1,275 +0,0 @@ -// Copyright 2021 The Parca Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package debuginfo - -import ( - "debug/elf" - "debug/gosym" - "errors" - "fmt" - "strings" - - "github.com/go-kit/log" - "github.com/go-kit/log/level" - "github.com/google/pprof/profile" - - "github.com/parca-dev/parca/internal/pprof/binutils" -) - -type symbolizer struct { - logger log.Logger - bu *binutils.Binutils -} - -func (s *symbolizer) createAddr2Line(m *profile.Mapping, file string) (addr2Line, error) { - hasDWARF, err := hasDWARF(file) - if err != nil { - level.Debug(s.logger).Log( - "msg", "failed to determine if binary has DWARF info", - "file", file, - "err", err, - ) - } - if hasDWARF { - level.Debug(s.logger).Log("msg", "using DWARF to resolve symbols", "file", file) - return s.compiledBinary(m, file) - } - - // Go binaries has a special case. They use ".gopclntab" section to symbolize addresses. - // Keep that section and other identifying sections in the debug information file. - isGo, err := isGoBinary(file) - if err != nil { - level.Debug(s.logger).Log( - "msg", "failed to determine if binary is a Go binary", - "file", file, - "err", err, - ) - } - if isGo { - // Right now, this uses "debug/gosym" package, and it won't work for inlined functions, - // so this is just a best-effort implementation, in case we don't have DWARF. - sourceLine, err := s.goBinary(file) - if err == nil { - level.Debug(s.logger).Log("msg", "using go addr2Line to resolve symbols", "file", file) - return sourceLine, nil - } - - level.Error(s.logger).Log( - "msg", "failed to create go addr2Line, falling back to binary addr2Line", - "file", file, - "err", err, - ) - } - - // Just in case, underlying binutils can symbolize addresses. - level.Debug(s.logger).Log("msg", "falling back to binutils addr2Line resolve symbols", "file", file) - return s.compiledBinary(m, file) -} - -func (s *symbolizer) compiledBinary(m *profile.Mapping, file string) (addr2Line, error) { - objFile, err := s.bu.Open(file, m.Start, m.Limit, m.Offset) - if err != nil { - level.Error(s.logger).Log("msg", "failed to open object file", - "file", file, - "start", m.Start, - "limit", m.Limit, - "offset", m.Offset, - "err", err, - ) - return nil, fmt.Errorf("open object file: %w", err) - } - - return func(addr uint64) ([]profile.Line, error) { - frames, err := objFile.SourceLine(addr) - if err != nil { - level.Debug(s.logger).Log("msg", "failed to open object file", - "file", file, - "start", m.Start, - "limit", m.Limit, - "offset", m.Offset, - "address", addr, - "err", err, - ) - return nil, err - } - - if len(frames) == 0 { - return nil, errors.New("could not find any frames for given address") - } - - lines := []profile.Line{} - for _, frame := range frames { - lines = append(lines, profile.Line{ - Line: int64(frame.Line), - Function: &profile.Function{ - Name: frame.Func, - Filename: frame.File, - }, - }) - } - return lines, nil - }, nil -} - -func (s *symbolizer) goBinary(binPath string) (addr2Line, error) { - level.Debug(s.logger).Log("msg", "symbolizing a Go binary", "file", binPath) - table, err := gosymtab(binPath) - if err != nil { - return nil, fmt.Errorf("failed to create go symbtab: %w", err) - } - - return func(addr uint64) (lines []profile.Line, err error) { - defer func() { - // PCToLine panics with "invalid memory address or nil pointer dereference", - // - when it refers to an address that doesn't actually exist. - if r := recover(); r != nil { - err = fmt.Errorf("recovering from panic in go binary add2line: %v", r) - } - }() - - file, line, fn := table.PCToLine(addr) - lines = append(lines, profile.Line{ - Line: int64(line), - Function: &profile.Function{ - Name: fn.Name, - Filename: file, - }, - }) - return lines, nil - }, nil -} - -// Simplified version of rsc.io/goversion/version. -func isGoBinary(path string) (bool, error) { - exe, err := elf.Open(path) - if err != nil { - return false, fmt.Errorf("failed to open elf: %w", err) - } - defer exe.Close() - - for _, s := range exe.Sections { - if s.Name == ".note.go.buildid" { - return true, nil - } - } - - syms, err := exe.Symbols() - if err != nil { - return false, fmt.Errorf("failed to read symbols: %w", err) - } - for _, sym := range syms { - name := sym.Name - if name == "runtime.main" || name == "main.main" { - return true, nil - } - if name == "runtime.buildVersion" { - return true, nil - } - } - - return false, err -} - -func hasDWARF(path string) (bool, error) { - exe, err := elf.Open(path) - if err != nil { - return false, fmt.Errorf("failed to open elf: %w", err) - } - defer exe.Close() - - data, err := getDWARF(exe) - if err != nil { - return false, fmt.Errorf("failed to read DWARF sections: %w", err) - } - - return len(data) > 0, nil -} - -// A simplified and modified version of debug/elf.DWARF(). -func getDWARF(f *elf.File) (map[string][]byte, error) { - dwarfSuffix := func(s *elf.Section) string { - switch { - case strings.HasPrefix(s.Name, ".debug_"): - return s.Name[7:] - case strings.HasPrefix(s.Name, ".zdebug_"): - return s.Name[8:] - case strings.HasPrefix(s.Name, "__debug_"): // macos - return s.Name[8:] - default: - return "" - } - } - - // There are many DWARf sections, but these are the ones - // the debug/dwarf package started with "abbrev", "info", "str", "line", "ranges". - // Possible canditates for future: "loc", "loclists", "rnglists" - sections := map[string]*string{"abbrev": nil, "info": nil, "str": nil, "line": nil, "ranges": nil} - data := map[string][]byte{} - for _, s := range f.Sections { - suffix := dwarfSuffix(s) - if suffix == "" { - continue - } - if _, ok := sections[suffix]; !ok { - continue - } - b, err := s.Data() - if err != nil { - return nil, fmt.Errorf("failed to read debug section: %w", err) - } - data[suffix] = b - } - - return data, nil -} - -func gosymtab(path string) (*gosym.Table, error) { - exe, err := elf.Open(path) - if err != nil { - return nil, fmt.Errorf("failed to open elf: %w", err) - } - defer exe.Close() - - var pclntab []byte - if sec := exe.Section(".gopclntab"); sec != nil { - if sec.Type == elf.SHT_NOBITS { - return nil, errors.New(".gopclntab section has no bits") - } - - pclntab, err = sec.Data() - if err != nil { - return nil, fmt.Errorf("could not find .gopclntab section: %w", err) - } - } - - if len(pclntab) <= 0 { - return nil, errors.New(".gopclntab section has no bits") - } - - var symtab []byte - if sec := exe.Section(".gosymtab"); sec != nil { - symtab, _ = sec.Data() - } - - var text uint64 = 0 - if sec := exe.Section(".text"); sec != nil { - text = sec.Addr - } - - table, err := gosym.NewTable(symtab, gosym.NewLineTable(pclntab, text)) - if err != nil { - return nil, fmt.Errorf("failed to build symtab or pclinetab: %w", err) - } - return table, nil -} diff --git a/pkg/parca/parca.go b/pkg/parca/parca.go index 3fccb192f46..07319d1fe48 100644 --- a/pkg/parca/parca.go +++ b/pkg/parca/parca.go @@ -27,6 +27,8 @@ import ( "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" "github.com/oklog/run" "github.com/parca-dev/parca/pkg/storage/metastore" + "github.com/parca-dev/parca/pkg/symbol" + "github.com/parca-dev/parca/pkg/symbolizer" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/discovery" "go.opentelemetry.io/otel" @@ -50,7 +52,6 @@ import ( "github.com/parca-dev/parca/pkg/scrape" "github.com/parca-dev/parca/pkg/server" "github.com/parca-dev/parca/pkg/storage" - "github.com/parca-dev/parca/pkg/symbol" ) type Flags struct { @@ -62,6 +63,8 @@ type Flags struct { StorageTSDBRetentionTime time.Duration `default:"6h" help:"How long to retain samples in storage."` StorageTSDBExpensiveMetrics bool `default:"false" help:"Enable really heavy metrics. Only do this for debugging as the metrics are slowing Parca down by a lot." hidden:"true"` + + SymbolizerDemangleMode string `default:"simple" help:"Mode to demangle C++ symbols. Default mode is simplified: : no parameters, no templates, no return type" enum:"simple,full,none,templates"` } // Run the parca server @@ -95,12 +98,6 @@ func Run(ctx context.Context, logger log.Logger, reg *prometheus.Registry, flags return err } - dbgInfo, err := debuginfo.NewStore(logger, cfg.DebugInfo) - if err != nil { - level.Error(logger).Log("msg", "failed to initialize debug info store", "err", err) - return err - } - mStr, err := metastore.NewInMemorySQLiteProfileMetaStore( reg, // Produces high cardinality traces - uncomment locally if needed. @@ -148,10 +145,28 @@ func Run(ctx context.Context, logger log.Logger, reg *prometheus.Registry, flags return err } + dbgInfo, err := debuginfo.NewStore(logger, symbol.NewSymbolizer(logger, flags.SymbolizerDemangleMode), cfg.DebugInfo) + if err != nil { + level.Error(logger).Log("msg", "failed to initialize debug info store", "err", err) + return err + } + parcaserver := server.NewServer(reg) var gr run.Group gr.Add(run.SignalHandler(ctx, os.Interrupt, syscall.SIGINT, syscall.SIGTERM)) + { + sym := symbolizer.NewSymbolizer(logger, mStr, dbgInfo) + ctx, cancel := context.WithCancel(ctx) + gr.Add( + func() error { + return sym.Run(ctx, 10*time.Second) + }, + func(_ error) { + level.Debug(logger).Log("msg", "symbolizer server shutting down") + cancel() + }) + } { ctx, cancel := context.WithCancel(ctx) gr.Add(func() error { @@ -223,18 +238,6 @@ func Run(ctx context.Context, logger log.Logger, reg *prometheus.Registry, flags } }, ) - { - sym := symbol.NewSymbolizer(logger, mStr, dbgInfo) - ctx, cancel := context.WithCancel(ctx) - gr.Add( - func() error { - return sym.Run(ctx, 10*time.Second) - }, - func(_ error) { - level.Debug(logger).Log("msg", "symbol server shutting down") - cancel() - }) - } if err := gr.Run(); err != nil { if _, ok := err.(run.SignalError); ok { return nil diff --git a/pkg/symbol/addr2line/dwarf.go b/pkg/symbol/addr2line/dwarf.go new file mode 100644 index 00000000000..010bf32eeca --- /dev/null +++ b/pkg/symbol/addr2line/dwarf.go @@ -0,0 +1,198 @@ +// Copyright 2021 The Parca Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package addr2line + +import ( + "debug/dwarf" + "debug/elf" + "errors" + "fmt" + "io" + "sort" + + "github.com/go-delve/delve/pkg/dwarf/godwarf" + "github.com/go-delve/delve/pkg/dwarf/reader" + "github.com/google/pprof/profile" + "github.com/parca-dev/parca/pkg/symbol/demangle" +) + +func DWARF(demangler *demangle.Demangler, _ *profile.Mapping, path string) (func(addr uint64) ([]profile.Line, error), error) { + // TODO(kakkoyun): Handle offset, start and limit? + //objFile, err := s.bu.Open(file, m.Start, m.Limit, m.Offset) + //if err != nil { + // return nil, fmt.Errorf("open object file: %w", err) + //} + + exe, err := elf.Open(path) + if err != nil { + return nil, fmt.Errorf("failed to open elf: %w", err) + } + defer exe.Close() + + data, err := exe.DWARF() + if err != nil { + return nil, fmt.Errorf("failed to read DWARF data: %w", err) + } + + return func(addr uint64) ([]profile.Line, error) { + defer func() { + if r := recover(); r != nil { + err = fmt.Errorf("recovering from panic in DWARF binary add2line: %v", r) + } + }() + + lines, err := sourceLines(demangler, data, addr) + if err != nil { + return nil, err + } + + if len(lines) == 0 { + return nil, errors.New("could not find any frames for given address") + } + + return lines, nil + }, nil +} + +func sourceLines(demangler *demangle.Demangler, data *dwarf.Data, addr uint64) ([]profile.Line, error) { + // The reader is positioned at byte offset 0 in the DWARF “info” section. + er := data.Reader() + cu, err := er.SeekPC(addr) + if err != nil { + return nil, err + } + if cu == nil { + return nil, errors.New("failed to find a corresponding dwarf entry for given address") + } + + // The reader is positioned at byte offset 0 in the DWARF “line” section. + lr, err := data.LineReader(cu) + if err != nil { + return nil, err + } + + lineEntries := []dwarf.LineEntry{} + for { + le := dwarf.LineEntry{} + err := lr.Next(&le) + if err != nil { + break + } + if le.IsStmt { + lineEntries = append(lineEntries, le) + } + } + + subprograms := []*godwarf.Tree{} + abstractSubprograms := map[dwarf.Offset]*dwarf.Entry{} +outer: + for { + entry, err := er.Next() + if err != nil { + if err == io.EOF { + break + } + continue + } + if entry == nil { + break + } + if entry.Tag == dwarf.TagCompileUnit { + break + } + + if entry.Tag == dwarf.TagSubprogram { + for _, field := range entry.Field { + if field.Attr == dwarf.AttrInline { + abstractSubprograms[entry.Offset] = entry + continue outer + } + } + + tr, err := godwarf.LoadTree(entry.Offset, data, 0) + if err != nil { + return nil, err + } + + if tr.ContainsPC(addr) { + subprograms = append(subprograms, tr) + } + } + } + + lines := []profile.Line{} + for _, tr := range subprograms { + name := tr.Entry.Val(dwarf.AttrName).(string) + file, line := findLineInfo(lineEntries, tr.Ranges) + lines = append(lines, profile.Line{ + Line: line, + Function: demangler.Demangle(&profile.Function{ + Name: name, + Filename: file, + }), + }) + + // If pc is 0 then all inlined calls will be returned. + for _, ch := range reader.InlineStack(tr, addr) { + var name string + if ch.Tag == dwarf.TagSubprogram { + name = tr.Entry.Val(dwarf.AttrName).(string) + } else { + abstractOrigin := abstractSubprograms[ch.Entry.Val(dwarf.AttrAbstractOrigin).(dwarf.Offset)] + name = getFunctionName(abstractOrigin) + } + + file, line := findLineInfo(lineEntries, ch.Ranges) + lines = append(lines, profile.Line{ + Line: line, + Function: demangler.Demangle(&profile.Function{ + Name: name, + Filename: file, + }), + }) + } + } + + return lines, nil +} + +func findLineInfo(entries []dwarf.LineEntry, rg [][2]uint64) (string, int64) { + file := "?" + var line int64 = 0 + i := sort.Search(len(entries), func(i int) bool { + return entries[i].Address >= rg[0][0] + }) + if i >= len(entries) { + return file, line + } + + le := dwarf.LineEntry{} + pc := entries[i].Address + if rg[0][0] <= pc && pc < rg[0][1] { + le = entries[i] + return le.File.Name, int64(le.Line) + } + + return file, line +} + +func getFunctionName(entry *dwarf.Entry) string { + var name string + for _, field := range entry.Field { + if field.Attr == dwarf.AttrName { + name = field.Val.(string) + } + } + return name +} diff --git a/pkg/symbol/addr2line/go.go b/pkg/symbol/addr2line/go.go new file mode 100644 index 00000000000..3910e227d22 --- /dev/null +++ b/pkg/symbol/addr2line/go.go @@ -0,0 +1,102 @@ +// Copyright 2021 The Parca Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package addr2line + +import ( + "debug/elf" + "debug/gosym" + "errors" + "fmt" + + "github.com/google/pprof/profile" +) + +func Go(path string) (func(addr uint64) ([]profile.Line, error), error) { + tab, err := gosymtab(path) + if err != nil { + return nil, fmt.Errorf("failed to create go symbtab: %w", err) + } + + return func(addr uint64) (lines []profile.Line, err error) { + defer func() { + // PCToLine panics with "invalid memory address or nil pointer dereference", + // - when it refers to an address that doesn't actually exist. + if r := recover(); r != nil { + err = fmt.Errorf("recovering from panic in go binary add2line: %v", r) + } + }() + + file, line, fn := tab.PCToLine(addr) + name := "?" + if fn != nil { + name = fn.Name + } else { + file = "?" + line = 0 + } + + // TODO(kakkoyun): Find a way to symbolize inline functions. + lines = append(lines, profile.Line{ + Line: int64(line), + Function: &profile.Function{ + Name: name, + Filename: file, + }, + }) + + return lines, nil + }, nil +} + +func gosymtab(path string) (*gosym.Table, error) { + exe, err := elf.Open(path) + if err != nil { + return nil, fmt.Errorf("failed to open elf: %w", err) + } + defer exe.Close() + + var pclntab []byte + if sec := exe.Section(".gopclntab"); sec != nil { + if sec.Type == elf.SHT_NOBITS { + return nil, errors.New(".gopclntab section has no bits") + } + + // TODO(kakkoyun): Optimize.Don't read just check existence! + pclntab, err = sec.Data() + if err != nil { + return nil, fmt.Errorf("could not find .gopclntab section: %w", err) + } + } + + if len(pclntab) <= 0 { + return nil, errors.New(".gopclntab section has no bits") + } + + var symtab []byte + if sec := exe.Section(".gosymtab"); sec != nil { + // TODO(kakkoyun): Optimize. Don't read just check existence! + symtab, _ = sec.Data() + } + + var text uint64 = 0 + if sec := exe.Section(".text"); sec != nil { + text = sec.Addr + } + + table, err := gosym.NewTable(symtab, gosym.NewLineTable(pclntab, text)) + if err != nil { + return nil, fmt.Errorf("failed to build symtab or pclinetab: %w", err) + } + return table, nil +} diff --git a/pkg/symbol/demangle/demangle.go b/pkg/symbol/demangle/demangle.go new file mode 100644 index 00000000000..299bbccae3e --- /dev/null +++ b/pkg/symbol/demangle/demangle.go @@ -0,0 +1,122 @@ +// Copyright 2021 The Parca Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package demangle + +import ( + "strings" + + "github.com/google/pprof/profile" + "github.com/ianlancetaylor/demangle" +) + +type Demangler struct { + options []demangle.Option + mode string + force bool +} + +func NewDemangler(mode string, force bool) *Demangler { + var options []demangle.Option + switch mode { + case "", "simple": // demangled, simplified: no parameters, no templates, no return type + options = []demangle.Option{demangle.NoParams, demangle.NoTemplateParams} + case "templates": // demangled, simplified: no parameters, no return type + options = []demangle.Option{demangle.NoParams} + case "full": + options = []demangle.Option{demangle.NoClones} + case "none": // no demangling + return nil + } + + return &Demangler{ + options: options, + mode: mode, + force: force, + } +} + +// Demangle updates the function names in a profile with demangled C++ +// names, simplified according to demanglerMode. If force is set, +// overwrite any names that appear already demangled. +// A modified version of pprof demangler. +func (d *Demangler) Demangle(fn *profile.Function) *profile.Function { + if d == nil { + return fn + } + + if d.force { + // Remove the current demangled names to force demangling. + if fn.Name != "" && fn.SystemName != "" { + fn.Name = fn.SystemName + } + } + + if fn.Name != "" && fn.SystemName != fn.Name { + return fn // Already demangled. + } + + if demangled := demangle.Filter(fn.SystemName, d.options...); demangled != fn.SystemName { + fn.Name = demangled + return fn + } + // Could not demangle. Apply heuristics in case the name is + // already demangled. + name := fn.SystemName + if looksLikeDemangledCPlusPlus(name) { + if d.mode == "" || d.mode == "templates" { + name = removeMatching(name, '(', ')') + } + if d.mode == "" { + name = removeMatching(name, '<', '>') + } + } + fn.Name = name + return fn +} + +// looksLikeDemangledCPlusPlus is a heuristic to decide if a name is +// the result of demangling C++. If so, further heuristics will be +// applied to simplify the name. +func looksLikeDemangledCPlusPlus(demangled string) bool { + if strings.Contains(demangled, ".<") { // Skip java names of the form "class." + return false + } + return strings.ContainsAny(demangled, "<>[]") || strings.Contains(demangled, "::") +} + +// removeMatching removes nested instances of start..end from name. +func removeMatching(name string, start, end byte) string { + s := string(start) + string(end) + var nesting, first, current int + for index := strings.IndexAny(name[current:], s); index != -1; index = strings.IndexAny(name[current:], s) { + switch current += index; name[current] { + case start: + nesting++ + if nesting == 1 { + first = current + } + case end: + nesting-- + switch { + case nesting < 0: + return name // Mismatch, abort + case nesting == 0: + name = name[:first] + name[current+1:] + current = first - 1 + } + } + current++ + } + return name +} diff --git a/pkg/symbol/elfutils/elfutils.go b/pkg/symbol/elfutils/elfutils.go new file mode 100644 index 00000000000..00cc0c8ed6e --- /dev/null +++ b/pkg/symbol/elfutils/elfutils.go @@ -0,0 +1,119 @@ +// Copyright 2021 The Parca Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package elfutils + +import ( + "debug/elf" + "fmt" + "strings" +) + +func HasDWARF(path string) (bool, error) { + exe, err := elf.Open(path) + if err != nil { + return false, fmt.Errorf("failed to open elf: %w", err) + } + defer exe.Close() + + sections, err := getDWARFSections(exe) + if err != nil { + return false, fmt.Errorf("failed to read DWARF sections: %w", err) + } + + return len(sections) > 0, nil +} + +// A simplified and modified version of debug/elf.DWARF(). +func getDWARFSections(f *elf.File) (map[string]struct{}, error) { + dwarfSuffix := func(s *elf.Section) string { + switch { + case strings.HasPrefix(s.Name, ".debug_"): + return s.Name[7:] + case strings.HasPrefix(s.Name, ".zdebug_"): + return s.Name[8:] + case strings.HasPrefix(s.Name, "__debug_"): // macos + return s.Name[8:] + default: + return "" + } + } + + // There are many DWARf sections, but these are the ones + // the debug/dwarf package started with "abbrev", "info", "str", "line", "ranges". + // Possible candidates for future: "loc", "loclists", "rnglists" + sections := map[string]*string{"abbrev": nil, "info": nil, "str": nil, "line": nil, "ranges": nil} + exists := map[string]struct{}{} + for _, s := range f.Sections { + suffix := dwarfSuffix(s) + if suffix == "" { + continue + } + if _, ok := sections[suffix]; !ok { + continue + } + exists[suffix] = struct{}{} + } + + return exists, nil +} + +func IsSymbolizableGoObjFile(path string) (bool, error) { + // Checks ".note.go.buildid" section and symtab better to keep those sections in object file. + exe, err := elf.Open(path) + if err != nil { + return false, fmt.Errorf("failed to open elf: %w", err) + } + defer exe.Close() + + isGo := false + for _, s := range exe.Sections { + if s.Name == ".note.go.buildid" { + isGo = true + } + } + + // In case ".note.go.buildid" section is stripped, check for symbols. + if !isGo { + syms, err := exe.Symbols() + if err != nil { + return false, fmt.Errorf("failed to read symbols: %w", err) + } + for _, sym := range syms { + name := sym.Name + if name == "runtime.main" || name == "main.main" { + isGo = true + } + if name == "runtime.buildVersion" { + isGo = true + } + } + } + + if !isGo { + return false, nil + } + + // Check if the Go binary symbolizable. + // Go binaries has a special case. They use ".gopclntab" section to symbolize addresses. + var pclntab []byte + if sec := exe.Section(".gopclntab"); sec != nil { + // TODO(kakkoyun): Optimize. Don't read just check existence! + pclntab, err = sec.Data() + if err != nil { + return false, fmt.Errorf("could not find .gopclntab section: %w", err) + } + } + + return len(pclntab) > 0, nil +} diff --git a/pkg/symbol/symbol.go b/pkg/symbol/symbol.go new file mode 100644 index 00000000000..ce78a3b0523 --- /dev/null +++ b/pkg/symbol/symbol.go @@ -0,0 +1,119 @@ +// Copyright 2021 The Parca Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package symbol + +import ( + "github.com/go-kit/log" + "github.com/go-kit/log/level" + "github.com/google/pprof/profile" + "github.com/parca-dev/parca/pkg/symbol/addr2line" + "github.com/parca-dev/parca/pkg/symbol/demangle" + "github.com/parca-dev/parca/pkg/symbol/elfutils" +) + +type Symbolizer struct { + logger log.Logger + + demangler *demangle.Demangler +} + +type liner interface { + PCToLines(pc uint64) ([]profile.Line, error) +} + +type funcLiner func(addr uint64) ([]profile.Line, error) + +func (f funcLiner) PCToLines(pc uint64) ([]profile.Line, error) { return f(pc) } + +func NewSymbolizer(logger log.Logger, demangleMode ...string) *Symbolizer { + var dm string + if len(demangleMode) > 0 { + dm = demangleMode[0] + } + return &Symbolizer{ + logger: log.With(logger, "component", "symbolizer"), + demangler: demangle.NewDemangler(dm, false), + } +} + +// TODO(kakkoyun): Do we still need mapping? What is the actual usecase? +func (s *Symbolizer) NewLiner(m *profile.Mapping, file string) (liner, error) { + hasDWARF, err := elfutils.HasDWARF(file) + if err != nil { + level.Debug(s.logger).Log( + "msg", "failed to determine if binary has DWARF info", + "file", file, + "err", err, + ) + } + if hasDWARF { + level.Debug(s.logger).Log("msg", "using DWARF to resolve symbols", "file", file) + // TODO(kakkoyun): Add cache per file. + // TODO(kakkoyun): Make add2line.DWARF cache costly debug info maps. + f, err := addr2line.DWARF(s.demangler, m, file) + if err != nil { + level.Error(s.logger).Log( + "msg", "failed to open object file", + "file", file, + "start", m.Start, + "limit", m.Limit, + "offset", m.Offset, + "err", err, + ) + return nil, err + } + return funcLiner(f), nil + } + + // Go binaries has a special case. They use ".gopclntab" section to symbolize addresses. + // Keep that section and other identifying sections in the debug information file. + isGo, err := elfutils.IsSymbolizableGoObjFile(file) + if err != nil { + level.Debug(s.logger).Log( + "msg", "failed to determine if binary is a Go binary", + "file", file, + "err", err, + ) + } + if isGo { + // Right now, this uses "debug/gosym" package, and it won't work for inlined functions, + // so this is just a best-effort implementation, in case we don't have DWARF. + level.Debug(s.logger).Log("msg", "symbolizing a Go binary", "file", file) + f, err := addr2line.Go(file) + if err == nil { + level.Debug(s.logger).Log("msg", "using go liner to resolve symbols", "file", file) + return funcLiner(f), nil + } + level.Error(s.logger).Log( + "msg", "failed to create go liner, falling back to binary liner", + "file", file, + "err", err, + ) + } + + // Just in case, underlying DWARF can symbolize addresses. + level.Debug(s.logger).Log("msg", "falling back to DWARF liner resolve symbols", "file", file) + f, err := addr2line.DWARF(s.demangler, m, file) + if err != nil { + level.Error(s.logger).Log("msg", "failed to open object file", + "file", file, + "start", m.Start, + "limit", m.Limit, + "offset", m.Offset, + "err", err, + ) + return nil, err + } + return funcLiner(f), nil +} diff --git a/pkg/symbol/symbolizer_test.go b/pkg/symbol/symbolizer_test.go deleted file mode 100644 index b4eb6b75b2b..00000000000 --- a/pkg/symbol/symbolizer_test.go +++ /dev/null @@ -1,288 +0,0 @@ -// Copyright 2021 The Parca Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package symbol - -import ( - "bytes" - "context" - "io/ioutil" - stdlog "log" - "net" - "os" - "sort" - "testing" - - "github.com/go-kit/log" - "github.com/google/pprof/profile" - profilestorepb "github.com/parca-dev/parca/gen/proto/go/parca/profilestore/v1alpha1" - "github.com/parca-dev/parca/pkg/profilestore" - "github.com/parca-dev/parca/pkg/storage" - "github.com/parca-dev/parca/pkg/storage/metastore" - "github.com/prometheus/client_golang/prometheus" - "github.com/stretchr/testify/require" - "github.com/thanos-io/thanos/pkg/objstore/client" - "github.com/thanos-io/thanos/pkg/objstore/filesystem" - "go.opentelemetry.io/otel/trace" - "google.golang.org/grpc" - - debuginfopb "github.com/parca-dev/parca/gen/proto/go/parca/debuginfo/v1alpha1" - "github.com/parca-dev/parca/pkg/debuginfo" -) - -type TestProfileMetaStore interface { - TestLocationStore - TestFunctionStore - metastore.MappingStore - Close() error - Ping() error -} - -type TestLocationStore interface { - metastore.LocationStore - GetLocations(ctx context.Context) ([]*profile.Location, error) -} - -type TestFunctionStore interface { - metastore.FunctionStore - GetFunctions(ctx context.Context) ([]*profile.Function, error) -} - -func TestSymbolizer(t *testing.T) { - ctx := context.Background() - - cacheDir, err := ioutil.TempDir("", "parca-test-cache") - require.NoError(t, err) - defer os.RemoveAll(cacheDir) - - w := log.NewSyncWriter(os.Stderr) - logger := log.NewLogfmtLogger(w) - s, err := debuginfo.NewStore(logger, &debuginfo.Config{ - Bucket: &client.BucketConfig{ - Type: client.FILESYSTEM, - Config: filesystem.Config{ - Directory: "testdata/", - }, - }, - Cache: &debuginfo.CacheConfig{ - Type: debuginfo.FILESYSTEM, - Config: &debuginfo.FilesystemCacheConfig{ - Directory: cacheDir, - }, - }, - }) - require.NoError(t, err) - - lis, err := net.Listen("tcp", ":0") - if err != nil { - t.Fatalf("failed to listen: %v", err) - } - grpcServer := grpc.NewServer() - defer grpcServer.GracefulStop() - debuginfopb.RegisterDebugInfoServiceServer(grpcServer, s) - go func() { - err := grpcServer.Serve(lis) - if err != nil { - stdlog.Fatalf("failed to serve: %v", err) - } - }() - - var mStr TestProfileMetaStore - mStr, err = metastore.NewInMemorySQLiteProfileMetaStore( - prometheus.NewRegistry(), - trace.NewNoopTracerProvider().Tracer(""), - "symbolizer", - ) - t.Cleanup(func() { - mStr.Close() - }) - require.NoError(t, err) - - sym := NewSymbolizer(log.NewNopLogger(), mStr, s) - m := &profile.Mapping{ - ID: uint64(1), - Start: 4194304, - Limit: 4603904, - BuildID: "2d6912fd3dd64542f6f6294f4bf9cb6c265b3085", - } - _, err = mStr.CreateMapping(ctx, m) - require.NoError(t, err) - - locs := []*profile.Location{{ - Mapping: m, - Address: 0x463781, - }} - _, err = mStr.CreateLocation(ctx, locs[0]) - require.NoError(t, err) - - allLocs, err := mStr.GetLocations(ctx) - require.NoError(t, err) - require.Equal(t, 1, len(allLocs)) - - symLocs, err := mStr.GetSymbolizableLocations(ctx) - require.NoError(t, err) - require.Equal(t, 1, len(symLocs)) - - err = sym.symbolize(context.Background(), symLocs) - require.NoError(t, err) - - allLocs, err = mStr.GetLocations(ctx) - require.NoError(t, err) - require.Equal(t, 1, len(allLocs)) - - symLocs, err = mStr.GetSymbolizableLocations(ctx) - require.NoError(t, err) - require.Equal(t, 0, len(symLocs)) - - functions, err := mStr.GetFunctions(ctx) - require.NoError(t, err) - require.Equal(t, 3, len(functions)) - - lines := allLocs[0].Line - sort.SliceStable(lines, func(i, j int) bool { - return lines[i].Line < lines[j].Line - }) - require.Equal(t, 3, len(lines)) - require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[0].Function.Filename) - require.Equal(t, int64(10), lines[0].Line) - require.Equal(t, "main.main", lines[0].Function.Name) - require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[1].Function.Filename) - require.Equal(t, int64(23), lines[1].Line) - require.Equal(t, "main.iteratePerTenant", lines[1].Function.Name) - require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[2].Function.Filename) - require.Equal(t, int64(27), lines[2].Line) - require.Equal(t, "main.iterate", lines[2].Function.Name) -} - -func TestRealSymbolizer(t *testing.T) { - ctx := context.Background() - - cacheDir, err := ioutil.TempDir("", "parca-test-cache") - require.NoError(t, err) - defer os.RemoveAll(cacheDir) - - dbgStr, err := debuginfo.NewStore(log.NewNopLogger(), &debuginfo.Config{ - Bucket: &client.BucketConfig{ - Type: client.FILESYSTEM, - Config: filesystem.Config{ - Directory: "testdata/", - }, - }, - Cache: &debuginfo.CacheConfig{ - Type: debuginfo.FILESYSTEM, - Config: &debuginfo.FilesystemCacheConfig{ - Directory: cacheDir, - }, - }, - }) - require.NoError(t, err) - - var mStr TestProfileMetaStore - mStr, err = metastore.NewInMemorySQLiteProfileMetaStore( - prometheus.NewRegistry(), - trace.NewNoopTracerProvider().Tracer(""), - ) - require.NoError(t, err) - t.Cleanup(func() { - mStr.Close() - }) - - db := storage.OpenDB(prometheus.NewRegistry(), trace.NewNoopTracerProvider().Tracer(""), nil) - pStr := profilestore.NewProfileStore( - log.NewNopLogger(), - trace.NewNoopTracerProvider().Tracer(""), - db, - mStr, - ) - - lis, err := net.Listen("tcp", ":0") - if err != nil { - t.Fatalf("failed to listen: %v", err) - } - grpcServer := grpc.NewServer() - defer grpcServer.GracefulStop() - - debuginfopb.RegisterDebugInfoServiceServer(grpcServer, dbgStr) - profilestorepb.RegisterProfileStoreServiceServer(grpcServer, pStr) - - go func() { - err := grpcServer.Serve(lis) - if err != nil { - stdlog.Fatalf("failed to serve: %v", err) - } - }() - - f, err := os.Open("testdata/profile.pb.gz") - require.NoError(t, err) - p, err := profile.Parse(f) - require.NoError(t, err) - require.NoError(t, p.CheckValid()) - - conn, err := grpc.Dial(lis.Addr().String(), grpc.WithInsecure()) - require.NoError(t, err) - t.Cleanup(func() { - conn.Close() - }) - - buf := bytes.NewBuffer(nil) - require.NoError(t, p.Write(buf)) - - wc := profilestorepb.NewProfileStoreServiceClient(conn) - _, err = wc.WriteRaw(context.Background(), &profilestorepb.WriteRawRequest{ - Series: []*profilestorepb.RawProfileSeries{{ - Labels: &profilestorepb.LabelSet{Labels: []*profilestorepb.Label{}}, - Samples: []*profilestorepb.RawSample{{ - RawProfile: buf.Bytes(), - }}, - }}, - }) - require.NoError(t, err) - - allLocs, err := mStr.GetLocations(ctx) - require.NoError(t, err) - require.Equal(t, 32, len(allLocs)) - - symLocs, err := mStr.GetSymbolizableLocations(ctx) - require.NoError(t, err) - require.Equal(t, 11, len(symLocs)) - - sym := NewSymbolizer(log.NewNopLogger(), mStr, dbgStr) - require.NoError(t, sym.symbolize(ctx, symLocs)) // p.Location to symbolize the profile. - - allLocs, err = mStr.GetLocations(ctx) - require.NoError(t, err) - require.Equal(t, 32, len(allLocs)) - - symLocs, err = mStr.GetSymbolizableLocations(ctx) - require.NoError(t, err) - require.Equal(t, 0, len(symLocs)) - - functions, err := mStr.GetFunctions(ctx) - require.NoError(t, err) - require.Equal(t, 31, len(functions)) - - lines := allLocs[4].Line - sort.SliceStable(lines, func(i, j int) bool { - return lines[i].Line < lines[j].Line - }) - require.Equal(t, 3, len(lines)) - require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[0].Function.Filename) - require.Equal(t, int64(10), lines[0].Line) - require.Equal(t, "main.main", lines[0].Function.Name) - require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[1].Function.Filename) - require.Equal(t, int64(23), lines[1].Line) - require.Equal(t, "main.iteratePerTenant", lines[1].Function.Name) - require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[2].Function.Filename) - require.Equal(t, int64(27), lines[2].Line) - require.Equal(t, "main.iterate", lines[2].Function.Name) -} diff --git a/pkg/symbol/symbolizer.go b/pkg/symbolizer/symbolizer.go similarity index 97% rename from pkg/symbol/symbolizer.go rename to pkg/symbolizer/symbolizer.go index 8a61954380a..66b7e19b5de 100644 --- a/pkg/symbol/symbolizer.go +++ b/pkg/symbolizer/symbolizer.go @@ -11,7 +11,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package symbol +package symbolizer import ( "context" @@ -84,6 +84,7 @@ func (s *Symbolizer) symbolize(ctx context.Context, locations []*profile.Locatio var result *multierror.Error for id, mapping := range mappings { level.Debug(s.logger).Log("msg", "storage symbolization request started", "buildid", mapping.BuildID) + // TODO(kakkoyun): Cache failed symbolization attempts per location. symbolizedLines, err := s.debugInfo.Symbolize(ctx, mapping, mappingLocations[id]...) if err != nil { // It's ok if we don't have the symbols for given BuildID, it happens too often. diff --git a/pkg/symbolizer/symbolizer_test.go b/pkg/symbolizer/symbolizer_test.go new file mode 100644 index 00000000000..929d6b82813 --- /dev/null +++ b/pkg/symbolizer/symbolizer_test.go @@ -0,0 +1,472 @@ +// Copyright 2021 The Parca Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package symbolizer + +import ( + "bytes" + "context" + "io/ioutil" + stdlog "log" + "net" + "os" + "sort" + "testing" + + "github.com/go-kit/log" + "github.com/google/pprof/profile" + profilestorepb "github.com/parca-dev/parca/gen/proto/go/parca/profilestore/v1alpha1" + "github.com/parca-dev/parca/pkg/profilestore" + "github.com/parca-dev/parca/pkg/storage" + "github.com/parca-dev/parca/pkg/storage/metastore" + "github.com/parca-dev/parca/pkg/symbol" + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + "github.com/thanos-io/thanos/pkg/objstore/client" + "github.com/thanos-io/thanos/pkg/objstore/filesystem" + "go.opentelemetry.io/otel/trace" + "google.golang.org/grpc" + + debuginfopb "github.com/parca-dev/parca/gen/proto/go/parca/debuginfo/v1alpha1" + "github.com/parca-dev/parca/pkg/debuginfo" +) + +type TestProfileMetaStore interface { + TestLocationStore + TestFunctionStore + metastore.MappingStore + Close() error + Ping() error +} + +type TestLocationStore interface { + metastore.LocationStore + GetLocations(ctx context.Context) ([]*profile.Location, error) +} + +type TestFunctionStore interface { + metastore.FunctionStore + GetFunctions(ctx context.Context) ([]*profile.Function, error) +} + +func TestSymbolizer(t *testing.T) { + _, dbgStr, mStr := setup(t) + + sym := NewSymbolizer(log.NewNopLogger(), mStr, dbgStr) + m := &profile.Mapping{ + ID: uint64(1), + Start: 4194304, + Limit: 4603904, + BuildID: "2d6912fd3dd64542f6f6294f4bf9cb6c265b3085", + } + + ctx := context.Background() + + _, err := mStr.CreateMapping(ctx, m) + require.NoError(t, err) + + locs := []*profile.Location{{ + Mapping: m, + Address: 0x463781, + }} + _, err = mStr.CreateLocation(ctx, locs[0]) + require.NoError(t, err) + + allLocs, err := mStr.GetLocations(ctx) + require.NoError(t, err) + require.Equal(t, 1, len(allLocs)) + + symLocs, err := mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 1, len(symLocs)) + + err = sym.symbolize(context.Background(), symLocs) + require.NoError(t, err) + + symLocs, err = mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 0, len(symLocs)) + + functions, err := mStr.GetFunctions(ctx) + require.NoError(t, err) + require.Equal(t, 3, len(functions)) + + // Get updated locations. + allLocs, err = mStr.GetLocations(ctx) + require.NoError(t, err) + + lines := allLocs[0].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 3, len(lines)) + require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[0].Function.Filename) + require.Equal(t, int64(7), lines[0].Line) // llvm-addr2line gives 10 + require.Equal(t, "main.main", lines[0].Function.Name) + require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[1].Function.Filename) + require.Equal(t, int64(23), lines[1].Line) + require.Equal(t, "main.iteratePerTenant", lines[1].Function.Name) + require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[2].Function.Filename) + require.Equal(t, int64(27), lines[2].Line) + require.Equal(t, "main.iterate", lines[2].Function.Name) +} + +func TestRealSymbolizer(t *testing.T) { + conn, dbgStr, mStr := setup(t) + + require.NoError(t, ingest(t, conn, "testdata/profile.pb.gz")) + + ctx := context.Background() + + allLocs, err := mStr.GetLocations(ctx) + require.NoError(t, err) + require.Equal(t, 32, len(allLocs)) + + symLocs, err := mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 11, len(symLocs)) + + sym := NewSymbolizer(log.NewNopLogger(), mStr, dbgStr) + require.NoError(t, sym.symbolize(ctx, symLocs)) + + symLocs, err = mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 0, len(symLocs)) + + functions, err := mStr.GetFunctions(ctx) + require.NoError(t, err) + require.Equal(t, 31, len(functions)) + + // Get updated locations. + allLocs, err = mStr.GetLocations(ctx) + require.NoError(t, err) + + lines := allLocs[4].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 3, len(lines)) + require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[0].Function.Filename) + require.Equal(t, int64(7), lines[0].Line) // llvm-addr2line gives 10 + require.Equal(t, "main.main", lines[0].Function.Name) + require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[1].Function.Filename) + require.Equal(t, int64(23), lines[1].Line) + require.Equal(t, "main.iteratePerTenant", lines[1].Function.Name) + require.Equal(t, "/home/brancz/src/github.com/polarsignals/pprof-labels-example/main.go", lines[2].Function.Filename) + require.Equal(t, int64(27), lines[2].Line) + require.Equal(t, "main.iterate", lines[2].Function.Name) +} + +func TestRealSymbolizerDwarfAndSymbols(t *testing.T) { + conn, dbgStr, mStr := setup(t) + + // Generated from https://github.com/polarsignals/pprof-example-app-go + require.NoError(t, ingest(t, conn, "testdata/normal-cpu.stripped.pprof")) + + ctx := context.Background() + + allLocs, err := mStr.GetLocations(ctx) + require.NoError(t, err) + require.Equal(t, 174, len(allLocs)) + + symLocs, err := mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 174, len(symLocs)) + + sym := NewSymbolizer(log.NewNopLogger(), mStr, dbgStr) + require.NoError(t, sym.symbolize(ctx, symLocs)) + + symLocs, err = mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 0, len(symLocs)) + + functions, err := mStr.GetFunctions(ctx) + require.NoError(t, err) + require.Equal(t, 127, len(functions)) + + // Get updated locations. + allLocs, err = mStr.GetLocations(ctx) + require.NoError(t, err) + + lines := allLocs[2].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 1, len(lines)) + require.Equal(t, "/home/kakkoyun/Workspace/PolarSignals/pprof-example-app-go/fib/fib.go", lines[0].Function.Filename) + require.Equal(t, int64(5), lines[0].Line) + require.Equal(t, "github.com/polarsignals/pprof-example-app-go/fib.Fibonacci", lines[0].Function.Name) + + lines = allLocs[3].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 1, len(lines)) + require.Equal(t, "/home/kakkoyun/Workspace/PolarSignals/pprof-example-app-go/main.go", lines[0].Function.Filename) + require.Equal(t, int64(86), lines[0].Line) + require.Equal(t, "main.busyCPU", lines[0].Function.Name) +} + +func TestRealSymbolizerInliningDisabled(t *testing.T) { + conn, dbgStr, mStr := setup(t) + + // Generated from https://github.com/polarsignals/pprof-example-app-go + require.NoError(t, ingest(t, conn, "testdata/inlining-disabled-cpu.stripped.pprof")) + + ctx := context.Background() + + allLocs, err := mStr.GetLocations(ctx) + require.NoError(t, err) + require.Equal(t, 223, len(allLocs)) + + symLocs, err := mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 223, len(symLocs)) + + sym := NewSymbolizer(log.NewNopLogger(), mStr, dbgStr) + require.NoError(t, sym.symbolize(ctx, symLocs)) + + symLocs, err = mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 0, len(symLocs)) + + functions, err := mStr.GetFunctions(ctx) + require.NoError(t, err) + require.Equal(t, 136, len(functions)) + + // Get updated locations. + allLocs, err = mStr.GetLocations(ctx) + require.NoError(t, err) + + lines := allLocs[1].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 1, len(lines)) + require.Equal(t, "/home/kakkoyun/Workspace/PolarSignals/pprof-example-app-go/fib/fib.go", lines[0].Function.Filename) + require.Equal(t, int64(5), lines[0].Line) + require.Equal(t, "github.com/polarsignals/pprof-example-app-go/fib.Fibonacci", lines[0].Function.Name) + + lines = allLocs[2].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 1, len(lines)) + require.Equal(t, "/home/kakkoyun/Workspace/PolarSignals/pprof-example-app-go/main.go", lines[0].Function.Filename) + require.Equal(t, int64(86), lines[0].Line) + require.Equal(t, "main.busyCPU", lines[0].Function.Name) +} + +func TestRealSymbolizerWithoutDWARF(t *testing.T) { + // NOTICE: Uses custom Go symbolizer! + + conn, dbgStr, mStr := setup(t) + + // Generated from https://github.com/polarsignals/pprof-example-app-go + require.NoError(t, ingest(t, conn, "testdata/without-dwarf-cpu.stripped.pprof")) + + ctx := context.Background() + + allLocs, err := mStr.GetLocations(ctx) + require.NoError(t, err) + require.Equal(t, 159, len(allLocs)) + + symLocs, err := mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 159, len(symLocs)) + + sym := NewSymbolizer(log.NewNopLogger(), mStr, dbgStr) + require.NoError(t, sym.symbolize(ctx, symLocs)) + + symLocs, err = mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 0, len(symLocs)) + + functions, err := mStr.GetFunctions(ctx) + require.NoError(t, err) + require.Equal(t, 99, len(functions)) + + // Get updated locations. + allLocs, err = mStr.GetLocations(ctx) + require.NoError(t, err) + + lines := allLocs[13].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 1, len(lines)) + require.Equal(t, "/home/kakkoyun/Workspace/PolarSignals/pprof-example-app-go/fib/fib.go", lines[0].Function.Filename) + require.Equal(t, int64(13), lines[0].Line) // with DWARF 5 + require.Equal(t, "github.com/polarsignals/pprof-example-app-go/fib.Fibonacci", lines[0].Function.Name) + + lines = allLocs[14].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 1, len(lines)) + require.Equal(t, "/home/kakkoyun/Workspace/PolarSignals/pprof-example-app-go/main.go", lines[0].Function.Filename) + require.Equal(t, int64(89), lines[0].Line) // with DWARF 86 + require.Equal(t, "main.busyCPU", lines[0].Function.Name) +} + +func TestRealSymbolizerEverythingStrippedInliningEnabled(t *testing.T) { + // NOTICE: Uses custom Go symbolizer! + + conn, dbgStr, mStr := setup(t) + + // Generated from https://github.com/polarsignals/pprof-example-app-go + require.NoError(t, ingest(t, conn, "testdata/stripped-cpu.stripped.pprof")) + + ctx := context.Background() + + allLocs, err := mStr.GetLocations(ctx) + require.NoError(t, err) + require.Equal(t, 136, len(allLocs)) + + symLocs, err := mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 136, len(symLocs)) + + sym := NewSymbolizer(log.NewNopLogger(), mStr, dbgStr) + require.NoError(t, sym.symbolize(ctx, symLocs)) + + symLocs, err = mStr.GetSymbolizableLocations(ctx) + require.NoError(t, err) + require.Equal(t, 0, len(symLocs)) + + functions, err := mStr.GetFunctions(ctx) + require.NoError(t, err) + require.Equal(t, 80, len(functions)) + + // Get updated locations. + allLocs, err = mStr.GetLocations(ctx) + require.NoError(t, err) + + lines := allLocs[1].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 1, len(lines)) + // go -trimpath + require.Equal(t, "github.com/polarsignals/pprof-example-app-go/fib/fib.go", lines[0].Function.Filename) + require.Equal(t, int64(13), lines[0].Line) // with DWARF 5 + require.Equal(t, "github.com/polarsignals/pprof-example-app-go/fib.Fibonacci", lines[0].Function.Name) + + lines = allLocs[2].Line + sort.SliceStable(lines, func(i, j int) bool { + return lines[i].Line < lines[j].Line + }) + require.Equal(t, 1, len(lines)) + // go -trimpath + require.Equal(t, "./main.go", lines[0].Function.Filename) + require.Equal(t, int64(89), lines[0].Line) // with DWARF 86 + require.Equal(t, "main.busyCPU", lines[0].Function.Name) +} + +func ingest(t *testing.T, conn *grpc.ClientConn, path string) error { + f, err := os.Open(path) + require.NoError(t, err) + p, err := profile.Parse(f) + require.NoError(t, err) + require.NoError(t, p.CheckValid()) + + buf := bytes.NewBuffer(nil) + require.NoError(t, p.Write(buf)) + + wc := profilestorepb.NewProfileStoreServiceClient(conn) + _, err = wc.WriteRaw(context.Background(), &profilestorepb.WriteRawRequest{ + Series: []*profilestorepb.RawProfileSeries{{ + Labels: &profilestorepb.LabelSet{Labels: []*profilestorepb.Label{}}, + Samples: []*profilestorepb.RawSample{{ + RawProfile: buf.Bytes(), + }}, + }}, + }) + return err +} + +func setup(t *testing.T) (*grpc.ClientConn, *debuginfo.Store, TestProfileMetaStore) { + t.Helper() + + cacheDir, err := ioutil.TempDir("", "parca-test-cache-*") + require.NoError(t, err) + t.Cleanup(func() { + os.RemoveAll(cacheDir) + }) + + logger := log.NewNopLogger() + dbgStr, err := debuginfo.NewStore( + logger, + symbol.NewSymbolizer(logger), + &debuginfo.Config{ + Bucket: &client.BucketConfig{ + Type: client.FILESYSTEM, + Config: filesystem.Config{ + Directory: "testdata/", + }, + }, + Cache: &debuginfo.CacheConfig{ + Type: debuginfo.FILESYSTEM, + Config: &debuginfo.FilesystemCacheConfig{ + Directory: cacheDir, + }, + }, + }) + require.NoError(t, err) + + var mStr TestProfileMetaStore + mStr, err = metastore.NewInMemorySQLiteProfileMetaStore( + prometheus.NewRegistry(), + trace.NewNoopTracerProvider().Tracer(""), + t.Name(), + ) + require.NoError(t, err) + t.Cleanup(func() { + mStr.Close() + }) + + db := storage.OpenDB(prometheus.NewRegistry(), trace.NewNoopTracerProvider().Tracer(""), nil) + pStr := profilestore.NewProfileStore( + log.NewNopLogger(), + trace.NewNoopTracerProvider().Tracer(""), + db, + mStr, + ) + + lis, err := net.Listen("tcp", ":0") + if err != nil { + t.Fatalf("failed to listen: %v", err) + } + grpcServer := grpc.NewServer() + t.Cleanup(func() { + grpcServer.GracefulStop() + }) + + debuginfopb.RegisterDebugInfoServiceServer(grpcServer, dbgStr) + profilestorepb.RegisterProfileStoreServiceServer(grpcServer, pStr) + + go func() { + err := grpcServer.Serve(lis) + if err != nil { + stdlog.Fatalf("failed to serve: %v", err) + } + }() + + conn, err := grpc.Dial(lis.Addr().String(), grpc.WithInsecure()) + require.NoError(t, err) + t.Cleanup(func() { + conn.Close() + }) + + return conn, dbgStr, mStr +} diff --git a/pkg/symbolizer/testdata/2d5745782d486738416336492d624458373872722f43476c446c45724d31434c692d6745383869752d2f5f6765757162714a666856504e464c73585830762f445a554f4e796c324b655554684d463241784b43/debuginfo b/pkg/symbolizer/testdata/2d5745782d486738416336492d624458373872722f43476c446c45724d31434c692d6745383869752d2f5f6765757162714a666856504e464c73585830762f445a554f4e796c324b655554684d463241784b43/debuginfo new file mode 100755 index 00000000000..2dc0e77c40e Binary files /dev/null and b/pkg/symbolizer/testdata/2d5745782d486738416336492d624458373872722f43476c446c45724d31434c692d6745383869752d2f5f6765757162714a666856504e464c73585830762f445a554f4e796c324b655554684d463241784b43/debuginfo differ diff --git a/pkg/symbol/testdata/2d6912fd3dd64542f6f6294f4bf9cb6c265b3085/debuginfo b/pkg/symbolizer/testdata/2d6912fd3dd64542f6f6294f4bf9cb6c265b3085/debuginfo similarity index 100% rename from pkg/symbol/testdata/2d6912fd3dd64542f6f6294f4bf9cb6c265b3085/debuginfo rename to pkg/symbolizer/testdata/2d6912fd3dd64542f6f6294f4bf9cb6c265b3085/debuginfo diff --git a/pkg/symbolizer/testdata/536f474d6962346e6d6839516b665657786e436e2f43476c446c45724d31434c692d6745383869752d2f5f6765757162714a666856504e464c73585830762f545065446b774c707530396845444b4b34534767/debuginfo b/pkg/symbolizer/testdata/536f474d6962346e6d6839516b665657786e436e2f43476c446c45724d31434c692d6745383869752d2f5f6765757162714a666856504e464c73585830762f545065446b774c707530396845444b4b34534767/debuginfo new file mode 100755 index 00000000000..f4fb7a74ad1 Binary files /dev/null and b/pkg/symbolizer/testdata/536f474d6962346e6d6839516b665657786e436e2f43476c446c45724d31434c692d6745383869752d2f5f6765757162714a666856504e464c73585830762f545065446b774c707530396845444b4b34534767/debuginfo differ diff --git a/pkg/symbolizer/testdata/595150334c6a706f4957766e4d6c7476614457742f454556526d5a2d665f79675433316e7169685f4a2f5a515a3830714d666c5a756f65714a79615154502f7057517431716e516f4b436b50696e756a474d6f/debuginfo b/pkg/symbolizer/testdata/595150334c6a706f4957766e4d6c7476614457742f454556526d5a2d665f79675433316e7169685f4a2f5a515a3830714d666c5a756f65714a79615154502f7057517431716e516f4b436b50696e756a474d6f/debuginfo new file mode 100755 index 00000000000..26f1f64fa64 Binary files /dev/null and b/pkg/symbolizer/testdata/595150334c6a706f4957766e4d6c7476614457742f454556526d5a2d665f79675433316e7169685f4a2f5a515a3830714d666c5a756f65714a79615154502f7057517431716e516f4b436b50696e756a474d6f/debuginfo differ diff --git a/pkg/symbolizer/testdata/77364271716762793947664b3479416b4676642d2f417a333159574d503255743036584e4a3867414c2f4d77616c62673256647a494f4437674265727a472f37625364676957463368655f68784f7678745478/debuginfo b/pkg/symbolizer/testdata/77364271716762793947664b3479416b4676642d2f417a333159574d503255743036584e4a3867414c2f4d77616c62673256647a494f4437674265727a472f37625364676957463368655f68784f7678745478/debuginfo new file mode 100755 index 00000000000..c598c9e3079 Binary files /dev/null and b/pkg/symbolizer/testdata/77364271716762793947664b3479416b4676642d2f417a333159574d503255743036584e4a3867414c2f4d77616c62673256647a494f4437674265727a472f37625364676957463368655f68784f7678745478/debuginfo differ diff --git a/pkg/symbolizer/testdata/inlining-disabled-cpu.stripped.pprof b/pkg/symbolizer/testdata/inlining-disabled-cpu.stripped.pprof new file mode 100644 index 00000000000..1a64e5d5874 Binary files /dev/null and b/pkg/symbolizer/testdata/inlining-disabled-cpu.stripped.pprof differ diff --git a/pkg/symbolizer/testdata/normal-cpu.stripped.pprof b/pkg/symbolizer/testdata/normal-cpu.stripped.pprof new file mode 100644 index 00000000000..5a176e1c1ec Binary files /dev/null and b/pkg/symbolizer/testdata/normal-cpu.stripped.pprof differ diff --git a/pkg/symbol/testdata/profile.pb.gz b/pkg/symbolizer/testdata/profile.pb.gz similarity index 100% rename from pkg/symbol/testdata/profile.pb.gz rename to pkg/symbolizer/testdata/profile.pb.gz diff --git a/pkg/symbolizer/testdata/stripped-cpu.stripped.pprof b/pkg/symbolizer/testdata/stripped-cpu.stripped.pprof new file mode 100644 index 00000000000..22ee596ed51 Binary files /dev/null and b/pkg/symbolizer/testdata/stripped-cpu.stripped.pprof differ diff --git a/pkg/symbolizer/testdata/without-dwarf-cpu.stripped.pprof b/pkg/symbolizer/testdata/without-dwarf-cpu.stripped.pprof new file mode 100644 index 00000000000..4d3568fbc0f Binary files /dev/null and b/pkg/symbolizer/testdata/without-dwarf-cpu.stripped.pprof differ diff --git a/scripts/check-license.sh b/scripts/check-license.sh index 49e6cc7d9e3..37f5e624c37 100755 --- a/scripts/check-license.sh +++ b/scripts/check-license.sh @@ -7,7 +7,7 @@ set -o pipefail set -u licRes=$( - find . -type f -iname '*.go' ! -path '*/vendor/*' ! -path '*/internal/pprof*' -exec \ + find . -type f -iname '*.go' ! -path '*/vendor/*' ! -path '*/tmp*' -exec \ sh -c 'head -n3 $1 | grep -Eq "(Copyright|generated|GENERATED)" || echo -e $1' {} {} \; )