Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

workflows: update to go 1.21 and golangci updates #601

Merged
merged 3 commits into from
Nov 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/go.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: '1.20'
go-version: '1.21'

- name: Test
run: go test -short -race -tags assert,debug -v ./...
4 changes: 2 additions & 2 deletions .github/workflows/golangci-lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ jobs:
steps:
- uses: actions/setup-go@v2
with:
go-version: ^1.20
go-version: ^1.21

- uses: actions/checkout@v3

- name: golangci-lint
uses: golangci/[email protected]
with:
version: v1.51.2
version: v1.55.2
24 changes: 15 additions & 9 deletions .golangci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,21 @@ issues:

linters-settings:
depguard:
list-type: blacklist
include-go-root: true
packages-with-error-message:
- go.uber.org/atomic: "Use sync/atomic instead of go.uber.org/atomic"
- debug/elf: "Use github.com/parca-dev/parca/internal/go/debug/elf instead of debug/elf"
- github.com/stretchr/testify/assert: "Use github.com/stretchr/testify/require instead of github.com/stretchr/testify/assert"
- github.com/go-kit/kit/log: "Use github.com/go-kit/log instead of github.com/go-kit/kit/log"
- github.com/pkg/errors: "Use fmt.Errorf instead"
- github.com/segmentio/parquet-go: "Use github.com/parquet-go/parquet-go instead"
rules:
main:
deny:
- pkg: go.uber.org/atomic
desc: "Use sync/atomic instead of go.uber.org/atomic"
- pkg: debug/elg
desc: "Use github.com/parca-dev/parca/internal/go/debug/elf instead of debug/elf"
- pkg: github.com/stretchr/testify/assert
desc: "Use github.com/stretchr/testify/require instead of github.com/stretchr/testify/assert"
- pkg: github.com/go-kit/kit/log
desc: "Use github.com/go-kit/log instead of github.com/go-kit/kit/log"
- pkg: github.com/pkg/errors
desc: "Use fmt.Errorf instead"
- pkg: github.com/segmentio/parquet-go
desc: "Use github.com/parquet-go/parquet-go instead"
errcheck:
exclude: ./.errcheck_excludes.txt
goimports:
Expand Down
2 changes: 1 addition & 1 deletion dynparquet/nil_chunk.go
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ func (p *nilPage) Data() encoding.Values {
panic("not implemented")
}

func (p *nilPage) Slice(i, j int64) parquet.Page {
func (p *nilPage) Slice(_, _ int64) parquet.Page {
return &nilPage{
numValues: p.numValues,
columnIndex: p.columnIndex,
Expand Down
10 changes: 5 additions & 5 deletions dynparquet/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ func (s *v2storageLayoutWrapper) GetCompressionInt32() int32 {
return int32(s.StorageLayout.GetCompression())
}

func StorageLayoutWrapper(layout *schemav2pb.StorageLayout) StorageLayout {
func StorageLayoutWrapper(_ *schemav2pb.StorageLayout) StorageLayout {
return nil
}

Expand Down Expand Up @@ -1290,13 +1290,13 @@ func (s *Schema) GetWriter(w io.Writer, dynamicColumns map[string][]string, sort
pool, _ := s.writers.LoadOrStore(fmt.Sprintf("%s,sorting=%t", key, sorting), &sync.Pool{})
pooled := pool.(*sync.Pool).Get()
if pooled == nil {
new, err := s.NewWriter(w, dynamicColumns, sorting)
pw, err := s.NewWriter(w, dynamicColumns, sorting)
if err != nil {
return nil, err
}
return &PooledWriter{
pool: pool.(*sync.Pool),
ParquetWriter: new,
ParquetWriter: pw,
}, nil
}
pooled.(*PooledWriter).ParquetWriter.Reset(w)
Expand Down Expand Up @@ -1372,13 +1372,13 @@ func (s *Schema) GetBuffer(dynamicColumns map[string][]string) (*PooledBuffer, e
pool, _ := s.buffers.LoadOrStore(key, &sync.Pool{})
pooled := pool.(*sync.Pool).Get()
if pooled == nil {
new, err := s.NewBuffer(dynamicColumns)
pw, err := s.NewBuffer(dynamicColumns)
if err != nil {
return nil, err
}
return &PooledBuffer{
pool: pool.(*sync.Pool),
Buffer: new,
Buffer: pw,
}, nil
}
return pooled.(*PooledBuffer), nil
Expand Down
6 changes: 5 additions & 1 deletion index/lsm.go
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ func (l *LSM) String() string {
return s
}

func (l *LSM) Prefixes(ctx context.Context, prefix string) ([]string, error) {
func (l *LSM) Prefixes(_ context.Context, _ string) ([]string, error) {
return []string{l.prefix}, nil
}

Expand Down Expand Up @@ -298,12 +298,16 @@ func (l *LSM) findNode(node *Node) *Node {
// levels are below the target size.
func (l *LSM) EnsureCompaction() error {
for !l.compacting.CompareAndSwap(false, true) { // TODO: should backoff retry this probably
// Satisfy linter with a statement.
continue
}
return l.compact(true /* ignoreSizes */)
}

func (l *LSM) Rotate(level SentinelType, externalWriter func([]*parts.Part) (*parts.Part, int64, int64, error)) error {
for !l.compacting.CompareAndSwap(false, true) { // TODO: should backoff retry this probably
// Satisfy linter with a statement.
continue
}
defer l.compacting.Store(false)
start := time.Now()
Expand Down
2 changes: 1 addition & 1 deletion index/lsm_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import (
"github.com/polarsignals/frostdb/parts"
)

func parquetCompaction(compact []*parts.Part, options ...parts.Option) ([]*parts.Part, int64, int64, error) {
func parquetCompaction(compact []*parts.Part, _ ...parts.Option) ([]*parts.Part, int64, int64, error) {
b := &bytes.Buffer{}
size, err := compactParts(b, compact)
if err != nil {
Expand Down
2 changes: 1 addition & 1 deletion logictest/runner.go
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ func (r *Runner) handleCmd(ctx context.Context, c *datadriven.TestData) (string,
return "", fmt.Errorf("unknown command %s", c.Cmd)
}

func (r *Runner) handleCreateTable(ctx context.Context, c *datadriven.TestData) (string, error) {
func (r *Runner) handleCreateTable(_ context.Context, c *datadriven.TestData) (string, error) {
var schema *schemapb.Schema
for _, arg := range c.CmdArgs {
if arg.Key == "schema" {
Expand Down
7 changes: 3 additions & 4 deletions pqarrow/arrow.go
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ func (c *ParquetConverter) Convert(ctx context.Context, rg parquet.RowGroup) err
}

if _, ok := rg.(*dynparquet.MergedRowGroup); ok {
return rowBasedParquetRowGroupToArrowRecord(ctx, c.pool, rg, c.outputSchema, c.builder)
return rowBasedParquetRowGroupToArrowRecord(ctx, rg, c.outputSchema, c.builder)
}

parquetSchema := rg.Schema()
Expand Down Expand Up @@ -608,7 +608,6 @@ var rowBufPool = &sync.Pool{
// record row by row. The result is appended to b.
func rowBasedParquetRowGroupToArrowRecord(
ctx context.Context,
pool memory.Allocator,
rg parquet.RowGroup,
schema *arrow.Schema,
builder *builder.RecordBuilder,
Expand Down Expand Up @@ -852,11 +851,11 @@ func (f PreExprVisitorFunc) PreVisit(expr logicalplan.Expr) bool {
return f(expr)
}

func (f PreExprVisitorFunc) Visit(expr logicalplan.Expr) bool {
func (f PreExprVisitorFunc) Visit(_ logicalplan.Expr) bool {
return false
}

func (f PreExprVisitorFunc) PostVisit(expr logicalplan.Expr) bool {
func (f PreExprVisitorFunc) PostVisit(_ logicalplan.Expr) bool {
return false
}

Expand Down
2 changes: 1 addition & 1 deletion pqarrow/arrowutils/merge.go
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ func (h cursorHeap) Swap(i, j int) {
h.cursors[i], h.cursors[j] = h.cursors[j], h.cursors[i]
}

func (h cursorHeap) Push(x any) {
func (h cursorHeap) Push(_ any) {
panic(
"number of cursors are known at Init time, none should ever be pushed",
)
Expand Down
2 changes: 1 addition & 1 deletion pqarrow/arrowutils/nullarray.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,6 @@ func (n VirtualNullArray) Release() {}

func (n VirtualNullArray) String() string { return "VirtualNullArray" }

func (n VirtualNullArray) ValueStr(i int) string { return "" }
func (n VirtualNullArray) ValueStr(_ int) string { return "" }

func (n VirtualNullArray) GetOneForMarshal(_ int) any { return nil }
2 changes: 1 addition & 1 deletion pqarrow/builder/optbuilders.go
Original file line number Diff line number Diff line change
Expand Up @@ -525,7 +525,7 @@ func (b *OptBooleanBuilder) Value(i int) bool {
return bitutil.BitIsSet(b.data, i)
}

func (b *OptBooleanBuilder) AppendData(data []byte) {
func (b *OptBooleanBuilder) AppendData(_ []byte) {
panic("do not use AppendData for opt boolean builder, use Append instead")
}

Expand Down
6 changes: 3 additions & 3 deletions pqarrow/writer/writer.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ type binaryValueWriter struct {

type NewWriterFunc func(b builder.ColumnBuilder, numValues int) ValueWriter

func NewBinaryValueWriter(b builder.ColumnBuilder, numValues int) ValueWriter {
func NewBinaryValueWriter(b builder.ColumnBuilder, _ int) ValueWriter {
return &binaryValueWriter{
b: b.(*builder.OptBinaryBuilder),
}
Expand Down Expand Up @@ -455,11 +455,11 @@ func NewMapWriter(b builder.ColumnBuilder, _ int) ValueWriter {
}
}

func (m *mapWriter) WritePage(p parquet.Page) error {
func (m *mapWriter) WritePage(_ parquet.Page) error {
panic("not implemented")
}

func (m *mapWriter) Write(values []parquet.Value) {
func (m *mapWriter) Write(_ []parquet.Value) {
panic("not implemented")
}

Expand Down
6 changes: 3 additions & 3 deletions query/expr/filter.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@ func (f PreExprVisitorFunc) PreVisit(expr logicalplan.Expr) bool {
return f(expr)
}

func (f PreExprVisitorFunc) Visit(expr logicalplan.Expr) bool {
func (f PreExprVisitorFunc) Visit(_ logicalplan.Expr) bool {
return false
}

func (f PreExprVisitorFunc) PostVisit(expr logicalplan.Expr) bool {
func (f PreExprVisitorFunc) PostVisit(_ logicalplan.Expr) bool {
return false
}

Expand All @@ -38,7 +38,7 @@ type TrueNegativeFilter interface {

type AlwaysTrueFilter struct{}

func (f *AlwaysTrueFilter) Eval(p Particulate) (bool, error) {
func (f *AlwaysTrueFilter) Eval(_ Particulate) (bool, error) {
return true, nil
}

Expand Down
14 changes: 7 additions & 7 deletions query/logicalplan/expr.go
Original file line number Diff line number Diff line change
Expand Up @@ -611,11 +611,11 @@ func (d *DurationExpr) Clone() Expr {
}
}

func (d *DurationExpr) DataType(schema *parquet.Schema) (arrow.DataType, error) {
func (d *DurationExpr) DataType(_ *parquet.Schema) (arrow.DataType, error) {
return &arrow.DurationType{}, nil
}

func (d *DurationExpr) MatchPath(path string) bool {
func (d *DurationExpr) MatchPath(_ string) bool {
return false
}

Expand Down Expand Up @@ -716,7 +716,7 @@ func (a *RegexpColumnMatch) Clone() Expr {
}
}

func (a *RegexpColumnMatch) DataType(s *parquet.Schema) (arrow.DataType, error) {
func (a *RegexpColumnMatch) DataType(_ *parquet.Schema) (arrow.DataType, error) {
return nil, nil
}

Expand Down Expand Up @@ -768,10 +768,10 @@ func (a *AllExpr) Name() string { return "all" }
func (a *AllExpr) ColumnsUsedExprs() []Expr {
return []Expr{&AllExpr{}}
}
func (a *AllExpr) MatchColumn(columnName string) bool { return true }
func (a *AllExpr) MatchPath(path string) bool { return true }
func (a *AllExpr) Computed() bool { return false }
func (a *AllExpr) Clone() Expr { return &AllExpr{} }
func (a *AllExpr) MatchColumn(_ string) bool { return true }
func (a *AllExpr) MatchPath(_ string) bool { return true }
func (a *AllExpr) Computed() bool { return false }
func (a *AllExpr) Clone() Expr { return &AllExpr{} }

type NotExpr struct {
Expr Expr
Expand Down
24 changes: 12 additions & 12 deletions query/logicalplan/logicalplan_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,26 +18,26 @@ func (m *mockTableReader) Schema() *dynparquet.Schema {
return m.schema
}

func (m *mockTableReader) View(ctx context.Context, fn func(ctx context.Context, tx uint64) error) error {
func (m *mockTableReader) View(_ context.Context, _ func(ctx context.Context, tx uint64) error) error {
return nil
}

func (m *mockTableReader) Iterator(
ctx context.Context,
tx uint64,
pool memory.Allocator,
callbacks []Callback,
iterOpts ...Option,
_ context.Context,
_ uint64,
_ memory.Allocator,
_ []Callback,
_ ...Option,
) error {
return nil
}

func (m *mockTableReader) SchemaIterator(
ctx context.Context,
tx uint64,
pool memory.Allocator,
callbacks []Callback,
iterOpts ...Option,
_ context.Context,
_ uint64,
_ memory.Allocator,
_ []Callback,
_ ...Option,
) error {
return nil
}
Expand All @@ -46,7 +46,7 @@ type mockTableProvider struct {
schema *dynparquet.Schema
}

func (m *mockTableProvider) GetTable(name string) (TableReader, error) {
func (m *mockTableProvider) GetTable(_ string) (TableReader, error) {
return &mockTableReader{
schema: m.schema,
}, nil
Expand Down
4 changes: 2 additions & 2 deletions query/logicalplan/validate.go
Original file line number Diff line number Diff line change
Expand Up @@ -409,11 +409,11 @@ type findExpressionForTypeVisitor struct {
result Expr
}

func (v *findExpressionForTypeVisitor) PreVisit(expr Expr) bool {
func (v *findExpressionForTypeVisitor) PreVisit(_ Expr) bool {
return true
}

func (v *findExpressionForTypeVisitor) Visit(expr Expr) bool {
func (v *findExpressionForTypeVisitor) Visit(_ Expr) bool {
return true
}

Expand Down
2 changes: 1 addition & 1 deletion query/physicalplan/aggregate.go
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ func (d *durationHashCombine) hashCombine(rhs uint64) uint64 {
return rhs / d.milliseconds // floors by default
}

func (a *HashAggregate) Callback(ctx context.Context, r arrow.Record) error {
func (a *HashAggregate) Callback(_ context.Context, r arrow.Record) error {
// Generates high volume of spans. Comment out if needed during development.
// ctx, span := a.tracer.Start(ctx, "HashAggregate/Callback")
// defer span.End()
Expand Down
4 changes: 2 additions & 2 deletions query/physicalplan/filter.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,11 @@ func (f PreExprVisitorFunc) PreVisit(expr logicalplan.Expr) bool {
return f(expr)
}

func (f PreExprVisitorFunc) Visit(expr logicalplan.Expr) bool {
func (f PreExprVisitorFunc) Visit(_ logicalplan.Expr) bool {
return false
}

func (f PreExprVisitorFunc) PostVisit(expr logicalplan.Expr) bool {
func (f PreExprVisitorFunc) PostVisit(_ logicalplan.Expr) bool {
return false
}

Expand Down
Loading
Loading