From 425702e7ba153160ff27dd71a6dc33abde27b3a2 Mon Sep 17 00:00:00 2001 From: Neil O'Toole Date: Sat, 17 Dec 2022 23:07:38 -0700 Subject: [PATCH] The linting goes on forever (#119) * linting * yet more linting * yet more linting * yet more linting * yet more linting * yet more linting --- .golangci.yml | 4 +- cli/cli.go | 2 +- cli/cmd_root.go | 2 +- cli/completion.go | 9 +++-- cli/config/config.go | 2 +- cli/config/store.go | 2 +- cli/consts.go | 2 +- cli/output/jsonw/encode.go | 2 +- cli/output/markdownw/markdownw.go | 6 +-- drivers/csv/csv.go | 30 +++----------- drivers/json/import.go | 13 +++--- drivers/json/import_json.go | 6 +-- drivers/mysql/metadata.go | 7 ++-- drivers/sqlserver/metadata.go | 15 ++++--- drivers/sqlserver/sqlserver.go | 6 --- drivers/userdriver/xmlud/xmlimport.go | 4 +- drivers/xlsx/xlsx.go | 58 ++++++++++++++++++++++----- libsq/ast/node.go | 2 +- libsq/ast/segment.go | 2 +- libsq/ast/walker.go | 4 +- libsq/core/kind/kind.go | 8 ++-- libsq/core/stringz/stringz.go | 8 ++-- libsq/driver/driver.go | 2 - libsq/engine.go | 7 ++-- libsq/source/set.go | 2 +- testh/proj/proj.go | 2 +- testh/testh.go | 5 +-- testh/tutil/tutil.go | 10 ++++- 28 files changed, 121 insertions(+), 101 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index 80a6e502..bc89b329 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -175,7 +175,9 @@ linters: - staticcheck # is a go vet on steroids, applying a ton of static analysis checks - typecheck # like the front-end of a Go compiler, parses and type-checks Go code - unused # checks for unused constants, variables, functions and types - ## disabled by default + + +# ## disabled by default - asasalint # checks for pass []any as any in variadic func(...any) - asciicheck # checks that your code does not contain non-ASCII identifiers - bidichk # checks for dangerous unicode character sequences diff --git a/cli/cli.go b/cli/cli.go index 4989a680..ecf64588 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -69,7 +69,7 @@ import ( "github.com/neilotoole/sq/libsq/source" ) -func init() { +func init() { //nolint:gochecknoinits cobra.EnableCommandSorting = false } diff --git a/cli/cmd_root.go b/cli/cmd_root.go index 0e69ddb3..8166f24e 100644 --- a/cli/cmd_root.go +++ b/cli/cmd_root.go @@ -3,7 +3,7 @@ package cli import ( "github.com/spf13/cobra" - // Import the providers package to initialize provider implementations + // Import the providers package to initialize provider implementations. _ "github.com/neilotoole/sq/drivers" ) diff --git a/cli/completion.go b/cli/completion.go index a036d730..872a3031 100644 --- a/cli/completion.go +++ b/cli/completion.go @@ -154,7 +154,7 @@ func (c *handleTableCompleter) complete(cmd *cobra.Command, args []string, toCom // a period. Effectively this is completion for tables in the // active src. func (c *handleTableCompleter) completeTableOnly(ctx context.Context, rc *RunContext, args []string, - toComplete string) ([]string, cobra.ShellCompDirective) { + toComplete string) ([]string, cobra.ShellCompDirective) { //nolint:unparam activeSrc := rc.Config.Sources.Active() if activeSrc == nil { rc.Log.Error("Active source is nil") @@ -191,7 +191,7 @@ func (c *handleTableCompleter) completeTableOnly(ctx context.Context, rc *RunCon // completeHandle returns suggestions given input beginning with // a '@'. The returned suggestions could be @HANDLE, or @HANDLE.TABLE. func (c *handleTableCompleter) completeHandle(ctx context.Context, rc *RunContext, args []string, - toComplete string) ([]string, cobra.ShellCompDirective) { + toComplete string) ([]string, cobra.ShellCompDirective) { //nolint:unparam // We're dealing with a handle. // But we could be dealing with just the handle ("@sakila_sl3") @@ -211,7 +211,8 @@ func (c *handleTableCompleter) completeHandle(ctx context.Context, rc *RunContex } if c.onlySQL { - isSQL, err := handleIsSQLDriver(rc, handle) + var isSQL bool + isSQL, err = handleIsSQLDriver(rc, handle) if err != nil { rc.Log.Error(err) return nil, cobra.ShellCompDirectiveError @@ -284,7 +285,7 @@ func (c *handleTableCompleter) completeHandle(ctx context.Context, rc *RunContex } func (c *handleTableCompleter) completeEither(ctx context.Context, rc *RunContext, args []string, - toComplete string) ([]string, cobra.ShellCompDirective) { + toComplete string) ([]string, cobra.ShellCompDirective) { //nolint:unparam // There's no input yet. // Therefore we want to return a union of all handles // plus the tables from the active source. diff --git a/cli/config/config.go b/cli/config/config.go index a8e480ad..4e09b25d 100644 --- a/cli/config/config.go +++ b/cli/config/config.go @@ -31,7 +31,7 @@ func (c *Config) String() string { } // Ext holds additional config (extensions) loaded from other -// config files, e.g. ~/.config/sq/ext/*.sq.yml +// config files, e.g. ~/.config/sq/ext/*.sq.yml. type Ext struct { UserDrivers []*userdriver.DriverDef `yaml:"user_drivers" json:"user_drivers"` } diff --git a/cli/config/store.go b/cli/config/store.go index d34374ca..b0575a47 100644 --- a/cli/config/store.go +++ b/cli/config/store.go @@ -202,7 +202,7 @@ func (DiscardStore) Save(*Config) error { return nil } -// Location returns /dev/null +// Location returns /dev/null. func (DiscardStore) Location() string { return "/dev/null" } diff --git a/cli/consts.go b/cli/consts.go index 0f591db6..f6824ffc 100644 --- a/cli/consts.go +++ b/cli/consts.go @@ -1,6 +1,6 @@ package cli -// cli flags +// cli flags. const ( flagActiveSrc = "src" flagActiveSrcUsage = "Override the active source for this query" diff --git a/cli/output/jsonw/encode.go b/cli/output/jsonw/encode.go index 692aec0f..28d258f2 100644 --- a/cli/output/jsonw/encode.go +++ b/cli/output/jsonw/encode.go @@ -252,7 +252,7 @@ func getFieldEncoders(recMeta sqlz.RecordMeta, fm *output.Formatting) []func(b [ // Else, we want color encoders enc := &colorEncoder{clrs: clrs} for i := 0; i < len(recMeta); i++ { - switch recMeta[i].Kind() { // nolint:exhaustive + switch recMeta[i].Kind() { //nolint:exhaustive case kind.Time: encodeFns[i] = enc.encodeTime case kind.Date: diff --git a/cli/output/markdownw/markdownw.go b/cli/output/markdownw/markdownw.go index 870eac95..a0a73a63 100644 --- a/cli/output/markdownw/markdownw.go +++ b/cli/output/markdownw/markdownw.go @@ -124,8 +124,8 @@ func (w *RecordWriter) WriteRecords(recs []sqlz.Record) error { // function with a real library call at the earliest opportunity. func escapeMarkdown(s string) string { s = html.EscapeString(s) - s = strings.Replace(s, "|", "|", -1) - s = strings.Replace(s, "\r\n", "
", -1) - s = strings.Replace(s, "\n", "
", -1) + s = strings.ReplaceAll(s, "|", "|") + s = strings.ReplaceAll(s, "\r\n", "
") + s = strings.ReplaceAll(s, "\n", "
") return s } diff --git a/drivers/csv/csv.go b/drivers/csv/csv.go index 1d52c23d..6eb383e9 100644 --- a/drivers/csv/csv.go +++ b/drivers/csv/csv.go @@ -68,37 +68,22 @@ func (d *driveri) DriverMetadata() driver.Metadata { // Open implements driver.Driver. func (d *driveri) Open(ctx context.Context, src *source.Source) (driver.Database, error) { dbase := &database{ - log: d.log, - src: src, - //clnup: cleanup.New(), + log: d.log, + src: src, files: d.files, } - //r, err := d.files.Open(src) - //if err != nil { - // return nil, err - //} - var err error dbase.impl, err = d.scratcher.OpenScratch(ctx, src.Handle) if err != nil { - //d.log.WarnIfCloseError(r) - //d.log.WarnIfFuncError(dbase.clnup.Run) return nil, err } err = importCSV(ctx, d.log, src, d.files.OpenFunc(src), dbase.impl) if err != nil { - //d.log.WarnIfCloseError(r) - //d.log.WarnIfFuncError(dbase.clnup.Run) return nil, err } - //err = r.Close() - //if err != nil { - // return nil, err - //} - return dbase, nil } @@ -146,10 +131,9 @@ func (d *driveri) Ping(ctx context.Context, src *source.Source) error { // database implements driver.Database. type database struct { - log lg.Log - src *source.Source - impl driver.Database - //clnup *cleanup.Cleanup + log lg.Log + src *source.Source + impl driver.Database files *source.Files } @@ -214,8 +198,6 @@ func (d *database) Close() error { d.log.Debugf("Close database: %s", d.src) return errz.Err(d.impl.Close()) - - //return errz.Combine(d.impl.Close(), d.clnup.Run()) } var ( @@ -266,7 +248,7 @@ const ( scoreMaybe float32 = 0.1 scoreProbably float32 = 0.2 // scoreYes is less than 1.0 because other detectors - // (e.g. XLSX) can be more confident + // (e.g. XLSX) can be more confident. scoreYes float32 = 0.9 ) diff --git a/drivers/json/import.go b/drivers/json/import.go index f742ca84..0a7f9f60 100644 --- a/drivers/json/import.go +++ b/drivers/json/import.go @@ -70,7 +70,7 @@ const ( rightBracket = stdj.Delim(']') // colScopeSep is used when generating flat column names. Thus - // an entity "name.first" becomes "name_first" + // an entity "name.first" becomes "name_first". colScopeSep = "_" ) @@ -255,13 +255,11 @@ func (p *processor) doAddObject(ent *entity, m map[string]any) error { detectors: map[string]*kind.Detector{}, } ent.children = append(ent.children, child) - } else { + } else if child.isArray { // Child already exists - if child.isArray { - // Safety check - return errz.Errorf("JSON entity %q previously detected as array, but now detected as object", - ent.String()) - } + // Safety check + return errz.Errorf("JSON entity %q previously detected as array, but now detected as object", + ent.String()) } err := p.doAddObject(child, val) @@ -580,7 +578,6 @@ func execInsertions(ctx context.Context, log lg.Log, drvr driver.SQLDriver, db s var err error var execer *driver.StmtExecer - //var affected int64 for _, insert := range insertions { execer, err = drvr.PrepareInsertStmt(ctx, db, insert.tbl, insert.cols, 1) diff --git a/drivers/json/import_json.go b/drivers/json/import_json.go index c4738568..bb50cbdd 100644 --- a/drivers/json/import_json.go +++ b/drivers/json/import_json.go @@ -16,7 +16,7 @@ import ( ) // DetectJSON implements source.TypeDetectFunc. -// The function returns TypeJSON for two varieties of input: +// The function returns TypeJSON for two varieties of input:. func DetectJSON(ctx context.Context, log lg.Log, openFn source.FileOpenFunc) (detected source.Type, score float32, err error) { var r1, r2 io.ReadCloser @@ -128,8 +128,6 @@ func DetectJSON(ctx context.Context, log lg.Log, openFn source.FileOpenFunc) (de return source.TypeNone, 0, nil } -//func detectJSONObjectsInArray(ctx context.Context, r io.Reader) - func importJSON(ctx context.Context, log lg.Log, job importJob) error { r, err := job.openFn() if err != nil { @@ -247,7 +245,7 @@ func importJSON(ctx context.Context, log lg.Log, job importJob) error { // objectsInArrayScanner scans JSON text that consists of an array of // JSON objects, returning the decoded object and the chunk of JSON -// that it was scanned from. Example input: [{a:1},{a:2},{a:3}] +// that it was scanned from. Example input: [{a:1},{a:2},{a:3}]. type objectsInArrayScanner struct { // buf will get all the data that the JSON decoder reads. // buf's role is to keep track of JSON text that has already been diff --git a/drivers/mysql/metadata.go b/drivers/mysql/metadata.go index 1c856607..ea6224f2 100644 --- a/drivers/mysql/metadata.go +++ b/drivers/mysql/metadata.go @@ -8,7 +8,6 @@ import ( "strings" "time" - "github.com/go-sql-driver/mysql" "github.com/neilotoole/errgroup" "github.com/neilotoole/lg" @@ -83,7 +82,7 @@ func recordMetaFromColumnTypes(log lg.Log, colTypes []*sql.ColumnType) sqlz.Reco // getNewRecordFunc returns a NewRecordFunc that, after interacting // with the standard driver.NewRecordFromScanRow, munges any skipped fields. -// In particular mysql.NullTime is unboxed to *time.Time, and TIME fields +// In particular sql.NullTime is unboxed to *time.Time, and TIME fields // are munged from RawBytes to string. func getNewRecordFunc(rowMeta sqlz.RecordMeta) driver.NewRecordFunc { return func(row []any) (sqlz.Record, error) { @@ -93,7 +92,7 @@ func getNewRecordFunc(rowMeta sqlz.RecordMeta) driver.NewRecordFunc { // the outside func) iterate over the column metadata, and // build a list of val elements to visit. for _, i := range skipped { - if nullTime, ok := rec[i].(*mysql.NullTime); ok { + if nullTime, ok := rec[i].(*sql.NullTime); ok { if nullTime.Valid { // Make a copy of the value t := nullTime.Time @@ -538,7 +537,7 @@ func mungeSetZeroValue(i int, rec []any, destMeta sqlz.RecordMeta) { } // canonicalTableType returns the canonical name for "BASE TABLE" -// and "VIEW" +// and "VIEW". func canonicalTableType(dbType string) string { switch dbType { default: diff --git a/drivers/sqlserver/metadata.go b/drivers/sqlserver/metadata.go index ee18e9d7..10b1efc6 100644 --- a/drivers/sqlserver/metadata.go +++ b/drivers/sqlserver/metadata.go @@ -143,13 +143,15 @@ GROUP BY database_id) AS total_size_bytes` i := i g.Go(func() error { - tblMeta, err := getTableMetadata(gctx, log, db, catalog, schema, tblNames[i], tblTypes[i]) + var tblMeta *source.TableMetadata + tblMeta, err = getTableMetadata(gctx, log, db, catalog, schema, tblNames[i], tblTypes[i]) if err != nil { if hasErrCode(err, errCodeObjectNotExist) { // This can happen if the table is dropped while // we're collecting metadata. We log a warning and continue. - log.Warnf("table metadata: table %q appears not to exist (continuing regardless): %v", tblNames[i], - err) + log.Warnf("table metadata: table %q appears not to exist (continuing regardless): %v", + tblNames[i], err) + return nil } return err @@ -249,11 +251,12 @@ func getTableMetadata(ctx context.Context, log lg.Log, db sqlz.DB, // REVISIT: This is all a bit messy and inconsistent with other drivers var colLength *int64 - if dbCols[i].CharMaxLength.Valid { + switch { + case dbCols[i].CharMaxLength.Valid: colLength = &dbCols[i].CharMaxLength.Int64 - } else if dbCols[i].NumericPrecision.Valid { + case dbCols[i].NumericPrecision.Valid: colLength = &dbCols[i].NumericPrecision.Int64 - } else if dbCols[i].DateTimePrecision.Valid { + case dbCols[i].DateTimePrecision.Valid: colLength = &dbCols[i].DateTimePrecision.Int64 } diff --git a/drivers/sqlserver/sqlserver.go b/drivers/sqlserver/sqlserver.go index 8c9beb70..c758a93a 100644 --- a/drivers/sqlserver/sqlserver.go +++ b/drivers/sqlserver/sqlserver.go @@ -435,12 +435,6 @@ WHERE TABLE_NAME = @p1` } return getTableMetadata(ctx, d.log, d.db, catalog, schema, tblName, tblType) - // - //srcMeta, err := d.SourceMetadata(ctx) - //if err != nil { - // return nil, err - //} - //return source.TableFromSourceMetadata(srcMeta, tblName) } // SourceMetadata implements driver.Database. diff --git a/drivers/userdriver/xmlud/xmlimport.go b/drivers/userdriver/xmlud/xmlimport.go index 2e15cb94..8617ba4a 100644 --- a/drivers/userdriver/xmlud/xmlimport.go +++ b/drivers/userdriver/xmlud/xmlimport.go @@ -516,8 +516,8 @@ func (im *importer) dbUpdate(ctx context.Context, row *rowState) error { } // Append the WHERE clause args - stmtArgs := append(updateVals, whereArgs...) - _, err = stmtExecer.Exec(ctx, stmtArgs...) + updateVals = append(updateVals, whereArgs...) + _, err = stmtExecer.Exec(ctx, updateVals...) return errz.Err(err) } diff --git a/drivers/xlsx/xlsx.go b/drivers/xlsx/xlsx.go index 92ee9e7a..5c8e5386 100644 --- a/drivers/xlsx/xlsx.go +++ b/drivers/xlsx/xlsx.go @@ -180,16 +180,8 @@ func (d *database) Source() *source.Source { return d.src } -// TableMetadata implements driver.Database. -func (d *database) TableMetadata(ctx context.Context, tblName string) (*source.TableMetadata, error) { - srcMeta, err := d.SourceMetadata(ctx) - if err != nil { - return nil, err - } - return source.TableFromSourceMetadata(srcMeta, tblName) -} - // SourceMetadata implements driver.Database. +// // TODO: the implementation of SourceMetadata is out // of sync with the way we import data. For example, empty // rows are filtered out during import, and empty columns @@ -255,6 +247,54 @@ func (d *database) SourceMetadata(ctx context.Context) (*source.Metadata, error) return meta, nil } +// TableMetadata implements driver.Database. +func (d *database) TableMetadata(ctx context.Context, tblName string) (*source.TableMetadata, error) { + b, err := d.files.ReadAll(d.src) + if err != nil { + return nil, errz.Err(err) + } + + xlFile, err := xlsx.OpenBinary(b) + if err != nil { + return nil, errz.Errorf("unable to open XLSX file: ", d.src.Location, err) + } + + hasHeader, _, err := options.HasHeader(d.src.Options) + if err != nil { + return nil, err + } + + for _, sheet := range xlFile.Sheets { + if sheet.Name != tblName { + continue + } + + tbl := &source.TableMetadata{Name: sheet.Name, RowCount: int64(len(sheet.Rows))} + + if hasHeader && tbl.RowCount > 0 { + tbl.RowCount-- + } + + colNames := getColNames(sheet, hasHeader) + + // TODO: Should move over to using kind.Detector + colTypes := getCellColumnTypes(sheet, hasHeader) + + for i, colType := range colTypes { + col := &source.ColMetadata{} + col.BaseType = cellTypeToString(colType) + col.ColumnType = col.BaseType + col.Position = int64(i) + col.Name = colNames[i] + tbl.Columns = append(tbl.Columns, col) + } + + return tbl, nil + } + + return nil, errz.Errorf("table %q not found", tblName) +} + // Close implements driver.Database. func (d *database) Close() error { d.log.Debugf("Close database: %s", d.src) diff --git a/libsq/ast/node.go b/libsq/ast/node.go index 31343ca8..a31580bd 100644 --- a/libsq/ast/node.go +++ b/libsq/ast/node.go @@ -46,7 +46,7 @@ type Selectable interface { } // ColExpr indicates a column selection expression such as a -// column name, or context-appropriate function (e.g. "COUNT(*)") +// column name, or context-appropriate function, e.g. "COUNT(*)". type ColExpr interface { // IsColName returns true if the expr is a column name, e.g. "uid" or "users.uid". IsColName() bool diff --git a/libsq/ast/segment.go b/libsq/ast/segment.go index a8fd219a..8a48e3f0 100644 --- a/libsq/ast/segment.go +++ b/libsq/ast/segment.go @@ -65,7 +65,7 @@ func (s *Segment) SetContext(ctx antlr.ParseTree) error { // ChildType returns the expected Type of the segment's elements, based // on the content of the segment's node's children. The type should be something -// like Selector|Func +// like Selector|Func. func (s *Segment) ChildType() (reflect.Type, error) { if len(s.Children()) == 0 { return nil, nil diff --git a/libsq/ast/walker.go b/libsq/ast/walker.go index dea45380..db48313d 100644 --- a/libsq/ast/walker.go +++ b/libsq/ast/walker.go @@ -198,7 +198,7 @@ func findWhereClause(log lg.Log, w *Walker, node Node) error { return nil } -// determineJoinTables attempts to determine the tables that a JOIN refers to +// determineJoinTables attempts to determine the tables that a JOIN refers to. func determineJoinTables(log lg.Log, w *Walker, node Node) error { // node is guaranteed to be FnJoin fnJoin, ok := node.(*Join) @@ -233,7 +233,7 @@ func determineJoinTables(log lg.Log, w *Walker, node Node) error { return nil } -// visitCheckRowRange validates the RowRange element +// visitCheckRowRange validates the RowRange element. func visitCheckRowRange(log lg.Log, w *Walker, node Node) error { // node is guaranteed to be FnJoin rr, ok := node.(*RowRange) diff --git a/libsq/core/kind/kind.go b/libsq/core/kind/kind.go index 318576f8..d13b19de 100644 --- a/libsq/core/kind/kind.go +++ b/libsq/core/kind/kind.go @@ -158,7 +158,7 @@ type Detector struct { // MungeFunc is a function that accepts a value and returns a munged // value with the appropriate Kind. For example, a Datetime MungeFunc -// would accept string "2020-06-11T02:50:54Z" and return a time.Time, +// would accept string "2020-06-11T02:50:54Z" and return a time.Time. type MungeFunc func(any) (any, error) // NewDetector returns a new instance. @@ -434,7 +434,7 @@ func (d *Detector) Detect() (kind Kind, mungeFn MungeFunc, err error) { return Text, nil, nil } -// delete deletes each of kinds from kd.kinds +// delete deletes each of kinds from d.kinds. func (d *Detector) delete(kinds ...Kind) { d.dirty = true for _, k := range kinds { @@ -443,8 +443,8 @@ func (d *Detector) delete(kinds ...Kind) { } // retain deletes everything from kd.kinds except items -// contains in the kinds arg. If kinds is empty, kd.kinds is -// be emptied. +// contains in the kinds arg. If kinds is empty, d.kinds is +// emptied. func (d *Detector) retain(kinds ...Kind) { d.dirty = true for k := range d.kinds { diff --git a/libsq/core/stringz/stringz.go b/libsq/core/stringz/stringz.go index f61190a7..9a50c955 100644 --- a/libsq/core/stringz/stringz.go +++ b/libsq/core/stringz/stringz.go @@ -223,9 +223,9 @@ func UniqN(length int) string { // returns "row col" if arg i is 1, otherwise returns "rows cols". func Plu(s string, i int) string { if i == 1 { - return strings.Replace(s, "(s)", "", -1) + return strings.ReplaceAll(s, "(s)", "") } - return strings.Replace(s, "(s)", "s", -1) + return strings.ReplaceAll(s, "(s)", "s") } // RepeatJoin returns a string consisting of count copies @@ -339,8 +339,8 @@ func UniqTableName(tbl string) string { tbl += suffix // paranoid sanitization - tbl = strings.Replace(tbl, "@", "_", -1) - tbl = strings.Replace(tbl, "/", "_", -1) + tbl = strings.ReplaceAll(tbl, "@", "_") + tbl = strings.ReplaceAll(tbl, "/", "_") return tbl } diff --git a/libsq/driver/driver.go b/libsq/driver/driver.go index 81d8a278..14d48228 100644 --- a/libsq/driver/driver.go +++ b/libsq/driver/driver.go @@ -361,8 +361,6 @@ func (d *Databases) Close() error { // Tuning holds tuning params. Ultimately these params // could come from user config or be dynamically calculated/adjusted? -// -// Deprecated: This is a stop-gap home for these tuning params. var Tuning = struct { // ErrgroupNumG is the numG value for errgroup.WithContextN. ErrgroupNumG int diff --git a/libsq/engine.go b/libsq/engine.go index ef470d54..bf48bc8f 100644 --- a/libsq/engine.go +++ b/libsq/engine.go @@ -72,18 +72,19 @@ func (ng *engine) prepare(ctx context.Context, qm *queryModel) error { qb.SetSelect(selectColsClause) qb.SetFrom(fromClause) - var rangeClause string - if qm.Range != nil { + var rangeClause string rangeClause, err = fragBuilder.Range(qm.Range) if err != nil { return err } + qb.SetRange(rangeClause) } if qm.Where != nil { - whereClause, err := fragBuilder.Where(qm.Where) + var whereClause string + whereClause, err = fragBuilder.Where(qm.Where) if err != nil { return err } diff --git a/libsq/source/set.go b/libsq/source/set.go index 634e7255..7528f7ce 100644 --- a/libsq/source/set.go +++ b/libsq/source/set.go @@ -38,7 +38,7 @@ func (s *Set) MarshalJSON() ([]byte, error) { return json.Marshal(s.data) } -// UnmarshalJSON implements json.Unmarshaler +// UnmarshalJSON implements json.Unmarshaler. func (s *Set) UnmarshalJSON(b []byte) error { s.mu.Lock() defer s.mu.Unlock() diff --git a/testh/proj/proj.go b/testh/proj/proj.go index 50534522..341b1e48 100644 --- a/testh/proj/proj.go +++ b/testh/proj/proj.go @@ -33,7 +33,7 @@ const ( var projDir string -func init() { +func init() { //nolint:gochecknoinits envar, ok := os.LookupEnv(EnvPassw) if !ok || envar == "" { err := os.Setenv(EnvPassw, DefaultPassw) diff --git a/testh/testh.go b/testh/testh.go index 3da56a08..0a1894e7 100644 --- a/testh/testh.go +++ b/testh/testh.go @@ -106,7 +106,6 @@ func (h *Helper) init() { assert.NoError(h.T, err) }) - //h.Cleanup.AddC(h.files) h.files.AddTypeDetectors(source.DetectMagicNumber) h.databases = driver.NewDatabases(log, h.registry, sqlite3.NewScratchSource) @@ -203,8 +202,7 @@ func (h *Helper) Source(handle string) *source.Source { require.NoError(t, err, "source %s was not found in %s", handle, testsrc.PathSrcsConfig) - switch src.Type { //nolint:exhaustive - case sqlite3.Type: + if src.Type == sqlite3.Type { // This could be easily generalized for CSV/XLSX etc. fpath, err := sqlite3.PathFromLocation(src) require.NoError(t, err) @@ -232,6 +230,7 @@ func (h *Helper) Source(handle string) *source.Source { src.Location = sqlite3.Prefix + destFileName } + h.srcCache[handle] = src // envDiffDB is the name of the envar that controls whether the testing diff --git a/testh/tutil/tutil.go b/testh/tutil/tutil.go index ed9df0e7..67d55c65 100644 --- a/testh/tutil/tutil.go +++ b/testh/tutil/tutil.go @@ -58,7 +58,13 @@ func StructFieldValue(fieldName string, strct any) any { } f := e.FieldByName(fieldName) - if f == zv { + if f == zv { //nolint:govet + // According to govet: + // + // reflectvaluecompare: avoid using == with reflect.Value + // + // Maybe we should be using f.IsZero instead? + panic(fmt.Sprintf("struct (%T) does not have field {%s}", strct, fieldName)) } fieldValue := f.Interface() @@ -213,7 +219,7 @@ func Name(args ...any) string { continue } - s = strings.Replace(s, "/", "_", -1) + s = strings.ReplaceAll(s, "/", "_") s = stringz.TrimLen(s, 40) // we don't want it to be too long parts = append(parts, s) }