diff --git a/.gitignore b/.gitignore index b3f7059..a1e301e 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,13 @@ coverage.html *.prof +# Security scan results +*.sarif +gosec-*.json + +# Test directories +test-action/ + # Dependency directories (remove the comment below to include it) vendor/ diff --git a/action.yml b/action.yml index 42150ed..5e7f839 100644 --- a/action.yml +++ b/action.yml @@ -52,6 +52,21 @@ inputs: required: false default: 'false' + sarif-output: + description: 'Generate SARIF output for GitHub Code Scanning (requires security-events: write permission)' + required: false + default: 'false' + + pr-comment: + description: 'Post validation results as a PR comment (only works on pull_request events)' + required: false + default: 'false' + + pr-comment-compact: + description: 'Use compact format for PR comments (limits to 5 errors max)' + required: false + default: 'false' + gosqlx-version: description: 'GoSQLX version to use (default: latest)' required: false @@ -198,8 +213,8 @@ runs: echo "... and $((FILE_COUNT - 10)) more files" fi - # Save file list for later steps - echo "$FILES" > /tmp/gosqlx-files.txt + # Save file list for later steps (use RUNNER_TEMP for cross-platform compatibility) + echo "$FILES" > "$RUNNER_TEMP/gosqlx-files.txt" echo "file-count=$FILE_COUNT" >> $GITHUB_OUTPUT - name: Validate SQL files @@ -266,7 +281,7 @@ runs: echo "::error file=$SAFE_FILE::SQL validation failed" INVALID=$((INVALID + 1)) fi - done < /tmp/gosqlx-files.txt + done < "$RUNNER_TEMP/gosqlx-files.txt" END_TIME=$(date +%s%3N) DURATION=$((END_TIME - START_TIME)) @@ -297,6 +312,190 @@ runs: exit 1 fi + - name: Generate SARIF output + id: sarif + if: inputs.sarif-output == 'true' && steps.find-files.outputs.file-count != '0' + shell: bash + working-directory: ${{ inputs.working-directory }} + run: | + echo "::group::Generate SARIF Report" + + # Build validation command with SARIF output + CMD="$HOME/go/bin/gosqlx validate --output-format sarif --output-file gosqlx-results.sarif" + + # Add config if provided + if [ -n "${{ inputs.config }}" ]; then + if [ -f "${{ inputs.config }}" ]; then + export GOSQLX_CONFIG="${{ inputs.config }}" + fi + fi + + # Add dialect if provided + DIALECT="${{ inputs.dialect }}" + if [ -n "$DIALECT" ] && [[ "$DIALECT" =~ ^(postgresql|mysql|sqlserver|oracle|sqlite)$ ]]; then + CMD="$CMD --dialect $DIALECT" + fi + + # Add strict mode if enabled + if [ "${{ inputs.strict }}" = "true" ]; then + CMD="$CMD --strict" + fi + + # Read files and run validation to generate SARIF + cat "$RUNNER_TEMP/gosqlx-files.txt" | tr '\n' ' ' | xargs $CMD || true + + # Check if SARIF file was created + if [ -f "gosqlx-results.sarif" ]; then + echo "✓ SARIF report generated: gosqlx-results.sarif" + echo "sarif-file=gosqlx-results.sarif" >> $GITHUB_OUTPUT + else + echo "::warning::SARIF report generation failed" + fi + + echo "::endgroup::" + + - name: Upload SARIF to GitHub Code Scanning + if: inputs.sarif-output == 'true' && steps.sarif.outputs.sarif-file != '' + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: ${{ inputs.working-directory }}/gosqlx-results.sarif + category: gosqlx-sql-validation + + - name: Post PR Comment + id: pr-comment + if: inputs.pr-comment == 'true' && github.event_name == 'pull_request' && steps.validate.conclusion != 'skipped' + shell: bash + working-directory: ${{ inputs.working-directory }} + env: + GH_TOKEN: ${{ github.token }} + run: | + echo "::group::Generate PR Comment" + + # Create a temporary Go program to format the validation results as a PR comment + cat > "$RUNNER_TEMP/format_comment.go" << 'SCRIPT_EOF' + package main + + import ( + "encoding/json" + "fmt" + "os" + "strings" + "time" + ) + + type FileValidationResult struct { + Path string + Valid bool + Size int64 + Error *string + } + + type ValidationResult struct { + TotalFiles int + ValidFiles int + InvalidFiles int + TotalBytes int64 + Duration string + Files []FileValidationResult + } + + func formatPRComment(result *ValidationResult, compact bool) string { + var sb strings.Builder + + duration, _ := time.ParseDuration(result.Duration) + + if compact { + if result.InvalidFiles == 0 { + sb.WriteString("## ✅ GoSQLX: All SQL files valid\n\n") + sb.WriteString(fmt.Sprintf("Validated **%d** file(s) in **%v**\n", result.ValidFiles, duration)) + } else { + sb.WriteString(fmt.Sprintf("## ❌ GoSQLX: Found issues in %d/%d files\n\n", result.InvalidFiles, result.TotalFiles)) + errorCount := 0 + maxErrors := 5 + for _, file := range result.Files { + if file.Error != nil && errorCount < maxErrors { + sb.WriteString(fmt.Sprintf("- ❌ `%s`: %s\n", file.Path, *file.Error)) + errorCount++ + } + } + if result.InvalidFiles > maxErrors { + sb.WriteString(fmt.Sprintf("\n*...and %d more error(s). Run locally for full details.*\n", result.InvalidFiles-maxErrors)) + } + } + sb.WriteString("\n---\n") + sb.WriteString(fmt.Sprintf("⏱️ %v", duration)) + if result.TotalFiles > 0 && duration.Seconds() > 0 { + throughput := float64(result.TotalFiles) / duration.Seconds() + sb.WriteString(fmt.Sprintf(" | 🚀 %.1f files/sec", throughput)) + } + } else { + sb.WriteString("## 🔍 GoSQLX SQL Validation Results\n\n") + if result.InvalidFiles == 0 { + sb.WriteString("### ✅ All SQL files are valid!\n\n") + sb.WriteString(fmt.Sprintf("**%d** file(s) validated successfully in **%v**\n\n", result.ValidFiles, duration)) + } else { + sb.WriteString(fmt.Sprintf("### ❌ Found issues in %d file(s)\n\n", result.InvalidFiles)) + } + sb.WriteString("| Metric | Value |\n|--------|-------|\n") + sb.WriteString(fmt.Sprintf("| Total Files | %d |\n", result.TotalFiles)) + sb.WriteString(fmt.Sprintf("| ✅ Valid | %d |\n", result.ValidFiles)) + sb.WriteString(fmt.Sprintf("| ❌ Invalid | %d |\n", result.InvalidFiles)) + sb.WriteString(fmt.Sprintf("| ⏱️ Duration | %v |\n", duration)) + if result.TotalFiles > 0 && duration.Seconds() > 0 { + throughput := float64(result.TotalFiles) / duration.Seconds() + sb.WriteString(fmt.Sprintf("| 🚀 Throughput | %.1f files/sec |\n", throughput)) + } + sb.WriteString("\n") + if result.InvalidFiles > 0 { + sb.WriteString("### 📋 Validation Errors\n\n") + for _, file := range result.Files { + if file.Error != nil { + sb.WriteString(fmt.Sprintf("#### ❌ `%s`\n\n```\n%s\n```\n\n", file.Path, *file.Error)) + } + } + } + sb.WriteString("---\n*Powered by [GoSQLX](https://github.com/ajitpratap0/GoSQLX) - Ultra-fast SQL validation (100x faster than SQLFluff)*\n") + } + return sb.String() + } + + func main() { + var result ValidationResult + if err := json.NewDecoder(os.Stdin).Decode(&result); err != nil { + fmt.Fprintf(os.Stderr, "Error decoding JSON: %v\n", err) + os.Exit(1) + } + compact := len(os.Args) > 1 && os.Args[1] == "compact" + fmt.Print(formatPRComment(&result, compact)) + } + SCRIPT_EOF + + # Create JSON from validation results + cat > "$RUNNER_TEMP/validation_results.json" << JSON_EOF + { + "TotalFiles": ${{ steps.validate.outputs.validated-files || 0 }} + ${{ steps.validate.outputs.invalid-files || 0 }}, + "ValidFiles": ${{ steps.validate.outputs.validated-files || 0 }}, + "InvalidFiles": ${{ steps.validate.outputs.invalid-files || 0 }}, + "Duration": "${{ steps.validate.outputs.validation-time || 0 }}ms", + "Files": [] + } + JSON_EOF + + # Format the compact argument + COMPACT_ARG="" + if [ "${{ inputs.pr-comment-compact }}" = "true" ]; then + COMPACT_ARG="compact" + fi + + # Generate comment using Go script + COMMENT_BODY=$(go run "$RUNNER_TEMP/format_comment.go" $COMPACT_ARG < "$RUNNER_TEMP/validation_results.json") + + # Post comment to PR using gh CLI + echo "$COMMENT_BODY" | gh pr comment ${{ github.event.pull_request.number }} --body-file - + + echo "✓ Posted validation results to PR #${{ github.event.pull_request.number }}" + echo "::endgroup::" + - name: Check SQL formatting id: format-check if: inputs.format-check == 'true' && steps.find-files.outputs.file-count != '0' @@ -317,7 +516,7 @@ runs: else echo "✓ Properly formatted: $file" fi - done < /tmp/gosqlx-files.txt + done < "$RUNNER_TEMP/gosqlx-files.txt" echo "::endgroup::" @@ -357,7 +556,7 @@ runs: echo "⚠ Issues found: $file" LINT_ISSUES=$((LINT_ISSUES + 1)) fi - done < /tmp/gosqlx-files.txt + done < "$RUNNER_TEMP/gosqlx-files.txt" echo "::endgroup::" @@ -375,4 +574,4 @@ runs: if: always() shell: bash run: | - rm -f /tmp/gosqlx-files.txt + rm -f "$RUNNER_TEMP/gosqlx-files.txt" "$RUNNER_TEMP/format_comment.go" "$RUNNER_TEMP/validation_results.json" diff --git a/cmd/gosqlx/cmd/format.go b/cmd/gosqlx/cmd/format.go index 243031a..cf66d7b 100644 --- a/cmd/gosqlx/cmd/format.go +++ b/cmd/gosqlx/cmd/format.go @@ -67,7 +67,7 @@ func formatRun(cmd *cobra.Command, args []string) error { Check: formatCheck, MaxLine: formatMaxLine, Verbose: verbose, - Output: output, + Output: outputFile, }) // Create formatter with injectable output writers diff --git a/cmd/gosqlx/cmd/root.go b/cmd/gosqlx/cmd/root.go index b182a00..0d8174b 100644 --- a/cmd/gosqlx/cmd/root.go +++ b/cmd/gosqlx/cmd/root.go @@ -4,11 +4,14 @@ import ( "github.com/spf13/cobra" ) +// Version is the current version of gosqlx CLI +var Version = "1.4.0" + var ( // Global flags - verbose bool - output string - format string + verbose bool + outputFile string + format string ) // rootCmd represents the base command when called without any subcommands @@ -42,6 +45,6 @@ func Execute() error { func init() { // Global flags rootCmd.PersistentFlags().BoolVarP(&verbose, "verbose", "v", false, "enable verbose output") - rootCmd.PersistentFlags().StringVarP(&output, "output", "o", "", "output file (default: stdout)") + rootCmd.PersistentFlags().StringVarP(&outputFile, "output", "o", "", "output file (default: stdout)") rootCmd.PersistentFlags().StringVarP(&format, "format", "f", "auto", "output format: json, yaml, table, tree, auto") } diff --git a/cmd/gosqlx/cmd/validate.go b/cmd/gosqlx/cmd/validate.go index c9d3b68..663213e 100644 --- a/cmd/gosqlx/cmd/validate.go +++ b/cmd/gosqlx/cmd/validate.go @@ -1,21 +1,25 @@ package cmd import ( + "fmt" "os" "github.com/spf13/cobra" "github.com/spf13/pflag" "github.com/ajitpratap0/GoSQLX/cmd/gosqlx/internal/config" + "github.com/ajitpratap0/GoSQLX/cmd/gosqlx/internal/output" ) var ( - validateRecursive bool - validatePattern string - validateQuiet bool - validateStats bool - validateDialect string - validateStrict bool + validateRecursive bool + validatePattern string + validateQuiet bool + validateStats bool + validateDialect string + validateStrict bool + validateOutputFormat string + validateOutputFile string ) // validateCmd represents the validate command @@ -31,6 +35,12 @@ Examples: gosqlx validate -r ./queries/ # Recursively validate directory gosqlx validate --quiet query.sql # Quiet mode (exit code only) gosqlx validate --stats ./queries/ # Show performance statistics + gosqlx validate --output-format sarif --output-file results.sarif queries/ # SARIF output for GitHub Code Scanning + +Output Formats: + text - Human-readable output (default) + json - JSON format for programmatic consumption + sarif - SARIF 2.1.0 format for GitHub Code Scanning integration Performance Target: <10ms for typical queries (50-500 characters) Throughput: 100+ files/second in batch mode`, @@ -46,6 +56,11 @@ func validateRun(cmd *cobra.Command, args []string) error { cfg = config.DefaultConfig() } + // Validate output format + if validateOutputFormat != "" && validateOutputFormat != "text" && validateOutputFormat != "json" && validateOutputFormat != "sarif" { + return fmt.Errorf("invalid output format: %s (valid options: text, json, sarif)", validateOutputFormat) + } + // Track which flags were explicitly set flagsChanged := make(map[string]bool) cmd.Flags().Visit(func(f *pflag.Flag) { @@ -58,10 +73,13 @@ func validateRun(cmd *cobra.Command, args []string) error { } // Create validator options from config and flags + // When outputting SARIF, automatically enable quiet mode to avoid mixing output + quietMode := validateQuiet || validateOutputFormat == "sarif" + opts := ValidatorOptionsFromConfig(cfg, flagsChanged, ValidatorFlags{ Recursive: validateRecursive, Pattern: validatePattern, - Quiet: validateQuiet, + Quiet: quietMode, ShowStats: validateStats, Dialect: validateDialect, StrictMode: validateStrict, @@ -77,6 +95,31 @@ func validateRun(cmd *cobra.Command, args []string) error { return err } + // Handle different output formats + if validateOutputFormat == "sarif" { + // Generate SARIF output + sarifData, err := output.FormatSARIF(result, Version) + if err != nil { + return fmt.Errorf("failed to generate SARIF output: %w", err) + } + + // Write SARIF output to file or stdout + if validateOutputFile != "" { + if err := os.WriteFile(validateOutputFile, sarifData, 0600); err != nil { + return fmt.Errorf("failed to write SARIF output: %w", err) + } + if !opts.Quiet { + fmt.Fprintf(cmd.OutOrStdout(), "SARIF output written to %s\n", validateOutputFile) + } + } else { + fmt.Fprint(cmd.OutOrStdout(), string(sarifData)) + } + } else if validateOutputFormat == "json" { + // JSON output format will be implemented later + return fmt.Errorf("JSON output format not yet implemented") + } + // Default text output is already handled by the validator + // Exit with error code if there were invalid files if result.InvalidFiles > 0 { os.Exit(1) @@ -94,4 +137,6 @@ func init() { validateCmd.Flags().BoolVarP(&validateStats, "stats", "s", false, "show performance statistics") validateCmd.Flags().StringVar(&validateDialect, "dialect", "", "SQL dialect: postgresql, mysql, sqlserver, oracle, sqlite (config: validate.dialect)") validateCmd.Flags().BoolVar(&validateStrict, "strict", false, "enable strict validation mode (config: validate.strict_mode)") + validateCmd.Flags().StringVar(&validateOutputFormat, "output-format", "text", "output format: text, json, sarif") + validateCmd.Flags().StringVar(&validateOutputFile, "output-file", "", "output file path (default: stdout)") } diff --git a/cmd/gosqlx/cmd/validator.go b/cmd/gosqlx/cmd/validator.go index 62fc8ec..0f8b7dd 100644 --- a/cmd/gosqlx/cmd/validator.go +++ b/cmd/gosqlx/cmd/validator.go @@ -9,6 +9,7 @@ import ( "time" "github.com/ajitpratap0/GoSQLX/cmd/gosqlx/internal/config" + "github.com/ajitpratap0/GoSQLX/cmd/gosqlx/internal/output" "github.com/ajitpratap0/GoSQLX/pkg/sql/ast" "github.com/ajitpratap0/GoSQLX/pkg/sql/parser" "github.com/ajitpratap0/GoSQLX/pkg/sql/tokenizer" @@ -32,24 +33,6 @@ type Validator struct { Opts ValidatorOptions } -// ValidationResult contains the results of a validation run -type ValidationResult struct { - TotalFiles int - ValidFiles int - InvalidFiles int - TotalBytes int64 - Duration time.Duration - Files []FileValidationResult -} - -// FileValidationResult contains the result for a single file -type FileValidationResult struct { - Path string - Valid bool - Size int64 - Error error -} - // NewValidator creates a new Validator with the given options func NewValidator(out, err io.Writer, opts ValidatorOptions) *Validator { return &Validator{ @@ -60,7 +43,7 @@ func NewValidator(out, err io.Writer, opts ValidatorOptions) *Validator { } // Validate validates the given SQL files or patterns -func (v *Validator) Validate(args []string) (*ValidationResult, error) { +func (v *Validator) Validate(args []string) (*output.ValidationResult, error) { startTime := time.Now() // Expand file arguments (glob patterns, directories, etc.) @@ -73,8 +56,8 @@ func (v *Validator) Validate(args []string) (*ValidationResult, error) { return nil, fmt.Errorf("no SQL files found matching the specified patterns") } - result := &ValidationResult{ - Files: make([]FileValidationResult, 0, len(files)), + result := &output.ValidationResult{ + Files: make([]output.FileValidationResult, 0, len(files)), } // Validate each file @@ -116,8 +99,8 @@ func (v *Validator) Validate(args []string) (*ValidationResult, error) { } // validateFile validates a single SQL file -func (v *Validator) validateFile(filename string) FileValidationResult { - result := FileValidationResult{ +func (v *Validator) validateFile(filename string) output.FileValidationResult { + result := output.FileValidationResult{ Path: filename, } @@ -235,7 +218,7 @@ func (v *Validator) isDirectory(path string) bool { } // displayStats displays validation statistics -func (v *Validator) displayStats(result *ValidationResult) { +func (v *Validator) displayStats(result *output.ValidationResult) { fmt.Fprintf(v.Out, "\n📊 Validation Statistics:\n") fmt.Fprintf(v.Out, " Files processed: %d\n", result.TotalFiles) fmt.Fprintf(v.Out, " Valid files: %d\n", result.ValidFiles) @@ -296,7 +279,8 @@ func ValidatorOptionsFromConfig(cfg *config.Config, flagsChanged map[string]bool if flagsChanged["pattern"] { opts.Pattern = flags.Pattern } - if flagsChanged["quiet"] { + // Always use quiet flag value (may be set programmatically for SARIF output) + if flagsChanged["quiet"] || flags.Quiet { opts.Quiet = flags.Quiet } if flagsChanged["stats"] { diff --git a/cmd/gosqlx/cmd/validator_test.go b/cmd/gosqlx/cmd/validator_test.go index 3c670f2..a6449c2 100644 --- a/cmd/gosqlx/cmd/validator_test.go +++ b/cmd/gosqlx/cmd/validator_test.go @@ -7,6 +7,8 @@ import ( "runtime" "strings" "testing" + + "github.com/ajitpratap0/GoSQLX/cmd/gosqlx/internal/output" ) // TestValidator_ValidateFile tests single file validation @@ -394,7 +396,7 @@ func TestValidator_DisplayStats(t *testing.T) { var buf bytes.Buffer validator := NewValidator(&buf, &buf, ValidatorOptions{}) - result := &ValidationResult{ + result := &output.ValidationResult{ TotalFiles: 10, ValidFiles: 8, InvalidFiles: 2, diff --git a/cmd/gosqlx/internal/output/pr_comment.go b/cmd/gosqlx/internal/output/pr_comment.go new file mode 100644 index 0000000..60717f6 --- /dev/null +++ b/cmd/gosqlx/internal/output/pr_comment.go @@ -0,0 +1,102 @@ +package output + +import ( + "fmt" + "strings" +) + +// FormatPRComment formats validation results as a GitHub PR comment with markdown +func FormatPRComment(result *ValidationResult) string { + var sb strings.Builder + + // Header + sb.WriteString("## 🔍 GoSQLX SQL Validation Results\n\n") + + // Summary section + if result.InvalidFiles == 0 { + sb.WriteString("### ✅ All SQL files are valid!\n\n") + sb.WriteString(fmt.Sprintf("**%d** file(s) validated successfully in **%v**\n\n", + result.ValidFiles, result.Duration)) + } else { + sb.WriteString(fmt.Sprintf("### ❌ Found issues in **%d** file(s)\n\n", result.InvalidFiles)) + } + + // Statistics table + sb.WriteString("| Metric | Value |\n") + sb.WriteString("|--------|-------|\n") + sb.WriteString(fmt.Sprintf("| Total Files | %d |\n", result.TotalFiles)) + sb.WriteString(fmt.Sprintf("| ✅ Valid | %d |\n", result.ValidFiles)) + sb.WriteString(fmt.Sprintf("| ❌ Invalid | %d |\n", result.InvalidFiles)) + sb.WriteString(fmt.Sprintf("| ⏱️ Duration | %v |\n", result.Duration)) + + if result.TotalFiles > 0 && result.Duration.Seconds() > 0 { + throughput := float64(result.TotalFiles) / result.Duration.Seconds() + sb.WriteString(fmt.Sprintf("| 🚀 Throughput | %.1f files/sec |\n", throughput)) + } + + sb.WriteString("\n") + + // Detailed errors section + if result.InvalidFiles > 0 { + sb.WriteString("### 📋 Validation Errors\n\n") + + for _, file := range result.Files { + if file.Error != nil { + // File header with error icon + sb.WriteString(fmt.Sprintf("#### ❌ `%s`\n\n", file.Path)) + + // Error details in a code block + sb.WriteString("```\n") + sb.WriteString(file.Error.Error()) + sb.WriteString("\n```\n\n") + } + } + } + + // Footer + sb.WriteString("---\n") + sb.WriteString("*Powered by [GoSQLX](https://github.com/ajitpratap0/GoSQLX) - ") + sb.WriteString("Ultra-fast SQL validation (100x faster than SQLFluff)*\n") + + return sb.String() +} + +// FormatPRCommentCompact formats validation results as a compact PR comment +// Useful for large validation runs to avoid overly long comments +func FormatPRCommentCompact(result *ValidationResult, maxErrors int) string { + var sb strings.Builder + + // Header with summary + if result.InvalidFiles == 0 { + sb.WriteString("## ✅ GoSQLX: All SQL files valid\n\n") + sb.WriteString(fmt.Sprintf("Validated **%d** file(s) in **%v**\n", + result.ValidFiles, result.Duration)) + } else { + sb.WriteString(fmt.Sprintf("## ❌ GoSQLX: Found issues in %d/%d files\n\n", + result.InvalidFiles, result.TotalFiles)) + + // Show limited errors + errorCount := 0 + for _, file := range result.Files { + if file.Error != nil && errorCount < maxErrors { + sb.WriteString(fmt.Sprintf("- ❌ `%s`: %s\n", file.Path, file.Error.Error())) + errorCount++ + } + } + + // Show truncation message if needed + if result.InvalidFiles > maxErrors { + remaining := result.InvalidFiles - maxErrors + sb.WriteString(fmt.Sprintf("\n*...and %d more error(s). Run locally for full details.*\n", remaining)) + } + } + + sb.WriteString("\n---\n") + sb.WriteString(fmt.Sprintf("⏱️ %v | ", result.Duration)) + if result.TotalFiles > 0 && result.Duration.Seconds() > 0 { + throughput := float64(result.TotalFiles) / result.Duration.Seconds() + sb.WriteString(fmt.Sprintf("🚀 %.1f files/sec", throughput)) + } + + return sb.String() +} diff --git a/cmd/gosqlx/internal/output/pr_comment_test.go b/cmd/gosqlx/internal/output/pr_comment_test.go new file mode 100644 index 0000000..b28328d --- /dev/null +++ b/cmd/gosqlx/internal/output/pr_comment_test.go @@ -0,0 +1,276 @@ +package output + +import ( + "errors" + "strings" + "testing" + "time" +) + +func TestFormatPRComment(t *testing.T) { + tests := []struct { + name string + result *ValidationResult + wantContains []string + wantNotContains []string + }{ + { + name: "all files valid", + result: &ValidationResult{ + TotalFiles: 5, + ValidFiles: 5, + InvalidFiles: 0, + Duration: 100 * time.Millisecond, + Files: []FileValidationResult{ + {Path: "query1.sql", Valid: true}, + {Path: "query2.sql", Valid: true}, + {Path: "query3.sql", Valid: true}, + {Path: "query4.sql", Valid: true}, + {Path: "query5.sql", Valid: true}, + }, + }, + wantContains: []string{ + "All SQL files are valid", + "**5** file(s) validated successfully", + "Total Files | 5", + "✅ Valid | 5", + "❌ Invalid | 0", + }, + wantNotContains: []string{ + "Validation Errors", + "❌ `", + }, + }, + { + name: "files with errors", + result: &ValidationResult{ + TotalFiles: 3, + ValidFiles: 1, + InvalidFiles: 2, + Duration: 50 * time.Millisecond, + Files: []FileValidationResult{ + {Path: "valid.sql", Valid: true}, + { + Path: "error1.sql", + Valid: false, + Error: errors.New("parsing failed: unexpected token"), + }, + { + Path: "error2.sql", + Valid: false, + Error: errors.New("tokenization failed: invalid character"), + }, + }, + }, + wantContains: []string{ + "Found issues in **2** file(s)", + "Total Files | 3", + "✅ Valid | 1", + "❌ Invalid | 2", + "Validation Errors", + "❌ `error1.sql`", + "parsing failed: unexpected token", + "❌ `error2.sql`", + "tokenization failed: invalid character", + }, + }, + { + name: "single error", + result: &ValidationResult{ + TotalFiles: 1, + ValidFiles: 0, + InvalidFiles: 1, + Duration: 10 * time.Millisecond, + Files: []FileValidationResult{ + { + Path: "bad.sql", + Valid: false, + Error: errors.New("syntax error on line 5"), + }, + }, + }, + wantContains: []string{ + "Found issues in **1** file(s)", + "❌ `bad.sql`", + "syntax error on line 5", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := FormatPRComment(tt.result) + + // Check for required content + for _, want := range tt.wantContains { + if !strings.Contains(result, want) { + t.Errorf("FormatPRComment() missing expected content: %q\nGot:\n%s", want, result) + } + } + + // Check for prohibited content + for _, notWant := range tt.wantNotContains { + if strings.Contains(result, notWant) { + t.Errorf("FormatPRComment() contains unexpected content: %q\nGot:\n%s", notWant, result) + } + } + + // Verify it's valid markdown-ish + if !strings.Contains(result, "##") { + t.Error("FormatPRComment() missing markdown headers") + } + + if !strings.Contains(result, "GoSQLX") { + t.Error("FormatPRComment() missing branding") + } + }) + } +} + +func TestFormatPRCommentCompact(t *testing.T) { + tests := []struct { + name string + result *ValidationResult + maxErrors int + wantContains []string + wantNotContains []string + }{ + { + name: "all files valid compact", + result: &ValidationResult{ + TotalFiles: 10, + ValidFiles: 10, + InvalidFiles: 0, + Duration: 200 * time.Millisecond, + Files: make([]FileValidationResult, 10), + }, + maxErrors: 5, + wantContains: []string{ + "All SQL files valid", + "Validated **10** file(s)", + }, + wantNotContains: []string{ + "Found issues", + }, + }, + { + name: "multiple errors with truncation", + result: &ValidationResult{ + TotalFiles: 10, + ValidFiles: 5, + InvalidFiles: 5, + Duration: 100 * time.Millisecond, + Files: []FileValidationResult{ + {Path: "valid1.sql", Valid: true}, + {Path: "valid2.sql", Valid: true}, + {Path: "valid3.sql", Valid: true}, + {Path: "valid4.sql", Valid: true}, + {Path: "valid5.sql", Valid: true}, + {Path: "error1.sql", Valid: false, Error: errors.New("error 1")}, + {Path: "error2.sql", Valid: false, Error: errors.New("error 2")}, + {Path: "error3.sql", Valid: false, Error: errors.New("error 3")}, + {Path: "error4.sql", Valid: false, Error: errors.New("error 4")}, + {Path: "error5.sql", Valid: false, Error: errors.New("error 5")}, + }, + }, + maxErrors: 3, + wantContains: []string{ + "Found issues in 5/10 files", + "❌ `error1.sql`: error 1", + "❌ `error2.sql`: error 2", + "❌ `error3.sql`: error 3", + "and 2 more error(s)", + }, + wantNotContains: []string{ + "error4.sql", + "error5.sql", + }, + }, + { + name: "errors within limit", + result: &ValidationResult{ + TotalFiles: 3, + ValidFiles: 1, + InvalidFiles: 2, + Duration: 50 * time.Millisecond, + Files: []FileValidationResult{ + {Path: "valid.sql", Valid: true}, + {Path: "error1.sql", Valid: false, Error: errors.New("error 1")}, + {Path: "error2.sql", Valid: false, Error: errors.New("error 2")}, + }, + }, + maxErrors: 5, + wantContains: []string{ + "Found issues in 2/3 files", + "❌ `error1.sql`: error 1", + "❌ `error2.sql`: error 2", + }, + wantNotContains: []string{ + "more error(s)", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := FormatPRCommentCompact(tt.result, tt.maxErrors) + + // Check for required content + for _, want := range tt.wantContains { + if !strings.Contains(result, want) { + t.Errorf("FormatPRCommentCompact() missing expected content: %q\nGot:\n%s", want, result) + } + } + + // Check for prohibited content + for _, notWant := range tt.wantNotContains { + if strings.Contains(result, notWant) { + t.Errorf("FormatPRCommentCompact() contains unexpected content: %q\nGot:\n%s", notWant, result) + } + } + + // Verify it's more compact than full format + if strings.Count(result, "\n") > 20 { + t.Error("FormatPRCommentCompact() should be more compact") + } + }) + } +} + +func TestPRCommentMarkdownStructure(t *testing.T) { + result := &ValidationResult{ + TotalFiles: 2, + ValidFiles: 1, + InvalidFiles: 1, + Duration: 25 * time.Millisecond, + Files: []FileValidationResult{ + {Path: "valid.sql", Valid: true}, + {Path: "error.sql", Valid: false, Error: errors.New("test error")}, + }, + } + + comment := FormatPRComment(result) + + // Verify markdown structure + tests := []struct { + name string + pattern string + }{ + {"has level 2 header", "## "}, + {"has level 3 header", "### "}, + {"has level 4 header", "#### "}, + {"has table", "| Metric | Value |"}, + {"has table separator", "|--------|-------|"}, + {"has code block", "```"}, + {"has horizontal rule", "---"}, + {"has bold text", "**"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if !strings.Contains(comment, tt.pattern) { + t.Errorf("FormatPRComment() missing markdown element: %s", tt.name) + } + }) + } +} diff --git a/cmd/gosqlx/internal/output/sarif.go b/cmd/gosqlx/internal/output/sarif.go new file mode 100644 index 0000000..f48b68b --- /dev/null +++ b/cmd/gosqlx/internal/output/sarif.go @@ -0,0 +1,269 @@ +package output + +import ( + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "path/filepath" + "strings" + "time" +) + +// ValidationResult contains the results of a validation run +type ValidationResult struct { + TotalFiles int + ValidFiles int + InvalidFiles int + TotalBytes int64 + Duration time.Duration + Files []FileValidationResult +} + +// FileValidationResult contains the result for a single file +type FileValidationResult struct { + Path string + Valid bool + Size int64 + Error error +} + +// SARIF represents a SARIF 2.1.0 document +type SARIF struct { + Schema string `json:"$schema"` + Version string `json:"version"` + Runs []SARIFRun `json:"runs"` +} + +// SARIFRun represents a single analysis run +type SARIFRun struct { + Tool SARIFTool `json:"tool"` + Results []SARIFResult `json:"results"` +} + +// SARIFTool describes the analysis tool +type SARIFTool struct { + Driver SARIFDriver `json:"driver"` +} + +// SARIFDriver contains tool information +type SARIFDriver struct { + Name string `json:"name"` + Version string `json:"version,omitempty"` + InformationURI string `json:"informationUri,omitempty"` + Rules []SARIFRule `json:"rules,omitempty"` + SemanticVersion string `json:"semanticVersion,omitempty"` +} + +// SARIFRule describes a validation rule +type SARIFRule struct { + ID string `json:"id"` + Name string `json:"name,omitempty"` + ShortDescription SARIFMessage `json:"shortDescription,omitempty"` + FullDescription SARIFMessage `json:"fullDescription,omitempty"` + Help SARIFMessage `json:"help,omitempty"` + DefaultLevel string `json:"defaultConfiguration,omitempty"` + Properties map[string]interface{} `json:"properties,omitempty"` +} + +// SARIFResult represents a single finding +type SARIFResult struct { + RuleID string `json:"ruleId"` + Level string `json:"level"` + Message SARIFMessage `json:"message"` + Locations []SARIFLocation `json:"locations"` + PartialFingerprints map[string]string `json:"partialFingerprints,omitempty"` +} + +// SARIFMessage contains text content +type SARIFMessage struct { + Text string `json:"text"` +} + +// SARIFLocation specifies where a result was found +type SARIFLocation struct { + PhysicalLocation SARIFPhysicalLocation `json:"physicalLocation"` +} + +// SARIFPhysicalLocation provides file and region information +type SARIFPhysicalLocation struct { + ArtifactLocation SARIFArtifactLocation `json:"artifactLocation"` + Region SARIFRegion `json:"region"` +} + +// SARIFArtifactLocation identifies the file +type SARIFArtifactLocation struct { + URI string `json:"uri"` + URIBaseID string `json:"uriBaseId,omitempty"` +} + +// SARIFRegion specifies the location within a file +type SARIFRegion struct { + StartLine int `json:"startLine"` + StartColumn int `json:"startColumn,omitempty"` + EndLine int `json:"endLine,omitempty"` + EndColumn int `json:"endColumn,omitempty"` +} + +// FormatSARIF converts validation results to SARIF 2.1.0 format +func FormatSARIF(result *ValidationResult, toolVersion string) ([]byte, error) { + // Create SARIF document + sarif := &SARIF{ + Schema: "https://json.schemastore.org/sarif-2.1.0.json", + Version: "2.1.0", + Runs: []SARIFRun{ + { + Tool: SARIFTool{ + Driver: SARIFDriver{ + Name: "GoSQLX", + Version: toolVersion, + SemanticVersion: toolVersion, + InformationURI: "https://github.com/ajitpratap0/GoSQLX", + Rules: []SARIFRule{ + { + ID: "sql-syntax-error", + Name: "SQL Syntax Error", + ShortDescription: SARIFMessage{ + Text: "SQL syntax validation failed", + }, + FullDescription: SARIFMessage{ + Text: "The SQL file contains syntax errors that prevent it from being parsed correctly. This may cause runtime errors or unexpected behavior when executed.", + }, + Help: SARIFMessage{ + Text: "Review the SQL syntax and fix any errors. Common issues include missing keywords, incorrect punctuation, or invalid SQL constructs.", + }, + Properties: map[string]interface{}{ + "category": "sql-validation", + "tags": []string{"sql", "syntax", "validation"}, + }, + }, + { + ID: "sql-parsing-error", + Name: "SQL Parsing Error", + ShortDescription: SARIFMessage{ + Text: "SQL parsing failed", + }, + FullDescription: SARIFMessage{ + Text: "The SQL file could not be parsed successfully. This indicates structural issues with the SQL statement.", + }, + Help: SARIFMessage{ + Text: "Verify the SQL structure is correct and follows the expected SQL dialect syntax.", + }, + Properties: map[string]interface{}{ + "category": "sql-validation", + "tags": []string{"sql", "parsing", "validation"}, + }, + }, + { + ID: "sql-tokenization-error", + Name: "SQL Tokenization Error", + ShortDescription: SARIFMessage{ + Text: "SQL tokenization failed", + }, + FullDescription: SARIFMessage{ + Text: "The SQL file could not be tokenized. This usually indicates invalid characters or malformed SQL syntax.", + }, + Help: SARIFMessage{ + Text: "Check for invalid characters, unmatched quotes, or other tokenization issues in the SQL file.", + }, + Properties: map[string]interface{}{ + "category": "sql-validation", + "tags": []string{"sql", "tokenization", "validation"}, + }, + }, + }, + }, + }, + Results: []SARIFResult{}, + }, + }, + } + + // Add results for invalid files + for _, fileResult := range result.Files { + if fileResult.Error != nil { + sarifResult := createSARIFResult(fileResult) + sarif.Runs[0].Results = append(sarif.Runs[0].Results, sarifResult) + } + } + + // Marshal to JSON with indentation for readability + data, err := json.MarshalIndent(sarif, "", " ") + if err != nil { + return nil, fmt.Errorf("failed to marshal SARIF: %w", err) + } + + return data, nil +} + +// createSARIFResult creates a SARIF result from a file validation result +func createSARIFResult(fileResult FileValidationResult) SARIFResult { + // Determine rule ID based on error message + ruleID := "sql-syntax-error" + errorMsg := fileResult.Error.Error() + + if strings.Contains(errorMsg, "tokenization") { + ruleID = "sql-tokenization-error" + } else if strings.Contains(errorMsg, "parsing") { + ruleID = "sql-parsing-error" + } + + // Normalize path to relative path + relPath := fileResult.Path + if filepath.IsAbs(relPath) { + // Try to make it relative to current directory + if rel, err := filepath.Rel(".", relPath); err == nil { + relPath = rel + } + } + + // Create partial fingerprint for deduplication + fingerprint := generateFingerprint(relPath, ruleID, errorMsg) + + return SARIFResult{ + RuleID: ruleID, + Level: "error", + Message: SARIFMessage{ + Text: errorMsg, + }, + Locations: []SARIFLocation{ + { + PhysicalLocation: SARIFPhysicalLocation{ + ArtifactLocation: SARIFArtifactLocation{ + URI: normalizeURI(relPath), + URIBaseID: "%SRCROOT%", + }, + Region: SARIFRegion{ + StartLine: 1, // Default to line 1 since we don't have line info yet + }, + }, + }, + }, + PartialFingerprints: map[string]string{ + "primaryLocationLineHash": fingerprint, + }, + } +} + +// generateFingerprint creates a unique fingerprint for result deduplication +func generateFingerprint(path, ruleID, message string) string { + // Create a hash from the combination of path, rule, and message + h := sha256.New() + h.Write([]byte(path)) + h.Write([]byte(ruleID)) + h.Write([]byte(message)) + hash := h.Sum(nil) + return hex.EncodeToString(hash[:8]) // Use first 8 bytes for shorter fingerprint +} + +// normalizeURI converts file paths to URI format with forward slashes +func normalizeURI(path string) string { + // Convert backslashes to forward slashes for Windows compatibility + // Note: filepath.ToSlash only converts on Windows, so we do it manually for consistency + normalized := strings.ReplaceAll(path, "\\", "/") + + // Remove leading ./ if present + normalized = strings.TrimPrefix(normalized, "./") + + return normalized +} diff --git a/cmd/gosqlx/internal/output/sarif_test.go b/cmd/gosqlx/internal/output/sarif_test.go new file mode 100644 index 0000000..e68791e --- /dev/null +++ b/cmd/gosqlx/internal/output/sarif_test.go @@ -0,0 +1,408 @@ +package output + +import ( + "encoding/json" + "errors" + "strings" + "testing" + "time" +) + +func TestFormatSARIF(t *testing.T) { + tests := []struct { + name string + result *ValidationResult + version string + wantSchema string + wantRules int + wantErrors int + }{ + { + name: "single syntax error", + result: &ValidationResult{ + TotalFiles: 1, + ValidFiles: 0, + InvalidFiles: 1, + Duration: time.Second, + Files: []FileValidationResult{ + { + Path: "test.sql", + Valid: false, + Error: errors.New("parsing failed: unexpected token"), + }, + }, + }, + version: "1.4.0", + wantSchema: "https://json.schemastore.org/sarif-2.1.0.json", + wantRules: 3, + wantErrors: 1, + }, + { + name: "tokenization error", + result: &ValidationResult{ + TotalFiles: 1, + ValidFiles: 0, + InvalidFiles: 1, + Duration: time.Second, + Files: []FileValidationResult{ + { + Path: "invalid.sql", + Valid: false, + Error: errors.New("tokenization failed: invalid character"), + }, + }, + }, + version: "1.4.0", + wantSchema: "https://json.schemastore.org/sarif-2.1.0.json", + wantRules: 3, + wantErrors: 1, + }, + { + name: "multiple errors", + result: &ValidationResult{ + TotalFiles: 3, + ValidFiles: 1, + InvalidFiles: 2, + Duration: time.Second, + Files: []FileValidationResult{ + { + Path: "valid.sql", + Valid: true, + Error: nil, + }, + { + Path: "error1.sql", + Valid: false, + Error: errors.New("parsing failed: missing FROM clause"), + }, + { + Path: "error2.sql", + Valid: false, + Error: errors.New("tokenization failed: unclosed string"), + }, + }, + }, + version: "1.4.0", + wantSchema: "https://json.schemastore.org/sarif-2.1.0.json", + wantRules: 3, + wantErrors: 2, + }, + { + name: "no errors", + result: &ValidationResult{ + TotalFiles: 2, + ValidFiles: 2, + InvalidFiles: 0, + Duration: time.Second, + Files: []FileValidationResult{ + { + Path: "valid1.sql", + Valid: true, + Error: nil, + }, + { + Path: "valid2.sql", + Valid: true, + Error: nil, + }, + }, + }, + version: "1.4.0", + wantSchema: "https://json.schemastore.org/sarif-2.1.0.json", + wantRules: 3, + wantErrors: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + data, err := FormatSARIF(tt.result, tt.version) + if err != nil { + t.Fatalf("FormatSARIF() error = %v", err) + } + + // Parse the SARIF JSON + var sarif SARIF + if err := json.Unmarshal(data, &sarif); err != nil { + t.Fatalf("Failed to unmarshal SARIF: %v", err) + } + + // Verify schema + if sarif.Schema != tt.wantSchema { + t.Errorf("Schema = %v, want %v", sarif.Schema, tt.wantSchema) + } + + // Verify version + if sarif.Version != "2.1.0" { + t.Errorf("Version = %v, want 2.1.0", sarif.Version) + } + + // Verify runs exist + if len(sarif.Runs) != 1 { + t.Fatalf("Runs count = %d, want 1", len(sarif.Runs)) + } + + run := sarif.Runs[0] + + // Verify tool information + if run.Tool.Driver.Name != "GoSQLX" { + t.Errorf("Tool name = %v, want GoSQLX", run.Tool.Driver.Name) + } + if run.Tool.Driver.Version != tt.version { + t.Errorf("Tool version = %v, want %v", run.Tool.Driver.Version, tt.version) + } + + // Verify rules + if len(run.Tool.Driver.Rules) != tt.wantRules { + t.Errorf("Rules count = %d, want %d", len(run.Tool.Driver.Rules), tt.wantRules) + } + + // Verify results count + if len(run.Results) != tt.wantErrors { + t.Errorf("Results count = %d, want %d", len(run.Results), tt.wantErrors) + } + + // Verify each result has required fields + for i, result := range run.Results { + if result.RuleID == "" { + t.Errorf("Result[%d].RuleID is empty", i) + } + if result.Level != "error" { + t.Errorf("Result[%d].Level = %v, want error", i, result.Level) + } + if result.Message.Text == "" { + t.Errorf("Result[%d].Message.Text is empty", i) + } + if len(result.Locations) == 0 { + t.Errorf("Result[%d].Locations is empty", i) + } else { + loc := result.Locations[0] + if loc.PhysicalLocation.ArtifactLocation.URI == "" { + t.Errorf("Result[%d].Location URI is empty", i) + } + if loc.PhysicalLocation.Region.StartLine < 1 { + t.Errorf("Result[%d].Location StartLine = %d, want >= 1", + i, loc.PhysicalLocation.Region.StartLine) + } + } + // Verify fingerprint exists + if _, ok := result.PartialFingerprints["primaryLocationLineHash"]; !ok { + t.Errorf("Result[%d] missing primaryLocationLineHash fingerprint", i) + } + } + }) + } +} + +func TestCreateSARIFResult(t *testing.T) { + tests := []struct { + name string + fileResult FileValidationResult + wantRuleID string + }{ + { + name: "parsing error", + fileResult: FileValidationResult{ + Path: "test.sql", + Error: errors.New("parsing failed: unexpected token"), + }, + wantRuleID: "sql-parsing-error", + }, + { + name: "tokenization error", + fileResult: FileValidationResult{ + Path: "test.sql", + Error: errors.New("tokenization failed: invalid character"), + }, + wantRuleID: "sql-tokenization-error", + }, + { + name: "generic syntax error", + fileResult: FileValidationResult{ + Path: "test.sql", + Error: errors.New("syntax error on line 5"), + }, + wantRuleID: "sql-syntax-error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := createSARIFResult(tt.fileResult) + + if result.RuleID != tt.wantRuleID { + t.Errorf("RuleID = %v, want %v", result.RuleID, tt.wantRuleID) + } + + if result.Level != "error" { + t.Errorf("Level = %v, want error", result.Level) + } + + if result.Message.Text == "" { + t.Error("Message.Text is empty") + } + + if len(result.Locations) != 1 { + t.Errorf("Locations count = %d, want 1", len(result.Locations)) + } + }) + } +} + +func TestGenerateFingerprint(t *testing.T) { + tests := []struct { + name string + path1 string + ruleID1 string + msg1 string + path2 string + ruleID2 string + msg2 string + wantSame bool + }{ + { + name: "identical inputs", + path1: "test.sql", + ruleID1: "sql-syntax-error", + msg1: "error message", + path2: "test.sql", + ruleID2: "sql-syntax-error", + msg2: "error message", + wantSame: true, + }, + { + name: "different paths", + path1: "test1.sql", + ruleID1: "sql-syntax-error", + msg1: "error message", + path2: "test2.sql", + ruleID2: "sql-syntax-error", + msg2: "error message", + wantSame: false, + }, + { + name: "different rule IDs", + path1: "test.sql", + ruleID1: "sql-syntax-error", + msg1: "error message", + path2: "test.sql", + ruleID2: "sql-parsing-error", + msg2: "error message", + wantSame: false, + }, + { + name: "different messages", + path1: "test.sql", + ruleID1: "sql-syntax-error", + msg1: "error message 1", + path2: "test.sql", + ruleID2: "sql-syntax-error", + msg2: "error message 2", + wantSame: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fp1 := generateFingerprint(tt.path1, tt.ruleID1, tt.msg1) + fp2 := generateFingerprint(tt.path2, tt.ruleID2, tt.msg2) + + if tt.wantSame { + if fp1 != fp2 { + t.Errorf("Fingerprints should be same: %v != %v", fp1, fp2) + } + } else { + if fp1 == fp2 { + t.Errorf("Fingerprints should be different: %v == %v", fp1, fp2) + } + } + + // Verify fingerprint is a valid hex string + if len(fp1) != 16 { // 8 bytes = 16 hex chars + t.Errorf("Fingerprint length = %d, want 16", len(fp1)) + } + }) + } +} + +func TestNormalizeURI(t *testing.T) { + tests := []struct { + name string + input string + want string + }{ + { + name: "unix path", + input: "path/to/file.sql", + want: "path/to/file.sql", + }, + { + name: "windows path", + input: "path\\to\\file.sql", + want: "path/to/file.sql", + }, + { + name: "relative path with dot", + input: "./path/to/file.sql", + want: "path/to/file.sql", + }, + { + name: "simple filename", + input: "file.sql", + want: "file.sql", + }, + { + name: "mixed slashes", + input: "./path\\to/file.sql", + want: "path/to/file.sql", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := normalizeURI(tt.input) + if got != tt.want { + t.Errorf("normalizeURI() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestSARIFJSONStructure(t *testing.T) { + // Test that the SARIF output is valid JSON and has the expected structure + result := &ValidationResult{ + TotalFiles: 1, + ValidFiles: 0, + InvalidFiles: 1, + Files: []FileValidationResult{ + { + Path: "test.sql", + Error: errors.New("parsing failed: test error"), + }, + }, + } + + data, err := FormatSARIF(result, "1.4.0") + if err != nil { + t.Fatalf("FormatSARIF() error = %v", err) + } + + // Verify it's valid JSON + var raw map[string]interface{} + if err := json.Unmarshal(data, &raw); err != nil { + t.Fatalf("Invalid JSON: %v", err) + } + + // Verify required top-level fields + requiredFields := []string{"$schema", "version", "runs"} + for _, field := range requiredFields { + if _, ok := raw[field]; !ok { + t.Errorf("Missing required field: %s", field) + } + } + + // Verify the JSON is properly formatted (has indentation) + if !strings.Contains(string(data), "\n") { + t.Error("SARIF JSON should be pretty-printed with indentation") + } +} diff --git a/pkg/gosqlx/context_test.go b/pkg/gosqlx/context_test.go index 87bcb74..171d8f6 100644 --- a/pkg/gosqlx/context_test.go +++ b/pkg/gosqlx/context_test.go @@ -2,6 +2,7 @@ package gosqlx import ( "context" + "errors" "strings" "testing" "time" @@ -68,7 +69,7 @@ func TestParseWithContext_CancelledContext(t *testing.T) { ast, err := ParseWithContext(ctx, sql) - if err != context.Canceled { + if !errors.Is(err, context.Canceled) { t.Errorf("Expected context.Canceled error, got: %v", err) } @@ -90,7 +91,7 @@ func TestParseWithContext_Timeout(t *testing.T) { ast, err := ParseWithContext(ctx, sql) - if err != context.DeadlineExceeded { + if !errors.Is(err, context.DeadlineExceeded) { t.Errorf("Expected context.DeadlineExceeded error, got: %v", err) } @@ -148,7 +149,7 @@ func TestParseWithTimeout_TimeoutExpires(t *testing.T) { ast, err := ParseWithTimeout(sql, timeout) // Should timeout or succeed quickly (race condition) - if err == context.DeadlineExceeded { + if errors.Is(err, context.DeadlineExceeded) { if ast != nil { t.Error("Expected nil AST when timeout expires") } @@ -338,7 +339,7 @@ func TestParseWithContext_ErrorHandling(t *testing.T) { t.Error("Expected error but got none") } - if err != nil && err == context.Canceled { + if err != nil && errors.Is(err, context.Canceled) { t.Error("Should not return context.Canceled for SQL errors") } @@ -430,7 +431,7 @@ func TestParseWithTimeout_ZeroTimeout(t *testing.T) { ast, err := ParseWithTimeout(sql, 0) // With zero timeout, context will be cancelled immediately or succeed - if err == context.DeadlineExceeded { + if errors.Is(err, context.DeadlineExceeded) { if ast != nil { t.Error("Expected nil AST when timeout expires") }