Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 73 additions & 0 deletions go/cmd/contract.go
Original file line number Diff line number Diff line change
Expand Up @@ -533,6 +533,79 @@ func runContractCreateCopilot(client *api.Client, dataset, outFile string, noWai
return nil
}

// runContractCreateCopilotBulk submits one GenerateContract operation for N
// qualifiedNames, polls a single status, then fetches each contract and writes
// it to its own file (named via outFiles[qualifiedName]). Per-dataset failures
// are logged as warnings and don't abort the rest. Returns the list of files
// successfully written.
func runContractCreateCopilotBulk(client *api.Client, qualifiedNames []string, outFiles map[string]string, noWait bool) ([]string, error) {
if len(qualifiedNames) == 0 {
return nil, nil
}
opID, err := client.GenerateContract(api.GenerateContractRequest{
DatasetQualifiedNames: qualifiedNames,
})
if err != nil {
return nil, err
}

if noWait {
fmt.Printf(" %s AI contract generation started for %d datasets.\n", output.Green.Render("✓"), len(qualifiedNames))
fmt.Println(output.Dim.Render(" Running in background — contracts will appear in Soda Cloud when ready."))
fmt.Println(output.Dim.Render(" Check results: sodacli results list"))
return nil, nil
}

spinner := output.NewSpinner(fmt.Sprintf("Generating AI contracts for %d datasets...", len(qualifiedNames)))
spinner.Start()

elapsed := 0
for {
time.Sleep(3 * time.Second)
elapsed += 3
status, err := client.GetGenerateStatus(opID)
if err != nil {
spinner.Stop()
return nil, err
}
if status.State == "completed" {
break
}
if status.State == "failed" || status.State == "canceled" {
spinner.Stop()
return nil, output.Errorf(2, "AI generation %s", status.State)
}
spinner.SetMessage(fmt.Sprintf("Generating AI contracts for %d datasets... (%ds)", len(qualifiedNames), elapsed))
}
spinner.Stop()

written := make([]string, 0, len(qualifiedNames))
for _, qn := range qualifiedNames {
contract, err := client.FindContractByDataset(qn)
if err != nil {
fmt.Fprintf(os.Stderr, " %s [%s] could not fetch contract: %v\n", output.Yellow.Render("⚠"), qn, err)
continue
}
if contract == nil {
fmt.Fprintf(os.Stderr, " %s [%s] AI generation completed but contract was not persisted.\n", output.Yellow.Render("⚠"), qn)
continue
}
outFile := outFiles[qn]
if outFile == "" {
outFile = datasetFileName(qn)
}
if err := os.WriteFile(outFile, []byte(contract.Contents), 0644); err != nil {
fmt.Fprintf(os.Stderr, " %s [%s] could not write file: %v\n", output.Yellow.Render("⚠"), qn, err)
continue
}
written = append(written, outFile)
}
if len(written) > 0 {
output.PrintSuccess(fmt.Sprintf("Wrote %d AI-generated contract(s).", len(written)), GCtx)
}
return written, nil
}

// ── contract copilot ──────────────────────────────────────────────────────────

var contractCopilotCmd = &cobra.Command{
Expand Down
32 changes: 28 additions & 4 deletions go/cmd/dataset.go
Original file line number Diff line number Diff line change
Expand Up @@ -523,6 +523,8 @@ var datasetDiagnosticsCmd = &cobra.Command{
noCollectResults, _ := cmd.Flags().GetBool("no-collect-results")
collectFailedRows, _ := cmd.Flags().GetBool("collect-failed-rows")
noCollectFailedRows, _ := cmd.Flags().GetBool("no-collect-failed-rows")
uniqueKeys, _ := cmd.Flags().GetStringSlice("unique-keys")
hasUniqueKeys := cmd.Flags().Changed("unique-keys")
// flags not yet in the public API — fail fast with a clear message
unsupportedFlags := []string{"schema", "table-prefix", "table-suffix", "failed-rows-description",
"expose-failed-rows-query", "no-expose-failed-rows-query", "failed-rows-cta", "no-failed-rows-cta"}
Expand All @@ -538,7 +540,7 @@ var datasetDiagnosticsCmd = &cobra.Command{
}

// no flags → show current settings
if !collectResults && !noCollectResults && !collectFailedRows && !noCollectFailedRows {
if !collectResults && !noCollectResults && !collectFailedRows && !noCollectFailedRows && !hasUniqueKeys {
result, err := client.GetDatasetDiagnostics(args[0])
if err != nil {
return err
Expand Down Expand Up @@ -569,6 +571,9 @@ var datasetDiagnosticsCmd = &cobra.Command{
if result.FailedRowsConfiguration.State != "" {
fmt.Printf(" %-28s %s\n", output.Bold.Render("State"), result.FailedRowsConfiguration.State)
}
if len(result.FailedRowsConfiguration.UniqueKeyColumnNames) > 0 {
fmt.Printf(" %-28s %s\n", output.Bold.Render("Unique key columns"), strings.Join(result.FailedRowsConfiguration.UniqueKeyColumnNames, ", "))
}
}
return nil
}
Expand All @@ -579,9 +584,27 @@ var datasetDiagnosticsCmd = &cobra.Command{
enabled := collectResults
cfg.ScanAndResultsConfiguration = &api.DiagnosticsScanConfig{Enabled: &enabled}
}
if collectFailedRows || noCollectFailedRows {
enabled := collectFailedRows
cfg.FailedRowsConfiguration = &api.DiagnosticsFailedRowsConfig{Enabled: &enabled}
if collectFailedRows || noCollectFailedRows || hasUniqueKeys {
// Seed from current state — the API replaces the whole
// failedRowsConfiguration object, so untouched fields would be reset.
current, err := client.GetDatasetDiagnostics(args[0])
if err != nil {
return err
}
fr := &api.DiagnosticsFailedRowsConfig{}
if current.FailedRowsConfiguration != nil {
enabled := current.FailedRowsConfiguration.Enabled
fr.Enabled = &enabled
fr.UniqueKeyColumnNames = current.FailedRowsConfiguration.UniqueKeyColumnNames
}
if collectFailedRows || noCollectFailedRows {
enabled := collectFailedRows
fr.Enabled = &enabled
}
if hasUniqueKeys {
fr.UniqueKeyColumnNames = uniqueKeys
}
cfg.FailedRowsConfiguration = fr
}

if _, err := client.UpdateDatasetDiagnostics(args[0], cfg); err != nil {
Expand Down Expand Up @@ -781,6 +804,7 @@ func init() {
datasetDiagnosticsCmd.Flags().Bool("no-collect-results", false, "Disable storing check results and scan history")
datasetDiagnosticsCmd.Flags().Bool("collect-failed-rows", false, "Store failed rows")
datasetDiagnosticsCmd.Flags().Bool("no-collect-failed-rows", false, "Disable storing failed rows")
datasetDiagnosticsCmd.Flags().StringSlice("unique-keys", nil, "Unique key columns for failed rows collection (comma-separated or repeated)")
datasetDiagnosticsCmd.Flags().String("table-prefix", "", "Prefix for diagnostic table names")
datasetDiagnosticsCmd.Flags().String("table-suffix", "", "Suffix for diagnostic table names")
datasetDiagnosticsCmd.Flags().String("failed-rows-description", "", "Description for failed rows storage context")
Expand Down
Loading