diff --git a/cmd/boostd/config.go b/cmd/boostd/config.go new file mode 100644 index 000000000..b2758077f --- /dev/null +++ b/cmd/boostd/config.go @@ -0,0 +1,77 @@ +package main + +import ( + "fmt" + "path/filepath" + + "github.com/filecoin-project/boost/node/config" + "github.com/mitchellh/go-homedir" + "github.com/urfave/cli/v2" +) + +var configCmd = &cli.Command{ + Name: "config", + Usage: "Display Boost config", + Subcommands: []*cli.Command{ + configDefaultCmd, + configUpdateCmd, + }, +} + +var configDefaultCmd = &cli.Command{ + Name: "default", + Usage: "Print config file defaults", + Flags: []cli.Flag{ + &cli.BoolFlag{ + Name: "no-comment", + Usage: "don't include comments in the output", + }, + }, + Action: func(cctx *cli.Context) error { + + cb, err := config.ConfigUpdate(config.DefaultBoost(), nil, !cctx.Bool("no-comment"), false) + if err != nil { + return err + } + + fmt.Println(string(cb)) + + return nil + }, +} + +var configUpdateCmd = &cli.Command{ + Name: "updated", + Usage: "Print config file with updates (changes from the default config file)", + Flags: []cli.Flag{ + &cli.BoolFlag{ + Name: "no-comment", + Usage: "don't include commented out default values in the output", + }, + &cli.BoolFlag{ + Name: "diff", + Usage: "only display values different from default", + }, + }, + Action: func(cctx *cli.Context) error { + path, err := homedir.Expand(cctx.String(FlagBoostRepo)) + if err != nil { + return err + } + + configPath := filepath.Join(path, "config.toml") + + cfgNode, err := config.FromFile(configPath, config.DefaultBoost()) + if err != nil { + return err + } + + output, err := config.ConfigUpdate(cfgNode, config.DefaultBoost(), !cctx.Bool("no-comment"), cctx.Bool("diff")) + if err != nil { + return err + } + + fmt.Print(string(output)) + return nil + }, +} diff --git a/cmd/boostd/main.go b/cmd/boostd/main.go index d79fe268d..852ee351c 100644 --- a/cmd/boostd/main.go +++ b/cmd/boostd/main.go @@ -41,6 +41,7 @@ func main() { migrateMarketsCmd, backupCmd, restoreCmd, + configCmd, dummydealCmd, dataTransfersCmd, retrievalDealsCmd, diff --git a/node/config/migrate_test.go b/node/config/migrate_test.go index c29e6a9f3..be175c6aa 100644 --- a/node/config/migrate_test.go +++ b/node/config/migrate_test.go @@ -21,6 +21,156 @@ ConfigVersion = 2 MyNewKey = "Hello" ` +const testConfig = ` +ConfigVersion = 4 +SealerApiInfo = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJBbGxvdyI6WyJyZWFkIiwid3JpdGUiLCJzaWduIiwiYWRtaW4iXX0.0LyVxqOde8UjLTcHPEo3VEBILtPQqCDNEHcoCbTRQ_Y:/ip4/127.0.0.1/tcp/2345/http" +SectorIndexApiInfo = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJBbGxvdyI6WyJyZWFkIiwid3JpdGUiLCJzaWduIiwiYWRtaW4iXX0.0LyVxqOde8UjLTcHPEo3VEBILtPQqCDNEHcoCbTRQ_Y:/ip4/127.0.0.1/tcp/2345/http" + +[API] + ListenAddress = "/ip4/127.0.0.1/tcp/1288/http" + RemoteListenAddress = "" + Timeout = "30s" + +[Backup] + DisableMetadataLog = false + +[Libp2p] + ListenAddresses = ["/ip4/0.0.0.1/tcp/50000", "/ip6/::/tcp/0"] + AnnounceAddresses = [] + NoAnnounceAddresses = [] + DisableNatPortMap = false + ConnMgrLow = 150 + ConnMgrHigh = 180 + ConnMgrGrace = "20s" + +[Pubsub] + Bootstrapper = false + RemoteTracer = "" + JsonTracer = "" + ElasticSearchTracer = "" + ElasticSearchIndex = "" + TracerSourceAuth = "" + +[Storage] + ParallelFetchLimit = 10 + StorageListRefreshDuration = "1h0m0s" + RedeclareOnStorageListRefresh = true + +[Dealmaking] + ConsiderOnlineStorageDeals = true + ConsiderOfflineStorageDeals = true + ConsiderOnlineRetrievalDeals = true + ConsiderOfflineRetrievalDeals = true + ConsiderVerifiedStorageDeals = true + ConsiderUnverifiedStorageDeals = true + PieceCidBlocklist = [] + ExpectedSealDuration = "24h0m0s" + MaxDealStartDelay = "336h0m0s" + MaxProviderCollateralMultiplier = 2 + MaxStagingDealsBytes = 2000000000 + MaxStagingDealsPercentPerHost = 0 + StartEpochSealingBuffer = 480 + DealProposalLogDuration = "24h0m0s" + RetrievalLogDuration = "24h0m0s" + StalledRetrievalTimeout = "30s" + Filter = "" + RetrievalFilter = "" + BlockstoreCacheMaxShards = 20 + BlockstoreCacheExpiry = "30s" + IsUnsealedCacheExpiry = "5m0s" + MaxTransferDuration = "24h0m0s" + RemoteCommp = false + MaxConcurrentLocalCommp = 1 + HTTPRetrievalMultiaddr = "" + HttpTransferMaxConcurrentDownloads = 20 + HttpTransferStallCheckPeriod = "30s" + HttpTransferStallTimeout = "5m0s" + BitswapPeerID = "" + BitswapPrivKeyFile = "" + DealLogDurationDays = 30 + SealingPipelineCacheTimeout = "30s" + FundsTaggingEnabled = true + [Dealmaking.RetrievalPricing] + Strategy = "default" + [Dealmaking.RetrievalPricing.Default] + VerifiedDealsFreeTransfer = false + [Dealmaking.RetrievalPricing.External] + Path = "" + +[Wallets] + Miner = "t01000" + PublishStorageDeals = "t3rjh3byfhhrlmdl6ofou544rmwuazp4bbqzuleahtn66ejnh73bsaonwyg54qrokvnbv5bmg37dfd5vu2bx4q" + DealCollateral = "t3qa5i54vprhl7gmdfgriubvjc5kdffx7cvbqvumopgvsy2osre5dcjdjhz5coruxp4co3chj3gbtxin7vtoia" + PledgeCollateral = "" + +[Graphql] + ListenAddress = "0.0.0.0" + Port = 8080 + +[Tracing] + Enabled = false + ServiceName = "boostd" + Endpoint = "" + +[ContractDeals] + Enabled = false + AllowlistContracts = [] + From = "0x0000000000000000000000000000000000000000" + +[LotusDealmaking] + ConsiderOnlineStorageDeals = true + ConsiderOfflineStorageDeals = true + ConsiderOnlineRetrievalDeals = true + ConsiderOfflineRetrievalDeals = true + ConsiderVerifiedStorageDeals = true + ConsiderUnverifiedStorageDeals = true + PieceCidBlocklist = [] + ExpectedSealDuration = "24h0m0s" + MaxDealStartDelay = "336h0m0s" + PublishMsgPeriod = "1h0m0s" + MaxDealsPerPublishMsg = 8 + MaxProviderCollateralMultiplier = 2 + MaxStagingDealsBytes = 0 + SimultaneousTransfersForStorage = 20 + SimultaneousTransfersForStoragePerClient = 0 + SimultaneousTransfersForRetrieval = 20 + StartEpochSealingBuffer = 480 + Filter = "" + RetrievalFilter = "" + [LotusDealmaking.RetrievalPricing] + Strategy = "default" + [LotusDealmaking.RetrievalPricing.Default] + VerifiedDealsFreeTransfer = true + [LotusDealmaking.RetrievalPricing.External] + Path = "" + +[LotusFees] + MaxPublishDealsFee = "0.05 FIL" + MaxMarketBalanceAddFee = "0.007 FIL" + +[DAGStore] + RootDir = "" + MaxConcurrentIndex = 5 + MaxConcurrentReadyFetches = 0 + MaxConcurrentUnseals = 0 + MaxConcurrencyStorageCalls = 100 + GCInterval = "1m0s" + +[IndexProvider] + Enable = true + EntriesCacheCapacity = 1024 + EntriesChunkSize = 16384 + TopicName = "" + PurgeCacheOnStart = false + [IndexProvider.Announce] + AnnounceOverHttp = false + DirectAnnounceURLs = ["https://cid.contact/ingest/announce", "http://localhost:3000"] + [IndexProvider.HttpPublisher] + Enabled = false + PublicHostname = "" + Port = 3104 +` + func TestMigrate(t *testing.T) { // Add a new mock migration so as to be able to test migrating up and down mockv1Tov2 := func(string) (string, error) { @@ -101,3 +251,22 @@ func TestMigrate(t *testing.T) { require.NoError(t, err) require.Equal(t, v1FileContents, string(bz)) } + +func TestConfigDiff(t *testing.T) { + repoDir := t.TempDir() + err := os.WriteFile(path.Join(repoDir, "config.toml"), []byte(testConfig), 0644) + require.NoError(t, err) + + cgf, err := FromFile(path.Join(repoDir, "config.toml"), DefaultBoost()) + require.NoError(t, err) + + s, err := ConfigUpdate(cgf, DefaultBoost(), false, true) + require.NoError(t, err) + + require.False(t, strings.Contains(string(s), `The connect string for the sealing RPC API`)) + + s, err = ConfigUpdate(cgf, DefaultBoost(), true, true) + require.NoError(t, err) + + require.True(t, strings.Contains(string(s), `The connect string for the sealing RPC API`)) +} diff --git a/node/config/update.go b/node/config/update.go new file mode 100644 index 000000000..5f1e63096 --- /dev/null +++ b/node/config/update.go @@ -0,0 +1,238 @@ +package config + +import ( + "bytes" + "fmt" + "reflect" + "regexp" + "strings" + "unicode" + + "github.com/BurntSushi/toml" +) + +// ConfigUpdate takes in a config and a default config and optionally comments out default values +func ConfigUpdate(cfgCur, cfgDef interface{}, comment bool, diff bool) ([]byte, error) { + var nodeStr, defStr string + if cfgDef != nil { + buf := new(bytes.Buffer) + e := toml.NewEncoder(buf) + if err := e.Encode(cfgDef); err != nil { + return nil, fmt.Errorf("encoding default config: %w", err) + } + + defStr = buf.String() + } + + { + buf := new(bytes.Buffer) + e := toml.NewEncoder(buf) + if err := e.Encode(cfgCur); err != nil { + return nil, fmt.Errorf("encoding node config: %w", err) + } + + nodeStr = buf.String() + } + + var outLines []string + + if comment || diff { + { + // create a map of default lines so we can comment those out later + defLines := strings.Split(defStr, "\n") + defaults := map[string]struct{}{} + for i := range defLines { + l := strings.TrimSpace(defLines[i]) + if len(l) == 0 { + continue + } + if l[0] == '#' || l[0] == '[' { + continue + } + defaults[l] = struct{}{} + } + + nodeLines := strings.Split(nodeStr, "\n") + + sectionRx := regexp.MustCompile(`\[(.+)]`) + var section string + + for i, line := range nodeLines { + // if this is a section, track it + trimmed := strings.TrimSpace(line) + if len(trimmed) > 0 { + if trimmed[0] == '[' { + m := sectionRx.FindSubmatch([]byte(trimmed)) + if len(m) != 2 { + return nil, fmt.Errorf("section didn't match (line %d)", i) + } + section = string(m[1]) + + // never comment sections + outLines = append(outLines, line) + continue + } + } + + pad := strings.Repeat(" ", len(line)-len(strings.TrimLeftFunc(line, unicode.IsSpace))) + lf := strings.Fields(line) + + if diff { + if len(lf) > 1 { + if _, found := defaults[strings.TrimSpace(nodeLines[i])]; (cfgDef == nil || !found) && len(line) > 0 { + if comment { + doc := findDoc(cfgCur, section, lf[0]) + if doc != nil { + // found docfield, emit doc comment + if len(doc.Comment) > 0 { + for _, docLine := range strings.Split(doc.Comment, "\n") { + outLines = append(outLines, pad+"# "+docLine) + } + outLines = append(outLines, pad+"#") + } + + outLines = append(outLines, pad+"# type: "+doc.Type) + } + outLines = append(outLines, line) + if len(line) > 0 { + outLines = append(outLines, "") + } + } else { + outLines = append(outLines, line) + if len(line) > 0 { + outLines = append(outLines, "") + } + } + + } + + } + + } else { + if len(lf) > 1 { + doc := findDoc(cfgCur, section, lf[0]) + + if doc != nil { + // found docfield, emit doc comment + if len(doc.Comment) > 0 { + for _, docLine := range strings.Split(doc.Comment, "\n") { + outLines = append(outLines, pad+"# "+docLine) + } + outLines = append(outLines, pad+"#") + } + + outLines = append(outLines, pad+"# type: "+doc.Type) + } + } + // if there is the same line in the default config, comment it out it output + if _, found := defaults[strings.TrimSpace(nodeLines[i])]; (cfgDef == nil || found) && len(line) > 0 { + line = pad + "#" + line[len(pad):] + } + outLines = append(outLines, line) + if len(line) > 0 { + outLines = append(outLines, "") + } + } + } + } + + if diff { + outLines = removeLines(outLines) + } + + nodeStr = strings.Join(outLines, "\n") + } + + // sanity-check that the updated config parses the same way as the current one + if cfgDef != nil { + cfgUpdated, err := FromReader(strings.NewReader(nodeStr), cfgDef) + if err != nil { + return nil, fmt.Errorf("parsing updated config: %w", err) + } + + if !reflect.DeepEqual(cfgCur, cfgUpdated) { + return nil, fmt.Errorf("updated config didn't match current config\n%s\n%s", cfgCur, cfgUpdated) + } + } + + return []byte(nodeStr), nil +} + +func removeLines(s []string) []string { + var result []string + var exit bool + + for i := 0; i < len(s); i++ { + var savedLines []string + if len(s[i]) > 0 { + + if string(s[i][0]) == "[" { + savedLines = append(savedLines, s[i]) + + for j := i + 1; j < len(s); j++ { + if len(s[j]) > 0 && string(s[j][0]) == "[" { + i = j - 1 + break + } + savedLines = append(savedLines, s[j]) + } + if i+len(savedLines) >= len(s) { + exit = true + } + if len(savedLines) < 2 { + savedLines = nil + } else { + counter := 0 + for _, v := range savedLines { + if len(v) > 0 && (string(strings.TrimSpace(v)[0]) == "[" || strings.TrimSpace(v) == "") { + counter += 1 + } + } + if counter < len(savedLines) { + res := cleanupSection(savedLines) + result = append(result, res...) + } + } + if exit { + break + } + } else { + result = append(result, s[i]) + } + } + } + + return result +} + +func cleanupSection(sec []string) []string { + var res []string + res = append(res, sec[0]) + for i := 1; i < len(sec); i++ { + var savedLines []string + if len(sec[i]) > 0 && strings.TrimSpace(sec[i]) != "" { + savedLines = append(savedLines, sec[i]) + for j := i + 1; j < len(sec); j++ { + if len(sec[j]) > 0 && string(strings.TrimSpace(sec[j])[0]) == "[" { + i = j - 1 + break + } + savedLines = append(savedLines, sec[j]) + } + if len(savedLines) < 2 { + savedLines = nil + } else { + res = append(res, savedLines...) + if len(res) == len(sec) { + return res + } + } + } + } + + if len(res) > 1 { + return res + } + + return nil +} diff --git a/node/config/v0_to_v1.go b/node/config/v0_to_v1.go index f5759576b..712393862 100644 --- a/node/config/v0_to_v1.go +++ b/node/config/v0_to_v1.go @@ -1,14 +1,7 @@ package config import ( - "bytes" "fmt" - "reflect" - "regexp" - "strings" - "unicode" - - "github.com/BurntSushi/toml" ) // Migrate from config version 0 to version 1 @@ -28,122 +21,10 @@ func v0Tov1(cfgPath string) (string, error) { // For the migration from v0 to v1 just add the config version and add // comments to the file - bz, err := ConfigUpdate(boostCfg, DefaultBoost(), true) + bz, err := ConfigUpdate(boostCfg, DefaultBoost(), true, false) if err != nil { return "", fmt.Errorf("applying configuration: %w", err) } return string(bz), nil } - -func ConfigUpdate(cfgCur, cfgDef interface{}, comment bool) ([]byte, error) { - var nodeStr, defStr string - if cfgDef != nil { - buf := new(bytes.Buffer) - e := toml.NewEncoder(buf) - if err := e.Encode(cfgDef); err != nil { - return nil, fmt.Errorf("encoding default config: %w", err) - } - - defStr = buf.String() - } - - { - buf := new(bytes.Buffer) - e := toml.NewEncoder(buf) - if err := e.Encode(cfgCur); err != nil { - return nil, fmt.Errorf("encoding node config: %w", err) - } - - nodeStr = buf.String() - } - - if comment { - // create a map of default lines so we can comment those out later - defLines := strings.Split(defStr, "\n") - defaults := map[string]struct{}{} - for i := range defLines { - l := strings.TrimSpace(defLines[i]) - if len(l) == 0 { - continue - } - if l[0] == '#' || l[0] == '[' { - continue - } - defaults[l] = struct{}{} - } - - nodeLines := strings.Split(nodeStr, "\n") - var outLines []string - - sectionRx := regexp.MustCompile(`\[(.+)]`) - var section string - - for i, line := range nodeLines { - // if this is a section, track it - trimmed := strings.TrimSpace(line) - if len(trimmed) > 0 { - if trimmed[0] == '[' { - m := sectionRx.FindSubmatch([]byte(trimmed)) - if len(m) != 2 { - return nil, fmt.Errorf("section didn't match (line %d)", i) - } - section = string(m[1]) - - // never comment sections - outLines = append(outLines, line) - continue - } - } - - pad := strings.Repeat(" ", len(line)-len(strings.TrimLeftFunc(line, unicode.IsSpace))) - - // see if we have docs for this field - { - lf := strings.Fields(line) - if len(lf) > 1 { - doc := findDoc(cfgCur, section, lf[0]) - - if doc != nil { - // found docfield, emit doc comment - if len(doc.Comment) > 0 { - for _, docLine := range strings.Split(doc.Comment, "\n") { - outLines = append(outLines, pad+"# "+docLine) - } - outLines = append(outLines, pad+"#") - } - - outLines = append(outLines, pad+"# type: "+doc.Type) - } - - outLines = append(outLines, pad+"# env var: LOTUS_"+strings.ToUpper(strings.ReplaceAll(section, ".", "_"))+"_"+strings.ToUpper(lf[0])) - } - } - - // if there is the same line in the default config, comment it out it output - if _, found := defaults[strings.TrimSpace(nodeLines[i])]; (cfgDef == nil || found) && len(line) > 0 { - line = pad + "#" + line[len(pad):] - } - outLines = append(outLines, line) - if len(line) > 0 { - outLines = append(outLines, "") - } - } - - nodeStr = strings.Join(outLines, "\n") - } - - // sanity-check that the updated config parses the same way as the current one - if cfgDef != nil { - cfgUpdated, err := FromReader(strings.NewReader(nodeStr), cfgDef) - if err != nil { - return nil, fmt.Errorf("parsing updated config: %w", err) - } - - if !reflect.DeepEqual(cfgCur, cfgUpdated) { - return nil, fmt.Errorf("updated config didn't match current config") - } - } - - return []byte(nodeStr), nil -} diff --git a/node/config/v1_to_v2.go b/node/config/v1_to_v2.go index cdc5b363a..e282dbc96 100644 --- a/node/config/v1_to_v2.go +++ b/node/config/v1_to_v2.go @@ -22,7 +22,7 @@ func v1Tov2(cfgPath string) (string, error) { // Update the Boost config version boostCfg.ConfigVersion = 2 - bz, err := ConfigUpdate(boostCfg, DefaultBoost(), true) + bz, err := ConfigUpdate(boostCfg, DefaultBoost(), true, false) if err != nil { return "", fmt.Errorf("applying configuration: %w", err) } diff --git a/node/config/v2_to_v3.go b/node/config/v2_to_v3.go index dbc069ebc..c94a917a4 100644 --- a/node/config/v2_to_v3.go +++ b/node/config/v2_to_v3.go @@ -19,7 +19,7 @@ func v2Tov3(cfgPath string) (string, error) { // Update the Boost config version boostCfg.ConfigVersion = 3 - bz, err := ConfigUpdate(boostCfg, DefaultBoost(), true) + bz, err := ConfigUpdate(boostCfg, DefaultBoost(), true, false) if err != nil { return "", fmt.Errorf("applying configuration: %w", err) } diff --git a/node/config/v3_to_v4.go b/node/config/v3_to_v4.go index 0692f5925..d3cdb31d4 100644 --- a/node/config/v3_to_v4.go +++ b/node/config/v3_to_v4.go @@ -19,7 +19,7 @@ func v3Tov4(cfgPath string) (string, error) { // Update the Boost config version boostCfg.ConfigVersion = 4 - bz, err := ConfigUpdate(boostCfg, DefaultBoost(), true) + bz, err := ConfigUpdate(boostCfg, DefaultBoost(), true, false) if err != nil { return "", fmt.Errorf("applying configuration: %w", err) }