Skip to content

Commit

Permalink
address PR comment
Browse files Browse the repository at this point in the history
  • Loading branch information
ganeshvanahalli committed Dec 10, 2024
1 parent 3f7dd3e commit 296314b
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion cmd/datool/datool.go
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ func parseClientStoreConfig(args []string) (*ClientStoreConfig, error) {
f.String("signing-wallet-password", genericconf.PASSWORD_NOT_SET, "password to unlock the wallet, if not specified the user is prompted for the password")
f.Duration("das-retention-period", 24*time.Hour, "The period which DASes are requested to retain the stored batches.")
f.Int("max-store-chunk-body-size", 512*1024, "The maximum HTTP POST body size for a chunked store request")
f.Bool("enable-chunked-store", true, "force data to always be sent to DAS all at once instead of splitting into chunks")
f.Bool("enable-chunked-store", true, "enable data to be sent to DAS in chunks instead of all at once")

k, err := confighelpers.BeginCommonParse(f, args)
if err != nil {
Expand Down
2 changes: 1 addition & 1 deletion das/aggregator.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ func AggregatorConfigAddOptions(prefix string, f *flag.FlagSet) {
f.Int(prefix+".assumed-honest", DefaultAggregatorConfig.AssumedHonest, "Number of assumed honest backends (H). If there are N backends, K=N+1-H valid responses are required to consider an Store request to be successful.")
f.Var(&parsedBackendsConf, prefix+".backends", "JSON RPC backend configuration. This can be specified on the command line as a JSON array, eg: [{\"url\": \"...\", \"pubkey\": \"...\"},...], or as a JSON array in the config file.")
f.Int(prefix+".max-store-chunk-body-size", DefaultAggregatorConfig.MaxStoreChunkBodySize, "maximum HTTP POST body size to use for individual batch chunks, including JSON RPC overhead and an estimated overhead of 512B of headers")
f.Bool(prefix+".enable-chunked-store", DefaultAggregatorConfig.EnableChunkedStore, "force data to always be sent to DAS all at once instead of splitting into chunks")
f.Bool(prefix+".enable-chunked-store", DefaultAggregatorConfig.EnableChunkedStore, "enable data to be sent to DAS in chunks instead of all at once")
}

type Aggregator struct {
Expand Down
2 changes: 1 addition & 1 deletion das/dasRpcClient.go
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ func (c *DASRPCClient) Store(ctx context.Context, message []byte, timeout uint64
}()

if !c.enableChunkedStore {
log.Info("Legacy store is being force-used by the DAS client", "url", c.url)
log.Debug("Legacy store is being force-used by the DAS client", "url", c.url)
return c.legacyStore(ctx, message, timeout)
}

Expand Down

0 comments on commit 296314b

Please sign in to comment.