Skip to content

Commit

Permalink
refractor: response
Browse files Browse the repository at this point in the history
  • Loading branch information
imstevez committed Sep 3, 2023
1 parent 187f740 commit ec4bb37
Show file tree
Hide file tree
Showing 35 changed files with 581 additions and 757 deletions.
3 changes: 3 additions & 0 deletions cmd/btfs/daemon.go
Original file line number Diff line number Diff line change
Expand Up @@ -717,6 +717,9 @@ If the user need to start multiple nodes on the same machine, the configuration
functest(cfg.Services.OnlineServerDomain, cfg.Identity.PeerID, hValue)
}

// init s3 providers
s3.InitProviders(statestore)

// access-key init
accesskey.InitService(s3.GetProviders())

Expand Down
2 changes: 1 addition & 1 deletion core/commands/files.go
Original file line number Diff line number Diff line change
Expand Up @@ -735,7 +735,7 @@ stat' on the file or any of its ancestors.
},
Arguments: []cmds.Argument{
cmds.StringArg("path", true, false, "Path to write to."),
cmds.FileArg("data", true, false, "Data to write.").EnableStdin(),
cmds.FileArg("data", true, false, "data to write.").EnableStdin(),
},
Options: []cmds.Option{
cmds.Int64Option(filesOffsetOptionName, "o", "Byte offset to begin writing at."),
Expand Down
10 changes: 5 additions & 5 deletions core/commands/object/object.go
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ Supported values are:
Type: Node{},
Encoders: cmds.EncoderMap{
cmds.Protobuf: cmds.MakeTypedEncoder(func(req *cmds.Request, w io.Writer, out *Node) error {
// deserialize the Data field as text as this was the standard behaviour
// deserialize the data field as text as this was the standard behaviour
object, err := deserializeNode(out, "text")
if err != nil {
return nil
Expand Down Expand Up @@ -371,20 +371,20 @@ It reads from stdin, and the output is a base58 encoded multihash.
'btfs object put' is a plumbing command for storing DAG nodes.
It reads from stdin, and the output is a base58 encoded multihash.
Data should be in the format specified by the --inputenc flag.
data should be in the format specified by the --inputenc flag.
--inputenc may be one of the following:
* "protobuf"
* "json" (default)
Examples:
$ echo '{ "Data": "abc" }' | btfs object put
$ echo '{ "data": "abc" }' | btfs object put
This creates a node with the data 'abc' and no links. For an object with
links, create a file named 'node.json' with the contents:
{
"Data": "another",
"data": "another",
"Links": [ {
"Name": "some link",
"Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V",
Expand All @@ -399,7 +399,7 @@ And then run:
},

Arguments: []cmds.Argument{
cmds.FileArg("data", true, false, "Data to be stored as a DAG object.").EnableStdin(),
cmds.FileArg("data", true, false, "data to be stored as a DAG object.").EnableStdin(),
},
Options: []cmds.Option{
cmds.StringOption(inputencOptionName, "Encoding type of input data. One of: {\"protobuf\", \"json\"}.").WithDefault("json"),
Expand Down
2 changes: 1 addition & 1 deletion core/commands/object/patch.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ the limit will not be respected by the network.
},
Arguments: []cmds.Argument{
cmds.StringArg("root", true, false, "The hash of the node to modify."),
cmds.FileArg("data", true, false, "Data to append.").EnableStdin(),
cmds.FileArg("data", true, false, "data to append.").EnableStdin(),
},
Run: func(req *cmds.Request, res cmds.ResponseEmitter, env cmds.Environment) error {
api, err := cmdenv.GetApi(env, req)
Expand Down
8 changes: 4 additions & 4 deletions fuse/ipns/ipns_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@ func verifyFile(t *testing.T, path string, wantData []byte) {
t.Fatal(err)
}
if len(isData) != len(wantData) {
t.Fatal("Data not equal - length check failed")
t.Fatal("data not equal - length check failed")
}
if !bytes.Equal(isData, wantData) {
t.Fatal("Data not equal")
t.Fatal("data not equal")
}
}

Expand Down Expand Up @@ -328,7 +328,7 @@ func TestAppendFile(t *testing.T) {
t.Fatal(err)
}
if !bytes.Equal(rbuf, data) {
t.Fatal("Data inconsistent!")
t.Fatal("data inconsistent!")
}
}

Expand Down Expand Up @@ -458,7 +458,7 @@ func TestFSThrash(t *testing.T) {
}

if !bytes.Equal(data, out) {
t.Errorf("Data didn't match in %s: expected %v, got %v", name, data, out)
t.Errorf("data didn't match in %s: expected %v, got %v", name, data, out)
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion fuse/readonly/readonly_unix.go
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ func (s *Node) Read(ctx context.Context, req *fuse.ReadRequest, resp *fuse.ReadR
if err != nil {
return err
}
// Data has a capacity of Size
// data has a capacity of Size
buf := resp.Data[:int(req.Size)]
n, err := io.ReadFull(r, buf)
resp.Data = buf[:n]
Expand Down
9 changes: 5 additions & 4 deletions s3/consts/consts.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,11 @@ const (
AssumeRole = "AssumeRole"
SignV4Algorithm = "AWS4-HMAC-SHA256"

DefaultLocation = "us-east-1"
DefaultBucketACL = s3.BucketCannedACLPublicRead
DefaultObjectACL = ""
AllUsersURI = "http://acs.amazonaws.com/groups/global/AllUsers"
DefaultServerInfo = "BTFS"
DefaultLocation = "us-east-1"
DefaultBucketACL = s3.BucketCannedACLPublicRead
DefaultObjectACL = ""
AllUsersURI = "http://acs.amazonaws.com/groups/global/AllUsers"
)

var SupportedLocations = map[string]bool{
Expand Down
2 changes: 0 additions & 2 deletions s3/handlers/handlers.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ import (
"strconv"
)

const lockPrefix = "s3:lock/"

var _ Handlerser = (*Handlers)(nil)

type Handlers struct {
Expand Down
4 changes: 2 additions & 2 deletions s3/handlers/handlers_bucket.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ func (h *Handlers) CreateBucketHandler(w http.ResponseWriter, r *http.Request) {
return
}

responses.WritePutBucketResponse(w, r)
responses.WriteCreateBucketResponse(w, r)

return
}
Expand Down Expand Up @@ -154,7 +154,7 @@ func (h *Handlers) PutBucketAclHandler(w http.ResponseWriter, r *http.Request) {
}()

req, rerr := requests.ParsePutBucketAclRequest(r)
if err != nil {
if rerr != nil {
err = rerr
responses.WriteErrorResponse(w, r, rerr)
return
Expand Down
File renamed without changes.
20 changes: 0 additions & 20 deletions s3/requests/parsers.go
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
package requests

import (
"errors"
"fmt"
"github.com/bittorrent/go-btfs/s3/cctx"
"github.com/bittorrent/go-btfs/s3/responses"
"net/http"
"reflect"
)

// CreateBucketRequest .
Expand All @@ -17,23 +14,6 @@ type CreateBucketRequest struct {
Region string
}

// todo: parse aws request use aws struct
func ParseS3Request(r *http.Request, v interface{}) (err error) {
rv := reflect.ValueOf(v)
if rv.Kind() != reflect.Pointer || rv.IsNil() {
err = errors.New("invalid value must be non nil pointer")
return
}

rt := reflect.TypeOf(v).Elem()
n := rt.NumField()
for i := 0; i < n; i++ {
f := rt.Field(i)
fmt.Println(f)
}
return
}

func ParseCreateBucketRequest(r *http.Request) (req *CreateBucketRequest, rerr *responses.Error) {
req = &CreateBucketRequest{}
req.AccessKey = cctx.GetAccessKey(r)
Expand Down
3 changes: 2 additions & 1 deletion s3/requests/parsers_common.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ import (
)

func parseBucket(r *http.Request) (bucket string, rerr *responses.Error) {
err := s3utils.CheckValidBucketNameStrict(mux.Vars(r)["bucket"])
bucket = mux.Vars(r)["bucket"]
err := s3utils.CheckValidBucketNameStrict(bucket)
if err != nil {
rerr = responses.ErrInvalidBucketName
}
Expand Down
10 changes: 0 additions & 10 deletions s3/requests/types_common.go

This file was deleted.

64 changes: 0 additions & 64 deletions s3/responses/object_header.go

This file was deleted.

Loading

0 comments on commit ec4bb37

Please sign in to comment.