diff --git a/src/internal/connector/exchange/attachment.go b/src/internal/connector/exchange/attachment.go index 88952f1b7..07fb0e7dd 100644 --- a/src/internal/connector/exchange/attachment.go +++ b/src/internal/connector/exchange/attachment.go @@ -11,7 +11,6 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/support" - "github.com/alcionai/corso/src/internal/connector/uploadsession" "github.com/alcionai/corso/src/pkg/logger" ) @@ -106,7 +105,7 @@ func uploadLargeAttachment( } url := ptr.Val(session.GetUploadUrl()) - aw := uploadsession.NewWriter(uploader.getItemID(), url, size) + aw := graph.NewLargeItemWriter(uploader.getItemID(), url, size) logger.Ctx(ctx).Debugw("uploading large attachment", "attachment_url", graph.LoggableURL(url)) // Upload the stream data diff --git a/src/internal/connector/uploadsession/uploadsession.go b/src/internal/connector/graph/uploadsession.go similarity index 67% rename from src/internal/connector/uploadsession/uploadsession.go rename to src/internal/connector/graph/uploadsession.go index 210abe018..77fefd5c8 100644 --- a/src/internal/connector/uploadsession/uploadsession.go +++ b/src/internal/connector/graph/uploadsession.go @@ -1,12 +1,12 @@ -package uploadsession +package graph import ( "bytes" "context" "fmt" + "net/http" "github.com/alcionai/clues" - "gopkg.in/resty.v1" "github.com/alcionai/corso/src/pkg/logger" ) @@ -20,7 +20,7 @@ const ( // Writer implements an io.Writer for a M365 // UploadSession URL -type writer struct { +type largeItemWriter struct { // Identifier id string // Upload URL for this item @@ -29,18 +29,20 @@ type writer struct { contentLength int64 // Last item offset that was written to lastWrittenOffset int64 - client *resty.Client + client httpWrapper } -func NewWriter(id, url string, size int64) *writer { - return &writer{id: id, url: url, contentLength: size, client: resty.New()} +func NewLargeItemWriter(id, url string, size int64) *largeItemWriter { + return &largeItemWriter{id: id, url: url, contentLength: size, client: *NewNoTimeoutHTTPWrapper()} } // Write will upload the provided data to M365. It sets the `Content-Length` and `Content-Range` headers based on // https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession -func (iw *writer) Write(p []byte) (int, error) { +func (iw *largeItemWriter) Write(p []byte) (int, error) { rangeLength := len(p) - logger.Ctx(context.Background()). + ctx := context.Background() + + logger.Ctx(ctx). Debugf("WRITE for %s. Size:%d, Offset: %d, TotalSize: %d", iw.id, rangeLength, iw.lastWrittenOffset, iw.contentLength) @@ -48,17 +50,20 @@ func (iw *writer) Write(p []byte) (int, error) { // PUT the request - set headers `Content-Range`to describe total size and `Content-Length` to describe size of // data in the current request - _, err := iw.client.R(). - SetHeaders(map[string]string{ - contentRangeHeaderKey: fmt.Sprintf( - contentRangeHeaderValueFmt, - iw.lastWrittenOffset, - endOffset-1, - iw.contentLength), - contentLengthHeaderKey: fmt.Sprintf("%d", rangeLength), - }). - SetBody(bytes.NewReader(p)). - Put(iw.url) + headers := make(map[string]string) + headers[contentRangeHeaderKey] = fmt.Sprintf( + contentRangeHeaderValueFmt, + iw.lastWrittenOffset, + endOffset-1, + iw.contentLength) + headers[contentLengthHeaderKey] = fmt.Sprintf("%d", rangeLength) + + _, err := iw.client.Request( + ctx, + http.MethodPut, + iw.url, + bytes.NewReader(p), + headers) if err != nil { return 0, clues.Wrap(err, "uploading item").With( "upload_id", iw.id, diff --git a/src/internal/connector/uploadsession/uploadsession_test.go b/src/internal/connector/graph/uploadsession_test.go similarity index 97% rename from src/internal/connector/uploadsession/uploadsession_test.go rename to src/internal/connector/graph/uploadsession_test.go index f080affa3..74d2c71c9 100644 --- a/src/internal/connector/uploadsession/uploadsession_test.go +++ b/src/internal/connector/graph/uploadsession_test.go @@ -1,4 +1,4 @@ -package uploadsession +package graph import ( "bytes" @@ -69,7 +69,7 @@ func (suite *UploadSessionSuite) TestWriter() { defer ts.Close() - writer := NewWriter("item", ts.URL, writeSize) + writer := NewLargeItemWriter("item", ts.URL, writeSize) // Using a 32 KB buffer for the copy allows us to validate the // multi-part upload. `io.CopyBuffer` will only write 32 KB at diff --git a/src/internal/connector/onedrive/item.go b/src/internal/connector/onedrive/item.go index 9aec6ef74..6c90ba7a3 100644 --- a/src/internal/connector/onedrive/item.go +++ b/src/internal/connector/onedrive/item.go @@ -16,7 +16,6 @@ import ( "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/onedrive/api" "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" - "github.com/alcionai/corso/src/internal/connector/uploadsession" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/logger" ) @@ -360,7 +359,7 @@ func driveItemWriter( url := ptr.Val(r.GetUploadUrl()) - return uploadsession.NewWriter(itemID, url, itemSize), nil + return graph.NewLargeItemWriter(itemID, url, itemSize), nil } // constructWebURL helper function for recreating the webURL