correct input aliasing (#2960)

Updates and corrects input aliasing according to
the following rules (in priority order):
1. if the library name is usable, use it
2. if not, alias to the package name
3. if the package name is weird, alias sensibly
4. in case of collision, alias more distant imports
5. aliases should be consistent throughout

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #1970

#### Test Plan

- [x]  Unit test
This commit is contained in:
Keepers 2023-03-28 14:48:22 -06:00 committed by GitHub
parent 214206b75e
commit 940892fc07
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 160 additions and 160 deletions

View File

@ -13,7 +13,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/sharepoint" "github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -39,7 +39,7 @@ func (gc *GraphConnector) DataCollections(
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Bus, errs *fault.Bus,
) ([]data.BackupCollection, map[string]map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]map[string]struct{}, error) {
ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String())) ctx, end := diagnostics.Span(ctx, "gc:dataCollections", diagnostics.Index("service", sels.Service.String()))
defer end() defer end()
err := verifyBackupInputs(sels, gc.GetSiteIDs()) err := verifyBackupInputs(sels, gc.GetSiteIDs())
@ -201,7 +201,7 @@ func (gc *GraphConnector) RestoreDataCollections(
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
errs *fault.Bus, errs *fault.Bus,
) (*details.Details, error) { ) (*details.Details, error) {
ctx, end := D.Span(ctx, "connector:restore") ctx, end := diagnostics.Span(ctx, "connector:restore")
defer end() defer end()
var ( var (

View File

@ -2,8 +2,8 @@ package api
import ( import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
absser "github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdkgo "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk" "github.com/alcionai/corso/src/internal/connector/graph/betasdk"
) )
@ -19,7 +19,7 @@ func (s BetaService) Client() *betasdk.BetaClient {
return s.client return s.client
} }
func NewBetaService(adpt *msgraphsdk.GraphRequestAdapter) *BetaService { func NewBetaService(adpt *msgraphsdkgo.GraphRequestAdapter) *BetaService {
return &BetaService{ return &BetaService{
client: betasdk.NewBetaClient(adpt), client: betasdk.NewBetaClient(adpt),
} }
@ -27,7 +27,7 @@ func NewBetaService(adpt *msgraphsdk.GraphRequestAdapter) *BetaService {
// Seraialize writes an M365 parsable object into a byte array using the built-in // Seraialize writes an M365 parsable object into a byte array using the built-in
// application/json writer within the adapter. // application/json writer within the adapter.
func (s BetaService) Serialize(object absser.Parsable) ([]byte, error) { func (s BetaService) Serialize(object serialization.Parsable) ([]byte, error) {
writer, err := s.client.Adapter(). writer, err := s.client.Adapter().
GetSerializationWriterFactory(). GetSerializationWriterFactory().
GetSerializationWriter("application/json") GetSerializationWriter("application/json")

View File

@ -5,7 +5,7 @@ import (
"fmt" "fmt"
"github.com/alcionai/clues" "github.com/alcionai/clues"
absser "github.com/microsoft/kiota-abstractions-go" abstractions "github.com/microsoft/kiota-abstractions-go"
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core" msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/microsoftgraph/msgraph-sdk-go/users"
@ -72,7 +72,7 @@ var userFilterNoGuests = "onPremisesSyncEnabled eq true OR userType ne 'Guest'"
var t = true var t = true
func userOptions(fs *string) *users.UsersRequestBuilderGetRequestConfiguration { func userOptions(fs *string) *users.UsersRequestBuilderGetRequestConfiguration {
headers := absser.NewRequestHeaders() headers := abstractions.NewRequestHeaders()
headers.Add("ConsistencyLevel", "eventual") headers.Add("ConsistencyLevel", "eventual")
return &users.UsersRequestBuilderGetRequestConfiguration{ return &users.UsersRequestBuilderGetRequestConfiguration{

View File

@ -7,7 +7,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
kioser "github.com/microsoft/kiota-serialization-json-go" kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/microsoftgraph/msgraph-sdk-go/users"
@ -286,7 +286,7 @@ func (c Contacts) Serialize(
var ( var (
err error err error
writer = kioser.NewJsonSerializationWriter() writer = kjson.NewJsonSerializationWriter()
) )
defer writer.Close() defer writer.Close()

View File

@ -8,7 +8,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
kioser "github.com/microsoft/kiota-serialization-json-go" kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/microsoftgraph/msgraph-sdk-go/users"
@ -321,7 +321,7 @@ func (c Events) Serialize(
var ( var (
err error err error
writer = kioser.NewJsonSerializationWriter() writer = kjson.NewJsonSerializationWriter()
) )
defer writer.Close() defer writer.Close()

View File

@ -7,7 +7,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
kioser "github.com/microsoft/kiota-serialization-json-go" kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/microsoftgraph/msgraph-sdk-go/users"
@ -331,7 +331,7 @@ func (c Mail) Serialize(
var ( var (
err error err error
writer = kioser.NewJsonSerializationWriter() writer = kjson.NewJsonSerializationWriter()
) )
defer writer.Close() defer writer.Close()

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
msusers "github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/microsoftgraph/msgraph-sdk-go/users"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
) )
@ -54,7 +54,7 @@ func (mau *mailAttachmentUploader) uploadSession(
attachmentName string, attachmentName string,
attachmentSize int64, attachmentSize int64,
) (models.UploadSessionable, error) { ) (models.UploadSessionable, error) {
session := msusers.NewItemMailFoldersItemMessagesItemAttachmentsCreateUploadSessionPostRequestBody() session := users.NewItemMailFoldersItemMessagesItemAttachmentsCreateUploadSessionPostRequestBody()
session.SetAttachmentItem(makeSessionAttachment(attachmentName, attachmentSize)) session.SetAttachmentItem(makeSessionAttachment(attachmentName, attachmentSize))
r, err := mau. r, err := mau.
@ -104,7 +104,7 @@ func (eau *eventAttachmentUploader) uploadSession(
attachmentName string, attachmentName string,
attachmentSize int64, attachmentSize int64,
) (models.UploadSessionable, error) { ) (models.UploadSessionable, error) {
session := msusers.NewItemCalendarEventsItemAttachmentsCreateUploadSessionPostRequestBody() session := users.NewItemCalendarEventsItemAttachmentsCreateUploadSessionPostRequestBody()
session.SetAttachmentItem(makeSessionAttachment(attachmentName, attachmentSize)) session.SetAttachmentItem(makeSessionAttachment(attachmentName, attachmentSize))
r, err := eau.service.Client(). r, err := eau.service.Client().

View File

@ -16,7 +16,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -369,7 +369,7 @@ func restoreCollection(
deets *details.Builder, deets *details.Builder,
errs *fault.Bus, errs *fault.Bus,
) (support.CollectionMetrics, bool) { ) (support.CollectionMetrics, bool) {
ctx, end := D.Span(ctx, "gc:exchange:restoreCollection", D.Label("path", dc.FullPath())) ctx, end := diagnostics.Span(ctx, "gc:exchange:restoreCollection", diagnostics.Label("path", dc.FullPath()))
defer end() defer end()
var ( var (

View File

@ -13,9 +13,9 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
backoff "github.com/cenkalti/backoff/v4" backoff "github.com/cenkalti/backoff/v4"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
ka "github.com/microsoft/kiota-authentication-azure-go" kauth "github.com/microsoft/kiota-authentication-azure-go"
khttp "github.com/microsoft/kiota-http-go" khttp "github.com/microsoft/kiota-http-go"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdkgo "github.com/microsoftgraph/msgraph-sdk-go"
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core" msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
"golang.org/x/time/rate" "golang.org/x/time/rate"
@ -57,22 +57,22 @@ type QueryParams struct {
var _ Servicer = &Service{} var _ Servicer = &Service{}
type Service struct { type Service struct {
adapter *msgraphsdk.GraphRequestAdapter adapter *msgraphsdkgo.GraphRequestAdapter
client *msgraphsdk.GraphServiceClient client *msgraphsdkgo.GraphServiceClient
} }
func NewService(adapter *msgraphsdk.GraphRequestAdapter) *Service { func NewService(adapter *msgraphsdkgo.GraphRequestAdapter) *Service {
return &Service{ return &Service{
adapter: adapter, adapter: adapter,
client: msgraphsdk.NewGraphServiceClient(adapter), client: msgraphsdkgo.NewGraphServiceClient(adapter),
} }
} }
func (s Service) Adapter() *msgraphsdk.GraphRequestAdapter { func (s Service) Adapter() *msgraphsdkgo.GraphRequestAdapter {
return s.adapter return s.adapter
} }
func (s Service) Client() *msgraphsdk.GraphServiceClient { func (s Service) Client() *msgraphsdkgo.GraphServiceClient {
return s.client return s.client
} }
@ -171,14 +171,14 @@ func MinimumBackoff(dur time.Duration) option {
func CreateAdapter( func CreateAdapter(
tenant, client, secret string, tenant, client, secret string,
opts ...option, opts ...option,
) (*msgraphsdk.GraphRequestAdapter, error) { ) (*msgraphsdkgo.GraphRequestAdapter, error) {
// Client Provider: Uses Secret for access to tenant-level data // Client Provider: Uses Secret for access to tenant-level data
cred, err := azidentity.NewClientSecretCredential(tenant, client, secret, nil) cred, err := azidentity.NewClientSecretCredential(tenant, client, secret, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating m365 client identity") return nil, clues.Wrap(err, "creating m365 client identity")
} }
auth, err := ka.NewAzureIdentityAuthenticationProviderWithScopes( auth, err := kauth.NewAzureIdentityAuthenticationProviderWithScopes(
cred, cred,
[]string{"https://graph.microsoft.com/.default"}, []string{"https://graph.microsoft.com/.default"},
) )
@ -188,7 +188,7 @@ func CreateAdapter(
httpClient := HTTPClient(opts...) httpClient := HTTPClient(opts...)
return msgraphsdk.NewGraphRequestAdapterWithParseNodeFactoryAndSerializationWriterFactoryAndHttpClient( return msgraphsdkgo.NewGraphRequestAdapterWithParseNodeFactoryAndSerializationWriterFactoryAndHttpClient(
auth, auth,
nil, nil, nil, nil,
httpClient) httpClient)
@ -201,7 +201,7 @@ func CreateAdapter(
// to centralize this client to be passed downstream where api calls // to centralize this client to be passed downstream where api calls
// can utilize it on a per-download basis. // can utilize it on a per-download basis.
func HTTPClient(opts ...option) *http.Client { func HTTPClient(opts ...option) *http.Client {
clientOptions := msgraphsdk.GetDefaultClientOptions() clientOptions := msgraphsdkgo.GetDefaultClientOptions()
clientconfig := (&clientConfig{}).populate(opts...) clientconfig := (&clientConfig{}).populate(opts...)
noOfRetries, minRetryDelay := clientconfig.applyMiddlewareConfig() noOfRetries, minRetryDelay := clientconfig.applyMiddlewareConfig()
middlewares := GetKiotaMiddlewares(&clientOptions, noOfRetries, minRetryDelay) middlewares := GetKiotaMiddlewares(&clientOptions, noOfRetries, minRetryDelay)
@ -258,10 +258,10 @@ func GetKiotaMiddlewares(
type Servicer interface { type Servicer interface {
// Client() returns msgraph Service client that can be used to process and execute // Client() returns msgraph Service client that can be used to process and execute
// the majority of the queries to the M365 Backstore // the majority of the queries to the M365 Backstore
Client() *msgraphsdk.GraphServiceClient Client() *msgraphsdkgo.GraphServiceClient
// Adapter() returns GraphRequest adapter used to process large requests, create batches // Adapter() returns GraphRequest adapter used to process large requests, create batches
// and page iterators // and page iterators
Adapter() *msgraphsdk.GraphRequestAdapter Adapter() *msgraphsdkgo.GraphRequestAdapter
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -22,7 +22,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/sharepoint" "github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
@ -119,7 +119,7 @@ func (gc *GraphConnector) createService() (*graph.Service, error) {
func (gc *GraphConnector) setTenantSites(ctx context.Context, errs *fault.Bus) error { func (gc *GraphConnector) setTenantSites(ctx context.Context, errs *fault.Bus) error {
gc.Sites = map[string]string{} gc.Sites = map[string]string{}
ctx, end := D.Span(ctx, "gc:setTenantSites") ctx, end := diagnostics.Span(ctx, "gc:setTenantSites")
defer end() defer end()
sites, err := getResources( sites, err := getResources(

View File

@ -7,7 +7,7 @@ import (
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
kw "github.com/microsoft/kiota-serialization-json-go" kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -145,7 +145,7 @@ func GetMockListDefault(title string) models.Listable {
func GetMockListBytes(title string) ([]byte, error) { func GetMockListBytes(title string) ([]byte, error) {
list := GetMockListDefault(title) list := GetMockListDefault(title)
objectWriter := kw.NewJsonSerializationWriter() objectWriter := kjson.NewJsonSerializationWriter()
defer objectWriter.Close() defer objectWriter.Close()
err := objectWriter.WriteObjectValue("", list) err := objectWriter.WriteObjectValue("", list)

View File

@ -6,8 +6,8 @@ import (
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
absser "github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
js "github.com/microsoft/kiota-serialization-json-go" kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -715,7 +715,7 @@ func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte,
message, err := hydrateMessage(base) message, err := hydrateMessage(base)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", nested) parseNode, err := kjson.NewJsonParseNodeFactory().GetRootParseNode("application/json", nested)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
anObject, err := parseNode.GetObjectValue(models.CreateContactFromDiscriminatorValue) anObject, err := parseNode.GetObjectValue(models.CreateContactFromDiscriminatorValue)
@ -733,8 +733,8 @@ func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte,
return serialize(t, message) return serialize(t, message)
} }
func serialize(t *testing.T, item absser.Parsable) []byte { func serialize(t *testing.T, item serialization.Parsable) []byte {
wtr := js.NewJsonSerializationWriter() wtr := kjson.NewJsonSerializationWriter()
err := wtr.WriteObjectValue("", item) err := wtr.WriteObjectValue("", item)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -745,7 +745,7 @@ func serialize(t *testing.T, item absser.Parsable) []byte {
} }
func hydrateMessage(byteArray []byte) (models.Messageable, error) { func hydrateMessage(byteArray []byte) (models.Messageable, error) {
parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", byteArray) parseNode, err := kjson.NewJsonParseNodeFactory().GetRootParseNode("application/json", byteArray)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "deserializing bytes into base m365 object") return nil, clues.Wrap(err, "deserializing bytes into base m365 object")
} }

View File

@ -8,10 +8,10 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
abstractions "github.com/microsoft/kiota-abstractions-go" abstractions "github.com/microsoft/kiota-abstractions-go"
msdrives "github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
mssites "github.com/microsoftgraph/msgraph-sdk-go/sites" "github.com/microsoftgraph/msgraph-sdk-go/sites"
msusers "github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/microsoftgraph/msgraph-sdk-go/users"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -34,8 +34,8 @@ const pageSize = int32(999)
type driveItemPager struct { type driveItemPager struct {
gs graph.Servicer gs graph.Servicer
driveID string driveID string
builder *msdrives.ItemRootDeltaRequestBuilder builder *drives.ItemRootDeltaRequestBuilder
options *msdrives.ItemRootDeltaRequestBuilderGetRequestConfiguration options *drives.ItemRootDeltaRequestBuilderGetRequestConfiguration
} }
func NewItemPager( func NewItemPager(
@ -54,9 +54,9 @@ func NewItemPager(
} }
headers.Add("Prefer", strings.Join(preferHeaderItems, ",")) headers.Add("Prefer", strings.Join(preferHeaderItems, ","))
requestConfig := &msdrives.ItemRootDeltaRequestBuilderGetRequestConfiguration{ requestConfig := &drives.ItemRootDeltaRequestBuilderGetRequestConfiguration{
Headers: headers, Headers: headers,
QueryParameters: &msdrives.ItemRootDeltaRequestBuilderGetQueryParameters{ QueryParameters: &drives.ItemRootDeltaRequestBuilderGetQueryParameters{
Top: &pageCount, Top: &pageCount,
Select: fields, Select: fields,
}, },
@ -70,7 +70,7 @@ func NewItemPager(
} }
if len(link) > 0 { if len(link) > 0 {
res.builder = msdrives.NewItemRootDeltaRequestBuilder(link, gs.Adapter()) res.builder = drives.NewItemRootDeltaRequestBuilder(link, gs.Adapter())
} }
return res return res
@ -91,7 +91,7 @@ func (p *driveItemPager) GetPage(ctx context.Context) (api.DeltaPageLinker, erro
} }
func (p *driveItemPager) SetNext(link string) { func (p *driveItemPager) SetNext(link string) {
p.builder = msdrives.NewItemRootDeltaRequestBuilder(link, p.gs.Adapter()) p.builder = drives.NewItemRootDeltaRequestBuilder(link, p.gs.Adapter())
} }
func (p *driveItemPager) Reset() { func (p *driveItemPager) Reset() {
@ -104,8 +104,8 @@ func (p *driveItemPager) ValuesIn(l api.DeltaPageLinker) ([]models.DriveItemable
type userDrivePager struct { type userDrivePager struct {
gs graph.Servicer gs graph.Servicer
builder *msusers.ItemDrivesRequestBuilder builder *users.ItemDrivesRequestBuilder
options *msusers.ItemDrivesRequestBuilderGetRequestConfiguration options *users.ItemDrivesRequestBuilderGetRequestConfiguration
} }
func NewUserDrivePager( func NewUserDrivePager(
@ -113,8 +113,8 @@ func NewUserDrivePager(
userID string, userID string,
fields []string, fields []string,
) *userDrivePager { ) *userDrivePager {
requestConfig := &msusers.ItemDrivesRequestBuilderGetRequestConfiguration{ requestConfig := &users.ItemDrivesRequestBuilderGetRequestConfiguration{
QueryParameters: &msusers.ItemDrivesRequestBuilderGetQueryParameters{ QueryParameters: &users.ItemDrivesRequestBuilderGetQueryParameters{
Select: fields, Select: fields,
}, },
} }
@ -143,7 +143,7 @@ func (p *userDrivePager) GetPage(ctx context.Context) (api.PageLinker, error) {
} }
func (p *userDrivePager) SetNext(link string) { func (p *userDrivePager) SetNext(link string) {
p.builder = msusers.NewItemDrivesRequestBuilder(link, p.gs.Adapter()) p.builder = users.NewItemDrivesRequestBuilder(link, p.gs.Adapter())
} }
func (p *userDrivePager) ValuesIn(l api.PageLinker) ([]models.Driveable, error) { func (p *userDrivePager) ValuesIn(l api.PageLinker) ([]models.Driveable, error) {
@ -152,8 +152,8 @@ func (p *userDrivePager) ValuesIn(l api.PageLinker) ([]models.Driveable, error)
type siteDrivePager struct { type siteDrivePager struct {
gs graph.Servicer gs graph.Servicer
builder *mssites.ItemDrivesRequestBuilder builder *sites.ItemDrivesRequestBuilder
options *mssites.ItemDrivesRequestBuilderGetRequestConfiguration options *sites.ItemDrivesRequestBuilderGetRequestConfiguration
} }
// NewSiteDrivePager is a constructor for creating a siteDrivePager // NewSiteDrivePager is a constructor for creating a siteDrivePager
@ -166,8 +166,8 @@ func NewSiteDrivePager(
siteID string, siteID string,
fields []string, fields []string,
) *siteDrivePager { ) *siteDrivePager {
requestConfig := &mssites.ItemDrivesRequestBuilderGetRequestConfiguration{ requestConfig := &sites.ItemDrivesRequestBuilderGetRequestConfiguration{
QueryParameters: &mssites.ItemDrivesRequestBuilderGetQueryParameters{ QueryParameters: &sites.ItemDrivesRequestBuilderGetQueryParameters{
Select: fields, Select: fields,
}, },
} }
@ -196,7 +196,7 @@ func (p *siteDrivePager) GetPage(ctx context.Context) (api.PageLinker, error) {
} }
func (p *siteDrivePager) SetNext(link string) { func (p *siteDrivePager) SetNext(link string) {
p.builder = mssites.NewItemDrivesRequestBuilder(link, p.gs.Adapter()) p.builder = sites.NewItemDrivesRequestBuilder(link, p.gs.Adapter())
} }
func (p *siteDrivePager) ValuesIn(l api.PageLinker) ([]models.Driveable, error) { func (p *siteDrivePager) ValuesIn(l api.PageLinker) ([]models.Driveable, error) {
@ -221,7 +221,7 @@ func GetAllDrives(
retry bool, retry bool,
maxRetryCount int, maxRetryCount int,
) ([]models.Driveable, error) { ) ([]models.Driveable, error) {
drives := []models.Driveable{} ds := []models.Driveable{}
if !retry { if !retry {
maxRetryCount = 0 maxRetryCount = 0
@ -261,7 +261,7 @@ func GetAllDrives(
return nil, graph.Wrap(ctx, err, "extracting drives from response") return nil, graph.Wrap(ctx, err, "extracting drives from response")
} }
drives = append(drives, tmp...) ds = append(ds, tmp...)
nextLink := ptr.Val(page.GetOdataNextLink()) nextLink := ptr.Val(page.GetOdataNextLink())
if len(nextLink) == 0 { if len(nextLink) == 0 {
@ -271,7 +271,7 @@ func GetAllDrives(
pager.SetNext(nextLink) pager.SetNext(nextLink)
} }
logger.Ctx(ctx).Debugf("retrieved %d valid drives", len(drives)) logger.Ctx(ctx).Debugf("retrieved %d valid drives", len(ds))
return drives, nil return ds, nil
} }

View File

@ -6,7 +6,7 @@ import (
"strings" "strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
msdrive "github.com/microsoftgraph/msgraph-sdk-go/drive" "github.com/microsoftgraph/msgraph-sdk-go/drive"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
@ -207,7 +207,7 @@ func getFolder(
// https://learn.microsoft.com/en-us/graph/onedrive-addressing-driveitems#path-based-addressing // https://learn.microsoft.com/en-us/graph/onedrive-addressing-driveitems#path-based-addressing
// - which allows us to lookup an item by its path relative to the parent ID // - which allows us to lookup an item by its path relative to the parent ID
rawURL := fmt.Sprintf(itemByPathRawURLFmt, driveID, parentFolderID, folderName) rawURL := fmt.Sprintf(itemByPathRawURLFmt, driveID, parentFolderID, folderName)
builder := msdrive.NewItemsDriveItemItemRequestBuilder(rawURL, service.Adapter()) builder := drive.NewItemsDriveItemItemRequestBuilder(rawURL, service.Adapter())
var ( var (
foundItem models.DriveItemable foundItem models.DriveItemable
@ -241,7 +241,7 @@ func CreateItem(
// Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended // Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended
// here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310 // here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310
rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID) rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID)
builder := msdrive.NewItemsRequestBuilder(rawURL, service.Adapter()) builder := drive.NewItemsRequestBuilder(rawURL, service.Adapter())
newItem, err := builder.Post(ctx, newItem, nil) newItem, err := builder.Post(ctx, newItem, nil)
if err != nil { if err != nil {

View File

@ -9,7 +9,7 @@ import (
"strings" "strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
msdrives "github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
@ -354,7 +354,7 @@ func driveItemWriter(
driveID, itemID string, driveID, itemID string,
itemSize int64, itemSize int64,
) (io.Writer, error) { ) (io.Writer, error) {
session := msdrives.NewItemItemsItemCreateUploadSessionPostRequestBody() session := drives.NewItemItemsItemCreateUploadSessionPostRequestBody()
ctx = clues.Add(ctx, "upload_item_id", itemID) ctx = clues.Add(ctx, "upload_item_id", itemID)
r, err := service.Client().DrivesById(driveID).ItemsById(itemID).CreateUploadSession().Post(ctx, session, nil) r, err := service.Client().DrivesById(driveID).ItemsById(itemID).CreateUploadSession().Post(ctx, session, nil)

View File

@ -15,7 +15,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -144,7 +144,7 @@ func RestoreCollection(
el = errs.Local() el = errs.Local()
) )
ctx, end := D.Span(ctx, "gc:oneDrive:restoreCollection", D.Label("path", directory)) ctx, end := diagnostics.Span(ctx, "gc:oneDrive:restoreCollection", diagnostics.Label("path", directory))
defer end() defer end()
drivePath, err := path.ToOneDrivePath(directory) drivePath, err := path.ToOneDrivePath(directory)
@ -573,7 +573,7 @@ func restoreData(
copyBuffer []byte, copyBuffer []byte,
source driveSource, source driveSource,
) (string, details.ItemInfo, error) { ) (string, details.ItemInfo, error) {
ctx, end := D.Span(ctx, "gc:oneDrive:restoreItem", D.Label("item_uuid", itemData.UUID())) ctx, end := diagnostics.Span(ctx, "gc:oneDrive:restoreItem", diagnostics.Label("item_uuid", itemData.UUID()))
defer end() defer end()
ctx = clues.Add(ctx, "item_name", itemData.UUID()) ctx = clues.Add(ctx, "item_name", itemData.UUID())

View File

@ -7,17 +7,17 @@ import (
"sync" "sync"
"github.com/alcionai/clues" "github.com/alcionai/clues"
msmodels "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
mssites "github.com/microsoftgraph/msgraph-sdk-go/sites" "github.com/microsoftgraph/msgraph-sdk-go/sites"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api" dapi "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" betamodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites" betasites "github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
) )
@ -26,13 +26,13 @@ import (
// Returns error if error experienced during the call // Returns error if error experienced during the call
func GetSitePages( func GetSitePages(
ctx context.Context, ctx context.Context,
serv *discover.BetaService, serv *dapi.BetaService,
siteID string, siteID string,
pages []string, pages []string,
errs *fault.Bus, errs *fault.Bus,
) ([]models.SitePageable, error) { ) ([]betamodels.SitePageable, error) {
var ( var (
col = make([]models.SitePageable, 0) col = make([]betamodels.SitePageable, 0)
semaphoreCh = make(chan struct{}, fetchChannelSize) semaphoreCh = make(chan struct{}, fetchChannelSize)
opts = retrieveSitePageOptions() opts = retrieveSitePageOptions()
el = errs.Local() el = errs.Local()
@ -42,7 +42,7 @@ func GetSitePages(
defer close(semaphoreCh) defer close(semaphoreCh)
updatePages := func(page models.SitePageable) { updatePages := func(page betamodels.SitePageable) {
m.Lock() m.Lock()
defer m.Unlock() defer m.Unlock()
@ -63,7 +63,7 @@ func GetSitePages(
defer func() { <-semaphoreCh }() defer func() { <-semaphoreCh }()
var ( var (
page models.SitePageable page betamodels.SitePageable
err error err error
) )
@ -83,10 +83,10 @@ func GetSitePages(
} }
// GetSite returns a minimal Site with the SiteID and the WebURL // GetSite returns a minimal Site with the SiteID and the WebURL
func GetSite(ctx context.Context, gs graph.Servicer, siteID string) (msmodels.Siteable, error) { func GetSite(ctx context.Context, gs graph.Servicer, siteID string) (models.Siteable, error) {
// resp *sites.SiteItemRequestBuilderresp *sites.SiteItemRequestBuilde // resp *sites.SiteItemRequestBuilderresp *sites.SiteItemRequestBuilde
options := &mssites.SiteItemRequestBuilderGetRequestConfiguration{ options := &sites.SiteItemRequestBuilderGetRequestConfiguration{
QueryParameters: &mssites.SiteItemRequestBuilderGetQueryParameters{ QueryParameters: &sites.SiteItemRequestBuilderGetQueryParameters{
Select: []string{"webUrl"}, Select: []string{"webUrl"},
}, },
} }
@ -100,12 +100,12 @@ func GetSite(ctx context.Context, gs graph.Servicer, siteID string) (msmodels.Si
} }
// fetchPages utility function to return the tuple of item // fetchPages utility function to return the tuple of item
func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([]NameID, error) { func FetchPages(ctx context.Context, bs *dapi.BetaService, siteID string) ([]NameID, error) {
var ( var (
builder = bs.Client().SitesById(siteID).Pages() builder = bs.Client().SitesById(siteID).Pages()
opts = fetchPageOptions() opts = fetchPageOptions()
pages = make([]NameID, 0) pages = make([]NameID, 0)
resp models.SitePageCollectionResponseable resp betamodels.SitePageCollectionResponseable
err error err error
) )
@ -134,7 +134,7 @@ func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([
break break
} }
builder = sites.NewItemPagesRequestBuilder(link, bs.Client().Adapter()) builder = betasites.NewItemPagesRequestBuilder(link, bs.Client().Adapter())
} }
return pages, nil return pages, nil
@ -142,10 +142,10 @@ func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([
// fetchPageOptions is used to return minimal information reltating to Site Pages // fetchPageOptions is used to return minimal information reltating to Site Pages
// Pages API: https://learn.microsoft.com/en-us/graph/api/resources/sitepage?view=graph-rest-beta // Pages API: https://learn.microsoft.com/en-us/graph/api/resources/sitepage?view=graph-rest-beta
func fetchPageOptions() *sites.ItemPagesRequestBuilderGetRequestConfiguration { func fetchPageOptions() *betasites.ItemPagesRequestBuilderGetRequestConfiguration {
fields := []string{"id", "name"} fields := []string{"id", "name"}
options := &sites.ItemPagesRequestBuilderGetRequestConfiguration{ options := &betasites.ItemPagesRequestBuilderGetRequestConfiguration{
QueryParameters: &sites.ItemPagesRequestBuilderGetQueryParameters{ QueryParameters: &betasites.ItemPagesRequestBuilderGetQueryParameters{
Select: fields, Select: fields,
}, },
} }
@ -157,7 +157,7 @@ func fetchPageOptions() *sites.ItemPagesRequestBuilderGetRequestConfiguration {
// https://learn.microsoft.com/en-us/graph/api/sitepage-delete?view=graph-rest-beta // https://learn.microsoft.com/en-us/graph/api/sitepage-delete?view=graph-rest-beta
func DeleteSitePage( func DeleteSitePage(
ctx context.Context, ctx context.Context,
serv *discover.BetaService, serv *dapi.BetaService,
siteID, pageID string, siteID, pageID string,
) error { ) error {
err := serv.Client().SitesById(siteID).PagesById(pageID).Delete(ctx, nil) err := serv.Client().SitesById(siteID).PagesById(pageID).Delete(ctx, nil)
@ -169,10 +169,10 @@ func DeleteSitePage(
} }
// retrievePageOptions returns options to expand // retrievePageOptions returns options to expand
func retrieveSitePageOptions() *sites.ItemPagesSitePageItemRequestBuilderGetRequestConfiguration { func retrieveSitePageOptions() *betasites.ItemPagesSitePageItemRequestBuilderGetRequestConfiguration {
fields := []string{"canvasLayout"} fields := []string{"canvasLayout"}
options := &sites.ItemPagesSitePageItemRequestBuilderGetRequestConfiguration{ options := &betasites.ItemPagesSitePageItemRequestBuilderGetRequestConfiguration{
QueryParameters: &sites.ItemPagesSitePageItemRequestBuilderGetQueryParameters{ QueryParameters: &betasites.ItemPagesSitePageItemRequestBuilderGetQueryParameters{
Expand: fields, Expand: fields,
}, },
} }
@ -182,11 +182,11 @@ func retrieveSitePageOptions() *sites.ItemPagesSitePageItemRequestBuilderGetRequ
func RestoreSitePage( func RestoreSitePage(
ctx context.Context, ctx context.Context,
service *discover.BetaService, service *dapi.BetaService,
itemData data.Stream, itemData data.Stream,
siteID, destName string, siteID, destName string,
) (details.ItemInfo, error) { ) (details.ItemInfo, error) {
ctx, end := D.Span(ctx, "gc:sharepoint:restorePage", D.Label("item_uuid", itemData.UUID())) ctx, end := diagnostics.Span(ctx, "gc:sharepoint:restorePage", diagnostics.Label("item_uuid", itemData.UUID()))
defer end() defer end()
var ( var (
@ -254,7 +254,7 @@ func RestoreSitePage(
// Helpers // Helpers
// ============================== // ==============================
// PageInfo extracts useful metadata into struct for book keeping // PageInfo extracts useful metadata into struct for book keeping
func PageInfo(page models.SitePageable, size int64) *details.SharePointInfo { func PageInfo(page betamodels.SitePageable, size int64) *details.SharePointInfo {
var ( var (
name = ptr.Val(page.GetTitle()) name = ptr.Val(page.GetTitle())
webURL = ptr.Val(page.GetWebUrl()) webURL = ptr.Val(page.GetWebUrl())

View File

@ -7,13 +7,13 @@ import (
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
absser "github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
kw "github.com/microsoft/kiota-serialization-json-go" kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/discovery/api" dapi "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
sapi "github.com/alcionai/corso/src/internal/connector/sharepoint/api" "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
@ -56,7 +56,7 @@ type Collection struct {
category DataCategory category DataCategory
service graph.Servicer service graph.Servicer
ctrl control.Options ctrl control.Options
betaService *api.BetaService betaService *dapi.BetaService
statusUpdater support.StatusUpdater statusUpdater support.StatusUpdater
} }
@ -179,7 +179,7 @@ func (sc *Collection) runPopulate(ctx context.Context, errs *fault.Bus) (support
var ( var (
err error err error
metrics support.CollectionMetrics metrics support.CollectionMetrics
writer = kw.NewJsonSerializationWriter() writer = kjson.NewJsonSerializationWriter()
) )
// TODO: Insert correct ID for CollectionProgress // TODO: Insert correct ID for CollectionProgress
@ -208,7 +208,7 @@ func (sc *Collection) runPopulate(ctx context.Context, errs *fault.Bus) (support
// models.Listable objects based on M365 IDs from the jobs field. // models.Listable objects based on M365 IDs from the jobs field.
func (sc *Collection) retrieveLists( func (sc *Collection) retrieveLists(
ctx context.Context, ctx context.Context,
wtr *kw.JsonSerializationWriter, wtr *kjson.JsonSerializationWriter,
progress chan<- struct{}, progress chan<- struct{},
errs *fault.Bus, errs *fault.Bus,
) (support.CollectionMetrics, error) { ) (support.CollectionMetrics, error) {
@ -263,7 +263,7 @@ func (sc *Collection) retrieveLists(
func (sc *Collection) retrievePages( func (sc *Collection) retrievePages(
ctx context.Context, ctx context.Context,
wtr *kw.JsonSerializationWriter, wtr *kjson.JsonSerializationWriter,
progress chan<- struct{}, progress chan<- struct{},
errs *fault.Bus, errs *fault.Bus,
) (support.CollectionMetrics, error) { ) (support.CollectionMetrics, error) {
@ -277,14 +277,14 @@ func (sc *Collection) retrievePages(
return metrics, clues.New("beta service required").WithClues(ctx) return metrics, clues.New("beta service required").WithClues(ctx)
} }
parent, err := sapi.GetSite(ctx, sc.service, sc.fullPath.ResourceOwner()) parent, err := api.GetSite(ctx, sc.service, sc.fullPath.ResourceOwner())
if err != nil { if err != nil {
return metrics, err return metrics, err
} }
root := ptr.Val(parent.GetWebUrl()) root := ptr.Val(parent.GetWebUrl())
pages, err := sapi.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs, errs) pages, err := api.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs, errs)
if err != nil { if err != nil {
return metrics, err return metrics, err
} }
@ -325,8 +325,8 @@ func (sc *Collection) retrievePages(
func serializeContent( func serializeContent(
ctx context.Context, ctx context.Context,
writer *kw.JsonSerializationWriter, writer *kjson.JsonSerializationWriter,
obj absser.Parsable, obj serialization.Parsable,
) ([]byte, error) { ) ([]byte, error) {
defer writer.Close() defer writer.Close()

View File

@ -6,10 +6,10 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/discovery/api" dapi "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
sapi "github.com/alcionai/corso/src/internal/connector/sharepoint/api" "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
@ -243,9 +243,9 @@ func collectPages(
return nil, clues.Wrap(err, "creating azure client adapter") return nil, clues.Wrap(err, "creating azure client adapter")
} }
betaService := api.NewBetaService(adpt) betaService := dapi.NewBetaService(adpt)
tuples, err := sapi.FetchPages(ctx, betaService, siteID) tuples, err := api.FetchPages(ctx, betaService, siteID)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -6,7 +6,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
mssite "github.com/microsoftgraph/msgraph-sdk-go/sites" "github.com/microsoftgraph/msgraph-sdk-go/sites"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -18,12 +18,12 @@ type listTuple struct {
id string id string
} }
func preFetchListOptions() *mssite.ItemListsRequestBuilderGetRequestConfiguration { func preFetchListOptions() *sites.ItemListsRequestBuilderGetRequestConfiguration {
selecting := []string{"id", "displayName"} selecting := []string{"id", "displayName"}
queryOptions := mssite.ItemListsRequestBuilderGetQueryParameters{ queryOptions := sites.ItemListsRequestBuilderGetQueryParameters{
Select: selecting, Select: selecting,
} }
options := &mssite.ItemListsRequestBuilderGetRequestConfiguration{ options := &sites.ItemListsRequestBuilderGetRequestConfiguration{
QueryParameters: &queryOptions, QueryParameters: &queryOptions,
} }
@ -66,7 +66,7 @@ func preFetchLists(
break break
} }
builder = mssite.NewItemListsRequestBuilder(link, gs.Adapter()) builder = sites.NewItemListsRequestBuilder(link, gs.Adapter())
} }
return listTuples, nil return listTuples, nil
@ -234,7 +234,7 @@ func fetchListItems(
break break
} }
builder = mssite.NewItemListsItemItemsRequestBuilder(link, gs.Adapter()) builder = sites.NewItemListsItemItemsRequestBuilder(link, gs.Adapter())
} }
return itms, el.Failure() return itms, el.Failure()
@ -267,7 +267,7 @@ func fetchColumns(
break break
} }
builder = mssite.NewItemListsItemColumnsRequestBuilder(link, gs.Adapter()) builder = sites.NewItemListsItemColumnsRequestBuilder(link, gs.Adapter())
} }
} else { } else {
builder := gs.Client().SitesById(siteID).ListsById(listID).ContentTypesById(cTypeID).Columns() builder := gs.Client().SitesById(siteID).ListsById(listID).ContentTypesById(cTypeID).Columns()
@ -285,7 +285,7 @@ func fetchColumns(
break break
} }
builder = mssite.NewItemListsItemContentTypesItemColumnsRequestBuilder(link, gs.Adapter()) builder = sites.NewItemListsItemContentTypesItemColumnsRequestBuilder(link, gs.Adapter())
} }
} }
@ -351,7 +351,7 @@ func fetchContentTypes(
break break
} }
builder = mssite.NewItemListsItemContentTypesRequestBuilder(link, gs.Adapter()) builder = sites.NewItemListsItemContentTypesRequestBuilder(link, gs.Adapter())
} }
return cTypes, el.Failure() return cTypes, el.Failure()
@ -380,7 +380,7 @@ func fetchColumnLinks(
break break
} }
builder = mssite.NewItemListsItemContentTypesItemColumnLinksRequestBuilder( builder = sites.NewItemListsItemContentTypesItemColumnLinksRequestBuilder(
link, link,
gs.Adapter()) gs.Adapter())
} }

View File

@ -3,8 +3,8 @@ package sharepoint
import ( import (
"context" "context"
absser "github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
mssite "github.com/microsoftgraph/msgraph-sdk-go/sites" "github.com/microsoftgraph/msgraph-sdk-go/sites"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
) )
@ -12,17 +12,17 @@ import (
// GetAllSitesForTenant makes a GraphQuery request retrieving all sites in the tenant. // GetAllSitesForTenant makes a GraphQuery request retrieving all sites in the tenant.
// Due to restrictions in filter capabilities for site queries, the returned iterable // Due to restrictions in filter capabilities for site queries, the returned iterable
// will contain all personal sites for all users in the org. // will contain all personal sites for all users in the org.
func GetAllSitesForTenant(ctx context.Context, gs graph.Servicer) (absser.Parsable, error) { func GetAllSitesForTenant(ctx context.Context, gs graph.Servicer) (serialization.Parsable, error) {
options := &mssite.SitesRequestBuilderGetRequestConfiguration{ options := &sites.SitesRequestBuilderGetRequestConfiguration{
QueryParameters: &mssite.SitesRequestBuilderGetQueryParameters{ QueryParameters: &sites.SitesRequestBuilderGetQueryParameters{
Select: []string{"id", "name", "weburl"}, Select: []string{"id", "name", "weburl"},
}, },
} }
sites, err := gs.Client().Sites().Get(ctx, options) ss, err := gs.Client().Sites().Get(ctx, options)
if err != nil { if err != nil {
return nil, graph.Wrap(ctx, err, "getting sites") return nil, graph.Wrap(ctx, err, "getting sites")
} }
return sites, nil return ss, nil
} }

View File

@ -10,13 +10,13 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api" dapi "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/sharepoint/api" "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -143,7 +143,7 @@ func restoreListItem(
itemData data.Stream, itemData data.Stream,
siteID, destName string, siteID, destName string,
) (details.ItemInfo, error) { ) (details.ItemInfo, error) {
ctx, end := D.Span(ctx, "gc:sharepoint:restoreList", D.Label("item_uuid", itemData.UUID())) ctx, end := diagnostics.Span(ctx, "gc:sharepoint:restoreList", diagnostics.Label("item_uuid", itemData.UUID()))
defer end() defer end()
ctx = clues.Add(ctx, "list_item_id", itemData.UUID()) ctx = clues.Add(ctx, "list_item_id", itemData.UUID())
@ -215,7 +215,7 @@ func RestoreListCollection(
deets *details.Builder, deets *details.Builder,
errs *fault.Bus, errs *fault.Bus,
) (support.CollectionMetrics, error) { ) (support.CollectionMetrics, error) {
ctx, end := D.Span(ctx, "gc:sharepoint:restoreListCollection", D.Label("path", dc.FullPath())) ctx, end := diagnostics.Span(ctx, "gc:sharepoint:restoreListCollection", diagnostics.Label("path", dc.FullPath()))
defer end() defer end()
var ( var (
@ -300,7 +300,7 @@ func RestorePageCollection(
) )
trace.Log(ctx, "gc:sharepoint:restorePageCollection", directory.String()) trace.Log(ctx, "gc:sharepoint:restorePageCollection", directory.String())
ctx, end := D.Span(ctx, "gc:sharepoint:restorePageCollection", D.Label("path", dc.FullPath())) ctx, end := diagnostics.Span(ctx, "gc:sharepoint:restorePageCollection", diagnostics.Label("path", dc.FullPath()))
defer end() defer end()
@ -314,7 +314,7 @@ func RestorePageCollection(
var ( var (
el = errs.Local() el = errs.Local()
service = discover.NewBetaService(adpt) service = dapi.NewBetaService(adpt)
items = dc.Items(ctx, errs) items = dc.Items(ctx, errs)
) )

View File

@ -2,17 +2,17 @@ package support
import ( import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
absser "github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
js "github.com/microsoft/kiota-serialization-json-go" kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" betamodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
) )
// CreateFromBytes helper function to initialize m365 object form bytes. // CreateFromBytes helper function to initialize m365 object form bytes.
// @param bytes -> source, createFunc -> abstract function for initialization // @param bytes -> source, createFunc -> abstract function for initialization
func CreateFromBytes(bytes []byte, createFunc absser.ParsableFactory) (absser.Parsable, error) { func CreateFromBytes(bytes []byte, createFunc serialization.ParsableFactory) (serialization.Parsable, error) {
parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", bytes) parseNode, err := kjson.NewJsonParseNodeFactory().GetRootParseNode("application/json", bytes)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "deserializing bytes into base m365 object") return nil, clues.Wrap(err, "deserializing bytes into base m365 object")
} }
@ -75,13 +75,13 @@ func CreateListFromBytes(bytes []byte) (models.Listable, error) {
} }
// CreatePageFromBytes transforms given bytes in models.SitePageable object // CreatePageFromBytes transforms given bytes in models.SitePageable object
func CreatePageFromBytes(bytes []byte) (bmodels.SitePageable, error) { func CreatePageFromBytes(bytes []byte) (betamodels.SitePageable, error) {
parsable, err := CreateFromBytes(bytes, bmodels.CreateSitePageFromDiscriminatorValue) parsable, err := CreateFromBytes(bytes, betamodels.CreateSitePageFromDiscriminatorValue)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "deserializing bytes to sharepoint page") return nil, clues.Wrap(err, "deserializing bytes to sharepoint page")
} }
page := parsable.(bmodels.SitePageable) page := parsable.(betamodels.SitePageable)
return page, nil return page, nil
} }

View File

@ -24,7 +24,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/metadata" "github.com/alcionai/corso/src/internal/connector/graph/metadata"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -546,7 +546,7 @@ func getStreamItemFunc(
progress *corsoProgress, progress *corsoProgress,
) func(context.Context, func(context.Context, fs.Entry) error) error { ) func(context.Context, func(context.Context, fs.Entry) error) error {
return func(ctx context.Context, cb func(context.Context, fs.Entry) error) error { return func(ctx context.Context, cb func(context.Context, fs.Entry) error) error {
ctx, end := D.Span(ctx, "kopia:getStreamItemFunc") ctx, end := diagnostics.Span(ctx, "kopia:getStreamItemFunc")
defer end() defer end()
// Return static entries in this directory first. // Return static entries in this directory first.

View File

@ -13,7 +13,7 @@ import (
"github.com/kopia/kopia/snapshot/snapshotfs" "github.com/kopia/kopia/snapshot/snapshotfs"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/stats" "github.com/alcionai/corso/src/internal/stats"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -143,7 +143,7 @@ func (w Wrapper) BackupCollections(
return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx) return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx)
} }
ctx, end := D.Span(ctx, "kopia:backupCollections") ctx, end := diagnostics.Span(ctx, "kopia:backupCollections")
defer end() defer end()
if len(collections) == 0 && len(globalExcludeSet) == 0 { if len(collections) == 0 && len(globalExcludeSet) == 0 {
@ -396,7 +396,7 @@ func (w Wrapper) RestoreMultipleItems(
bcounter ByteCounter, bcounter ByteCounter,
errs *fault.Bus, errs *fault.Bus,
) ([]data.RestoreCollection, error) { ) ([]data.RestoreCollection, error) {
ctx, end := D.Span(ctx, "kopia:restoreMultipleItems") ctx, end := diagnostics.Span(ctx, "kopia:restoreMultipleItems")
defer end() defer end()
if len(paths) == 0 { if len(paths) == 0 {

View File

@ -12,7 +12,7 @@ import (
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
@ -111,7 +111,7 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
} }
}() }()
ctx, end := D.Span(ctx, "operations:backup:run") ctx, end := diagnostics.Span(ctx, "operations:backup:run")
defer func() { defer func() {
end() end()
// wait for the progress display to clean up // wait for the progress display to clean up

View File

@ -13,7 +13,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
@ -125,7 +125,7 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De
// Setup // Setup
// ----- // -----
ctx, end := D.Span(ctx, "operations:restore:run") ctx, end := diagnostics.Span(ctx, "operations:restore:run")
defer func() { defer func() {
end() end()
// wait for the progress display to clean up // wait for the progress display to clean up

View File

@ -6,7 +6,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
D "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
@ -304,7 +304,7 @@ func reduce[T scopeT, C categoryT](
dataCategories map[path.CategoryType]C, dataCategories map[path.CategoryType]C,
errs *fault.Bus, errs *fault.Bus,
) *details.Details { ) *details.Details {
ctx, end := D.Span(ctx, "selectors:reduce") ctx, end := diagnostics.Span(ctx, "selectors:reduce")
defer end() defer end()
if deets == nil { if deets == nil {