Split services and collections for OneDrive & SharePoint (#4002)
Only code movement, no code changes.
Moved services to `/internal/m365/services/{onedrive,sharepoint,exchange}`
Moved collections to `/internal/m365/collection/{drive,site}`
---
#### Does this PR need a docs update or release note?
- [ ] ✅ Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x] ⛔ No
#### Type of change
<!--- Please check the type of change your PR introduces: --->
- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup
#### Issue(s)
<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* #<issue>
#### Test Plan
<!-- How will this be tested prior to merging.-->
- [ ] 💪 Manual
- [ ] ⚡ Unit test
- [ ] 💚 E2E
This commit is contained in:
parent
bffaebd351
commit
8c939c0f0d
@ -17,9 +17,9 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/str"
|
"github.com/alcionai/corso/src/internal/common/str"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365"
|
"github.com/alcionai/corso/src/internal/m365"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
|
||||||
odStub "github.com/alcionai/corso/src/internal/m365/onedrive/stub"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/resource"
|
"github.com/alcionai/corso/src/internal/m365/resource"
|
||||||
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
|
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||||
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
|
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
|||||||
@ -5,8 +5,8 @@ import (
|
|||||||
|
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
. "github.com/alcionai/corso/src/cli/print"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/resource"
|
"github.com/alcionai/corso/src/internal/m365/resource"
|
||||||
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/count"
|
"github.com/alcionai/corso/src/pkg/count"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
|||||||
@ -14,7 +14,7 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
|||||||
@ -13,7 +13,7 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
|||||||
@ -21,7 +21,7 @@ import (
|
|||||||
|
|
||||||
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
|
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/identity"
|
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||||
|
|||||||
@ -25,8 +25,8 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/data/mock"
|
"github.com/alcionai/corso/src/internal/data/mock"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/identity"
|
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||||
|
|||||||
@ -8,10 +8,10 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/m365/exchange"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||||
"github.com/alcionai/corso/src/internal/m365/sharepoint"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/filters"
|
"github.com/alcionai/corso/src/pkg/filters"
|
||||||
|
|||||||
@ -11,9 +11,9 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/exchange"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/resource"
|
"github.com/alcionai/corso/src/internal/m365/resource"
|
||||||
"github.com/alcionai/corso/src/internal/m365/sharepoint"
|
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
// Package onedrive provides support for retrieving M365 OneDrive objects
|
// Package drive provides support for retrieving M365 Drive objects
|
||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -15,8 +15,8 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -20,11 +20,11 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
|
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||||
metaTD "github.com/alcionai/corso/src/internal/m365/onedrive/metadata/testdata"
|
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/mock"
|
|
||||||
odTD "github.com/alcionai/corso/src/internal/m365/onedrive/testdata"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -14,9 +14,9 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -17,10 +17,10 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
|
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -13,8 +13,8 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/common/readers"
|
"github.com/alcionai/corso/src/internal/common/readers"
|
||||||
"github.com/alcionai/corso/src/internal/common/str"
|
"github.com/alcionai/corso/src/internal/common/str"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -10,7 +10,7 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -29,6 +29,10 @@ type itemBackupHandler struct {
|
|||||||
scope selectors.OneDriveScope
|
scope selectors.OneDriveScope
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewItemBackupHandler(ac api.Drives, scope selectors.OneDriveScope) *itemBackupHandler {
|
||||||
|
return &itemBackupHandler{ac, scope}
|
||||||
|
}
|
||||||
|
|
||||||
func (h itemBackupHandler) Get(
|
func (h itemBackupHandler) Get(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
url string,
|
url string,
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -9,8 +9,7 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
|
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -18,13 +17,17 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ onedrive.BackupHandler = &libraryBackupHandler{}
|
var _ BackupHandler = &libraryBackupHandler{}
|
||||||
|
|
||||||
type libraryBackupHandler struct {
|
type libraryBackupHandler struct {
|
||||||
ac api.Drives
|
ac api.Drives
|
||||||
scope selectors.SharePointScope
|
scope selectors.SharePointScope
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewLibraryBackupHandler(ac api.Drives, scope selectors.SharePointScope) libraryBackupHandler {
|
||||||
|
return libraryBackupHandler{ac, scope}
|
||||||
|
}
|
||||||
|
|
||||||
func (h libraryBackupHandler) Get(
|
func (h libraryBackupHandler) Get(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
url string,
|
url string,
|
||||||
@ -78,7 +81,7 @@ func (h libraryBackupHandler) AugmentItemInfo(
|
|||||||
size int64,
|
size int64,
|
||||||
parentPath *path.Builder,
|
parentPath *path.Builder,
|
||||||
) details.ItemInfo {
|
) details.ItemInfo {
|
||||||
return augmentItemInfo(dii, item, size, parentPath)
|
return augmentLibraryItemInfo(dii, item, size, parentPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// constructWebURL is a helper function for recreating the webURL
|
// constructWebURL is a helper function for recreating the webURL
|
||||||
@ -154,12 +157,16 @@ func (h libraryBackupHandler) IncludesDir(dir string) bool {
|
|||||||
// Restore
|
// Restore
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ onedrive.RestoreHandler = &libraryRestoreHandler{}
|
var _ RestoreHandler = &libraryRestoreHandler{}
|
||||||
|
|
||||||
type libraryRestoreHandler struct {
|
type libraryRestoreHandler struct {
|
||||||
ac api.Client
|
ac api.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewLibraryRestoreHandler(ac api.Client) libraryRestoreHandler {
|
||||||
|
return libraryRestoreHandler{ac}
|
||||||
|
}
|
||||||
|
|
||||||
func (h libraryRestoreHandler) PostDrive(
|
func (h libraryRestoreHandler) PostDrive(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
siteID, driveName string,
|
siteID, driveName string,
|
||||||
@ -167,10 +174,6 @@ func (h libraryRestoreHandler) PostDrive(
|
|||||||
return h.ac.Lists().PostDrive(ctx, siteID, driveName)
|
return h.ac.Lists().PostDrive(ctx, siteID, driveName)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewRestoreHandler(ac api.Client) *libraryRestoreHandler {
|
|
||||||
return &libraryRestoreHandler{ac}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h libraryRestoreHandler) NewDrivePager(
|
func (h libraryRestoreHandler) NewDrivePager(
|
||||||
resourceOwner string,
|
resourceOwner string,
|
||||||
fields []string,
|
fields []string,
|
||||||
@ -184,7 +187,7 @@ func (h libraryRestoreHandler) AugmentItemInfo(
|
|||||||
size int64,
|
size int64,
|
||||||
parentPath *path.Builder,
|
parentPath *path.Builder,
|
||||||
) details.ItemInfo {
|
) details.ItemInfo {
|
||||||
return augmentItemInfo(dii, item, size, parentPath)
|
return augmentLibraryItemInfo(dii, item, size, parentPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h libraryRestoreHandler) DeleteItem(
|
func (h libraryRestoreHandler) DeleteItem(
|
||||||
@ -263,7 +266,7 @@ func (h libraryRestoreHandler) GetRootFolder(
|
|||||||
// Common
|
// Common
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
func augmentItemInfo(
|
func augmentLibraryItemInfo(
|
||||||
dii details.ItemInfo,
|
dii details.ItemInfo,
|
||||||
item models.DriveItemable,
|
item models.DriveItemable,
|
||||||
size int64,
|
size int64,
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
@ -6,7 +6,7 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
)
|
)
|
||||||
|
|
||||||
func AssertMetadataEqual(t *testing.T, expect, got metadata.Metadata) {
|
func AssertMetadataEqual(t *testing.T, expect, got metadata.Metadata) {
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -11,7 +11,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -76,7 +76,7 @@ func getCollectionMetadata(
|
|||||||
metaName = metadata.DirMetaFileSuffix
|
metaName = metadata.DirMetaFileSuffix
|
||||||
}
|
}
|
||||||
|
|
||||||
meta, err := fetchAndReadMetadata(ctx, dc, metaName)
|
meta, err := FetchAndReadMetadata(ctx, dc, metaName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return metadata.Metadata{}, clues.Wrap(err, "collection metadata")
|
return metadata.Metadata{}, clues.Wrap(err, "collection metadata")
|
||||||
}
|
}
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
"strings"
|
||||||
@ -9,8 +9,8 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -6,7 +6,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"runtime/trace"
|
"runtime/trace"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
@ -15,12 +14,11 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
@ -39,81 +37,6 @@ const (
|
|||||||
maxUploadRetries = 3
|
maxUploadRetries = 3
|
||||||
)
|
)
|
||||||
|
|
||||||
// ConsumeRestoreCollections will restore the specified data collections into OneDrive
|
|
||||||
func ConsumeRestoreCollections(
|
|
||||||
ctx context.Context,
|
|
||||||
rh RestoreHandler,
|
|
||||||
rcc inject.RestoreConsumerConfig,
|
|
||||||
backupDriveIDNames idname.Cacher,
|
|
||||||
dcs []data.RestoreCollection,
|
|
||||||
deets *details.Builder,
|
|
||||||
errs *fault.Bus,
|
|
||||||
ctr *count.Bus,
|
|
||||||
) (*support.ControllerOperationStatus, error) {
|
|
||||||
var (
|
|
||||||
restoreMetrics support.CollectionMetrics
|
|
||||||
el = errs.Local()
|
|
||||||
caches = NewRestoreCaches(backupDriveIDNames)
|
|
||||||
fallbackDriveName = rcc.RestoreConfig.Location
|
|
||||||
)
|
|
||||||
|
|
||||||
ctx = clues.Add(ctx, "backup_version", rcc.BackupVersion)
|
|
||||||
|
|
||||||
err := caches.Populate(ctx, rh, rcc.ProtectedResource.ID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.Wrap(err, "initializing restore caches")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reorder collections so that the parents directories are created
|
|
||||||
// before the child directories; a requirement for permissions.
|
|
||||||
data.SortRestoreCollections(dcs)
|
|
||||||
|
|
||||||
// Iterate through the data collections and restore the contents of each
|
|
||||||
for _, dc := range dcs {
|
|
||||||
if el.Failure() != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
err error
|
|
||||||
metrics support.CollectionMetrics
|
|
||||||
ictx = clues.Add(
|
|
||||||
ctx,
|
|
||||||
"category", dc.FullPath().Category(),
|
|
||||||
"full_path", dc.FullPath())
|
|
||||||
)
|
|
||||||
|
|
||||||
metrics, err = RestoreCollection(
|
|
||||||
ictx,
|
|
||||||
rh,
|
|
||||||
rcc,
|
|
||||||
dc,
|
|
||||||
caches,
|
|
||||||
deets,
|
|
||||||
fallbackDriveName,
|
|
||||||
errs,
|
|
||||||
ctr.Local())
|
|
||||||
if err != nil {
|
|
||||||
el.AddRecoverable(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
restoreMetrics = support.CombineMetrics(restoreMetrics, metrics)
|
|
||||||
|
|
||||||
if errors.Is(err, context.Canceled) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
status := support.CreateStatus(
|
|
||||||
ctx,
|
|
||||||
support.Restore,
|
|
||||||
len(dcs),
|
|
||||||
restoreMetrics,
|
|
||||||
rcc.RestoreConfig.Location)
|
|
||||||
|
|
||||||
return status, el.Failure()
|
|
||||||
}
|
|
||||||
|
|
||||||
// RestoreCollection handles restoration of an individual collection.
|
// RestoreCollection handles restoration of an individual collection.
|
||||||
// returns:
|
// returns:
|
||||||
// - the collection's item and byte count metrics
|
// - the collection's item and byte count metrics
|
||||||
@ -518,7 +441,7 @@ func restoreV1File(
|
|||||||
// Fetch item permissions from the collection and restore them.
|
// Fetch item permissions from the collection and restore them.
|
||||||
metaName := trimmedName + metadata.MetaFileSuffix
|
metaName := trimmedName + metadata.MetaFileSuffix
|
||||||
|
|
||||||
meta, err := fetchAndReadMetadata(ctx, fibn, metaName)
|
meta, err := FetchAndReadMetadata(ctx, fibn, metaName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
|
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
|
||||||
}
|
}
|
||||||
@ -556,7 +479,7 @@ func restoreV6File(
|
|||||||
// Get metadata file so we can determine the file name.
|
// Get metadata file so we can determine the file name.
|
||||||
metaName := trimmedName + metadata.MetaFileSuffix
|
metaName := trimmedName + metadata.MetaFileSuffix
|
||||||
|
|
||||||
meta, err := fetchAndReadMetadata(ctx, fibn, metaName)
|
meta, err := FetchAndReadMetadata(ctx, fibn, metaName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
|
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
|
||||||
}
|
}
|
||||||
@ -932,7 +855,7 @@ func restoreFile(
|
|||||||
return ptr.Val(newItem.GetId()), dii, nil
|
return ptr.Val(newItem.GetId()), dii, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchAndReadMetadata(
|
func FetchAndReadMetadata(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
fibn data.FetchItemByNamer,
|
fibn data.FetchItemByNamer,
|
||||||
metaName string,
|
metaName string,
|
||||||
@ -974,132 +897,6 @@ func getMetadata(metar io.ReadCloser) (metadata.Metadata, error) {
|
|||||||
return meta, nil
|
return meta, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Augment restore path to add extra files(meta) needed for restore as
|
|
||||||
// well as do any other ordering operations on the paths
|
|
||||||
//
|
|
||||||
// Only accepts StoragePath/RestorePath pairs where the RestorePath is
|
|
||||||
// at least as long as the StoragePath. If the RestorePath is longer than the
|
|
||||||
// StoragePath then the first few (closest to the root) directories will use
|
|
||||||
// default permissions during restore.
|
|
||||||
func AugmentRestorePaths(
|
|
||||||
backupVersion int,
|
|
||||||
paths []path.RestorePaths,
|
|
||||||
) ([]path.RestorePaths, error) {
|
|
||||||
// Keyed by each value's StoragePath.String() which corresponds to the RepoRef
|
|
||||||
// of the directory.
|
|
||||||
colPaths := map[string]path.RestorePaths{}
|
|
||||||
|
|
||||||
for _, p := range paths {
|
|
||||||
first := true
|
|
||||||
|
|
||||||
for {
|
|
||||||
sp, err := p.StoragePath.Dir()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
drivePath, err := path.ToDrivePath(sp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(drivePath.Folders) == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(p.RestorePath.Elements()) < len(sp.Elements()) {
|
|
||||||
return nil, clues.New("restorePath shorter than storagePath").
|
|
||||||
With("restore_path", p.RestorePath, "storage_path", sp)
|
|
||||||
}
|
|
||||||
|
|
||||||
rp := p.RestorePath
|
|
||||||
|
|
||||||
// Make sure the RestorePath always points to the level of the current
|
|
||||||
// collection. We need to track if it's the first iteration because the
|
|
||||||
// RestorePath starts out at the collection level to begin with.
|
|
||||||
if !first {
|
|
||||||
rp, err = p.RestorePath.Dir()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
paths := path.RestorePaths{
|
|
||||||
StoragePath: sp,
|
|
||||||
RestorePath: rp,
|
|
||||||
}
|
|
||||||
|
|
||||||
colPaths[sp.String()] = paths
|
|
||||||
p = paths
|
|
||||||
first = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Adds dirmeta files as we need to make sure collections for all
|
|
||||||
// directories involved are created and not just the final one. No
|
|
||||||
// need to add `.meta` files (metadata for files) as they will
|
|
||||||
// anyways be looked up automatically.
|
|
||||||
// TODO: Stop populating .dirmeta for newer versions once we can
|
|
||||||
// get files from parent directory via `Fetch` in a collection.
|
|
||||||
// As of now look up metadata for parent directories from a
|
|
||||||
// collection.
|
|
||||||
for _, p := range colPaths {
|
|
||||||
el := p.StoragePath.Elements()
|
|
||||||
|
|
||||||
if backupVersion >= version.OneDrive6NameInMeta {
|
|
||||||
mPath, err := p.StoragePath.AppendItem(".dirmeta")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
paths = append(
|
|
||||||
paths,
|
|
||||||
path.RestorePaths{StoragePath: mPath, RestorePath: p.RestorePath})
|
|
||||||
} else if backupVersion >= version.OneDrive4DirIncludesPermissions {
|
|
||||||
mPath, err := p.StoragePath.AppendItem(el.Last() + ".dirmeta")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
paths = append(
|
|
||||||
paths,
|
|
||||||
path.RestorePaths{StoragePath: mPath, RestorePath: p.RestorePath})
|
|
||||||
} else if backupVersion >= version.OneDrive1DataAndMetaFiles {
|
|
||||||
pp, err := p.StoragePath.Dir()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
mPath, err := pp.AppendItem(el.Last() + ".dirmeta")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
prp, err := p.RestorePath.Dir()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
paths = append(
|
|
||||||
paths,
|
|
||||||
path.RestorePaths{StoragePath: mPath, RestorePath: prp})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This sort is done primarily to order `.meta` files after `.data`
|
|
||||||
// files. This is only a necessity for OneDrive as we are storing
|
|
||||||
// metadata for files/folders in separate meta files and we the
|
|
||||||
// data to be restored before we can restore the metadata.
|
|
||||||
//
|
|
||||||
// This sorting assumes stuff in the same StoragePath directory end up in the
|
|
||||||
// same RestorePath collection.
|
|
||||||
sort.Slice(paths, func(i, j int) bool {
|
|
||||||
return paths[i].StoragePath.String() < paths[j].StoragePath.String()
|
|
||||||
})
|
|
||||||
|
|
||||||
return paths, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type PostDriveAndGetRootFolderer interface {
|
type PostDriveAndGetRootFolderer interface {
|
||||||
PostDriver
|
PostDriver
|
||||||
GetRootFolderer
|
GetRootFolderer
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -10,8 +10,8 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -14,8 +14,8 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/mock"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
@ -34,301 +34,6 @@ func TestRestoreUnitSuite(t *testing.T) {
|
|||||||
suite.Run(t, &RestoreUnitSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &RestoreUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
|
|
||||||
// Adding a simple test here so that we can be sure that this
|
|
||||||
// function gets updated whenever we add a new version.
|
|
||||||
require.LessOrEqual(suite.T(), version.Backup, version.All8MigrateUserPNToID, "unsupported backup version")
|
|
||||||
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
version int
|
|
||||||
input []string
|
|
||||||
output []string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "no change v0",
|
|
||||||
version: 0,
|
|
||||||
input: []string{
|
|
||||||
"file.txt.data",
|
|
||||||
"file.txt", // v0 does not have `.data`
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"file.txt", // ordering artifact of sorting
|
|
||||||
"file.txt.data",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one folder v0",
|
|
||||||
version: 0,
|
|
||||||
input: []string{
|
|
||||||
"folder/file.txt.data",
|
|
||||||
"folder/file.txt",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"folder/file.txt",
|
|
||||||
"folder/file.txt.data",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no change v1",
|
|
||||||
version: version.OneDrive1DataAndMetaFiles,
|
|
||||||
input: []string{
|
|
||||||
"file.txt.data",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"file.txt.data",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one folder v1",
|
|
||||||
version: version.OneDrive1DataAndMetaFiles,
|
|
||||||
input: []string{
|
|
||||||
"folder/file.txt.data",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"folder.dirmeta",
|
|
||||||
"folder/file.txt.data",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "nested folders v1",
|
|
||||||
version: version.OneDrive1DataAndMetaFiles,
|
|
||||||
input: []string{
|
|
||||||
"folder/file.txt.data",
|
|
||||||
"folder/folder2/file.txt.data",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"folder.dirmeta",
|
|
||||||
"folder/file.txt.data",
|
|
||||||
"folder/folder2.dirmeta",
|
|
||||||
"folder/folder2/file.txt.data",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no change v4",
|
|
||||||
version: version.OneDrive4DirIncludesPermissions,
|
|
||||||
input: []string{
|
|
||||||
"file.txt.data",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"file.txt.data",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one folder v4",
|
|
||||||
version: version.OneDrive4DirIncludesPermissions,
|
|
||||||
input: []string{
|
|
||||||
"folder/file.txt.data",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"folder/file.txt.data",
|
|
||||||
"folder/folder.dirmeta",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "nested folders v4",
|
|
||||||
version: version.OneDrive4DirIncludesPermissions,
|
|
||||||
input: []string{
|
|
||||||
"folder/file.txt.data",
|
|
||||||
"folder/folder2/file.txt.data",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"folder/file.txt.data",
|
|
||||||
"folder/folder.dirmeta",
|
|
||||||
"folder/folder2/file.txt.data",
|
|
||||||
"folder/folder2/folder2.dirmeta",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no change v6",
|
|
||||||
version: version.OneDrive6NameInMeta,
|
|
||||||
input: []string{
|
|
||||||
"file.txt.data",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"file.txt.data",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one folder v6",
|
|
||||||
version: version.OneDrive6NameInMeta,
|
|
||||||
input: []string{
|
|
||||||
"folder/file.txt.data",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"folder/.dirmeta",
|
|
||||||
"folder/file.txt.data",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "nested folders v6",
|
|
||||||
version: version.OneDrive6NameInMeta,
|
|
||||||
input: []string{
|
|
||||||
"folder/file.txt.data",
|
|
||||||
"folder/folder2/file.txt.data",
|
|
||||||
},
|
|
||||||
output: []string{
|
|
||||||
"folder/.dirmeta",
|
|
||||||
"folder/file.txt.data",
|
|
||||||
"folder/folder2/.dirmeta",
|
|
||||||
"folder/folder2/file.txt.data",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
_, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
base := "id/onedrive/user/files/drives/driveID/root:/"
|
|
||||||
|
|
||||||
inPaths := []path.RestorePaths{}
|
|
||||||
for _, ps := range test.input {
|
|
||||||
p, err := path.FromDataLayerPath(base+ps, true)
|
|
||||||
require.NoError(t, err, "creating path", clues.ToCore(err))
|
|
||||||
|
|
||||||
pd, err := p.Dir()
|
|
||||||
require.NoError(t, err, "creating collection path", clues.ToCore(err))
|
|
||||||
|
|
||||||
inPaths = append(
|
|
||||||
inPaths,
|
|
||||||
path.RestorePaths{StoragePath: p, RestorePath: pd})
|
|
||||||
}
|
|
||||||
|
|
||||||
outPaths := []path.RestorePaths{}
|
|
||||||
for _, ps := range test.output {
|
|
||||||
p, err := path.FromDataLayerPath(base+ps, true)
|
|
||||||
require.NoError(t, err, "creating path", clues.ToCore(err))
|
|
||||||
|
|
||||||
pd, err := p.Dir()
|
|
||||||
require.NoError(t, err, "creating collection path", clues.ToCore(err))
|
|
||||||
|
|
||||||
outPaths = append(
|
|
||||||
outPaths,
|
|
||||||
path.RestorePaths{StoragePath: p, RestorePath: pd})
|
|
||||||
}
|
|
||||||
|
|
||||||
actual, err := AugmentRestorePaths(test.version, inPaths)
|
|
||||||
require.NoError(t, err, "augmenting paths", clues.ToCore(err))
|
|
||||||
|
|
||||||
// Ordering of paths matter here as we need dirmeta files
|
|
||||||
// to show up before file in dir
|
|
||||||
assert.Equal(t, outPaths, actual, "augmented paths")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestAugmentRestorePaths_DifferentRestorePath tests that RestorePath
|
|
||||||
// substitution works properly. Since it's only possible for future backup
|
|
||||||
// versions to need restore path substitution (i.e. due to storing folders by
|
|
||||||
// ID instead of name) this is only tested against the most recent backup
|
|
||||||
// version at the moment.
|
|
||||||
func (suite *RestoreUnitSuite) TestAugmentRestorePaths_DifferentRestorePath() {
|
|
||||||
// Adding a simple test here so that we can be sure that this
|
|
||||||
// function gets updated whenever we add a new version.
|
|
||||||
require.LessOrEqual(suite.T(), version.Backup, version.All8MigrateUserPNToID, "unsupported backup version")
|
|
||||||
|
|
||||||
type pathPair struct {
|
|
||||||
storage string
|
|
||||||
restore string
|
|
||||||
}
|
|
||||||
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
version int
|
|
||||||
input []pathPair
|
|
||||||
output []pathPair
|
|
||||||
errCheck assert.ErrorAssertionFunc
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "nested folders",
|
|
||||||
version: version.Backup,
|
|
||||||
input: []pathPair{
|
|
||||||
{storage: "folder-id/file.txt.data", restore: "folder"},
|
|
||||||
{storage: "folder-id/folder2-id/file.txt.data", restore: "folder/folder2"},
|
|
||||||
},
|
|
||||||
output: []pathPair{
|
|
||||||
{storage: "folder-id/.dirmeta", restore: "folder"},
|
|
||||||
{storage: "folder-id/file.txt.data", restore: "folder"},
|
|
||||||
{storage: "folder-id/folder2-id/.dirmeta", restore: "folder/folder2"},
|
|
||||||
{storage: "folder-id/folder2-id/file.txt.data", restore: "folder/folder2"},
|
|
||||||
},
|
|
||||||
errCheck: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "restore path longer one folder",
|
|
||||||
version: version.Backup,
|
|
||||||
input: []pathPair{
|
|
||||||
{storage: "folder-id/file.txt.data", restore: "corso_restore/folder"},
|
|
||||||
},
|
|
||||||
output: []pathPair{
|
|
||||||
{storage: "folder-id/.dirmeta", restore: "corso_restore/folder"},
|
|
||||||
{storage: "folder-id/file.txt.data", restore: "corso_restore/folder"},
|
|
||||||
},
|
|
||||||
errCheck: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "restore path shorter one folder",
|
|
||||||
version: version.Backup,
|
|
||||||
input: []pathPair{
|
|
||||||
{storage: "folder-id/file.txt.data", restore: ""},
|
|
||||||
},
|
|
||||||
errCheck: assert.Error,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
_, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
base := "id/onedrive/user/files/drives/driveID/root:/"
|
|
||||||
|
|
||||||
inPaths := []path.RestorePaths{}
|
|
||||||
for _, ps := range test.input {
|
|
||||||
p, err := path.FromDataLayerPath(base+ps.storage, true)
|
|
||||||
require.NoError(t, err, "creating path", clues.ToCore(err))
|
|
||||||
|
|
||||||
r, err := path.FromDataLayerPath(base+ps.restore, false)
|
|
||||||
require.NoError(t, err, "creating path", clues.ToCore(err))
|
|
||||||
|
|
||||||
inPaths = append(
|
|
||||||
inPaths,
|
|
||||||
path.RestorePaths{StoragePath: p, RestorePath: r})
|
|
||||||
}
|
|
||||||
|
|
||||||
outPaths := []path.RestorePaths{}
|
|
||||||
for _, ps := range test.output {
|
|
||||||
p, err := path.FromDataLayerPath(base+ps.storage, true)
|
|
||||||
require.NoError(t, err, "creating path", clues.ToCore(err))
|
|
||||||
|
|
||||||
r, err := path.FromDataLayerPath(base+ps.restore, false)
|
|
||||||
require.NoError(t, err, "creating path", clues.ToCore(err))
|
|
||||||
|
|
||||||
outPaths = append(
|
|
||||||
outPaths,
|
|
||||||
path.RestorePaths{StoragePath: p, RestorePath: r})
|
|
||||||
}
|
|
||||||
|
|
||||||
actual, err := AugmentRestorePaths(test.version, inPaths)
|
|
||||||
test.errCheck(t, err, "augmenting paths", clues.ToCore(err))
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ordering of paths matter here as we need dirmeta files
|
|
||||||
// to show up before file in dir
|
|
||||||
assert.Equal(t, outPaths, actual, "augmented paths")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||||
const mndiID = "mndi-id"
|
const mndiID = "mndi-id"
|
||||||
|
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package onedrive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -7,11 +7,10 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive"
|
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
|
||||||
betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/observe"
|
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -21,173 +20,9 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
func ProduceBackupCollections(
|
// CollectLibraries constructs a onedrive Collections struct and Get()s
|
||||||
ctx context.Context,
|
|
||||||
bpc inject.BackupProducerConfig,
|
|
||||||
ac api.Client,
|
|
||||||
creds account.M365Config,
|
|
||||||
su support.StatusUpdater,
|
|
||||||
errs *fault.Bus,
|
|
||||||
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
|
|
||||||
b, err := bpc.Selector.ToSharePointBackup()
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, false, clues.Wrap(err, "sharePointDataCollection: parsing selector")
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
el = errs.Local()
|
|
||||||
collections = []data.BackupCollection{}
|
|
||||||
categories = map[path.CategoryType]struct{}{}
|
|
||||||
ssmb = prefixmatcher.NewStringSetBuilder()
|
|
||||||
canUsePreviousBackup bool
|
|
||||||
)
|
|
||||||
|
|
||||||
ctx = clues.Add(
|
|
||||||
ctx,
|
|
||||||
"site_id", clues.Hide(bpc.ProtectedResource.ID()),
|
|
||||||
"site_url", clues.Hide(bpc.ProtectedResource.Name()))
|
|
||||||
|
|
||||||
for _, scope := range b.Scopes() {
|
|
||||||
if el.Failure() != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
progressBar := observe.MessageWithCompletion(
|
|
||||||
ctx,
|
|
||||||
observe.Bulletf("%s", scope.Category().PathType()))
|
|
||||||
defer close(progressBar)
|
|
||||||
|
|
||||||
var spcs []data.BackupCollection
|
|
||||||
|
|
||||||
switch scope.Category().PathType() {
|
|
||||||
case path.ListsCategory:
|
|
||||||
spcs, err = collectLists(
|
|
||||||
ctx,
|
|
||||||
bpc,
|
|
||||||
ac,
|
|
||||||
creds.AzureTenantID,
|
|
||||||
su,
|
|
||||||
errs)
|
|
||||||
if err != nil {
|
|
||||||
el.AddRecoverable(ctx, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lists don't make use of previous metadata
|
|
||||||
// TODO: Revisit when we add support of lists
|
|
||||||
canUsePreviousBackup = true
|
|
||||||
|
|
||||||
case path.LibrariesCategory:
|
|
||||||
spcs, canUsePreviousBackup, err = collectLibraries(
|
|
||||||
ctx,
|
|
||||||
bpc,
|
|
||||||
ac.Drives(),
|
|
||||||
creds.AzureTenantID,
|
|
||||||
ssmb,
|
|
||||||
scope,
|
|
||||||
su,
|
|
||||||
errs)
|
|
||||||
if err != nil {
|
|
||||||
el.AddRecoverable(ctx, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
case path.PagesCategory:
|
|
||||||
spcs, err = collectPages(
|
|
||||||
ctx,
|
|
||||||
bpc,
|
|
||||||
creds,
|
|
||||||
ac,
|
|
||||||
su,
|
|
||||||
errs)
|
|
||||||
if err != nil {
|
|
||||||
el.AddRecoverable(ctx, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lists don't make use of previous metadata
|
|
||||||
// TODO: Revisit when we add support of pages
|
|
||||||
canUsePreviousBackup = true
|
|
||||||
}
|
|
||||||
|
|
||||||
collections = append(collections, spcs...)
|
|
||||||
|
|
||||||
categories[scope.Category().PathType()] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(collections) > 0 {
|
|
||||||
baseCols, err := graph.BaseCollections(
|
|
||||||
ctx,
|
|
||||||
collections,
|
|
||||||
creds.AzureTenantID,
|
|
||||||
bpc.ProtectedResource.ID(),
|
|
||||||
path.SharePointService,
|
|
||||||
categories,
|
|
||||||
su,
|
|
||||||
errs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
collections = append(collections, baseCols...)
|
|
||||||
}
|
|
||||||
|
|
||||||
return collections, ssmb.ToReader(), canUsePreviousBackup, el.Failure()
|
|
||||||
}
|
|
||||||
|
|
||||||
func collectLists(
|
|
||||||
ctx context.Context,
|
|
||||||
bpc inject.BackupProducerConfig,
|
|
||||||
ac api.Client,
|
|
||||||
tenantID string,
|
|
||||||
su support.StatusUpdater,
|
|
||||||
errs *fault.Bus,
|
|
||||||
) ([]data.BackupCollection, error) {
|
|
||||||
logger.Ctx(ctx).Debug("Creating SharePoint List Collections")
|
|
||||||
|
|
||||||
var (
|
|
||||||
el = errs.Local()
|
|
||||||
spcs = make([]data.BackupCollection, 0)
|
|
||||||
)
|
|
||||||
|
|
||||||
lists, err := preFetchLists(ctx, ac.Stable, bpc.ProtectedResource.ID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tuple := range lists {
|
|
||||||
if el.Failure() != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
dir, err := path.Build(
|
|
||||||
tenantID,
|
|
||||||
bpc.ProtectedResource.ID(),
|
|
||||||
path.SharePointService,
|
|
||||||
path.ListsCategory,
|
|
||||||
false,
|
|
||||||
tuple.name)
|
|
||||||
if err != nil {
|
|
||||||
el.AddRecoverable(ctx, clues.Wrap(err, "creating list collection path").WithClues(ctx))
|
|
||||||
}
|
|
||||||
|
|
||||||
collection := NewCollection(
|
|
||||||
dir,
|
|
||||||
ac,
|
|
||||||
List,
|
|
||||||
su,
|
|
||||||
bpc.Options)
|
|
||||||
collection.AddJob(tuple.id)
|
|
||||||
|
|
||||||
spcs = append(spcs, collection)
|
|
||||||
}
|
|
||||||
|
|
||||||
return spcs, el.Failure()
|
|
||||||
}
|
|
||||||
|
|
||||||
// collectLibraries constructs a onedrive Collections struct and Get()s
|
|
||||||
// all the drives associated with the site.
|
// all the drives associated with the site.
|
||||||
func collectLibraries(
|
func CollectLibraries(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
bpc inject.BackupProducerConfig,
|
bpc inject.BackupProducerConfig,
|
||||||
ad api.Drives,
|
ad api.Drives,
|
||||||
@ -201,8 +36,8 @@ func collectLibraries(
|
|||||||
|
|
||||||
var (
|
var (
|
||||||
collections = []data.BackupCollection{}
|
collections = []data.BackupCollection{}
|
||||||
colls = onedrive.NewCollections(
|
colls = drive.NewCollections(
|
||||||
&libraryBackupHandler{ad, scope},
|
drive.NewLibraryBackupHandler(ad, scope),
|
||||||
tenantID,
|
tenantID,
|
||||||
bpc.ProtectedResource.ID(),
|
bpc.ProtectedResource.ID(),
|
||||||
su,
|
su,
|
||||||
@ -217,9 +52,9 @@ func collectLibraries(
|
|||||||
return append(collections, odcs...), canUsePreviousBackup, nil
|
return append(collections, odcs...), canUsePreviousBackup, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// collectPages constructs a sharepoint Collections struct and Get()s the associated
|
// CollectPages constructs a sharepoint Collections struct and Get()s the associated
|
||||||
// M365 IDs for the associated Pages.
|
// M365 IDs for the associated Pages.
|
||||||
func collectPages(
|
func CollectPages(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
bpc inject.BackupProducerConfig,
|
bpc inject.BackupProducerConfig,
|
||||||
creds account.M365Config,
|
creds account.M365Config,
|
||||||
@ -273,7 +108,57 @@ func collectPages(
|
|||||||
Pages,
|
Pages,
|
||||||
su,
|
su,
|
||||||
bpc.Options)
|
bpc.Options)
|
||||||
collection.betaService = betaService
|
collection.SetBetaService(betaService)
|
||||||
|
collection.AddJob(tuple.ID)
|
||||||
|
|
||||||
|
spcs = append(spcs, collection)
|
||||||
|
}
|
||||||
|
|
||||||
|
return spcs, el.Failure()
|
||||||
|
}
|
||||||
|
|
||||||
|
func CollectLists(
|
||||||
|
ctx context.Context,
|
||||||
|
bpc inject.BackupProducerConfig,
|
||||||
|
ac api.Client,
|
||||||
|
tenantID string,
|
||||||
|
su support.StatusUpdater,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]data.BackupCollection, error) {
|
||||||
|
logger.Ctx(ctx).Debug("Creating SharePoint List Collections")
|
||||||
|
|
||||||
|
var (
|
||||||
|
el = errs.Local()
|
||||||
|
spcs = make([]data.BackupCollection, 0)
|
||||||
|
)
|
||||||
|
|
||||||
|
lists, err := PreFetchLists(ctx, ac.Stable, bpc.ProtectedResource.ID())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tuple := range lists {
|
||||||
|
if el.Failure() != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
dir, err := path.Build(
|
||||||
|
tenantID,
|
||||||
|
bpc.ProtectedResource.ID(),
|
||||||
|
path.SharePointService,
|
||||||
|
path.ListsCategory,
|
||||||
|
false,
|
||||||
|
tuple.Name)
|
||||||
|
if err != nil {
|
||||||
|
el.AddRecoverable(ctx, clues.Wrap(err, "creating list collection path").WithClues(ctx))
|
||||||
|
}
|
||||||
|
|
||||||
|
collection := NewCollection(
|
||||||
|
dir,
|
||||||
|
ac,
|
||||||
|
List,
|
||||||
|
su,
|
||||||
|
bpc.Options)
|
||||||
collection.AddJob(tuple.ID)
|
collection.AddJob(tuple.ID)
|
||||||
|
|
||||||
spcs = append(spcs, collection)
|
spcs = append(spcs, collection)
|
||||||
73
src/internal/m365/collection/site/backup_test.go
Normal file
73
src/internal/m365/collection/site/backup_test.go
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
package site
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SharePointPagesSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSharePointPagesSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &SharePointPagesSuite{
|
||||||
|
Suite: tester.NewIntegrationSuite(
|
||||||
|
t,
|
||||||
|
[][]string{tconfig.M365AcctCredEnvs}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *SharePointPagesSuite) SetupSuite() {
|
||||||
|
ctx, flush := tester.NewContext(suite.T())
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
graph.InitializeConcurrencyLimiter(ctx, false, 4)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *SharePointPagesSuite) TestCollectPages() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
var (
|
||||||
|
siteID = tconfig.M365SiteID(t)
|
||||||
|
a = tconfig.NewM365Account(t)
|
||||||
|
)
|
||||||
|
|
||||||
|
creds, err := a.M365Config()
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
ac, err := api.NewClient(creds, control.DefaultOptions())
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
bpc := inject.BackupProducerConfig{
|
||||||
|
LastBackupVersion: version.NoBackup,
|
||||||
|
Options: control.DefaultOptions(),
|
||||||
|
ProtectedResource: mock.NewProvider(siteID, siteID),
|
||||||
|
}
|
||||||
|
|
||||||
|
col, err := CollectPages(
|
||||||
|
ctx,
|
||||||
|
bpc,
|
||||||
|
creds,
|
||||||
|
ac,
|
||||||
|
(&MockGraphService{}).UpdateStatus,
|
||||||
|
fault.New(true))
|
||||||
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.NotEmpty(t, col)
|
||||||
|
}
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -13,7 +13,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api"
|
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -81,6 +81,10 @@ func NewCollection(
|
|||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (sc *Collection) SetBetaService(betaService *betaAPI.BetaService) {
|
||||||
|
sc.betaService = betaService
|
||||||
|
}
|
||||||
|
|
||||||
// AddJob appends additional objectID to job field
|
// AddJob appends additional objectID to job field
|
||||||
func (sc *Collection) AddJob(objID string) {
|
func (sc *Collection) AddJob(objID string) {
|
||||||
sc.jobs = append(sc.jobs, objID)
|
sc.jobs = append(sc.jobs, objID)
|
||||||
@ -254,7 +258,7 @@ func (sc *Collection) retrieveLists(
|
|||||||
sc.data <- &Item{
|
sc.data <- &Item{
|
||||||
id: ptr.Val(lst.GetId()),
|
id: ptr.Val(lst.GetId()),
|
||||||
data: io.NopCloser(bytes.NewReader(byteArray)),
|
data: io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
info: listToSPInfo(lst, size),
|
info: ListToSPInfo(lst, size),
|
||||||
modTime: t,
|
modTime: t,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -14,8 +14,8 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api"
|
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
|
||||||
spMock "github.com/alcionai/corso/src/internal/m365/sharepoint/mock"
|
spMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
@ -118,7 +118,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
|||||||
data := &Item{
|
data := &Item{
|
||||||
id: name,
|
id: name,
|
||||||
data: io.NopCloser(bytes.NewReader(byteArray)),
|
data: io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
info: listToSPInfo(listing, int64(len(byteArray))),
|
info: ListToSPInfo(listing, int64(len(byteArray))),
|
||||||
}
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@ -207,7 +207,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
|
|||||||
listData := &Item{
|
listData := &Item{
|
||||||
id: testName,
|
id: testName,
|
||||||
data: io.NopCloser(bytes.NewReader(byteArray)),
|
data: io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
info: listToSPInfo(listing, int64(len(byteArray))),
|
info: ListToSPInfo(listing, int64(len(byteArray))),
|
||||||
}
|
}
|
||||||
|
|
||||||
destName := testdata.DefaultRestoreConfig("").Location
|
destName := testdata.DefaultRestoreConfig("").Location
|
||||||
@ -1,6 +1,6 @@
|
|||||||
// Code generated by "stringer -type=DataCategory"; DO NOT EDIT.
|
// Code generated by "stringer -type=DataCategory"; DO NOT EDIT.
|
||||||
|
|
||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import "strconv"
|
import "strconv"
|
||||||
|
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
@ -43,7 +43,7 @@ func (ms *MockGraphService) UpdateStatus(*support.ControllerOperationStatus) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Helper Functions
|
// Helper functions
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
func createTestService(t *testing.T, credentials account.M365Config) *graph.Service {
|
func createTestService(t *testing.T, credentials account.M365Config) *graph.Service {
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -14,9 +14,9 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
)
|
)
|
||||||
|
|
||||||
// listToSPInfo translates models.Listable metadata into searchable content
|
// ListToSPInfo translates models.Listable metadata into searchable content
|
||||||
// List Details: https://learn.microsoft.com/en-us/graph/api/resources/list?view=graph-rest-1.0
|
// List Details: https://learn.microsoft.com/en-us/graph/api/resources/list?view=graph-rest-1.0
|
||||||
func listToSPInfo(lst models.Listable, size int64) *details.SharePointInfo {
|
func ListToSPInfo(lst models.Listable, size int64) *details.SharePointInfo {
|
||||||
var (
|
var (
|
||||||
name = ptr.Val(lst.GetDisplayName())
|
name = ptr.Val(lst.GetDisplayName())
|
||||||
webURL = ptr.Val(lst.GetWebUrl())
|
webURL = ptr.Val(lst.GetWebUrl())
|
||||||
@ -34,9 +34,9 @@ func listToSPInfo(lst models.Listable, size int64) *details.SharePointInfo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type listTuple struct {
|
type ListTuple struct {
|
||||||
name string
|
ID string
|
||||||
id string
|
Name string
|
||||||
}
|
}
|
||||||
|
|
||||||
func preFetchListOptions() *sites.ItemListsRequestBuilderGetRequestConfiguration {
|
func preFetchListOptions() *sites.ItemListsRequestBuilderGetRequestConfiguration {
|
||||||
@ -51,15 +51,15 @@ func preFetchListOptions() *sites.ItemListsRequestBuilderGetRequestConfiguration
|
|||||||
return options
|
return options
|
||||||
}
|
}
|
||||||
|
|
||||||
func preFetchLists(
|
func PreFetchLists(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
gs graph.Servicer,
|
||||||
siteID string,
|
siteID string,
|
||||||
) ([]listTuple, error) {
|
) ([]ListTuple, error) {
|
||||||
var (
|
var (
|
||||||
builder = gs.Client().Sites().BySiteId(siteID).Lists()
|
builder = gs.Client().Sites().BySiteId(siteID).Lists()
|
||||||
options = preFetchListOptions()
|
options = preFetchListOptions()
|
||||||
listTuples = make([]listTuple, 0)
|
listTuples = make([]ListTuple, 0)
|
||||||
)
|
)
|
||||||
|
|
||||||
for {
|
for {
|
||||||
@ -72,11 +72,11 @@ func preFetchLists(
|
|||||||
var (
|
var (
|
||||||
id = ptr.Val(entry.GetId())
|
id = ptr.Val(entry.GetId())
|
||||||
name = ptr.Val(entry.GetDisplayName())
|
name = ptr.Val(entry.GetDisplayName())
|
||||||
temp = listTuple{id: id, name: name}
|
temp = ListTuple{ID: id, Name: name}
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(name) == 0 {
|
if len(name) == 0 {
|
||||||
temp.name = id
|
temp.Name = id
|
||||||
}
|
}
|
||||||
|
|
||||||
listTuples = append(listTuples, temp)
|
listTuples = append(listTuples, temp)
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
@ -28,6 +29,11 @@ func (suite *ListsUnitSuite) SetupSuite() {
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
suite.creds = m365
|
suite.creds = m365
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(suite.T())
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
graph.InitializeConcurrencyLimiter(ctx, false, 4)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestListsUnitSuite(t *testing.T) {
|
func TestListsUnitSuite(t *testing.T) {
|
||||||
@ -57,10 +63,10 @@ func (suite *ListsUnitSuite) TestLoadList() {
|
|||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
service := createTestService(t, suite.creds)
|
service := createTestService(t, suite.creds)
|
||||||
tuples, err := preFetchLists(ctx, service, "root")
|
tuples, err := PreFetchLists(ctx, service, "root")
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
job := []string{tuples[0].id}
|
job := []string{tuples[0].ID}
|
||||||
lists, err := loadSiteLists(ctx, service, "root", job, fault.New(true))
|
lists, err := loadSiteLists(ctx, service, "root", job, fault.New(true))
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
assert.Greater(t, len(lists), 0)
|
assert.Greater(t, len(lists), 0)
|
||||||
@ -98,7 +104,7 @@ func (suite *ListsUnitSuite) TestSharePointInfo() {
|
|||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
list, expected := test.listAndDeets()
|
list, expected := test.listAndDeets()
|
||||||
info := listToSPInfo(list, 10)
|
info := ListToSPInfo(list, 10)
|
||||||
assert.Equal(t, expected.ItemType, info.ItemType)
|
assert.Equal(t, expected.ItemType, info.ItemType)
|
||||||
assert.Equal(t, expected.ItemName, info.ItemName)
|
assert.Equal(t, expected.ItemName, info.ItemName)
|
||||||
assert.Equal(t, expected.WebURL, info.WebURL)
|
assert.Equal(t, expected.WebURL, info.WebURL)
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"time"
|
"time"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
@ -1,4 +1,4 @@
|
|||||||
package sharepoint
|
package site
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -15,9 +15,9 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive"
|
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
|
||||||
betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -41,9 +41,9 @@ func ConsumeRestoreCollections(
|
|||||||
ctr *count.Bus,
|
ctr *count.Bus,
|
||||||
) (*support.ControllerOperationStatus, error) {
|
) (*support.ControllerOperationStatus, error) {
|
||||||
var (
|
var (
|
||||||
lrh = libraryRestoreHandler{ac}
|
lrh = drive.NewLibraryRestoreHandler(ac)
|
||||||
restoreMetrics support.CollectionMetrics
|
restoreMetrics support.CollectionMetrics
|
||||||
caches = onedrive.NewRestoreCaches(backupDriveIDNames)
|
caches = drive.NewRestoreCaches(backupDriveIDNames)
|
||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -75,7 +75,7 @@ func ConsumeRestoreCollections(
|
|||||||
|
|
||||||
switch dc.FullPath().Category() {
|
switch dc.FullPath().Category() {
|
||||||
case path.LibrariesCategory:
|
case path.LibrariesCategory:
|
||||||
metrics, err = onedrive.RestoreCollection(
|
metrics, err = drive.RestoreCollection(
|
||||||
ictx,
|
ictx,
|
||||||
lrh,
|
lrh,
|
||||||
rcc,
|
rcc,
|
||||||
@ -200,7 +200,7 @@ func restoreListItem(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dii.SharePoint = listToSPInfo(restoredList, int64(len(byteArray)))
|
dii.SharePoint = ListToSPInfo(restoredList, int64(len(byteArray)))
|
||||||
|
|
||||||
return dii, nil
|
return dii, nil
|
||||||
}
|
}
|
||||||
@ -17,10 +17,10 @@ import (
|
|||||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/mock"
|
"github.com/alcionai/corso/src/internal/m365/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/resource"
|
"github.com/alcionai/corso/src/internal/m365/resource"
|
||||||
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/stub"
|
"github.com/alcionai/corso/src/internal/m365/stub"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
|||||||
@ -8,7 +8,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
package metadata
|
package metadata
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -9,8 +9,8 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
odmetadata "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph/metadata"
|
"github.com/alcionai/corso/src/internal/m365/graph/metadata"
|
||||||
odmetadata "github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -17,10 +17,10 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
odStub "github.com/alcionai/corso/src/internal/m365/onedrive/stub"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/resource"
|
"github.com/alcionai/corso/src/internal/m365/resource"
|
||||||
|
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||||
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
|
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -737,7 +737,7 @@ func compareDriveItem(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if !isMeta {
|
if !isMeta {
|
||||||
oitem := item.(*onedrive.Item)
|
oitem := item.(*drive.Item)
|
||||||
info := oitem.Info()
|
info := oitem.Info()
|
||||||
|
|
||||||
if info.OneDrive != nil {
|
if info.OneDrive != nil {
|
||||||
|
|||||||
@ -14,11 +14,11 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/stub"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/resource"
|
"github.com/alcionai/corso/src/internal/m365/resource"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
|||||||
@ -7,10 +7,11 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/m365/exchange"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||||
"github.com/alcionai/corso/src/internal/m365/sharepoint"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -71,7 +72,7 @@ func (ctrl *Controller) ConsumeRestoreCollections(
|
|||||||
case path.OneDriveService:
|
case path.OneDriveService:
|
||||||
status, err = onedrive.ConsumeRestoreCollections(
|
status, err = onedrive.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
onedrive.NewRestoreHandler(ctrl.AC),
|
drive.NewRestoreHandler(ctrl.AC),
|
||||||
rcc,
|
rcc,
|
||||||
ctrl.backupDriveIDNames,
|
ctrl.backupDriveIDNames,
|
||||||
dcs,
|
dcs,
|
||||||
|
|||||||
@ -10,8 +10,8 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -11,8 +11,8 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -11,8 +11,8 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -10,7 +10,7 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
@ -7,8 +7,8 @@ import (
|
|||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/exchange"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
@ -10,7 +10,7 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
@ -7,6 +7,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
@ -47,8 +48,8 @@ func ProduceBackupCollections(
|
|||||||
|
|
||||||
logger.Ctx(ctx).Debug("creating OneDrive collections")
|
logger.Ctx(ctx).Debug("creating OneDrive collections")
|
||||||
|
|
||||||
nc := NewCollections(
|
nc := drive.NewCollections(
|
||||||
&itemBackupHandler{ac.Drives(), scope},
|
drive.NewItemBackupHandler(ac.Drives(), scope),
|
||||||
tenant,
|
tenant,
|
||||||
bpc.ProtectedResource.ID(),
|
bpc.ProtectedResource.ID(),
|
||||||
su,
|
su,
|
||||||
@ -7,7 +7,8 @@ import (
|
|||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -120,7 +121,7 @@ func getItemName(
|
|||||||
trimmedName := strings.TrimSuffix(id, metadata.DataFileSuffix)
|
trimmedName := strings.TrimSuffix(id, metadata.DataFileSuffix)
|
||||||
metaName := trimmedName + metadata.MetaFileSuffix
|
metaName := trimmedName + metadata.MetaFileSuffix
|
||||||
|
|
||||||
meta, err := fetchAndReadMetadata(ctx, fin, metaName)
|
meta, err := drive.FetchAndReadMetadata(ctx, fin, metaName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", clues.Wrap(err, "getting metadata").WithClues(ctx)
|
return "", clues.Wrap(err, "getting metadata").WithClues(ctx)
|
||||||
}
|
}
|
||||||
@ -10,8 +10,8 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -8,7 +8,7 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
221
src/internal/m365/service/onedrive/restore.go
Normal file
221
src/internal/m365/service/onedrive/restore.go
Normal file
@ -0,0 +1,221 @@
|
|||||||
|
package onedrive
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/count"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ConsumeRestoreCollections will restore the specified data collections into OneDrive
|
||||||
|
func ConsumeRestoreCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
rh drive.RestoreHandler,
|
||||||
|
rcc inject.RestoreConsumerConfig,
|
||||||
|
backupDriveIDNames idname.Cacher,
|
||||||
|
dcs []data.RestoreCollection,
|
||||||
|
deets *details.Builder,
|
||||||
|
errs *fault.Bus,
|
||||||
|
ctr *count.Bus,
|
||||||
|
) (*support.ControllerOperationStatus, error) {
|
||||||
|
var (
|
||||||
|
restoreMetrics support.CollectionMetrics
|
||||||
|
el = errs.Local()
|
||||||
|
caches = drive.NewRestoreCaches(backupDriveIDNames)
|
||||||
|
fallbackDriveName = rcc.RestoreConfig.Location
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx = clues.Add(ctx, "backup_version", rcc.BackupVersion)
|
||||||
|
|
||||||
|
err := caches.Populate(ctx, rh, rcc.ProtectedResource.ID())
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "initializing restore caches")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reorder collections so that the parents directories are created
|
||||||
|
// before the child directories; a requirement for permissions.
|
||||||
|
data.SortRestoreCollections(dcs)
|
||||||
|
|
||||||
|
// Iterate through the data collections and restore the contents of each
|
||||||
|
for _, dc := range dcs {
|
||||||
|
if el.Failure() != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
metrics support.CollectionMetrics
|
||||||
|
ictx = clues.Add(
|
||||||
|
ctx,
|
||||||
|
"category", dc.FullPath().Category(),
|
||||||
|
"full_path", dc.FullPath())
|
||||||
|
)
|
||||||
|
|
||||||
|
metrics, err = drive.RestoreCollection(
|
||||||
|
ictx,
|
||||||
|
rh,
|
||||||
|
rcc,
|
||||||
|
dc,
|
||||||
|
caches,
|
||||||
|
deets,
|
||||||
|
fallbackDriveName,
|
||||||
|
errs,
|
||||||
|
ctr.Local())
|
||||||
|
if err != nil {
|
||||||
|
el.AddRecoverable(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
restoreMetrics = support.CombineMetrics(restoreMetrics, metrics)
|
||||||
|
|
||||||
|
if errors.Is(err, context.Canceled) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
status := support.CreateStatus(
|
||||||
|
ctx,
|
||||||
|
support.Restore,
|
||||||
|
len(dcs),
|
||||||
|
restoreMetrics,
|
||||||
|
rcc.RestoreConfig.Location)
|
||||||
|
|
||||||
|
return status, el.Failure()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Augment restore path to add extra files(meta) needed for restore as
|
||||||
|
// well as do any other ordering operations on the paths
|
||||||
|
//
|
||||||
|
// Only accepts StoragePath/RestorePath pairs where the RestorePath is
|
||||||
|
// at least as long as the StoragePath. If the RestorePath is longer than the
|
||||||
|
// StoragePath then the first few (closest to the root) directories will use
|
||||||
|
// default permissions during restore.
|
||||||
|
func AugmentRestorePaths(
|
||||||
|
backupVersion int,
|
||||||
|
paths []path.RestorePaths,
|
||||||
|
) ([]path.RestorePaths, error) {
|
||||||
|
// Keyed by each value's StoragePath.String() which corresponds to the RepoRef
|
||||||
|
// of the directory.
|
||||||
|
colPaths := map[string]path.RestorePaths{}
|
||||||
|
|
||||||
|
for _, p := range paths {
|
||||||
|
first := true
|
||||||
|
|
||||||
|
for {
|
||||||
|
sp, err := p.StoragePath.Dir()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
drivePath, err := path.ToDrivePath(sp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(drivePath.Folders) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(p.RestorePath.Elements()) < len(sp.Elements()) {
|
||||||
|
return nil, clues.New("restorePath shorter than storagePath").
|
||||||
|
With("restore_path", p.RestorePath, "storage_path", sp)
|
||||||
|
}
|
||||||
|
|
||||||
|
rp := p.RestorePath
|
||||||
|
|
||||||
|
// Make sure the RestorePath always points to the level of the current
|
||||||
|
// collection. We need to track if it's the first iteration because the
|
||||||
|
// RestorePath starts out at the collection level to begin with.
|
||||||
|
if !first {
|
||||||
|
rp, err = p.RestorePath.Dir()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
paths := path.RestorePaths{
|
||||||
|
StoragePath: sp,
|
||||||
|
RestorePath: rp,
|
||||||
|
}
|
||||||
|
|
||||||
|
colPaths[sp.String()] = paths
|
||||||
|
p = paths
|
||||||
|
first = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Adds dirmeta files as we need to make sure collections for all
|
||||||
|
// directories involved are created and not just the final one. No
|
||||||
|
// need to add `.meta` files (metadata for files) as they will
|
||||||
|
// anyways be looked up automatically.
|
||||||
|
// TODO: Stop populating .dirmeta for newer versions once we can
|
||||||
|
// get files from parent directory via `Fetch` in a collection.
|
||||||
|
// As of now look up metadata for parent directories from a
|
||||||
|
// collection.
|
||||||
|
for _, p := range colPaths {
|
||||||
|
el := p.StoragePath.Elements()
|
||||||
|
|
||||||
|
if backupVersion >= version.OneDrive6NameInMeta {
|
||||||
|
mPath, err := p.StoragePath.AppendItem(".dirmeta")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
paths = append(
|
||||||
|
paths,
|
||||||
|
path.RestorePaths{StoragePath: mPath, RestorePath: p.RestorePath})
|
||||||
|
} else if backupVersion >= version.OneDrive4DirIncludesPermissions {
|
||||||
|
mPath, err := p.StoragePath.AppendItem(el.Last() + ".dirmeta")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
paths = append(
|
||||||
|
paths,
|
||||||
|
path.RestorePaths{StoragePath: mPath, RestorePath: p.RestorePath})
|
||||||
|
} else if backupVersion >= version.OneDrive1DataAndMetaFiles {
|
||||||
|
pp, err := p.StoragePath.Dir()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
mPath, err := pp.AppendItem(el.Last() + ".dirmeta")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
prp, err := p.RestorePath.Dir()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
paths = append(
|
||||||
|
paths,
|
||||||
|
path.RestorePaths{StoragePath: mPath, RestorePath: prp})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This sort is done primarily to order `.meta` files after `.data`
|
||||||
|
// files. This is only a necessity for OneDrive as we are storing
|
||||||
|
// metadata for files/folders in separate meta files and we the
|
||||||
|
// data to be restored before we can restore the metadata.
|
||||||
|
//
|
||||||
|
// This sorting assumes stuff in the same StoragePath directory end up in the
|
||||||
|
// same RestorePath collection.
|
||||||
|
sort.Slice(paths, func(i, j int) bool {
|
||||||
|
return paths[i].StoragePath.String() < paths[j].StoragePath.String()
|
||||||
|
})
|
||||||
|
|
||||||
|
return paths, nil
|
||||||
|
}
|
||||||
317
src/internal/m365/service/onedrive/restore_test.go
Normal file
317
src/internal/m365/service/onedrive/restore_test.go
Normal file
@ -0,0 +1,317 @@
|
|||||||
|
package onedrive
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RestoreUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRestoreUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &RestoreUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
|
||||||
|
// Adding a simple test here so that we can be sure that this
|
||||||
|
// function gets updated whenever we add a new version.
|
||||||
|
require.LessOrEqual(suite.T(), version.Backup, version.All8MigrateUserPNToID, "unsupported backup version")
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
version int
|
||||||
|
input []string
|
||||||
|
output []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no change v0",
|
||||||
|
version: 0,
|
||||||
|
input: []string{
|
||||||
|
"file.txt.data",
|
||||||
|
"file.txt", // v0 does not have `.data`
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"file.txt", // ordering artifact of sorting
|
||||||
|
"file.txt.data",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "one folder v0",
|
||||||
|
version: 0,
|
||||||
|
input: []string{
|
||||||
|
"folder/file.txt.data",
|
||||||
|
"folder/file.txt",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"folder/file.txt",
|
||||||
|
"folder/file.txt.data",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no change v1",
|
||||||
|
version: version.OneDrive1DataAndMetaFiles,
|
||||||
|
input: []string{
|
||||||
|
"file.txt.data",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"file.txt.data",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "one folder v1",
|
||||||
|
version: version.OneDrive1DataAndMetaFiles,
|
||||||
|
input: []string{
|
||||||
|
"folder/file.txt.data",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"folder.dirmeta",
|
||||||
|
"folder/file.txt.data",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nested folders v1",
|
||||||
|
version: version.OneDrive1DataAndMetaFiles,
|
||||||
|
input: []string{
|
||||||
|
"folder/file.txt.data",
|
||||||
|
"folder/folder2/file.txt.data",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"folder.dirmeta",
|
||||||
|
"folder/file.txt.data",
|
||||||
|
"folder/folder2.dirmeta",
|
||||||
|
"folder/folder2/file.txt.data",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no change v4",
|
||||||
|
version: version.OneDrive4DirIncludesPermissions,
|
||||||
|
input: []string{
|
||||||
|
"file.txt.data",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"file.txt.data",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "one folder v4",
|
||||||
|
version: version.OneDrive4DirIncludesPermissions,
|
||||||
|
input: []string{
|
||||||
|
"folder/file.txt.data",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"folder/file.txt.data",
|
||||||
|
"folder/folder.dirmeta",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nested folders v4",
|
||||||
|
version: version.OneDrive4DirIncludesPermissions,
|
||||||
|
input: []string{
|
||||||
|
"folder/file.txt.data",
|
||||||
|
"folder/folder2/file.txt.data",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"folder/file.txt.data",
|
||||||
|
"folder/folder.dirmeta",
|
||||||
|
"folder/folder2/file.txt.data",
|
||||||
|
"folder/folder2/folder2.dirmeta",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no change v6",
|
||||||
|
version: version.OneDrive6NameInMeta,
|
||||||
|
input: []string{
|
||||||
|
"file.txt.data",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"file.txt.data",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "one folder v6",
|
||||||
|
version: version.OneDrive6NameInMeta,
|
||||||
|
input: []string{
|
||||||
|
"folder/file.txt.data",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"folder/.dirmeta",
|
||||||
|
"folder/file.txt.data",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nested folders v6",
|
||||||
|
version: version.OneDrive6NameInMeta,
|
||||||
|
input: []string{
|
||||||
|
"folder/file.txt.data",
|
||||||
|
"folder/folder2/file.txt.data",
|
||||||
|
},
|
||||||
|
output: []string{
|
||||||
|
"folder/.dirmeta",
|
||||||
|
"folder/file.txt.data",
|
||||||
|
"folder/folder2/.dirmeta",
|
||||||
|
"folder/folder2/file.txt.data",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
_, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
base := "id/onedrive/user/files/drives/driveID/root:/"
|
||||||
|
|
||||||
|
inPaths := []path.RestorePaths{}
|
||||||
|
for _, ps := range test.input {
|
||||||
|
p, err := path.FromDataLayerPath(base+ps, true)
|
||||||
|
require.NoError(t, err, "creating path", clues.ToCore(err))
|
||||||
|
|
||||||
|
pd, err := p.Dir()
|
||||||
|
require.NoError(t, err, "creating collection path", clues.ToCore(err))
|
||||||
|
|
||||||
|
inPaths = append(
|
||||||
|
inPaths,
|
||||||
|
path.RestorePaths{StoragePath: p, RestorePath: pd})
|
||||||
|
}
|
||||||
|
|
||||||
|
outPaths := []path.RestorePaths{}
|
||||||
|
for _, ps := range test.output {
|
||||||
|
p, err := path.FromDataLayerPath(base+ps, true)
|
||||||
|
require.NoError(t, err, "creating path", clues.ToCore(err))
|
||||||
|
|
||||||
|
pd, err := p.Dir()
|
||||||
|
require.NoError(t, err, "creating collection path", clues.ToCore(err))
|
||||||
|
|
||||||
|
outPaths = append(
|
||||||
|
outPaths,
|
||||||
|
path.RestorePaths{StoragePath: p, RestorePath: pd})
|
||||||
|
}
|
||||||
|
|
||||||
|
actual, err := AugmentRestorePaths(test.version, inPaths)
|
||||||
|
require.NoError(t, err, "augmenting paths", clues.ToCore(err))
|
||||||
|
|
||||||
|
// Ordering of paths matter here as we need dirmeta files
|
||||||
|
// to show up before file in dir
|
||||||
|
assert.Equal(t, outPaths, actual, "augmented paths")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAugmentRestorePaths_DifferentRestorePath tests that RestorePath
|
||||||
|
// substitution works properly. Since it's only possible for future backup
|
||||||
|
// versions to need restore path substitution (i.e. due to storing folders by
|
||||||
|
// ID instead of name) this is only tested against the most recent backup
|
||||||
|
// version at the moment.
|
||||||
|
func (suite *RestoreUnitSuite) TestAugmentRestorePaths_DifferentRestorePath() {
|
||||||
|
// Adding a simple test here so that we can be sure that this
|
||||||
|
// function gets updated whenever we add a new version.
|
||||||
|
require.LessOrEqual(suite.T(), version.Backup, version.All8MigrateUserPNToID, "unsupported backup version")
|
||||||
|
|
||||||
|
type pathPair struct {
|
||||||
|
storage string
|
||||||
|
restore string
|
||||||
|
}
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
version int
|
||||||
|
input []pathPair
|
||||||
|
output []pathPair
|
||||||
|
errCheck assert.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "nested folders",
|
||||||
|
version: version.Backup,
|
||||||
|
input: []pathPair{
|
||||||
|
{storage: "folder-id/file.txt.data", restore: "folder"},
|
||||||
|
{storage: "folder-id/folder2-id/file.txt.data", restore: "folder/folder2"},
|
||||||
|
},
|
||||||
|
output: []pathPair{
|
||||||
|
{storage: "folder-id/.dirmeta", restore: "folder"},
|
||||||
|
{storage: "folder-id/file.txt.data", restore: "folder"},
|
||||||
|
{storage: "folder-id/folder2-id/.dirmeta", restore: "folder/folder2"},
|
||||||
|
{storage: "folder-id/folder2-id/file.txt.data", restore: "folder/folder2"},
|
||||||
|
},
|
||||||
|
errCheck: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "restore path longer one folder",
|
||||||
|
version: version.Backup,
|
||||||
|
input: []pathPair{
|
||||||
|
{storage: "folder-id/file.txt.data", restore: "corso_restore/folder"},
|
||||||
|
},
|
||||||
|
output: []pathPair{
|
||||||
|
{storage: "folder-id/.dirmeta", restore: "corso_restore/folder"},
|
||||||
|
{storage: "folder-id/file.txt.data", restore: "corso_restore/folder"},
|
||||||
|
},
|
||||||
|
errCheck: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "restore path shorter one folder",
|
||||||
|
version: version.Backup,
|
||||||
|
input: []pathPair{
|
||||||
|
{storage: "folder-id/file.txt.data", restore: ""},
|
||||||
|
},
|
||||||
|
errCheck: assert.Error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
_, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
base := "id/onedrive/user/files/drives/driveID/root:/"
|
||||||
|
|
||||||
|
inPaths := []path.RestorePaths{}
|
||||||
|
for _, ps := range test.input {
|
||||||
|
p, err := path.FromDataLayerPath(base+ps.storage, true)
|
||||||
|
require.NoError(t, err, "creating path", clues.ToCore(err))
|
||||||
|
|
||||||
|
r, err := path.FromDataLayerPath(base+ps.restore, false)
|
||||||
|
require.NoError(t, err, "creating path", clues.ToCore(err))
|
||||||
|
|
||||||
|
inPaths = append(
|
||||||
|
inPaths,
|
||||||
|
path.RestorePaths{StoragePath: p, RestorePath: r})
|
||||||
|
}
|
||||||
|
|
||||||
|
outPaths := []path.RestorePaths{}
|
||||||
|
for _, ps := range test.output {
|
||||||
|
p, err := path.FromDataLayerPath(base+ps.storage, true)
|
||||||
|
require.NoError(t, err, "creating path", clues.ToCore(err))
|
||||||
|
|
||||||
|
r, err := path.FromDataLayerPath(base+ps.restore, false)
|
||||||
|
require.NoError(t, err, "creating path", clues.ToCore(err))
|
||||||
|
|
||||||
|
outPaths = append(
|
||||||
|
outPaths,
|
||||||
|
path.RestorePaths{StoragePath: p, RestorePath: r})
|
||||||
|
}
|
||||||
|
|
||||||
|
actual, err := AugmentRestorePaths(test.version, inPaths)
|
||||||
|
test.errCheck(t, err, "augmenting paths", clues.ToCore(err))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ordering of paths matter here as we need dirmeta files
|
||||||
|
// to show up before file in dir
|
||||||
|
assert.Equal(t, outPaths, actual, "augmented paths")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user