add integration tests for missing drives (#3847)

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🤖 Supportability/Tests

#### Issue(s)

* #3652

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-07-20 19:13:09 -06:00 committed by GitHub
parent 9359679f99
commit d555501093
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 352 additions and 208 deletions

View File

@ -19,7 +19,9 @@ inputs:
site: site:
description: Sharepoint site where data is to be purged. description: Sharepoint site where data is to be purged.
libraries: libraries:
description: List of library names within site where data is to be purged. description: List of library names within the site where data is to be purged.
library-prefix:
description: List of library names within the site where the library will get deleted entirely.
folder-prefix: folder-prefix:
description: Name of the folder to be purged. If falsy, will purge the set of static, well known folders instead. description: Name of the folder to be purged. If falsy, will purge the set of static, well known folders instead.
older-than: older-than:
@ -76,7 +78,10 @@ runs:
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }} M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }} M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: | run: |
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }} ./onedrivePurge.ps1 \
-User ${{ inputs.user }} \
-FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") \
-PurgeBeforeTimestamp ${{ inputs.older-than }}
################################################################################################################ ################################################################################################################
# Sharepoint # Sharepoint
@ -90,4 +95,8 @@ runs:
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }} M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }} M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: | run: |
./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -PurgeBeforeTimestamp ${{ inputs.older-than }} ./onedrivePurge.ps1 -Site ${{ inputs.site }} \
-LibraryNameList "${{ inputs.libraries }}".split(",") \
-FolderPrefixPurgeList ${{ inputs.folder-prefix }} \
-LibraryPrefixDeleteList ${{ inputs.library-prefix }} \
-PurgeBeforeTimestamp ${{ inputs.older-than }}

View File

@ -62,6 +62,7 @@ jobs:
site: ${{ vars[matrix.site] }} site: ${{ vars[matrix.site] }}
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }} folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }} libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
library-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
older-than: ${{ env.HALF_HOUR_AGO }} older-than: ${{ env.HALF_HOUR_AGO }}
azure-client-id: ${{ secrets.CLIENT_ID }} azure-client-id: ${{ secrets.CLIENT_ID }}
azure-client-secret: ${{ secrets.CLIENT_SECRET }} azure-client-secret: ${{ secrets.CLIENT_SECRET }}

View File

@ -19,14 +19,17 @@ Param (
[datetime]$PurgeBeforeTimestamp, [datetime]$PurgeBeforeTimestamp,
[Parameter(Mandatory = $True, HelpMessage = "Purge folders with this prefix")] [Parameter(Mandatory = $True, HelpMessage = "Purge folders with this prefix")]
[String[]]$FolderPrefixPurgeList [String[]]$FolderPrefixPurgeList,
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
[String[]]$LibraryPrefixDeleteList
) )
Set-StrictMode -Version 2.0 Set-StrictMode -Version 2.0
# Attempt to set network timeout to 10min # Attempt to set network timeout to 10min
[System.Net.ServicePointManager]::MaxServicePointIdleTime = 600000 [System.Net.ServicePointManager]::MaxServicePointIdleTime = 600000
function Get-TimestampFromName { function Get-TimestampFromFolderName {
param ( param (
[Parameter(Mandatory = $True, HelpMessage = "Folder ")] [Parameter(Mandatory = $True, HelpMessage = "Folder ")]
[Microsoft.SharePoint.Client.Folder]$folder [Microsoft.SharePoint.Client.Folder]$folder
@ -54,6 +57,36 @@ function Get-TimestampFromName {
return $timestamp return $timestamp
} }
function Get-TimestampFromListName {
param (
[Parameter(Mandatory = $True, HelpMessage = "List ")]
[Microsoft.SharePoint.Client.List]$list
)
$name = $list.Title
#fallback on list create time
[datetime]$timestamp = $list.LastItemUserModifiedDate
try {
# Assumes that the timestamp is at the end and starts with yyyy-mm-ddT and is ISO8601
if ($name -imatch "(\d{4}}-\d{2}-\d{2}T.*)") {
$timestamp = [System.Convert]::ToDatetime($Matches.0)
}
# Assumes that the timestamp is at the end and starts with dd-MMM-yyyy_HH-MM-SS
if ($name -imatch "(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}-\d{2}-\d{2})") {
$timestamp = [datetime]::ParseExact($Matches.0, "dd-MMM-yyyy_HH-mm-ss", [CultureInfo]::InvariantCulture, "AssumeUniversal")
}
}
catch {}
Write-Verbose "List: $name, create timestamp: $timestamp"
return $timestamp
}
function Purge-Library { function Purge-Library {
[CmdletBinding(SupportsShouldProcess)] [CmdletBinding(SupportsShouldProcess)]
Param ( Param (
@ -77,7 +110,7 @@ function Purge-Library {
foreach ($f in $folders) { foreach ($f in $folders) {
$folderName = $f.Name $folderName = $f.Name
$createTime = Get-TimestampFromName -Folder $f $createTime = Get-TimestampFromFolderName -Folder $f
if ($PurgeBeforeTimestamp -gt $createTime) { if ($PurgeBeforeTimestamp -gt $createTime) {
foreach ($p in $FolderPrefixPurgeList) { foreach ($p in $FolderPrefixPurgeList) {
@ -97,7 +130,7 @@ function Purge-Library {
if ($f.ServerRelativeUrl -imatch "$SiteSuffix/{0,1}(.+?)/{0,1}$folderName$") { if ($f.ServerRelativeUrl -imatch "$SiteSuffix/{0,1}(.+?)/{0,1}$folderName$") {
$siteRelativeParentPath = $Matches.1 $siteRelativeParentPath = $Matches.1
} }
if ($PSCmdlet.ShouldProcess("Name: " + $f.Name + " Parent: " + $siteRelativeParentPath, "Remove folder")) { if ($PSCmdlet.ShouldProcess("Name: " + $f.Name + " Parent: " + $siteRelativeParentPath, "Remove folder")) {
Write-Host "Deleting folder: "$f.Name" with parent: $siteRelativeParentPath" Write-Host "Deleting folder: "$f.Name" with parent: $siteRelativeParentPath"
try { try {
@ -110,6 +143,54 @@ function Purge-Library {
} }
} }
function Delete-LibraryByPrefix {
[CmdletBinding(SupportsShouldProcess)]
Param (
[Parameter(Mandatory = $True, HelpMessage = "Document library root")]
[String]$LibraryNamePrefix,
[Parameter(Mandatory = $True, HelpMessage = "Purge folders before this date time (UTC)")]
[datetime]$PurgeBeforeTimestamp,
[Parameter(Mandatory = $True, HelpMessage = "Site suffix")]
[String[]]$SiteSuffix
)
Write-Host "`nDeleting library: $LibraryNamePrefix"
$listsToDelete = @()
$lists = Get-PnPList
foreach ($l in $lists) {
$listName = $l.Title
$createTime = Get-TimestampFromListName -List $l
if ($PurgeBeforeTimestamp -gt $createTime) {
foreach ($p in $FolderPrefixPurgeList) {
if ($listName -like "$p*") {
$listsToDelete += $l
}
}
}
}
Write-Host "Found"$listsToDelete.count"lists to delete"
foreach ($l in $listsToDelete) {
$listName = $l.Title
if ($PSCmdlet.ShouldProcess("Name: " + $l.Title + "Remove folder")) {
Write-Host "Deleting list: "$l.Title
try {
Remove-PnPList -Identity $l.Id -Force
}
catch [ System.Management.Automation.ItemNotFoundException ] {
Write-Host "List: "$f.Name" is already deleted. Skipping..."
}
}
}
}
######## MAIN ######### ######## MAIN #########
# Setup SharePointPnP # Setup SharePointPnP
@ -176,4 +257,8 @@ $FolderPrefixPurgeList = $FolderPrefixPurgeList | ForEach-Object { @($_.Split(',
foreach ($library in $LibraryNameList) { foreach ($library in $LibraryNameList) {
Purge-Library -LibraryName $library -PurgeBeforeTimestamp $PurgeBeforeTimestamp -FolderPrefixPurgeList $FolderPrefixPurgeList -SiteSuffix $siteSuffix Purge-Library -LibraryName $library -PurgeBeforeTimestamp $PurgeBeforeTimestamp -FolderPrefixPurgeList $FolderPrefixPurgeList -SiteSuffix $siteSuffix
} }
foreach ($libraryPfx in $LibraryPrefixDeleteList) {
Delete-LibraryByPrefix -LibraryNamePrefix $libraryPfx -PurgeBeforeTimestamp $PurgeBeforeTimestamp -SiteSuffix $siteSuffix
}

View File

@ -271,7 +271,9 @@ func Wrap(ctx context.Context, e error, msg string) *clues.Err {
e = clues.Stack(e, clues.New(mainMsg)) e = clues.Stack(e, clues.New(mainMsg))
} }
return setLabels(clues.Wrap(e, msg).WithClues(ctx).With(data...), innerMsg) ce := clues.Wrap(e, msg).WithClues(ctx).With(data...).WithTrace(1)
return setLabels(ce, innerMsg)
} }
// Stack is a helper function that extracts ODataError metadata from // Stack is a helper function that extracts ODataError metadata from
@ -292,7 +294,9 @@ func Stack(ctx context.Context, e error) *clues.Err {
e = clues.Stack(e, clues.New(mainMsg)) e = clues.Stack(e, clues.New(mainMsg))
} }
return setLabels(clues.Stack(e).WithClues(ctx).With(data...), innerMsg) ce := clues.Stack(e).WithClues(ctx).With(data...).WithTrace(1)
return setLabels(ce, innerMsg)
} }
// stackReq is a helper function that extracts ODataError metadata from // stackReq is a helper function that extracts ODataError metadata from

View File

@ -10,8 +10,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/dttm"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
evmock "github.com/alcionai/corso/src/internal/events/mock" evmock "github.com/alcionai/corso/src/internal/events/mock"
@ -21,7 +19,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/mock" "github.com/alcionai/corso/src/internal/m365/mock"
"github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/stats" "github.com/alcionai/corso/src/internal/stats"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -32,7 +29,6 @@ import (
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata" storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
@ -143,13 +139,6 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
// integration // integration
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type bupResults struct {
selectorResourceOwners []string
backupID model.StableID
items int
ctrl *m365.Controller
}
type RestoreOpIntegrationSuite struct { type RestoreOpIntegrationSuite struct {
tester.Suite tester.Suite
@ -267,192 +256,6 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
} }
} }
func setupExchangeBackup(
t *testing.T,
kw *kopia.Wrapper,
sw *store.Wrapper,
acct account.Account,
owner string,
) bupResults {
ctx, flush := tester.NewContext(t)
defer flush()
var (
users = []string{owner}
esel = selectors.NewExchangeBackup(users)
)
esel.DiscreteOwner = owner
esel.Include(
esel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()),
esel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
esel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()))
ctrl, sel := ControllerWithSelector(t, ctx, acct, resource.Users, esel.Selector, nil, nil)
bo, err := NewBackupOperation(
ctx,
control.Defaults(),
kw,
sw,
ctrl,
acct,
sel,
inMock.NewProvider(owner, owner),
evmock.NewBus())
require.NoError(t, err, clues.ToCore(err))
err = bo.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, bo.Results.BackupID)
return bupResults{
selectorResourceOwners: users,
backupID: bo.Results.BackupID,
// Discount metadata collection files (1 delta and one prev path for each category).
// These meta files are used to aid restore, but are not themselves
// restored (ie: counted as writes).
items: bo.Results.ItemsWritten - 6,
ctrl: ctrl,
}
}
func setupSharePointBackup(
t *testing.T,
kw *kopia.Wrapper,
sw *store.Wrapper,
acct account.Account,
owner string,
) bupResults {
ctx, flush := tester.NewContext(t)
defer flush()
var (
sites = []string{owner}
ssel = selectors.NewSharePointBackup(sites)
)
// assume a folder name "test" exists in the drive.
// this is brittle, and requires us to backfill anytime
// the site under test changes, but also prevents explosive
// growth from re-backup/restore of restored files.
ssel.Include(ssel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
ssel.DiscreteOwner = owner
ctrl, sel := ControllerWithSelector(t, ctx, acct, resource.Sites, ssel.Selector, nil, nil)
bo, err := NewBackupOperation(
ctx,
control.Defaults(),
kw,
sw,
ctrl,
acct,
sel,
inMock.NewProvider(owner, owner),
evmock.NewBus())
require.NoError(t, err, clues.ToCore(err))
err = bo.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, bo.Results.BackupID)
return bupResults{
selectorResourceOwners: sites,
backupID: bo.Results.BackupID,
// Discount metadata files (2: 1 delta, 1 prev path)
// assume only one folder, and therefore 1 dirmeta per drive
// (2 drives: documents and more documents)
// assume only one file in each folder, and therefore 1 meta per drive
// (2 drives: documents and more documents)
// Meta files are used to aid restore, but are not themselves
// restored (ie: counted as writes).
items: bo.Results.ItemsWritten - 6,
ctrl: ctrl,
}
}
func (suite *RestoreOpIntegrationSuite) TestRestore_Run() {
tables := []struct {
name string
owner string
restoreCfg control.RestoreConfig
getSelector func(t *testing.T, owners []string) selectors.Selector
setup func(t *testing.T, kw *kopia.Wrapper, sw *store.Wrapper, acct account.Account, owner string) bupResults
}{
{
name: "Exchange_Restore",
owner: tconfig.M365UserID(suite.T()),
restoreCfg: testdata.DefaultRestoreConfig(""),
getSelector: func(t *testing.T, owners []string) selectors.Selector {
rsel := selectors.NewExchangeRestore(owners)
rsel.Include(rsel.AllData())
return rsel.Selector
},
setup: setupExchangeBackup,
},
{
name: "SharePoint_Restore",
owner: tconfig.M365SiteID(suite.T()),
restoreCfg: control.DefaultRestoreConfig(dttm.SafeForTesting),
getSelector: func(t *testing.T, owners []string) selectors.Selector {
rsel := selectors.NewSharePointRestore(owners)
rsel.Include(rsel.Library(tconfig.LibraryDocuments), rsel.Library(tconfig.LibraryMoreDocuments))
return rsel.Selector
},
setup: setupSharePointBackup,
},
}
for _, test := range tables {
suite.Run(test.name, func() {
var (
t = suite.T()
mb = evmock.NewBus()
bup = test.setup(t, suite.kw, suite.sw, suite.acct, test.owner)
)
ctx, flush := tester.NewContext(t)
defer flush()
require.NotZero(t, bup.items)
require.NotEmpty(t, bup.backupID)
ro, err := NewRestoreOperation(
ctx,
control.Options{FailureHandling: control.FailFast},
suite.kw,
suite.sw,
bup.ctrl,
tconfig.NewM365Account(t),
bup.backupID,
test.getSelector(t, bup.selectorResourceOwners),
test.restoreCfg,
mb,
count.New())
require.NoError(t, err, clues.ToCore(err))
ds, err := ro.Run(ctx)
require.NoError(t, err, "restoreOp.Run() %+v", clues.ToCore(err))
require.NotEmpty(t, ro.Results, "restoreOp results")
require.NotNil(t, ds, "restored details")
assert.Equal(t, ro.Status, Completed, "restoreOp status")
assert.Equal(t, ro.Results.ItemsWritten, len(ds.Items()), "item write count matches len details")
assert.Less(t, 0, ro.Results.ItemsRead, "restore items read")
assert.Less(t, int64(0), ro.Results.BytesRead, "bytes read")
assert.Equal(t, 1, ro.Results.ResourceOwners, "resource Owners")
assert.NoError(t, ro.Errors.Failure(), "non-recoverable error", clues.ToCore(ro.Errors.Failure()))
assert.Empty(t, ro.Errors.Recovered(), "recoverable errors")
assert.Equal(t, bup.items, ro.Results.ItemsWritten, "backup and restore wrote the same num of items")
assert.Equal(t, 1, mb.TimesCalled[events.RestoreStart], "restore-start events")
assert.Equal(t, 1, mb.TimesCalled[events.RestoreEnd], "restore-end events")
})
}
}
func (suite *RestoreOpIntegrationSuite) TestRestore_Run_errorNoBackup() { func (suite *RestoreOpIntegrationSuite) TestRestore_Run_errorNoBackup() {
t := suite.T() t := suite.T()

View File

@ -5,6 +5,9 @@ import (
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -19,6 +22,8 @@ import (
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata" deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
ctrlTD "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata" selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
@ -196,7 +201,7 @@ func (suite *SharePointRestoreIntgSuite) SetupSuite() {
} }
func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointWithAdvancedOptions() { func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointWithAdvancedOptions() {
sel := selectors.NewSharePointBackup([]string{suite.its.userID}) sel := selectors.NewSharePointBackup([]string{suite.its.siteID})
sel.Include(selTD.SharePointBackupFolderScope(sel)) sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.Filter(sel.Library("documents")) sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.siteID sel.DiscreteOwner = suite.its.siteID
@ -209,3 +214,240 @@ func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointWithAdvancedO
suite.its.siteDriveID, suite.its.siteDriveID,
suite.its.siteDriveRootFolderID) suite.its.siteDriveRootFolderID)
} }
func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives() {
t := suite.T()
// despite the client having a method for drive.Patch and drive.Delete, both only return
// the error code and message `invalidRequest`.
t.Skip("graph api doesn't allow patch or delete on drives, so we cannot run any conditions")
ctx, flush := tester.NewContext(t)
defer flush()
rc := ctrlTD.DefaultRestoreConfig("restore_deleted_drives")
rc.OnCollision = control.Copy
// create a new drive
md, err := suite.its.ac.Lists().PostDrive(ctx, suite.its.siteID, rc.Location)
require.NoError(t, err, clues.ToCore(err))
driveID := ptr.Val(md.GetId())
// get the root folder
mdi, err := suite.its.ac.Drives().GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
rootFolderID := ptr.Val(mdi.GetId())
// add an item to it
itemName := uuid.NewString()
item := models.NewDriveItem()
item.SetName(ptr.To(itemName + ".txt"))
file := models.NewFile()
item.SetFile(file)
_, err = suite.its.ac.Drives().PostItemInContainer(
ctx,
driveID,
rootFolderID,
item,
control.Copy)
require.NoError(t, err, clues.ToCore(err))
// run a backup
var (
mb = evmock.NewBus()
opts = control.Defaults()
graphClient = suite.its.ac.Stable.Client()
)
bsel := selectors.NewSharePointBackup([]string{suite.its.siteID})
bsel.Include(selTD.SharePointBackupFolderScope(bsel))
bsel.Filter(bsel.Library(rc.Location))
bsel.DiscreteOwner = suite.its.siteID
bo, bod := prepNewTestBackupOp(t, ctx, mb, bsel.Selector, opts, version.Backup)
defer bod.close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
// test cases:
// first test, we take the current drive and rename it.
// the restore should find the drive by id and restore items
// into it like normal. Due to collision handling, this should
// create a copy of the current item.
suite.Run("renamed drive", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
patchBody := models.NewDrive()
patchBody.SetName(ptr.To("some other name"))
md, err = graphClient.
Drives().
ByDriveId(driveID).
Patch(ctx, patchBody, nil)
require.NoError(t, err, clues.ToCore(graph.Stack(ctx, err)))
var (
mb = evmock.NewBus()
ctr = count.New()
)
ro, _ := prepNewTestRestoreOp(
t,
ctx,
bod.st,
bo.Results.BackupID,
mb,
ctr,
bod.sel,
opts,
rc)
runAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")
resp, err := graphClient.
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(rootFolderID).
Children().
Get(ctx, nil)
require.NoError(t, err, clues.ToCore(graph.Stack(ctx, err)))
items := resp.GetValue()
assert.Len(t, items, 2)
for _, item := range items {
assert.Contains(t, ptr.Val(item.GetName()), itemName)
}
})
// second test, we delete the drive altogether. the restore should find
// no existing drives, but it should have the old drive's name and attempt
// to recreate that drive by name.
suite.Run("deleted drive", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
err = graphClient.
Drives().
ByDriveId(driveID).
Delete(ctx, nil)
require.NoError(t, err, clues.ToCore(graph.Stack(ctx, err)))
var (
mb = evmock.NewBus()
ctr = count.New()
)
ro, _ := prepNewTestRestoreOp(
t,
ctx,
bod.st,
bo.Results.BackupID,
mb,
ctr,
bod.sel,
opts,
rc)
runAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")
pgr := suite.its.ac.
Drives().
NewSiteDrivePager(suite.its.siteID, []string{"id", "name"})
drives, err := api.GetAllDrives(ctx, pgr, false, -1)
require.NoError(t, err, clues.ToCore(err))
var created models.Driveable
for _, drive := range drives {
if ptr.Val(drive.GetName()) == ptr.Val(created.GetName()) &&
ptr.Val(drive.GetId()) != driveID {
created = drive
break
}
}
require.NotNil(t, created, "found the restored drive by name")
md = created
driveID = ptr.Val(md.GetId())
mdi, err := suite.its.ac.Drives().GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
rootFolderID = ptr.Val(mdi.GetId())
resp, err := graphClient.
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(rootFolderID).
Children().
Get(ctx, nil)
require.NoError(t, err, clues.ToCore(graph.Stack(ctx, err)))
items := resp.GetValue()
assert.Len(t, items, 1)
assert.Equal(t, ptr.Val(items[0].GetName()), itemName+".txt")
})
// final test, run a follow-up restore. This should match the
// drive we created in the prior test by name, but not by ID.
suite.Run("different drive - same name", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
ctr = count.New()
)
ro, _ := prepNewTestRestoreOp(
t,
ctx,
bod.st,
bo.Results.BackupID,
mb,
ctr,
bod.sel,
opts,
rc)
runAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")
resp, err := graphClient.
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(rootFolderID).
Children().
Get(ctx, nil)
require.NoError(t, err, clues.ToCore(graph.Stack(ctx, err)))
items := resp.GetValue()
assert.Len(t, items, 2)
for _, item := range items {
assert.Contains(t, ptr.Val(item.GetName()), itemName)
}
})
}