Export data from SharePoint (#3824)
This borrows a lot of the core logic from OneDrive as the internal structure is mostly the same. <!-- PR description--> Prev: https://github.com/alcionai/corso/pull/3822 --- #### Does this PR need a docs update or release note? - [x] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [ ] ⛔ No #### Type of change <!--- Please check the type of change your PR introduces: ---> - [x] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup #### Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * fixes https://github.com/alcionai/corso/issues/3823 #### Test Plan <!-- How will this be tested prior to merging.--> - [ ] 💪 Manual - [x] ⚡ Unit test - [ ] 💚 E2E
This commit is contained in:
parent
d7443c2211
commit
c654dfba1b
@ -15,7 +15,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Restore requires the protected resource to have access to the service being restored.
|
- Restore requires the protected resource to have access to the service being restored.
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
- Added option to export data from OneDrive backups as individual files or as a single zip file.
|
- Added option to export data from OneDrive and SharePoint backups as individual files or as a single zip file.
|
||||||
|
|
||||||
## [v0.11.1] (beta) - 2023-07-20
|
## [v0.11.1] (beta) - 2023-07-20
|
||||||
|
|
||||||
|
|||||||
@ -1,11 +1,26 @@
|
|||||||
package export
|
package export
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
. "github.com/alcionai/corso/src/cli/print"
|
||||||
|
"github.com/alcionai/corso/src/cli/repo"
|
||||||
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
|
|
||||||
var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
|
var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
|
||||||
addOneDriveCommands,
|
addOneDriveCommands,
|
||||||
|
addSharePointCommands,
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddCommands attaches all `corso export * *` commands to the parent.
|
// AddCommands attaches all `corso export * *` commands to the parent.
|
||||||
@ -37,3 +52,57 @@ func exportCmd() *cobra.Command {
|
|||||||
func handleExportCmd(cmd *cobra.Command, args []string) error {
|
func handleExportCmd(cmd *cobra.Command, args []string) error {
|
||||||
return cmd.Help()
|
return cmd.Help()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func runExport(
|
||||||
|
ctx context.Context,
|
||||||
|
cmd *cobra.Command,
|
||||||
|
args []string,
|
||||||
|
ueco utils.ExportCfgOpts,
|
||||||
|
sel selectors.Selector,
|
||||||
|
backupID, serviceName string,
|
||||||
|
) error {
|
||||||
|
r, _, _, _, err := utils.GetAccountAndConnect(ctx, sel.PathService(), repo.S3Overrides(cmd))
|
||||||
|
if err != nil {
|
||||||
|
return Only(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer utils.CloseRepo(ctx, r)
|
||||||
|
|
||||||
|
exportLocation := args[0]
|
||||||
|
if len(exportLocation) == 0 {
|
||||||
|
// This should not be possible, but adding it just in case.
|
||||||
|
exportLocation = control.DefaultRestoreLocation + dttm.FormatNow(dttm.HumanReadableDriveItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
Infof(ctx, "Exporting to folder %s", exportLocation)
|
||||||
|
|
||||||
|
eo, err := r.NewExport(
|
||||||
|
ctx,
|
||||||
|
backupID,
|
||||||
|
sel,
|
||||||
|
utils.MakeExportConfig(ctx, ueco))
|
||||||
|
if err != nil {
|
||||||
|
return Only(ctx, clues.Wrap(err, "Failed to initialize "+serviceName+" export"))
|
||||||
|
}
|
||||||
|
|
||||||
|
expColl, err := eo.Run(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, data.ErrNotFound) {
|
||||||
|
return Only(ctx, clues.New("Backup or backup details missing for id "+backupID))
|
||||||
|
}
|
||||||
|
|
||||||
|
return Only(ctx, clues.Wrap(err, "Failed to run "+serviceName+" export"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// It would be better to give a progressbar than a spinner, but we
|
||||||
|
// have any way of knowing how many files are available as of now.
|
||||||
|
diskWriteComplete := observe.MessageWithCompletion(ctx, "Writing data to disk")
|
||||||
|
defer close(diskWriteComplete)
|
||||||
|
|
||||||
|
err = export.ConsumeExportCollections(ctx, exportLocation, expColl, eo.Errors)
|
||||||
|
if err != nil {
|
||||||
|
return Only(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@ -1,26 +1,12 @@
|
|||||||
package export
|
package export
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
ospath "path"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/spf13/pflag"
|
"github.com/spf13/pflag"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
|
||||||
"github.com/alcionai/corso/src/cli/repo"
|
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
|
||||||
"github.com/alcionai/corso/src/internal/observe"
|
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// called by export.go to map subcommands to provider-specific handling.
|
// called by export.go to map subcommands to provider-specific handling.
|
||||||
@ -103,113 +89,8 @@ func exportOneDriveCmd(cmd *cobra.Command, args []string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
r, _, _, _, err := utils.GetAccountAndConnect(ctx, path.OneDriveService, repo.S3Overrides(cmd))
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
defer utils.CloseRepo(ctx, r)
|
|
||||||
|
|
||||||
exportLocation := args[0]
|
|
||||||
if exportLocation == "" {
|
|
||||||
// This is unlikely, but adding it just in case.
|
|
||||||
exportLocation = control.DefaultRestoreLocation + dttm.FormatNow(dttm.HumanReadableDriveItem)
|
|
||||||
}
|
|
||||||
|
|
||||||
Infof(ctx, "Exporting to folder %s", exportLocation)
|
|
||||||
|
|
||||||
sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
|
sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
|
||||||
utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
|
utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
|
||||||
|
|
||||||
eo, err := r.NewExport(
|
return runExport(ctx, cmd, args, opts.ExportCfg, sel.Selector, flags.BackupIDFV, "OneDrive")
|
||||||
ctx,
|
|
||||||
flags.BackupIDFV,
|
|
||||||
sel.Selector,
|
|
||||||
utils.MakeExportConfig(ctx, opts.ExportCfg),
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, clues.Wrap(err, "Failed to initialize OneDrive export"))
|
|
||||||
}
|
|
||||||
|
|
||||||
expColl, err := eo.Run(ctx)
|
|
||||||
if err != nil {
|
|
||||||
if errors.Is(err, data.ErrNotFound) {
|
|
||||||
return Only(ctx, clues.New("Backup or backup details missing for id "+flags.BackupIDFV))
|
|
||||||
}
|
|
||||||
|
|
||||||
return Only(ctx, clues.Wrap(err, "Failed to run OneDrive export"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// It would be better to give a progressbar than a spinner, but we
|
|
||||||
// have know way of knowing how many files are available as of now.
|
|
||||||
diskWriteComplete := observe.MessageWithCompletion(ctx, "Writing data to disk")
|
|
||||||
defer func() {
|
|
||||||
diskWriteComplete <- struct{}{}
|
|
||||||
close(diskWriteComplete)
|
|
||||||
}()
|
|
||||||
|
|
||||||
err = writeExportCollections(ctx, exportLocation, expColl)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeExportCollections(
|
|
||||||
ctx context.Context,
|
|
||||||
exportLocation string,
|
|
||||||
expColl []export.Collection,
|
|
||||||
) error {
|
|
||||||
for _, col := range expColl {
|
|
||||||
folder := ospath.Join(exportLocation, col.BasePath())
|
|
||||||
|
|
||||||
for item := range col.Items(ctx) {
|
|
||||||
err := item.Error
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, clues.Wrap(err, "getting item").With("dir_name", folder))
|
|
||||||
}
|
|
||||||
|
|
||||||
err = writeExportItem(ctx, item, folder)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// writeExportItem writes an ExportItem to disk in the specified folder.
|
|
||||||
func writeExportItem(ctx context.Context, item export.Item, folder string) error {
|
|
||||||
name := item.Data.Name
|
|
||||||
fpath := ospath.Join(folder, name)
|
|
||||||
|
|
||||||
progReader, pclose := observe.ItemSpinner(
|
|
||||||
ctx,
|
|
||||||
item.Data.Body,
|
|
||||||
observe.ItemExportMsg,
|
|
||||||
clues.Hide(name))
|
|
||||||
|
|
||||||
defer item.Data.Body.Close()
|
|
||||||
defer pclose()
|
|
||||||
|
|
||||||
err := os.MkdirAll(folder, os.ModePerm)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, clues.Wrap(err, "creating directory").With("dir_name", folder))
|
|
||||||
}
|
|
||||||
|
|
||||||
// In case the user tries to restore to a non-clean
|
|
||||||
// directory, we might run into collisions an fail.
|
|
||||||
f, err := os.Create(fpath)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, clues.Wrap(err, "creating file").With("file_name", name, "file_dir", folder))
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = io.Copy(f, progReader)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, clues.Wrap(err, "writing file").With("file_name", name, "file_dir", folder))
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -59,6 +59,7 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
|
|||||||
|
|
||||||
cmd.SetArgs([]string{
|
cmd.SetArgs([]string{
|
||||||
"onedrive",
|
"onedrive",
|
||||||
|
testdata.RestoreDestination,
|
||||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||||
"--" + flags.BackupFN, testdata.BackupInput,
|
"--" + flags.BackupFN, testdata.BackupInput,
|
||||||
"--" + flags.FileFN, testdata.FlgInputs(testdata.FileNameInput),
|
"--" + flags.FileFN, testdata.FlgInputs(testdata.FileNameInput),
|
||||||
@ -68,15 +69,14 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
|
|||||||
"--" + flags.FileModifiedAfterFN, testdata.FileModifiedAfterInput,
|
"--" + flags.FileModifiedAfterFN, testdata.FileModifiedAfterInput,
|
||||||
"--" + flags.FileModifiedBeforeFN, testdata.FileModifiedBeforeInput,
|
"--" + flags.FileModifiedBeforeFN, testdata.FileModifiedBeforeInput,
|
||||||
|
|
||||||
"--" + flags.ArchiveFN,
|
|
||||||
|
|
||||||
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
|
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
|
||||||
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
|
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
|
||||||
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
|
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
|
||||||
|
|
||||||
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
|
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
|
||||||
|
|
||||||
testdata.RestoreDestination,
|
// bool flags
|
||||||
|
"--" + flags.ArchiveFN,
|
||||||
})
|
})
|
||||||
|
|
||||||
cmd.SetOut(new(bytes.Buffer)) // drop output
|
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||||
|
|||||||
100
src/cli/export/sharepoint.go
Normal file
100
src/cli/export/sharepoint.go
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
package export
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/pflag"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// called by export.go to map subcommands to provider-specific handling.
|
||||||
|
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
||||||
|
var (
|
||||||
|
c *cobra.Command
|
||||||
|
fs *pflag.FlagSet
|
||||||
|
)
|
||||||
|
|
||||||
|
switch cmd.Use {
|
||||||
|
case exportCommand:
|
||||||
|
c, fs = utils.AddCommand(cmd, sharePointExportCmd())
|
||||||
|
|
||||||
|
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
|
||||||
|
|
||||||
|
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||||
|
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||||
|
fs.SortFlags = false
|
||||||
|
|
||||||
|
flags.AddBackupIDFlag(c, true)
|
||||||
|
flags.AddSharePointDetailsAndRestoreFlags(c)
|
||||||
|
flags.AddExportConfigFlags(c)
|
||||||
|
flags.AddFailFastFlag(c)
|
||||||
|
flags.AddCorsoPassphaseFlags(c)
|
||||||
|
flags.AddAWSCredsFlags(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
sharePointServiceCommand = "sharepoint"
|
||||||
|
sharePointServiceCommandUseSuffix = "--backup <backupId> <destination>"
|
||||||
|
|
||||||
|
//nolint:lll
|
||||||
|
sharePointServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's latest backup (1234abcd...) to my-exports directory
|
||||||
|
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef my-exports
|
||||||
|
|
||||||
|
# Export files named "ServerRenderTemplate.xsl" in the folder "Display Templates/Style Sheets". as archive to current directory
|
||||||
|
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||||
|
--file "ServerRenderTemplate.xsl" --folder "Display Templates/Style Sheets" --archive .
|
||||||
|
|
||||||
|
# Export all files in the folder "Display Templates/Style Sheets" that were created before 2020 to my-exports directory.
|
||||||
|
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||||
|
--file-created-before 2020-01-01T00:00:00 --folder "Display Templates/Style Sheets" my-exports
|
||||||
|
|
||||||
|
# Export all files in the "Documents" library to current directory.
|
||||||
|
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||||
|
--library Documents --folder "Display Templates/Style Sheets" .`
|
||||||
|
)
|
||||||
|
|
||||||
|
// `corso export sharepoint [<flag>...] <destination>`
|
||||||
|
func sharePointExportCmd() *cobra.Command {
|
||||||
|
return &cobra.Command{
|
||||||
|
Use: sharePointServiceCommand,
|
||||||
|
Short: "Export M365 SharePoint service data",
|
||||||
|
RunE: exportSharePointCmd,
|
||||||
|
Args: func(cmd *cobra.Command, args []string) error {
|
||||||
|
if len(args) != 1 {
|
||||||
|
return errors.New("missing restore destination")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
Example: sharePointServiceCommandExportExamples,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// processes an sharepoint service export.
|
||||||
|
func exportSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||||
|
ctx := cmd.Context()
|
||||||
|
|
||||||
|
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := utils.MakeSharePointOpts(cmd)
|
||||||
|
|
||||||
|
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := utils.ValidateSharePointRestoreFlags(flags.BackupIDFV, opts); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||||
|
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||||
|
|
||||||
|
return runExport(ctx, cmd, args, opts.ExportCfg, sel.Selector, flags.BackupIDFV, "SharePoint")
|
||||||
|
}
|
||||||
118
src/cli/export/sharepoint_test.go
Normal file
118
src/cli/export/sharepoint_test.go
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
package export
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
|
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SharePointUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSharePointUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &SharePointUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
||||||
|
expectUse := sharePointServiceCommand + " " + sharePointServiceCommandUseSuffix
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
use string
|
||||||
|
expectUse string
|
||||||
|
expectShort string
|
||||||
|
expectRunE func(*cobra.Command, []string) error
|
||||||
|
}{
|
||||||
|
{"export sharepoint", exportCommand, expectUse, sharePointExportCmd().Short, exportSharePointCmd},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
cmd := &cobra.Command{Use: test.use}
|
||||||
|
|
||||||
|
// normally a persistent flag from the root.
|
||||||
|
// required to ensure a dry run.
|
||||||
|
flags.AddRunModeFlag(cmd, true)
|
||||||
|
|
||||||
|
c := addSharePointCommands(cmd)
|
||||||
|
require.NotNil(t, c)
|
||||||
|
|
||||||
|
cmds := cmd.Commands()
|
||||||
|
require.Len(t, cmds, 1)
|
||||||
|
|
||||||
|
child := cmds[0]
|
||||||
|
assert.Equal(t, test.expectUse, child.Use)
|
||||||
|
assert.Equal(t, test.expectShort, child.Short)
|
||||||
|
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||||
|
|
||||||
|
cmd.SetArgs([]string{
|
||||||
|
"sharepoint",
|
||||||
|
testdata.RestoreDestination,
|
||||||
|
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||||
|
"--" + flags.BackupFN, testdata.BackupInput,
|
||||||
|
"--" + flags.LibraryFN, testdata.LibraryInput,
|
||||||
|
"--" + flags.FileFN, testdata.FlgInputs(testdata.FileNameInput),
|
||||||
|
"--" + flags.FolderFN, testdata.FlgInputs(testdata.FolderPathInput),
|
||||||
|
"--" + flags.FileCreatedAfterFN, testdata.FileCreatedAfterInput,
|
||||||
|
"--" + flags.FileCreatedBeforeFN, testdata.FileCreatedBeforeInput,
|
||||||
|
"--" + flags.FileModifiedAfterFN, testdata.FileModifiedAfterInput,
|
||||||
|
"--" + flags.FileModifiedBeforeFN, testdata.FileModifiedBeforeInput,
|
||||||
|
"--" + flags.ListItemFN, testdata.FlgInputs(testdata.ListItemInput),
|
||||||
|
"--" + flags.ListFolderFN, testdata.FlgInputs(testdata.ListFolderInput),
|
||||||
|
"--" + flags.PageFN, testdata.FlgInputs(testdata.PageInput),
|
||||||
|
"--" + flags.PageFolderFN, testdata.FlgInputs(testdata.PageFolderInput),
|
||||||
|
|
||||||
|
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
|
||||||
|
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
|
||||||
|
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
|
||||||
|
|
||||||
|
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
|
||||||
|
|
||||||
|
// bool flags
|
||||||
|
"--" + flags.ArchiveFN,
|
||||||
|
})
|
||||||
|
|
||||||
|
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||||
|
cmd.SetErr(new(bytes.Buffer)) // drop output
|
||||||
|
err := cmd.Execute()
|
||||||
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
opts := utils.MakeSharePointOpts(cmd)
|
||||||
|
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||||
|
|
||||||
|
assert.Equal(t, testdata.LibraryInput, opts.Library)
|
||||||
|
assert.ElementsMatch(t, testdata.FileNameInput, opts.FileName)
|
||||||
|
assert.ElementsMatch(t, testdata.FolderPathInput, opts.FolderPath)
|
||||||
|
assert.Equal(t, testdata.FileCreatedAfterInput, opts.FileCreatedAfter)
|
||||||
|
assert.Equal(t, testdata.FileCreatedBeforeInput, opts.FileCreatedBefore)
|
||||||
|
assert.Equal(t, testdata.FileModifiedAfterInput, opts.FileModifiedAfter)
|
||||||
|
assert.Equal(t, testdata.FileModifiedBeforeInput, opts.FileModifiedBefore)
|
||||||
|
|
||||||
|
assert.ElementsMatch(t, testdata.ListItemInput, opts.ListItem)
|
||||||
|
assert.ElementsMatch(t, testdata.ListFolderInput, opts.ListFolder)
|
||||||
|
|
||||||
|
assert.ElementsMatch(t, testdata.PageInput, opts.Page)
|
||||||
|
assert.ElementsMatch(t, testdata.PageFolderInput, opts.PageFolder)
|
||||||
|
|
||||||
|
assert.Equal(t, testdata.Archive, opts.ExportCfg.Archive)
|
||||||
|
|
||||||
|
assert.Equal(t, testdata.AWSAccessKeyID, flags.AWSAccessKeyFV)
|
||||||
|
assert.Equal(t, testdata.AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
|
||||||
|
assert.Equal(t, testdata.AWSSessionToken, flags.AWSSessionTokenFV)
|
||||||
|
|
||||||
|
assert.Equal(t, testdata.CorsoPassphrase, flags.CorsoPassphraseFV)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -34,7 +34,7 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
|||||||
expectShort string
|
expectShort string
|
||||||
expectRunE func(*cobra.Command, []string) error
|
expectRunE func(*cobra.Command, []string) error
|
||||||
}{
|
}{
|
||||||
{"restore onedrive", restoreCommand, expectUse, sharePointRestoreCmd().Short, restoreSharePointCmd},
|
{"restore sharepoint", restoreCommand, expectUse, sharePointRestoreCmd().Short, restoreSharePointCmd},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
|
|||||||
@ -32,6 +32,7 @@ type SharePointOpts struct {
|
|||||||
Page []string
|
Page []string
|
||||||
|
|
||||||
RestoreCfg RestoreCfgOpts
|
RestoreCfg RestoreCfgOpts
|
||||||
|
ExportCfg ExportCfgOpts
|
||||||
|
|
||||||
Populated flags.PopulatedFlags
|
Populated flags.PopulatedFlags
|
||||||
}
|
}
|
||||||
@ -56,6 +57,7 @@ func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts {
|
|||||||
PageFolder: flags.PageFolderFV,
|
PageFolder: flags.PageFolderFV,
|
||||||
|
|
||||||
RestoreCfg: makeRestoreCfgOpts(cmd),
|
RestoreCfg: makeRestoreCfgOpts(cmd),
|
||||||
|
ExportCfg: makeExportCfgOpts(cmd),
|
||||||
|
|
||||||
// populated contains the list of flags that appear in the
|
// populated contains the list of flags that appear in the
|
||||||
// command, according to pflags. Use this to differentiate
|
// command, according to pflags. Use this to differentiate
|
||||||
|
|||||||
@ -41,7 +41,8 @@ func (ctrl *Controller) ProduceExportCollections(
|
|||||||
)
|
)
|
||||||
|
|
||||||
switch sels.Service {
|
switch sels.Service {
|
||||||
case selectors.ServiceOneDrive:
|
case selectors.ServiceOneDrive, selectors.ServiceSharePoint:
|
||||||
|
// OneDrive and SharePoint can share the code to create collections
|
||||||
expCollections, err = onedrive.ProduceExportCollections(
|
expCollections, err = onedrive.ProduceExportCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
|
|||||||
79
src/pkg/export/consume.go
Normal file
79
src/pkg/export/consume.go
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
package export
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ConsumeExportCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
exportLocation string,
|
||||||
|
expColl []Collection,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) error {
|
||||||
|
el := errs.Local()
|
||||||
|
|
||||||
|
for _, col := range expColl {
|
||||||
|
if el.Failure() != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
folder := filepath.Join(exportLocation, col.BasePath())
|
||||||
|
ictx := clues.Add(ctx, "dir_name", folder)
|
||||||
|
|
||||||
|
for item := range col.Items(ctx) {
|
||||||
|
if item.Error != nil {
|
||||||
|
el.AddRecoverable(ictx, clues.Wrap(item.Error, "getting item").WithClues(ctx))
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeItem(ictx, item, folder); err != nil {
|
||||||
|
el.AddRecoverable(
|
||||||
|
ictx,
|
||||||
|
clues.Wrap(err, "writing item").With("file_name", item.Data.Name).WithClues(ctx))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return el.Failure()
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeItem writes an ExportItem to disk in the specified folder.
|
||||||
|
func writeItem(ctx context.Context, item Item, folder string) error {
|
||||||
|
name := item.Data.Name
|
||||||
|
fpath := filepath.Join(folder, name)
|
||||||
|
|
||||||
|
progReader, pclose := observe.ItemSpinner(
|
||||||
|
ctx,
|
||||||
|
item.Data.Body,
|
||||||
|
observe.ItemExportMsg,
|
||||||
|
clues.Hide(name))
|
||||||
|
|
||||||
|
defer item.Data.Body.Close()
|
||||||
|
defer pclose()
|
||||||
|
|
||||||
|
err := os.MkdirAll(folder, os.ModePerm)
|
||||||
|
if err != nil {
|
||||||
|
return clues.Wrap(err, "creating directory")
|
||||||
|
}
|
||||||
|
|
||||||
|
// In case the user tries to restore to a non-clean
|
||||||
|
// directory, we might run into collisions an fail.
|
||||||
|
f, err := os.Create(fpath)
|
||||||
|
if err != nil {
|
||||||
|
return clues.Wrap(err, "creating file")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = io.Copy(f, progReader)
|
||||||
|
if err != nil {
|
||||||
|
return clues.Wrap(err, "writing data")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@ -13,7 +13,7 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ExportE2ESuite struct {
|
type ExportE2ESuite struct {
|
||||||
@ -31,12 +31,12 @@ func (suite *ExportE2ESuite) SetupSuite() {
|
|||||||
|
|
||||||
type mockExportCollection struct {
|
type mockExportCollection struct {
|
||||||
path string
|
path string
|
||||||
items []export.Item
|
items []Item
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mec mockExportCollection) BasePath() string { return mec.path }
|
func (mec mockExportCollection) BasePath() string { return mec.path }
|
||||||
func (mec mockExportCollection) Items(context.Context) <-chan export.Item {
|
func (mec mockExportCollection) Items(context.Context) <-chan Item {
|
||||||
ch := make(chan export.Item)
|
ch := make(chan Item)
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
defer close(ch)
|
defer close(ch)
|
||||||
@ -49,7 +49,7 @@ func (mec mockExportCollection) Items(context.Context) <-chan export.Item {
|
|||||||
return ch
|
return ch
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ExportE2ESuite) TestWriteExportCollection() {
|
func (suite *ExportE2ESuite) TestConsumeExportCollection() {
|
||||||
type ei struct {
|
type ei struct {
|
||||||
name string
|
name string
|
||||||
body string
|
body string
|
||||||
@ -132,12 +132,12 @@ func (suite *ExportE2ESuite) TestWriteExportCollection() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
ecs := []export.Collection{}
|
ecs := []Collection{}
|
||||||
for _, col := range test.cols {
|
for _, col := range test.cols {
|
||||||
items := []export.Item{}
|
items := []Item{}
|
||||||
for _, item := range col.items {
|
for _, item := range col.items {
|
||||||
items = append(items, export.Item{
|
items = append(items, Item{
|
||||||
Data: export.ItemData{
|
Data: ItemData{
|
||||||
Name: item.name,
|
Name: item.name,
|
||||||
Body: io.NopCloser((bytes.NewBufferString(item.body))),
|
Body: io.NopCloser((bytes.NewBufferString(item.body))),
|
||||||
},
|
},
|
||||||
@ -154,7 +154,7 @@ func (suite *ExportE2ESuite) TestWriteExportCollection() {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer os.RemoveAll(dir)
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
err = writeExportCollections(ctx, dir, ecs)
|
err = ConsumeExportCollections(ctx, dir, ecs, fault.New(true))
|
||||||
require.NoError(t, err, "writing data")
|
require.NoError(t, err, "writing data")
|
||||||
|
|
||||||
for _, col := range test.cols {
|
for _, col := range test.cols {
|
||||||
@ -7,7 +7,9 @@ import (
|
|||||||
|
|
||||||
// Collection is the interface that is returned to the SDK consumer
|
// Collection is the interface that is returned to the SDK consumer
|
||||||
type Collection interface {
|
type Collection interface {
|
||||||
// BasePath gets the base path of the collection
|
// BasePath gets the base path of the collection. This is derived
|
||||||
|
// from FullPath, but trim out thing like drive id or any other part
|
||||||
|
// that is not needed to show the path to the collection.
|
||||||
BasePath() string
|
BasePath() string
|
||||||
|
|
||||||
// Items gets the items within the collection(folder)
|
// Items gets the items within the collection(folder)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user