refactor drive restore & export to use sanitree (#4425)
refactors the common drive sanity checks to use the sanitree data container. Also expands the sanitree in two ways: 1. adds leaves (individual items) to nodes for granular data comparison 2. adds multi-type support for comparing nodes of different types. --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🤖 Supportability/Tests #### Issue(s) * #3988 #### Test Plan - [x] 💪 Manual - [x] 💚 E2E
This commit is contained in:
parent
800170787f
commit
7419faab23
50
.github/actions/backup-restore-test/action.yml
vendored
50
.github/actions/backup-restore-test/action.yml
vendored
@ -7,6 +7,9 @@ inputs:
|
|||||||
kind:
|
kind:
|
||||||
description: Kind of test
|
description: Kind of test
|
||||||
required: true
|
required: true
|
||||||
|
backup-id:
|
||||||
|
description: Backup to retrieve data out of
|
||||||
|
required: false
|
||||||
backup-args:
|
backup-args:
|
||||||
description: Arguments to pass for backup
|
description: Arguments to pass for backup
|
||||||
required: false
|
required: false
|
||||||
@ -15,12 +18,9 @@ inputs:
|
|||||||
description: Arguments to pass for restore; restore is skipped when missing.
|
description: Arguments to pass for restore; restore is skipped when missing.
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
test-folder:
|
restore-container:
|
||||||
description: Folder to use for testing
|
description: Folder to use for testing
|
||||||
required: true
|
required: true
|
||||||
base-backup:
|
|
||||||
description: Base backup to use for testing
|
|
||||||
required: false
|
|
||||||
log-dir:
|
log-dir:
|
||||||
description: Folder to store test log files
|
description: Folder to store test log files
|
||||||
required: true
|
required: true
|
||||||
@ -49,7 +49,7 @@ runs:
|
|||||||
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
|
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}-${{inputs.kind }}.log
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-backup-${{inputs.kind }}.log
|
||||||
./corso backup create '${{ inputs.service }}' \
|
./corso backup create '${{ inputs.service }}' \
|
||||||
--no-stats --hide-progress --json \
|
--no-stats --hide-progress --json \
|
||||||
${{ inputs.backup-args }} |
|
${{ inputs.backup-args }} |
|
||||||
@ -68,7 +68,7 @@ runs:
|
|||||||
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
|
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-restore-${{inputs.kind }}.log
|
||||||
./corso restore '${{ inputs.service }}' \
|
./corso restore '${{ inputs.service }}' \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
--hide-progress \
|
--hide-progress \
|
||||||
@ -88,16 +88,14 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
working-directory: src
|
working-directory: src
|
||||||
env:
|
env:
|
||||||
SANITY_TEST_KIND: restore
|
SANITY_TEST_RESTORE_CONTAINER: ${{ steps.restore.outputs.result }}
|
||||||
SANITY_TEST_FOLDER: ${{ steps.restore.outputs.result }}
|
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||||
SANITY_TEST_SERVICE: ${{ inputs.service }}
|
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||||
SANITY_TEST_DATA: ${{ inputs.test-folder }}
|
|
||||||
SANITY_BASE_BACKUP: ${{ inputs.base-backup }}
|
|
||||||
run: |
|
run: |
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
|
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-validate-${{inputs.kind }}.log
|
||||||
./sanity-test restore ${{ inputs.service }}
|
./sanity-test restore ${{ inputs.service }}
|
||||||
|
|
||||||
- name: Export ${{ inputs.service }} ${{ inputs.kind }}
|
- name: Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
@ -110,7 +108,7 @@ runs:
|
|||||||
echo Export ${{ inputs.service }} ${{ inputs.kind }}
|
echo Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-restore-${{inputs.kind }}.log
|
||||||
./corso export '${{ inputs.service }}' \
|
./corso export '${{ inputs.service }}' \
|
||||||
/tmp/export-${{ inputs.service }}-${{inputs.kind }} \
|
/tmp/export-${{ inputs.service }}-${{inputs.kind }} \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
@ -125,16 +123,14 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
working-directory: src
|
working-directory: src
|
||||||
env:
|
env:
|
||||||
SANITY_TEST_KIND: export
|
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}
|
||||||
SANITY_TEST_FOLDER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}
|
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||||
SANITY_TEST_SERVICE: ${{ inputs.service }}
|
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||||
SANITY_TEST_DATA: ${{ inputs.test-folder }}
|
|
||||||
SANITY_BASE_BACKUP: ${{ inputs.base-backup }}
|
|
||||||
run: |
|
run: |
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
|
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-validate-${{inputs.kind }}.log
|
||||||
./sanity-test export ${{ inputs.service }}
|
./sanity-test export ${{ inputs.service }}
|
||||||
|
|
||||||
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
|
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
@ -147,7 +143,7 @@ runs:
|
|||||||
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-restore-${{inputs.kind }}.log
|
||||||
./corso export '${{ inputs.service }}' \
|
./corso export '${{ inputs.service }}' \
|
||||||
/tmp/export-${{ inputs.service }}-${{inputs.kind }}-archive \
|
/tmp/export-${{ inputs.service }}-${{inputs.kind }}-archive \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
@ -165,16 +161,14 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
working-directory: src
|
working-directory: src
|
||||||
env:
|
env:
|
||||||
SANITY_TEST_KIND: export
|
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped
|
||||||
SANITY_TEST_FOLDER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped
|
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||||
SANITY_TEST_SERVICE: ${{ inputs.service }}
|
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||||
SANITY_TEST_DATA: ${{ inputs.test-folder }}
|
|
||||||
SANITY_BASE_BACKUP: ${{ inputs.base-backup }}
|
|
||||||
run: |
|
run: |
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-validate-${{inputs.kind }}.log
|
||||||
./sanity-test export ${{ inputs.service }}
|
./sanity-test export ${{ inputs.service }}
|
||||||
|
|
||||||
- name: List ${{ inputs.service }} ${{ inputs.kind }}
|
- name: List ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
@ -185,7 +179,7 @@ runs:
|
|||||||
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
|
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}-${{inputs.kind }}.log
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}-list-${{inputs.kind }}.log
|
||||||
./corso backup list ${{ inputs.service }} \
|
./corso backup list ${{ inputs.service }} \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
--hide-progress \
|
--hide-progress \
|
||||||
@ -206,7 +200,7 @@ runs:
|
|||||||
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
|
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-single-${{ inputs.service }}-${{inputs.kind }}.log
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}-single-${{inputs.kind }}.log
|
||||||
./corso backup list ${{ inputs.service }} \
|
./corso backup list ${{ inputs.service }} \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
--hide-progress \
|
--hide-progress \
|
||||||
|
|||||||
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@ -172,7 +172,6 @@ jobs:
|
|||||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||||
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
|
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
|
||||||
CORSO_ENABLE_GROUPS: true
|
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
go test \
|
go test \
|
||||||
@ -243,7 +242,6 @@ jobs:
|
|||||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||||
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
|
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
|
||||||
CORSO_ENABLE_GROUPS: true
|
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
go test \
|
go test \
|
||||||
@ -277,7 +275,6 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
|
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
|
||||||
LOG_GRAPH_REQUESTS: true
|
LOG_GRAPH_REQUESTS: true
|
||||||
CORSO_ENABLE_GROUPS: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
@ -332,7 +329,6 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log
|
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log
|
||||||
LOG_GRAPH_REQUESTS: true
|
LOG_GRAPH_REQUESTS: true
|
||||||
CORSO_ENABLE_GROUPS: true
|
|
||||||
steps:
|
steps:
|
||||||
- name: Fail check if not repository_dispatch
|
- name: Fail check if not repository_dispatch
|
||||||
if: github.event_name != 'repository_dispatch'
|
if: github.event_name != 'repository_dispatch'
|
||||||
|
|||||||
1
.github/workflows/nightly_test.yml
vendored
1
.github/workflows/nightly_test.yml
vendored
@ -59,7 +59,6 @@ jobs:
|
|||||||
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
|
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
|
||||||
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
||||||
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
||||||
CORSO_ENABLE_GROUPS: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
|||||||
68
.github/workflows/sanity-test.yaml
vendored
68
.github/workflows/sanity-test.yaml
vendored
@ -39,7 +39,6 @@ jobs:
|
|||||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
|
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
|
||||||
RESTORE_DEST_PFX: Corso_Test_Sanity_
|
RESTORE_DEST_PFX: Corso_Test_Sanity_
|
||||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
||||||
CORSO_ENABLE_GROUPS: true
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@ -184,7 +183,7 @@ jobs:
|
|||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
|
|
||||||
- name: Exchange - Incremental backup
|
- name: Exchange - Incremental backup
|
||||||
@ -195,8 +194,8 @@ jobs:
|
|||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
|
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
|
|
||||||
- name: Exchange - Non delta backup
|
- name: Exchange - Non delta backup
|
||||||
@ -207,8 +206,8 @@ jobs:
|
|||||||
kind: non-delta
|
kind: non-delta
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
||||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
|
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
|
|
||||||
- name: Exchange - Incremental backup after non-delta
|
- name: Exchange - Incremental backup after non-delta
|
||||||
@ -219,8 +218,8 @@ jobs:
|
|||||||
kind: non-delta-incremental
|
kind: non-delta-incremental
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
|
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
|
|
||||||
|
|
||||||
@ -252,7 +251,7 @@ jobs:
|
|||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -275,7 +274,7 @@ jobs:
|
|||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -308,7 +307,7 @@ jobs:
|
|||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}"'
|
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}"'
|
||||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -332,7 +331,7 @@ jobs:
|
|||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}"'
|
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}"'
|
||||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -364,32 +363,33 @@ jobs:
|
|||||||
service: groups
|
service: groups
|
||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||||
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
|
with-export: true
|
||||||
|
|
||||||
# generate some more enteries for incremental check
|
# generate some more enteries for incremental check
|
||||||
# - name: Groups - Create new data (for incremental)
|
- name: Groups - Create new data (for incremental)
|
||||||
# working-directory: ./src/cmd/factory
|
working-directory: ./src/cmd/factory
|
||||||
# run: |
|
run: |
|
||||||
# go run . sharepoint files \
|
go run . sharepoint files \
|
||||||
# --site ${{ secrets.CORSO_M365_TEST_GROUPS_SITE_URL }} \
|
--site ${{ env.CORSO_M365_TEST_GROUPS_SITE_URL }} \
|
||||||
# --user ${{ env.TEST_USER }} \
|
--user ${{ env.TEST_USER }} \
|
||||||
# --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
||||||
# --tenant ${{ secrets.TENANT_ID }} \
|
--tenant ${{ secrets.TENANT_ID }} \
|
||||||
# --destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }} \
|
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }} \
|
||||||
# --count 4
|
--count 4
|
||||||
|
|
||||||
# - name: Groups - Incremental backup
|
- name: Groups - Incremental backup
|
||||||
# id: groups-incremental
|
id: groups-incremental
|
||||||
# uses: ./.github/actions/backup-restore-test
|
uses: ./.github/actions/backup-restore-test
|
||||||
# with:
|
with:
|
||||||
# service: groups
|
service: groups
|
||||||
# kind: incremental
|
kind: incremental
|
||||||
# backup-args: '--site "${{ secrets.CORSO_M365_TEST_GROUPS_SITE_URL }}"'
|
backup-args: '--site "${{ env.CORSO_M365_TEST_GROUPS_SITE_URL }}"'
|
||||||
# restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||||
# test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||||
# log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
# with-export: true
|
with-export: true
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
|
|||||||
@ -2,16 +2,13 @@ package common
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/credentials"
|
"github.com/alcionai/corso/src/pkg/credentials"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -21,39 +18,38 @@ type PermissionInfo struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
sanityBaseBackup = "SANITY_BASE_BACKUP"
|
sanityBackupID = "SANITY_BACKUP_ID"
|
||||||
sanityTestData = "SANITY_TEST_DATA"
|
sanityTestSourceContainer = "SANITY_TEST_SOURCE_CONTAINER"
|
||||||
sanityTestFolder = "SANITY_TEST_FOLDER"
|
sanityTestRestoreContainer = "SANITY_TEST_RESTORE_CONTAINER"
|
||||||
sanityTestService = "SANITY_TEST_SERVICE"
|
sanityTestUser = "SANITY_TEST_USER"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Envs struct {
|
type Envs struct {
|
||||||
BaseBackupFolder string
|
BackupID string
|
||||||
DataFolder string
|
SourceContainer string
|
||||||
FolderName string
|
RestoreContainer string
|
||||||
Service string
|
GroupID string
|
||||||
SiteID string
|
SiteID string
|
||||||
StartTime time.Time
|
|
||||||
UserID string
|
UserID string
|
||||||
}
|
}
|
||||||
|
|
||||||
func EnvVars(ctx context.Context) Envs {
|
func EnvVars(ctx context.Context) Envs {
|
||||||
folder := strings.TrimSpace(os.Getenv(sanityTestFolder))
|
folder := strings.TrimSpace(os.Getenv(sanityTestRestoreContainer))
|
||||||
startTime, _ := MustGetTimeFromName(ctx, folder)
|
|
||||||
|
|
||||||
e := Envs{
|
e := Envs{
|
||||||
BaseBackupFolder: os.Getenv(sanityBaseBackup),
|
BackupID: os.Getenv(sanityBackupID),
|
||||||
DataFolder: os.Getenv(sanityTestData),
|
SourceContainer: os.Getenv(sanityTestSourceContainer),
|
||||||
FolderName: folder,
|
RestoreContainer: folder,
|
||||||
|
GroupID: tconfig.GetM365TeamID(ctx),
|
||||||
SiteID: tconfig.GetM365SiteID(ctx),
|
SiteID: tconfig.GetM365SiteID(ctx),
|
||||||
Service: os.Getenv(sanityTestService),
|
|
||||||
StartTime: startTime,
|
|
||||||
UserID: tconfig.GetM365UserID(ctx),
|
UserID: tconfig.GetM365UserID(ctx),
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("\n-----\nenvs %+v\n-----\n", e)
|
if len(os.Getenv(sanityTestUser)) > 0 {
|
||||||
|
e.UserID = os.Getenv(sanityTestUser)
|
||||||
|
}
|
||||||
|
|
||||||
logger.Ctx(ctx).Info("envs", e)
|
Infof(ctx, "test env vars: %+v", e)
|
||||||
|
|
||||||
return e
|
return e
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,38 +1,79 @@
|
|||||||
package common
|
package common
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
func FilepathWalker(
|
func BuildFilepathSanitree(
|
||||||
folderName string,
|
ctx context.Context,
|
||||||
exportFileSizes map[string]int64,
|
rootDir string,
|
||||||
startTime time.Time,
|
) *Sanitree[fs.FileInfo, fs.FileInfo] {
|
||||||
) filepath.WalkFunc {
|
var root *Sanitree[fs.FileInfo, fs.FileInfo]
|
||||||
return func(path string, info os.FileInfo, err error) error {
|
|
||||||
|
walker := func(
|
||||||
|
p string,
|
||||||
|
info os.FileInfo,
|
||||||
|
err error,
|
||||||
|
) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.Stack(err)
|
Fatal(ctx, "error passed to filepath walker", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
relPath, err := filepath.Rel(rootDir, p)
|
||||||
|
if err != nil {
|
||||||
|
Fatal(ctx, "getting relative filepath", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if info != nil {
|
||||||
|
Debugf(ctx, "adding: %s", relPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
if root == nil {
|
||||||
|
root = &Sanitree[fs.FileInfo, fs.FileInfo]{
|
||||||
|
Self: info,
|
||||||
|
ID: info.Name(),
|
||||||
|
Name: info.Name(),
|
||||||
|
Leaves: map[string]*Sanileaf[fs.FileInfo, fs.FileInfo]{},
|
||||||
|
Children: map[string]*Sanitree[fs.FileInfo, fs.FileInfo]{},
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
elems := path.Split(relPath)
|
||||||
|
node := root.NodeAt(ctx, elems[:len(elems)-1])
|
||||||
|
|
||||||
if info.IsDir() {
|
if info.IsDir() {
|
||||||
|
node.Children[info.Name()] = &Sanitree[fs.FileInfo, fs.FileInfo]{
|
||||||
|
Parent: node,
|
||||||
|
Self: info,
|
||||||
|
ID: info.Name(),
|
||||||
|
Name: info.Name(),
|
||||||
|
Leaves: map[string]*Sanileaf[fs.FileInfo, fs.FileInfo]{},
|
||||||
|
Children: map[string]*Sanitree[fs.FileInfo, fs.FileInfo]{},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
node.Leaves[info.Name()] = &Sanileaf[fs.FileInfo, fs.FileInfo]{
|
||||||
|
Parent: node,
|
||||||
|
Self: info,
|
||||||
|
ID: info.Name(),
|
||||||
|
Name: info.Name(),
|
||||||
|
Size: info.Size(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
relPath, err := filepath.Rel(folderName, path)
|
err := filepath.Walk(rootDir, walker)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.Stack(err)
|
Fatal(ctx, "walking filepath", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
exportFileSizes[relPath] = info.Size()
|
return root
|
||||||
|
|
||||||
if startTime.After(info.ModTime()) {
|
|
||||||
startTime = info.ModTime()
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,67 +3,267 @@ package common
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type Sanileaf[T, L any] struct {
|
||||||
|
Parent *Sanitree[T, L]
|
||||||
|
Self L
|
||||||
|
ID string
|
||||||
|
Name string
|
||||||
|
Size int64
|
||||||
|
|
||||||
|
// Expand is an arbitrary k:v map of any data that is
|
||||||
|
// uniquely scrutinized by a given service.
|
||||||
|
Expand map[string]any
|
||||||
|
}
|
||||||
|
|
||||||
// Sanitree is used to build out a hierarchical tree of items
|
// Sanitree is used to build out a hierarchical tree of items
|
||||||
// for comparison against each other. Primarily so that a restore
|
// for comparison against each other. Primarily so that a restore
|
||||||
// can compare two subtrees easily.
|
// can compare two subtrees easily.
|
||||||
type Sanitree[T any] struct {
|
type Sanitree[T, L any] struct {
|
||||||
Container T
|
Parent *Sanitree[T, L]
|
||||||
ContainerID string
|
|
||||||
ContainerName string
|
Self T
|
||||||
// non-containers only
|
ID string
|
||||||
ContainsItems int
|
Name string
|
||||||
|
|
||||||
|
// CountLeaves is the number of non-container child items.
|
||||||
|
// Used for services that don't need full item metadata, and
|
||||||
|
// just want a count of children.
|
||||||
|
CountLeaves int
|
||||||
|
// leaves are non-container child items. Used by services
|
||||||
|
// that need more than just a count of items.
|
||||||
|
// name (or equivalent) -> leaf
|
||||||
|
Leaves map[string]*Sanileaf[T, L]
|
||||||
|
// Children holds all child containers
|
||||||
// name -> node
|
// name -> node
|
||||||
Children map[string]*Sanitree[T]
|
Children map[string]*Sanitree[T, L]
|
||||||
|
|
||||||
|
// Expand is an arbitrary k:v map of any data that is
|
||||||
|
// uniquely scrutinized by a given service.
|
||||||
|
Expand map[string]any
|
||||||
}
|
}
|
||||||
|
|
||||||
func AssertEqualTrees[T any](
|
func (s *Sanitree[T, L]) Path() path.Elements {
|
||||||
|
if s.Parent == nil {
|
||||||
|
return path.NewElements(s.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fp := s.Parent.Path()
|
||||||
|
|
||||||
|
return append(fp, s.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Sanitree[T, L]) NodeAt(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
expect, other *Sanitree[T],
|
elems []string,
|
||||||
|
) *Sanitree[T, L] {
|
||||||
|
node := s
|
||||||
|
|
||||||
|
for _, e := range elems {
|
||||||
|
child, ok := node.Children[e]
|
||||||
|
|
||||||
|
Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return ok },
|
||||||
|
"tree node should contain next child",
|
||||||
|
s.Path(),
|
||||||
|
maps.Keys(s.Children))
|
||||||
|
|
||||||
|
node = child
|
||||||
|
}
|
||||||
|
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Comparing trees
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type (
|
||||||
|
ContainerComparatorFn[ET, EL, RT, RL any] func(
|
||||||
|
ctx context.Context,
|
||||||
|
expect *Sanitree[ET, EL],
|
||||||
|
result *Sanitree[RT, RL])
|
||||||
|
LeafComparatorFn[ET, EL, RT, RL any] func(
|
||||||
|
ctx context.Context,
|
||||||
|
expect *Sanileaf[ET, EL],
|
||||||
|
result *Sanileaf[RT, RL])
|
||||||
|
)
|
||||||
|
|
||||||
|
func AssertEqualTrees[ET, EL, RT, RL any](
|
||||||
|
ctx context.Context,
|
||||||
|
expect *Sanitree[ET, EL],
|
||||||
|
result *Sanitree[RT, RL],
|
||||||
|
customContainerCheck ContainerComparatorFn[ET, EL, RT, RL],
|
||||||
|
customLeafCheck LeafComparatorFn[ET, EL, RT, RL],
|
||||||
) {
|
) {
|
||||||
if expect == nil && other == nil {
|
if expect == nil && result == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert(
|
Debugf(ctx, "comparing trees at path: %+v", expect.Path())
|
||||||
ctx,
|
|
||||||
func() bool { return expect != nil && other != nil },
|
|
||||||
"non nil nodes",
|
|
||||||
expect,
|
|
||||||
other)
|
|
||||||
|
|
||||||
Assert(
|
checkChildrenAndLeaves(ctx, expect, result)
|
||||||
ctx,
|
ctx = clues.Add(ctx, "container_name", expect.Name)
|
||||||
func() bool { return expect.ContainerName == other.ContainerName },
|
|
||||||
"container names match",
|
|
||||||
expect.ContainerName,
|
|
||||||
other.ContainerName)
|
|
||||||
|
|
||||||
Assert(
|
if customContainerCheck != nil {
|
||||||
ctx,
|
customContainerCheck(ctx, expect, result)
|
||||||
func() bool { return expect.ContainsItems == other.ContainsItems },
|
}
|
||||||
"count of items in container matches",
|
|
||||||
expect.ContainsItems,
|
|
||||||
other.ContainsItems)
|
|
||||||
|
|
||||||
Assert(
|
CompareLeaves[ET, EL, RT, RL](
|
||||||
ctx,
|
ctx,
|
||||||
func() bool { return len(expect.Children) == len(other.Children) },
|
expect.Leaves,
|
||||||
"count of child containers matches",
|
result.Leaves,
|
||||||
len(expect.Children),
|
customLeafCheck)
|
||||||
len(other.Children))
|
|
||||||
|
|
||||||
|
// recurse
|
||||||
for name, s := range expect.Children {
|
for name, s := range expect.Children {
|
||||||
ch, ok := other.Children[name]
|
r, ok := result.Children[name]
|
||||||
Assert(
|
Assert(
|
||||||
ctx,
|
ctx,
|
||||||
func() bool { return ok },
|
func() bool { return ok },
|
||||||
"found matching child container",
|
"found matching child container",
|
||||||
name,
|
name,
|
||||||
maps.Keys(other.Children))
|
maps.Keys(result.Children))
|
||||||
|
|
||||||
AssertEqualTrees(ctx, s, ch)
|
AssertEqualTrees(ctx, s, r, customContainerCheck, customLeafCheck)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Comparing differently typed trees.
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type NodeComparator[ET, EL, RT, RL any] func(
|
||||||
|
ctx context.Context,
|
||||||
|
expect *Sanitree[ET, EL],
|
||||||
|
result *Sanitree[RT, RL],
|
||||||
|
)
|
||||||
|
|
||||||
|
// CompareDiffTrees recursively compares two sanitrees that have
|
||||||
|
// different data types. The two trees are expected to represent
|
||||||
|
// a common hierarchy.
|
||||||
|
//
|
||||||
|
// Additional comparisons besides the tre hierarchy are optionally
|
||||||
|
// left to the caller by population of the NodeComparator func.
|
||||||
|
func CompareDiffTrees[ET, EL, RT, RL any](
|
||||||
|
ctx context.Context,
|
||||||
|
expect *Sanitree[ET, EL],
|
||||||
|
result *Sanitree[RT, RL],
|
||||||
|
comparator NodeComparator[ET, EL, RT, RL],
|
||||||
|
) {
|
||||||
|
if expect == nil && result == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
Debugf(ctx, "comparing tree at path: %+v", expect.Path())
|
||||||
|
|
||||||
|
checkChildrenAndLeaves(ctx, expect, result)
|
||||||
|
ctx = clues.Add(ctx, "container_name", expect.Name)
|
||||||
|
|
||||||
|
if comparator != nil {
|
||||||
|
comparator(ctx, expect, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// recurse
|
||||||
|
for name, s := range expect.Children {
|
||||||
|
r, ok := result.Children[name]
|
||||||
|
Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return ok },
|
||||||
|
"found matching child container",
|
||||||
|
name,
|
||||||
|
maps.Keys(result.Children))
|
||||||
|
|
||||||
|
CompareDiffTrees(ctx, s, r, comparator)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Checking hierarchy likeness
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func checkChildrenAndLeaves[ET, EL, RT, RL any](
|
||||||
|
ctx context.Context,
|
||||||
|
expect *Sanitree[ET, EL],
|
||||||
|
result *Sanitree[RT, RL],
|
||||||
|
) {
|
||||||
|
Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return expect != nil },
|
||||||
|
"expected stree is nil",
|
||||||
|
"not nil",
|
||||||
|
expect)
|
||||||
|
|
||||||
|
Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return result != nil },
|
||||||
|
"result stree is nil",
|
||||||
|
"not nil",
|
||||||
|
result)
|
||||||
|
|
||||||
|
ctx = clues.Add(ctx, "container_name", expect.Name)
|
||||||
|
|
||||||
|
Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return expect.Name == result.Name },
|
||||||
|
"container names match",
|
||||||
|
expect.Name,
|
||||||
|
result.Name)
|
||||||
|
|
||||||
|
Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return expect.CountLeaves == result.CountLeaves },
|
||||||
|
"count of leaves in container matches",
|
||||||
|
expect.CountLeaves,
|
||||||
|
result.CountLeaves)
|
||||||
|
|
||||||
|
Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return len(expect.Leaves) == len(result.Leaves) },
|
||||||
|
"len of leaves in container matches",
|
||||||
|
len(expect.Leaves),
|
||||||
|
len(result.Leaves))
|
||||||
|
|
||||||
|
Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return len(expect.Children) == len(result.Children) },
|
||||||
|
"count of child containers matches",
|
||||||
|
len(expect.Children),
|
||||||
|
len(result.Children))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CompareLeaves[ET, EL, RT, RL any](
|
||||||
|
ctx context.Context,
|
||||||
|
expect map[string]*Sanileaf[ET, EL],
|
||||||
|
result map[string]*Sanileaf[RT, RL],
|
||||||
|
customLeafCheck LeafComparatorFn[ET, EL, RT, RL],
|
||||||
|
) {
|
||||||
|
for name, l := range expect {
|
||||||
|
ictx := clues.Add(ctx, "leaf_name", l.Name)
|
||||||
|
|
||||||
|
r, ok := result[name]
|
||||||
|
Assert(
|
||||||
|
ictx,
|
||||||
|
func() bool { return ok },
|
||||||
|
"found matching leaf item",
|
||||||
|
name,
|
||||||
|
maps.Keys(result))
|
||||||
|
|
||||||
|
Assert(
|
||||||
|
ictx,
|
||||||
|
func() bool { return l.Size == r.Size },
|
||||||
|
"leaf sizes match",
|
||||||
|
l.Size,
|
||||||
|
r.Size)
|
||||||
|
|
||||||
|
if customLeafCheck != nil {
|
||||||
|
customLeafCheck(ictx, l, r)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,6 +8,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
)
|
)
|
||||||
@ -16,7 +18,7 @@ func Assert(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
passes func() bool,
|
passes func() bool,
|
||||||
header string,
|
header string,
|
||||||
expect, current any,
|
expect, have any,
|
||||||
) {
|
) {
|
||||||
if passes() {
|
if passes() {
|
||||||
return
|
return
|
||||||
@ -24,20 +26,25 @@ func Assert(
|
|||||||
|
|
||||||
header = "TEST FAILURE: " + header
|
header = "TEST FAILURE: " + header
|
||||||
expected := fmt.Sprintf("* Expected: %+v", expect)
|
expected := fmt.Sprintf("* Expected: %+v", expect)
|
||||||
got := fmt.Sprintf("* Current: %+v", current)
|
got := fmt.Sprintf("* Have: %+v", have)
|
||||||
|
|
||||||
logger.Ctx(ctx).Info(strings.Join([]string{header, expected, got}, " "))
|
logger.Ctx(ctx).Info(strings.Join([]string{header, expected, got}, " "))
|
||||||
|
|
||||||
|
fmt.Println("=========================")
|
||||||
fmt.Println(header)
|
fmt.Println(header)
|
||||||
fmt.Println(expected)
|
fmt.Println(expected)
|
||||||
fmt.Println(got)
|
fmt.Println(got)
|
||||||
|
fmt.Println("=========================")
|
||||||
|
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
func Fatal(ctx context.Context, msg string, err error) {
|
func Fatal(ctx context.Context, msg string, err error) {
|
||||||
logger.CtxErr(ctx, err).Error("test failure: " + msg)
|
logger.CtxErr(ctx, err).Error("test failure: " + msg)
|
||||||
|
fmt.Println("=========================")
|
||||||
fmt.Println("TEST FAILURE: "+msg+": ", err)
|
fmt.Println("TEST FAILURE: "+msg+": ", err)
|
||||||
|
fmt.Println(clues.ToCore(err))
|
||||||
|
fmt.Println("=========================")
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,7 +57,11 @@ func MustGetTimeFromName(ctx context.Context, name string) (time.Time, bool) {
|
|||||||
return t, !errors.Is(err, dttm.ErrNoTimeString)
|
return t, !errors.Is(err, dttm.ErrNoTimeString)
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsWithinTimeBound(ctx context.Context, bound, check time.Time, hasTime bool) bool {
|
func IsWithinTimeBound(
|
||||||
|
ctx context.Context,
|
||||||
|
bound, check time.Time,
|
||||||
|
hasTime bool,
|
||||||
|
) bool {
|
||||||
if hasTime {
|
if hasTime {
|
||||||
if bound.Before(check) {
|
if bound.Before(check) {
|
||||||
logger.Ctx(ctx).
|
logger.Ctx(ctx).
|
||||||
@ -76,7 +87,34 @@ func FilterSlice(sl []string, remove string) []string {
|
|||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
func LogAndPrint(ctx context.Context, tmpl string, vs ...any) {
|
func Infof(ctx context.Context, tmpl string, vs ...any) {
|
||||||
|
logger.Ctx(ctx).Infof(tmpl, vs...)
|
||||||
|
fmt.Printf(tmpl+"\n", vs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
type debugKey string
|
||||||
|
|
||||||
|
const ctxDebugKey debugKey = "ctx_debug"
|
||||||
|
|
||||||
|
func SetDebug(ctx context.Context) context.Context {
|
||||||
|
if len(os.Getenv("SANITY_TEST_DEBUG")) == 0 {
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
return context.WithValue(ctx, ctxDebugKey, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isDebug(ctx context.Context) bool {
|
||||||
|
cdk := ctx.Value(ctxDebugKey)
|
||||||
|
|
||||||
|
return cdk != nil && cdk.(bool)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Debugf(ctx context.Context, tmpl string, vs ...any) {
|
||||||
|
if !isDebug(ctx) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
logger.Ctx(ctx).Infof(tmpl, vs...)
|
logger.Ctx(ctx).Infof(tmpl, vs...)
|
||||||
fmt.Printf(tmpl+"\n", vs...)
|
fmt.Printf(tmpl+"\n", vs...)
|
||||||
}
|
}
|
||||||
|
|||||||
107
src/cmd/sanity_test/driveish/driveish.go
Normal file
107
src/cmd/sanity_test/driveish/driveish.go
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
package driveish
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
expandPermissions = "expand_permissions"
|
||||||
|
owner = "owner"
|
||||||
|
)
|
||||||
|
|
||||||
|
func populateSanitree(
|
||||||
|
ctx context.Context,
|
||||||
|
ac api.Client,
|
||||||
|
driveID string,
|
||||||
|
) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
|
||||||
|
common.Infof(ctx, "building sanitree for drive: %s", driveID)
|
||||||
|
|
||||||
|
root, err := ac.Drives().GetRootFolder(ctx, driveID)
|
||||||
|
if err != nil {
|
||||||
|
common.Fatal(ctx, "getting drive root folder", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
stree := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
|
||||||
|
Self: root,
|
||||||
|
ID: ptr.Val(root.GetId()),
|
||||||
|
Name: ptr.Val(root.GetName()),
|
||||||
|
Leaves: map[string]*common.Sanileaf[models.DriveItemable, models.DriveItemable]{},
|
||||||
|
Children: map[string]*common.Sanitree[models.DriveItemable, models.DriveItemable]{},
|
||||||
|
}
|
||||||
|
|
||||||
|
recursivelyBuildTree(
|
||||||
|
ctx,
|
||||||
|
ac,
|
||||||
|
driveID,
|
||||||
|
stree.Name+"/",
|
||||||
|
stree)
|
||||||
|
|
||||||
|
return stree
|
||||||
|
}
|
||||||
|
|
||||||
|
func recursivelyBuildTree(
|
||||||
|
ctx context.Context,
|
||||||
|
ac api.Client,
|
||||||
|
driveID, location string,
|
||||||
|
stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
|
||||||
|
) {
|
||||||
|
common.Debugf(ctx, "adding: %s", location)
|
||||||
|
|
||||||
|
children, err := ac.Drives().GetFolderChildren(ctx, driveID, stree.ID)
|
||||||
|
if err != nil {
|
||||||
|
common.Fatal(ctx, "getting drive children by id", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, driveItem := range children {
|
||||||
|
var (
|
||||||
|
itemID = ptr.Val(driveItem.GetId())
|
||||||
|
itemName = ptr.Val(driveItem.GetName())
|
||||||
|
)
|
||||||
|
|
||||||
|
if driveItem.GetFolder() != nil {
|
||||||
|
// currently we don't restore blank folders.
|
||||||
|
// skip permission check for empty folders
|
||||||
|
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
|
||||||
|
common.Infof(ctx, "skipped empty folder: %s/%s", location, itemName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
|
||||||
|
Parent: stree,
|
||||||
|
Self: driveItem,
|
||||||
|
ID: itemID,
|
||||||
|
Name: itemName,
|
||||||
|
Expand: map[string]any{
|
||||||
|
expandPermissions: permissionIn(ctx, ac, driveID, itemID),
|
||||||
|
},
|
||||||
|
Leaves: map[string]*common.Sanileaf[models.DriveItemable, models.DriveItemable]{},
|
||||||
|
Children: map[string]*common.Sanitree[models.DriveItemable, models.DriveItemable]{},
|
||||||
|
}
|
||||||
|
|
||||||
|
stree.Children[itemName] = branch
|
||||||
|
|
||||||
|
recursivelyBuildTree(
|
||||||
|
ctx,
|
||||||
|
ac,
|
||||||
|
driveID,
|
||||||
|
location+branch.Name+"/",
|
||||||
|
branch)
|
||||||
|
}
|
||||||
|
|
||||||
|
if driveItem.GetFile() != nil {
|
||||||
|
stree.Leaves[itemName] = &common.Sanileaf[models.DriveItemable, models.DriveItemable]{
|
||||||
|
Parent: stree,
|
||||||
|
Self: driveItem,
|
||||||
|
ID: itemID,
|
||||||
|
Name: itemName,
|
||||||
|
Size: ptr.Val(driveItem.GetSize()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
61
src/cmd/sanity_test/driveish/export.go
Normal file
61
src/cmd/sanity_test/driveish/export.go
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
package driveish
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io/fs"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CheckExport(
|
||||||
|
ctx context.Context,
|
||||||
|
ac api.Client,
|
||||||
|
drive models.Driveable,
|
||||||
|
envs common.Envs,
|
||||||
|
) {
|
||||||
|
var (
|
||||||
|
driveID = ptr.Val(drive.GetId())
|
||||||
|
driveName = ptr.Val(drive.GetName())
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx = clues.Add(
|
||||||
|
ctx,
|
||||||
|
"drive_id", driveID,
|
||||||
|
"drive_name", driveName)
|
||||||
|
|
||||||
|
root := populateSanitree(
|
||||||
|
ctx,
|
||||||
|
ac,
|
||||||
|
driveID)
|
||||||
|
|
||||||
|
sourceTree, ok := root.Children[envs.SourceContainer]
|
||||||
|
common.Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return ok },
|
||||||
|
"should find root-level source data folder",
|
||||||
|
envs.SourceContainer,
|
||||||
|
"not found")
|
||||||
|
|
||||||
|
fpTree := common.BuildFilepathSanitree(ctx, envs.RestoreContainer)
|
||||||
|
|
||||||
|
comparator := func(
|
||||||
|
ctx context.Context,
|
||||||
|
expect *common.Sanitree[models.DriveItemable, models.DriveItemable],
|
||||||
|
result *common.Sanitree[fs.FileInfo, fs.FileInfo],
|
||||||
|
) {
|
||||||
|
common.CompareLeaves(ctx, expect.Leaves, result.Leaves, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
common.CompareDiffTrees(
|
||||||
|
ctx,
|
||||||
|
sourceTree,
|
||||||
|
fpTree.Children[envs.SourceContainer],
|
||||||
|
comparator)
|
||||||
|
|
||||||
|
common.Infof(ctx, "Success")
|
||||||
|
}
|
||||||
201
src/cmd/sanity_test/driveish/restore.go
Normal file
201
src/cmd/sanity_test/driveish/restore.go
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
package driveish
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
"golang.org/x/exp/slices"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/tform"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ComparatorEqualPerms(expect, result []common.PermissionInfo) func() bool {
|
||||||
|
return func() bool {
|
||||||
|
return len(expect) == len(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// was getting used by sharepoint, but sharepoint was also skipping permissions
|
||||||
|
// tests. Keeping here for reference.
|
||||||
|
// func ComparatorExpectFewerPerms(expect, result []common.PermissionInfo) func() bool {
|
||||||
|
// return func() bool {
|
||||||
|
// return len(expect) <= len(result)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
func CheckRestoration(
|
||||||
|
ctx context.Context,
|
||||||
|
ac api.Client,
|
||||||
|
drive models.Driveable,
|
||||||
|
envs common.Envs,
|
||||||
|
permissionsComparator func(expect, result []common.PermissionInfo) func() bool,
|
||||||
|
) {
|
||||||
|
var (
|
||||||
|
driveID = ptr.Val(drive.GetId())
|
||||||
|
driveName = ptr.Val(drive.GetName())
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx = clues.Add(
|
||||||
|
ctx,
|
||||||
|
"drive_id", driveID,
|
||||||
|
"drive_name", driveName)
|
||||||
|
|
||||||
|
root := populateSanitree(ctx, ac, driveID)
|
||||||
|
|
||||||
|
sourceTree, ok := root.Children[envs.SourceContainer]
|
||||||
|
common.Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return ok },
|
||||||
|
"should find root-level source data folder",
|
||||||
|
envs.SourceContainer,
|
||||||
|
"not found")
|
||||||
|
|
||||||
|
restoreTree, ok := root.Children[envs.RestoreContainer]
|
||||||
|
common.Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return ok },
|
||||||
|
"should find root-level restore folder",
|
||||||
|
envs.RestoreContainer,
|
||||||
|
"not found")
|
||||||
|
|
||||||
|
var permissionCheck common.ContainerComparatorFn[
|
||||||
|
models.DriveItemable, models.DriveItemable,
|
||||||
|
models.DriveItemable, models.DriveItemable]
|
||||||
|
|
||||||
|
if permissionsComparator != nil {
|
||||||
|
permissionCheck = checkRestoredDriveItemPermissions(permissionsComparator)
|
||||||
|
}
|
||||||
|
|
||||||
|
common.AssertEqualTrees[models.DriveItemable](
|
||||||
|
ctx,
|
||||||
|
sourceTree,
|
||||||
|
restoreTree.Children[envs.SourceContainer],
|
||||||
|
permissionCheck,
|
||||||
|
nil)
|
||||||
|
|
||||||
|
common.Infof(ctx, "Success")
|
||||||
|
}
|
||||||
|
|
||||||
|
func permissionIn(
|
||||||
|
ctx context.Context,
|
||||||
|
ac api.Client,
|
||||||
|
driveID, itemID string,
|
||||||
|
) []common.PermissionInfo {
|
||||||
|
pi := []common.PermissionInfo{}
|
||||||
|
|
||||||
|
pcr, err := ac.Drives().GetItemPermission(ctx, driveID, itemID)
|
||||||
|
if err != nil {
|
||||||
|
common.Fatal(ctx, "getting permission", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, perm := range pcr.GetValue() {
|
||||||
|
if perm.GetGrantedToV2() == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
gv2 = perm.GetGrantedToV2()
|
||||||
|
permInfo = common.PermissionInfo{}
|
||||||
|
entityID string
|
||||||
|
)
|
||||||
|
|
||||||
|
// TODO: replace with filterUserPermissions in onedrive item.go
|
||||||
|
if gv2.GetUser() != nil {
|
||||||
|
entityID = ptr.Val(gv2.GetUser().GetId())
|
||||||
|
} else if gv2.GetGroup() != nil {
|
||||||
|
entityID = ptr.Val(gv2.GetGroup().GetId())
|
||||||
|
}
|
||||||
|
|
||||||
|
roles := common.FilterSlice(perm.GetRoles(), owner)
|
||||||
|
for _, role := range roles {
|
||||||
|
permInfo.EntityID = entityID
|
||||||
|
permInfo.Roles = append(permInfo.Roles, role)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(roles) > 0 {
|
||||||
|
slices.Sort[[]string, string](permInfo.Roles)
|
||||||
|
|
||||||
|
pi = append(pi, permInfo)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pi
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
TODO: replace this check with testElementsMatch
|
||||||
|
from internal/connecter/graph_connector_helper_test.go
|
||||||
|
*/
|
||||||
|
func checkRestoredDriveItemPermissions(
|
||||||
|
comparator func(expect, result []common.PermissionInfo) func() bool,
|
||||||
|
) common.ContainerComparatorFn[
|
||||||
|
models.DriveItemable, models.DriveItemable,
|
||||||
|
models.DriveItemable, models.DriveItemable,
|
||||||
|
] {
|
||||||
|
return func(
|
||||||
|
ctx context.Context,
|
||||||
|
expect, result *common.Sanitree[models.DriveItemable, models.DriveItemable],
|
||||||
|
) {
|
||||||
|
expectPerms, err := tform.AnyValueToT[[]common.PermissionInfo](
|
||||||
|
expandPermissions,
|
||||||
|
expect.Expand)
|
||||||
|
common.Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return err == nil },
|
||||||
|
"should find permissions in 'expect' node Expand data",
|
||||||
|
expect.Name,
|
||||||
|
err)
|
||||||
|
|
||||||
|
resultPerms, err := tform.AnyValueToT[[]common.PermissionInfo](
|
||||||
|
expandPermissions,
|
||||||
|
result.Expand)
|
||||||
|
common.Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return err == nil },
|
||||||
|
"should find permissions in 'result' node Expand data",
|
||||||
|
result.Name,
|
||||||
|
err)
|
||||||
|
|
||||||
|
if len(expectPerms) == 0 {
|
||||||
|
common.Infof(ctx, "no permissions found in folder: %s", expect.Name)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
common.Assert(
|
||||||
|
ctx,
|
||||||
|
comparator(expectPerms, resultPerms),
|
||||||
|
"wrong number of restored permissions",
|
||||||
|
expectPerms,
|
||||||
|
resultPerms)
|
||||||
|
|
||||||
|
for _, perm := range expectPerms {
|
||||||
|
eqID := func(pi common.PermissionInfo) bool {
|
||||||
|
return strings.EqualFold(pi.EntityID, perm.EntityID)
|
||||||
|
}
|
||||||
|
|
||||||
|
i := slices.IndexFunc(resultPerms, eqID)
|
||||||
|
|
||||||
|
common.Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return i >= 0 },
|
||||||
|
"restore is missing permission",
|
||||||
|
perm.EntityID,
|
||||||
|
resultPerms)
|
||||||
|
|
||||||
|
// permissions should be sorted, so a by-index comparison works
|
||||||
|
restored := resultPerms[i]
|
||||||
|
|
||||||
|
common.Assert(
|
||||||
|
ctx,
|
||||||
|
func() bool { return slices.Equal(perm.Roles, restored.Roles) },
|
||||||
|
"different roles restored",
|
||||||
|
perm.Roles,
|
||||||
|
restored.Roles)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -2,8 +2,14 @@ package export
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"io/fs"
|
||||||
|
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
|
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -12,5 +18,109 @@ func CheckGroupsExport(
|
|||||||
ac api.Client,
|
ac api.Client,
|
||||||
envs common.Envs,
|
envs common.Envs,
|
||||||
) {
|
) {
|
||||||
// TODO
|
// assumes we only need to sanity check the default site.
|
||||||
|
// should we expand this to check all sites in the group?
|
||||||
|
// are we backing up / restoring more than the default site?
|
||||||
|
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
|
||||||
|
if err != nil {
|
||||||
|
common.Fatal(ctx, "getting the drive:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
driveish.CheckExport(
|
||||||
|
ctx,
|
||||||
|
ac,
|
||||||
|
drive,
|
||||||
|
envs)
|
||||||
|
|
||||||
|
checkChannelMessagesExport(
|
||||||
|
ctx,
|
||||||
|
ac,
|
||||||
|
envs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkChannelMessagesExport(
|
||||||
|
ctx context.Context,
|
||||||
|
ac api.Client,
|
||||||
|
envs common.Envs,
|
||||||
|
) {
|
||||||
|
sourceTree := populateMessagesSanitree(
|
||||||
|
ctx,
|
||||||
|
ac,
|
||||||
|
envs.GroupID)
|
||||||
|
|
||||||
|
fpTree := common.BuildFilepathSanitree(ctx, envs.RestoreContainer)
|
||||||
|
|
||||||
|
comparator := func(
|
||||||
|
ctx context.Context,
|
||||||
|
expect *common.Sanitree[models.Channelable, models.ChatMessageable],
|
||||||
|
result *common.Sanitree[fs.FileInfo, fs.FileInfo],
|
||||||
|
) {
|
||||||
|
common.CompareLeaves(ctx, expect.Leaves, result.Leaves, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
common.CompareDiffTrees(
|
||||||
|
ctx,
|
||||||
|
sourceTree,
|
||||||
|
fpTree.Children["Messages"],
|
||||||
|
comparator)
|
||||||
|
|
||||||
|
common.Infof(ctx, "Success")
|
||||||
|
}
|
||||||
|
|
||||||
|
func populateMessagesSanitree(
|
||||||
|
ctx context.Context,
|
||||||
|
ac api.Client,
|
||||||
|
groupID string,
|
||||||
|
) *common.Sanitree[models.Channelable, models.ChatMessageable] {
|
||||||
|
root := &common.Sanitree[models.Channelable, models.ChatMessageable]{
|
||||||
|
ID: groupID,
|
||||||
|
Name: path.ChannelMessagesCategory.HumanString(),
|
||||||
|
// group should not have leaves
|
||||||
|
Children: map[string]*common.Sanitree[models.Channelable, models.ChatMessageable]{},
|
||||||
|
}
|
||||||
|
|
||||||
|
channels, err := ac.Channels().GetChannels(ctx, groupID)
|
||||||
|
if err != nil {
|
||||||
|
common.Fatal(ctx, "getting channels", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, ch := range channels {
|
||||||
|
child := &common.Sanitree[
|
||||||
|
models.Channelable, models.ChatMessageable,
|
||||||
|
]{
|
||||||
|
Parent: root,
|
||||||
|
ID: ptr.Val(ch.GetId()),
|
||||||
|
Name: ptr.Val(ch.GetDisplayName()),
|
||||||
|
Leaves: map[string]*common.Sanileaf[models.Channelable, models.ChatMessageable]{},
|
||||||
|
// no children in channels
|
||||||
|
}
|
||||||
|
|
||||||
|
msgs, err := ac.Channels().GetChannelMessages(
|
||||||
|
ctx,
|
||||||
|
groupID,
|
||||||
|
ptr.Val(ch.GetId()),
|
||||||
|
api.CallConfig{
|
||||||
|
// include all nessage replies in each message
|
||||||
|
Expand: []string{"replies"},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
common.Fatal(ctx, "getting channel messages", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, msg := range msgs {
|
||||||
|
child.Leaves[ptr.Val(msg.GetId())] = &common.Sanileaf[
|
||||||
|
models.Channelable,
|
||||||
|
models.ChatMessageable,
|
||||||
|
]{
|
||||||
|
Self: msg,
|
||||||
|
ID: ptr.Val(msg.GetId()),
|
||||||
|
Name: ptr.Val(msg.GetId()), // channel messages have no display name
|
||||||
|
Size: int64(len(msg.GetReplies())), // size is the count of replies
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
root.Children[ptr.Val(ch.GetDisplayName())] = child
|
||||||
|
}
|
||||||
|
|
||||||
|
return root
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,13 +2,9 @@ package export
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"path/filepath"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/restore"
|
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -22,42 +18,9 @@ func CheckOneDriveExport(
|
|||||||
common.Fatal(ctx, "getting the drive:", err)
|
common.Fatal(ctx, "getting the drive:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// map itemID -> item size
|
driveish.CheckExport(
|
||||||
var (
|
|
||||||
fileSizes = make(map[string]int64)
|
|
||||||
exportFileSizes = make(map[string]int64)
|
|
||||||
startTime = time.Now()
|
|
||||||
)
|
|
||||||
|
|
||||||
err = filepath.Walk(
|
|
||||||
envs.FolderName,
|
|
||||||
common.FilepathWalker(envs.FolderName, exportFileSizes, startTime))
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error walking the path:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
_ = restore.PopulateDriveDetails(
|
|
||||||
ctx,
|
ctx,
|
||||||
ac,
|
ac,
|
||||||
ptr.Val(drive.GetId()),
|
drive,
|
||||||
envs.FolderName,
|
envs)
|
||||||
envs.DataFolder,
|
|
||||||
fileSizes,
|
|
||||||
map[string][]common.PermissionInfo{},
|
|
||||||
startTime)
|
|
||||||
|
|
||||||
for fileName, expected := range fileSizes {
|
|
||||||
common.LogAndPrint(ctx, "checking for file: %s", fileName)
|
|
||||||
|
|
||||||
got := exportFileSizes[fileName]
|
|
||||||
|
|
||||||
common.Assert(
|
|
||||||
ctx,
|
|
||||||
func() bool { return expected == got },
|
|
||||||
fmt.Sprintf("different file size: %s", fileName),
|
|
||||||
expected,
|
|
||||||
got)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Println("Success")
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,13 +2,9 @@ package export
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"path/filepath"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/restore"
|
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -22,42 +18,9 @@ func CheckSharePointExport(
|
|||||||
common.Fatal(ctx, "getting the drive:", err)
|
common.Fatal(ctx, "getting the drive:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// map itemID -> item size
|
driveish.CheckExport(
|
||||||
var (
|
|
||||||
fileSizes = make(map[string]int64)
|
|
||||||
exportFileSizes = make(map[string]int64)
|
|
||||||
startTime = time.Now()
|
|
||||||
)
|
|
||||||
|
|
||||||
err = filepath.Walk(
|
|
||||||
envs.FolderName,
|
|
||||||
common.FilepathWalker(envs.FolderName, exportFileSizes, startTime))
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error walking the path:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
_ = restore.PopulateDriveDetails(
|
|
||||||
ctx,
|
ctx,
|
||||||
ac,
|
ac,
|
||||||
ptr.Val(drive.GetId()),
|
drive,
|
||||||
envs.FolderName,
|
envs)
|
||||||
envs.DataFolder,
|
|
||||||
fileSizes,
|
|
||||||
map[string][]common.PermissionInfo{},
|
|
||||||
startTime)
|
|
||||||
|
|
||||||
for fileName, expected := range fileSizes {
|
|
||||||
common.LogAndPrint(ctx, "checking for file: %s", fileName)
|
|
||||||
|
|
||||||
got := exportFileSizes[fileName]
|
|
||||||
|
|
||||||
common.Assert(
|
|
||||||
ctx,
|
|
||||||
func() bool { return expected == got },
|
|
||||||
fmt.Sprintf("different file size: %s", fileName),
|
|
||||||
expected,
|
|
||||||
got)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Println("Success")
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -19,47 +19,31 @@ func CheckEmailRestoration(
|
|||||||
ac api.Client,
|
ac api.Client,
|
||||||
envs common.Envs,
|
envs common.Envs,
|
||||||
) {
|
) {
|
||||||
var (
|
restoredTree := buildSanitree(ctx, ac, envs.UserID, envs.RestoreContainer)
|
||||||
folderNameToItemCount = make(map[string]int32)
|
sourceTree := buildSanitree(ctx, ac, envs.UserID, envs.SourceContainer)
|
||||||
folderNameToRestoreItemCount = make(map[string]int32)
|
|
||||||
)
|
|
||||||
|
|
||||||
restoredTree := buildSanitree(ctx, ac, envs.UserID, envs.FolderName)
|
|
||||||
dataTree := buildSanitree(ctx, ac, envs.UserID, envs.DataFolder)
|
|
||||||
|
|
||||||
ctx = clues.Add(
|
ctx = clues.Add(
|
||||||
ctx,
|
ctx,
|
||||||
"restore_folder_id", restoredTree.ContainerID,
|
"restore_container_id", restoredTree.ID,
|
||||||
"restore_folder_name", restoredTree.ContainerName,
|
"restore_container_name", restoredTree.Name,
|
||||||
"original_folder_id", dataTree.ContainerID,
|
"source_container_id", sourceTree.ID,
|
||||||
"original_folder_name", dataTree.ContainerName)
|
"source_container_name", sourceTree.Name)
|
||||||
|
|
||||||
verifyEmailData(ctx, folderNameToRestoreItemCount, folderNameToItemCount)
|
common.AssertEqualTrees[models.MailFolderable, any](
|
||||||
|
|
||||||
common.AssertEqualTrees[models.MailFolderable](
|
|
||||||
ctx,
|
ctx,
|
||||||
dataTree,
|
sourceTree,
|
||||||
restoredTree.Children[envs.DataFolder])
|
restoredTree.Children[envs.SourceContainer],
|
||||||
}
|
nil,
|
||||||
|
nil)
|
||||||
|
|
||||||
func verifyEmailData(ctx context.Context, restoreMessageCount, messageCount map[string]int32) {
|
common.Infof(ctx, "Success")
|
||||||
for fldName, expected := range messageCount {
|
|
||||||
got := restoreMessageCount[fldName]
|
|
||||||
|
|
||||||
common.Assert(
|
|
||||||
ctx,
|
|
||||||
func() bool { return expected == got },
|
|
||||||
fmt.Sprintf("Restore item counts do not match: %s", fldName),
|
|
||||||
expected,
|
|
||||||
got)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildSanitree(
|
func buildSanitree(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ac api.Client,
|
ac api.Client,
|
||||||
userID, folderName string,
|
userID, folderName string,
|
||||||
) *common.Sanitree[models.MailFolderable] {
|
) *common.Sanitree[models.MailFolderable, any] {
|
||||||
gcc, err := ac.Mail().GetContainerByName(
|
gcc, err := ac.Mail().GetContainerByName(
|
||||||
ctx,
|
ctx,
|
||||||
userID,
|
userID,
|
||||||
@ -80,46 +64,47 @@ func buildSanitree(
|
|||||||
clues.New("casting "+*gcc.GetDisplayName()+" to models.MailFolderable"))
|
clues.New("casting "+*gcc.GetDisplayName()+" to models.MailFolderable"))
|
||||||
}
|
}
|
||||||
|
|
||||||
root := &common.Sanitree[models.MailFolderable]{
|
root := &common.Sanitree[models.MailFolderable, any]{
|
||||||
Container: mmf,
|
Self: mmf,
|
||||||
ContainerID: ptr.Val(mmf.GetId()),
|
ID: ptr.Val(mmf.GetId()),
|
||||||
ContainerName: ptr.Val(mmf.GetDisplayName()),
|
Name: ptr.Val(mmf.GetDisplayName()),
|
||||||
ContainsItems: int(ptr.Val(mmf.GetTotalItemCount())),
|
CountLeaves: int(ptr.Val(mmf.GetTotalItemCount())),
|
||||||
Children: map[string]*common.Sanitree[models.MailFolderable]{},
|
Children: map[string]*common.Sanitree[models.MailFolderable, any]{},
|
||||||
}
|
}
|
||||||
|
|
||||||
recurseSubfolders(ctx, ac, root, userID)
|
recursivelyBuildTree(ctx, ac, root, userID, root.Name+"/")
|
||||||
|
|
||||||
return root
|
return root
|
||||||
}
|
}
|
||||||
|
|
||||||
func recurseSubfolders(
|
func recursivelyBuildTree(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ac api.Client,
|
ac api.Client,
|
||||||
parent *common.Sanitree[models.MailFolderable],
|
stree *common.Sanitree[models.MailFolderable, any],
|
||||||
userID string,
|
userID, location string,
|
||||||
) {
|
) {
|
||||||
|
common.Debugf(ctx, "adding: %s", location)
|
||||||
|
|
||||||
childFolders, err := ac.Mail().GetContainerChildren(
|
childFolders, err := ac.Mail().GetContainerChildren(
|
||||||
ctx,
|
ctx,
|
||||||
userID,
|
userID,
|
||||||
parent.ContainerID)
|
stree.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
common.Fatal(ctx, "getting subfolders", err)
|
common.Fatal(ctx, "getting child containers", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, child := range childFolders {
|
for _, child := range childFolders {
|
||||||
c := &common.Sanitree[models.MailFolderable]{
|
c := &common.Sanitree[models.MailFolderable, any]{
|
||||||
Container: child,
|
Parent: stree,
|
||||||
ContainerID: ptr.Val(child.GetId()),
|
Self: child,
|
||||||
ContainerName: ptr.Val(child.GetDisplayName()),
|
ID: ptr.Val(child.GetId()),
|
||||||
ContainsItems: int(ptr.Val(child.GetTotalItemCount())),
|
Name: ptr.Val(child.GetDisplayName()),
|
||||||
Children: map[string]*common.Sanitree[models.MailFolderable]{},
|
CountLeaves: int(ptr.Val(child.GetTotalItemCount())),
|
||||||
|
Children: map[string]*common.Sanitree[models.MailFolderable, any]{},
|
||||||
}
|
}
|
||||||
|
|
||||||
parent.Children[c.ContainerName] = c
|
stree.Children[c.Name] = c
|
||||||
|
|
||||||
if ptr.Val(child.GetChildFolderCount()) > 0 {
|
recursivelyBuildTree(ctx, ac, c, userID, location+c.Name+"/")
|
||||||
recurseSubfolders(ctx, ac, c, userID)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,13 +4,27 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
|
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// only checks drive restoration. channel messages are not
|
||||||
|
// supported for restore actions.
|
||||||
func CheckGroupsRestoration(
|
func CheckGroupsRestoration(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ac api.Client,
|
ac api.Client,
|
||||||
envs common.Envs,
|
envs common.Envs,
|
||||||
) {
|
) {
|
||||||
// TODO
|
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
|
||||||
|
if err != nil {
|
||||||
|
common.Fatal(ctx, "getting site's default drive:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
driveish.CheckRestoration(
|
||||||
|
ctx,
|
||||||
|
ac,
|
||||||
|
drive,
|
||||||
|
envs,
|
||||||
|
// skip permissions tests
|
||||||
|
nil)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,23 +2,12 @@ package restore
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"golang.org/x/exp/slices"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
owner = "owner"
|
|
||||||
)
|
|
||||||
|
|
||||||
func CheckOneDriveRestoration(
|
func CheckOneDriveRestoration(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ac api.Client,
|
ac api.Client,
|
||||||
@ -26,353 +15,13 @@ func CheckOneDriveRestoration(
|
|||||||
) {
|
) {
|
||||||
drive, err := ac.Users().GetDefaultDrive(ctx, envs.UserID)
|
drive, err := ac.Users().GetDefaultDrive(ctx, envs.UserID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
common.Fatal(ctx, "getting the drive:", err)
|
common.Fatal(ctx, "getting user's default drive:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
checkDriveRestoration(
|
driveish.CheckRestoration(
|
||||||
ctx,
|
ctx,
|
||||||
ac,
|
ac,
|
||||||
path.OneDriveService,
|
drive,
|
||||||
envs.FolderName,
|
envs,
|
||||||
ptr.Val(drive.GetId()),
|
driveish.ComparatorEqualPerms)
|
||||||
ptr.Val(drive.GetName()),
|
|
||||||
envs.DataFolder,
|
|
||||||
envs.StartTime,
|
|
||||||
false)
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkDriveRestoration(
|
|
||||||
ctx context.Context,
|
|
||||||
ac api.Client,
|
|
||||||
service path.ServiceType,
|
|
||||||
folderName,
|
|
||||||
driveID,
|
|
||||||
driveName,
|
|
||||||
dataFolder string,
|
|
||||||
startTime time.Time,
|
|
||||||
skipPermissionTest bool,
|
|
||||||
) {
|
|
||||||
var (
|
|
||||||
// map itemID -> item size
|
|
||||||
fileSizes = make(map[string]int64)
|
|
||||||
// map itemID -> permission id -> []permission roles
|
|
||||||
folderPermissions = make(map[string][]common.PermissionInfo)
|
|
||||||
restoreFile = make(map[string]int64)
|
|
||||||
restoredFolderPermissions = make(map[string][]common.PermissionInfo)
|
|
||||||
)
|
|
||||||
|
|
||||||
ctx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName)
|
|
||||||
|
|
||||||
restoreFolderID := PopulateDriveDetails(
|
|
||||||
ctx,
|
|
||||||
ac,
|
|
||||||
driveID,
|
|
||||||
folderName,
|
|
||||||
dataFolder,
|
|
||||||
fileSizes,
|
|
||||||
folderPermissions,
|
|
||||||
startTime)
|
|
||||||
|
|
||||||
getRestoredDrive(
|
|
||||||
ctx,
|
|
||||||
ac,
|
|
||||||
driveID,
|
|
||||||
restoreFolderID,
|
|
||||||
restoreFile,
|
|
||||||
restoredFolderPermissions,
|
|
||||||
startTime)
|
|
||||||
|
|
||||||
checkRestoredDriveItemPermissions(
|
|
||||||
ctx,
|
|
||||||
service,
|
|
||||||
skipPermissionTest,
|
|
||||||
folderPermissions,
|
|
||||||
restoredFolderPermissions)
|
|
||||||
|
|
||||||
for fileName, expected := range fileSizes {
|
|
||||||
common.LogAndPrint(ctx, "checking for file: %s", fileName)
|
|
||||||
|
|
||||||
got := restoreFile[fileName]
|
|
||||||
|
|
||||||
common.Assert(
|
|
||||||
ctx,
|
|
||||||
func() bool { return expected == got },
|
|
||||||
fmt.Sprintf("different file size: %s", fileName),
|
|
||||||
expected,
|
|
||||||
got)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Println("Success")
|
|
||||||
}
|
|
||||||
|
|
||||||
func PopulateDriveDetails(
|
|
||||||
ctx context.Context,
|
|
||||||
ac api.Client,
|
|
||||||
driveID, folderName, dataFolder string,
|
|
||||||
fileSizes map[string]int64,
|
|
||||||
folderPermissions map[string][]common.PermissionInfo,
|
|
||||||
startTime time.Time,
|
|
||||||
) string {
|
|
||||||
var restoreFolderID string
|
|
||||||
|
|
||||||
children, err := ac.Drives().GetFolderChildren(ctx, driveID, "root")
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(ctx, "getting drive by id", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, driveItem := range children {
|
|
||||||
var (
|
|
||||||
itemID = ptr.Val(driveItem.GetId())
|
|
||||||
itemName = ptr.Val(driveItem.GetName())
|
|
||||||
)
|
|
||||||
|
|
||||||
if itemName == folderName {
|
|
||||||
restoreFolderID = itemID
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if itemName != dataFolder {
|
|
||||||
common.LogAndPrint(ctx, "test data for folder: %s", dataFolder)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// if it's a file check the size
|
|
||||||
if driveItem.GetFile() != nil {
|
|
||||||
fileSizes[itemName] = ptr.Val(driveItem.GetSize())
|
|
||||||
}
|
|
||||||
|
|
||||||
if driveItem.GetFolder() == nil && driveItem.GetPackageEscaped() == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// currently we don't restore blank folders.
|
|
||||||
// skip permission check for empty folders
|
|
||||||
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
|
|
||||||
common.LogAndPrint(ctx, "skipped empty folder: %s", itemName)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
folderPermissions[itemName] = permissionIn(ctx, ac, driveID, itemID)
|
|
||||||
|
|
||||||
getOneDriveChildFolder(
|
|
||||||
ctx,
|
|
||||||
ac,
|
|
||||||
driveID,
|
|
||||||
itemID,
|
|
||||||
itemName,
|
|
||||||
fileSizes,
|
|
||||||
folderPermissions,
|
|
||||||
startTime)
|
|
||||||
}
|
|
||||||
|
|
||||||
return restoreFolderID
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkRestoredDriveItemPermissions(
|
|
||||||
ctx context.Context,
|
|
||||||
service path.ServiceType,
|
|
||||||
skip bool,
|
|
||||||
folderPermissions map[string][]common.PermissionInfo,
|
|
||||||
restoredFolderPermissions map[string][]common.PermissionInfo,
|
|
||||||
) {
|
|
||||||
if skip {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
TODO: replace this check with testElementsMatch
|
|
||||||
from internal/connecter/graph_connector_helper_test.go
|
|
||||||
**/
|
|
||||||
|
|
||||||
for folderName, permissions := range folderPermissions {
|
|
||||||
common.LogAndPrint(ctx, "checking for folder: %s", folderName)
|
|
||||||
|
|
||||||
restoreFolderPerm := restoredFolderPermissions[folderName]
|
|
||||||
|
|
||||||
if len(permissions) < 1 {
|
|
||||||
common.LogAndPrint(ctx, "no permissions found in: %s", folderName)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
permCheck := func() bool { return len(permissions) == len(restoreFolderPerm) }
|
|
||||||
|
|
||||||
if service == path.SharePointService {
|
|
||||||
permCheck = func() bool { return len(permissions) <= len(restoreFolderPerm) }
|
|
||||||
}
|
|
||||||
|
|
||||||
common.Assert(
|
|
||||||
ctx,
|
|
||||||
permCheck,
|
|
||||||
fmt.Sprintf("wrong number of restored permissions: %s", folderName),
|
|
||||||
permissions,
|
|
||||||
restoreFolderPerm)
|
|
||||||
|
|
||||||
for _, perm := range permissions {
|
|
||||||
eqID := func(pi common.PermissionInfo) bool { return strings.EqualFold(pi.EntityID, perm.EntityID) }
|
|
||||||
i := slices.IndexFunc(restoreFolderPerm, eqID)
|
|
||||||
|
|
||||||
common.Assert(
|
|
||||||
ctx,
|
|
||||||
func() bool { return i >= 0 },
|
|
||||||
fmt.Sprintf("permission was restored in: %s", folderName),
|
|
||||||
perm.EntityID,
|
|
||||||
restoreFolderPerm)
|
|
||||||
|
|
||||||
// permissions should be sorted, so a by-index comparison works
|
|
||||||
restored := restoreFolderPerm[i]
|
|
||||||
|
|
||||||
common.Assert(
|
|
||||||
ctx,
|
|
||||||
func() bool { return slices.Equal(perm.Roles, restored.Roles) },
|
|
||||||
fmt.Sprintf("different roles restored: %s", folderName),
|
|
||||||
perm.Roles,
|
|
||||||
restored.Roles)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getOneDriveChildFolder(
|
|
||||||
ctx context.Context,
|
|
||||||
ac api.Client,
|
|
||||||
driveID, itemID, parentName string,
|
|
||||||
fileSizes map[string]int64,
|
|
||||||
folderPermission map[string][]common.PermissionInfo,
|
|
||||||
startTime time.Time,
|
|
||||||
) {
|
|
||||||
children, err := ac.Drives().GetFolderChildren(ctx, driveID, itemID)
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(ctx, "getting child folder", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, driveItem := range children {
|
|
||||||
var (
|
|
||||||
itemID = ptr.Val(driveItem.GetId())
|
|
||||||
itemName = ptr.Val(driveItem.GetName())
|
|
||||||
fullName = parentName + "/" + itemName
|
|
||||||
)
|
|
||||||
|
|
||||||
folderTime, hasTime := common.MustGetTimeFromName(ctx, itemName)
|
|
||||||
if !common.IsWithinTimeBound(ctx, startTime, folderTime, hasTime) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// if it's a file check the size
|
|
||||||
if driveItem.GetFile() != nil {
|
|
||||||
fileSizes[fullName] = ptr.Val(driveItem.GetSize())
|
|
||||||
}
|
|
||||||
|
|
||||||
if driveItem.GetFolder() == nil && driveItem.GetPackageEscaped() == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// currently we don't restore blank folders.
|
|
||||||
// skip permission check for empty folders
|
|
||||||
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
|
|
||||||
common.LogAndPrint(ctx, "skipped empty folder: %s", fullName)
|
|
||||||
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
folderPermission[fullName] = permissionIn(ctx, ac, driveID, itemID)
|
|
||||||
getOneDriveChildFolder(
|
|
||||||
ctx,
|
|
||||||
ac,
|
|
||||||
driveID,
|
|
||||||
itemID,
|
|
||||||
fullName,
|
|
||||||
fileSizes,
|
|
||||||
folderPermission,
|
|
||||||
startTime)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getRestoredDrive(
|
|
||||||
ctx context.Context,
|
|
||||||
ac api.Client,
|
|
||||||
driveID, restoreFolderID string,
|
|
||||||
restoreFile map[string]int64,
|
|
||||||
restoreFolder map[string][]common.PermissionInfo,
|
|
||||||
startTime time.Time,
|
|
||||||
) {
|
|
||||||
children, err := ac.Drives().GetFolderChildren(ctx, driveID, restoreFolderID)
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(ctx, "getting child folder", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, item := range children {
|
|
||||||
var (
|
|
||||||
itemID = ptr.Val(item.GetId())
|
|
||||||
itemName = ptr.Val(item.GetName())
|
|
||||||
itemSize = ptr.Val(item.GetSize())
|
|
||||||
)
|
|
||||||
|
|
||||||
if item.GetFile() != nil {
|
|
||||||
restoreFile[itemName] = itemSize
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if item.GetFolder() == nil && item.GetPackageEscaped() == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
restoreFolder[itemName] = permissionIn(ctx, ac, driveID, itemID)
|
|
||||||
getOneDriveChildFolder(
|
|
||||||
ctx,
|
|
||||||
ac,
|
|
||||||
driveID,
|
|
||||||
itemID,
|
|
||||||
itemName,
|
|
||||||
restoreFile,
|
|
||||||
restoreFolder,
|
|
||||||
startTime)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// permission helpers
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
func permissionIn(
|
|
||||||
ctx context.Context,
|
|
||||||
ac api.Client,
|
|
||||||
driveID, itemID string,
|
|
||||||
) []common.PermissionInfo {
|
|
||||||
pi := []common.PermissionInfo{}
|
|
||||||
|
|
||||||
pcr, err := ac.Drives().GetItemPermission(ctx, driveID, itemID)
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(ctx, "getting permission", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, perm := range pcr.GetValue() {
|
|
||||||
if perm.GetGrantedToV2() == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
gv2 = perm.GetGrantedToV2()
|
|
||||||
permInfo = common.PermissionInfo{}
|
|
||||||
entityID string
|
|
||||||
)
|
|
||||||
|
|
||||||
// TODO: replace with filterUserPermissions in onedrive item.go
|
|
||||||
if gv2.GetUser() != nil {
|
|
||||||
entityID = ptr.Val(gv2.GetUser().GetId())
|
|
||||||
} else if gv2.GetGroup() != nil {
|
|
||||||
entityID = ptr.Val(gv2.GetGroup().GetId())
|
|
||||||
}
|
|
||||||
|
|
||||||
roles := common.FilterSlice(perm.GetRoles(), owner)
|
|
||||||
for _, role := range roles {
|
|
||||||
permInfo.EntityID = entityID
|
|
||||||
permInfo.Roles = append(permInfo.Roles, role)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(roles) > 0 {
|
|
||||||
slices.Sort(permInfo.Roles)
|
|
||||||
pi = append(pi, permInfo)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return pi
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,8 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -16,17 +15,14 @@ func CheckSharePointRestoration(
|
|||||||
) {
|
) {
|
||||||
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
|
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
common.Fatal(ctx, "getting the drive:", err)
|
common.Fatal(ctx, "getting site's default drive:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
checkDriveRestoration(
|
driveish.CheckRestoration(
|
||||||
ctx,
|
ctx,
|
||||||
ac,
|
ac,
|
||||||
path.SharePointService,
|
drive,
|
||||||
envs.FolderName,
|
envs,
|
||||||
ptr.Val(drive.GetId()),
|
// skip permissions tests
|
||||||
ptr.Val(drive.GetName()),
|
nil)
|
||||||
envs.DataFolder,
|
|
||||||
envs.StartTime,
|
|
||||||
true)
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -79,7 +79,7 @@ func main() {
|
|||||||
|
|
||||||
func exportCMD() *cobra.Command {
|
func exportCMD() *cobra.Command {
|
||||||
return &cobra.Command{
|
return &cobra.Command{
|
||||||
Use: "restore",
|
Use: "export",
|
||||||
Short: "run the post-export sanity tests",
|
Short: "run the post-export sanity tests",
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
RunE: sanityTestExport,
|
RunE: sanityTestExport,
|
||||||
@ -117,7 +117,7 @@ func exportGroupsCMD() *cobra.Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func sanityTestExportGroups(cmd *cobra.Command, args []string) error {
|
func sanityTestExportGroups(cmd *cobra.Command, args []string) error {
|
||||||
ctx := cmd.Context()
|
ctx := common.SetDebug(cmd.Context())
|
||||||
envs := common.EnvVars(ctx)
|
envs := common.EnvVars(ctx)
|
||||||
|
|
||||||
ac, err := common.GetAC()
|
ac, err := common.GetAC()
|
||||||
@ -140,7 +140,7 @@ func exportOneDriveCMD() *cobra.Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func sanityTestExportOneDrive(cmd *cobra.Command, args []string) error {
|
func sanityTestExportOneDrive(cmd *cobra.Command, args []string) error {
|
||||||
ctx := cmd.Context()
|
ctx := common.SetDebug(cmd.Context())
|
||||||
envs := common.EnvVars(ctx)
|
envs := common.EnvVars(ctx)
|
||||||
|
|
||||||
ac, err := common.GetAC()
|
ac, err := common.GetAC()
|
||||||
@ -163,7 +163,7 @@ func exportSharePointCMD() *cobra.Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func sanityTestExportSharePoint(cmd *cobra.Command, args []string) error {
|
func sanityTestExportSharePoint(cmd *cobra.Command, args []string) error {
|
||||||
ctx := cmd.Context()
|
ctx := common.SetDebug(cmd.Context())
|
||||||
envs := common.EnvVars(ctx)
|
envs := common.EnvVars(ctx)
|
||||||
|
|
||||||
ac, err := common.GetAC()
|
ac, err := common.GetAC()
|
||||||
@ -190,7 +190,7 @@ func restoreExchangeCMD() *cobra.Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func sanityTestRestoreExchange(cmd *cobra.Command, args []string) error {
|
func sanityTestRestoreExchange(cmd *cobra.Command, args []string) error {
|
||||||
ctx := cmd.Context()
|
ctx := common.SetDebug(cmd.Context())
|
||||||
envs := common.EnvVars(ctx)
|
envs := common.EnvVars(ctx)
|
||||||
|
|
||||||
ac, err := common.GetAC()
|
ac, err := common.GetAC()
|
||||||
@ -213,7 +213,7 @@ func restoreOneDriveCMD() *cobra.Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func sanityTestRestoreOneDrive(cmd *cobra.Command, args []string) error {
|
func sanityTestRestoreOneDrive(cmd *cobra.Command, args []string) error {
|
||||||
ctx := cmd.Context()
|
ctx := common.SetDebug(cmd.Context())
|
||||||
envs := common.EnvVars(ctx)
|
envs := common.EnvVars(ctx)
|
||||||
|
|
||||||
ac, err := common.GetAC()
|
ac, err := common.GetAC()
|
||||||
@ -236,7 +236,7 @@ func restoreSharePointCMD() *cobra.Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func sanityTestRestoreSharePoint(cmd *cobra.Command, args []string) error {
|
func sanityTestRestoreSharePoint(cmd *cobra.Command, args []string) error {
|
||||||
ctx := cmd.Context()
|
ctx := common.SetDebug(cmd.Context())
|
||||||
envs := common.EnvVars(ctx)
|
envs := common.EnvVars(ctx)
|
||||||
|
|
||||||
ac, err := common.GetAC()
|
ac, err := common.GetAC()
|
||||||
@ -259,7 +259,7 @@ func restoreGroupsCMD() *cobra.Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func sanityTestRestoreGroups(cmd *cobra.Command, args []string) error {
|
func sanityTestRestoreGroups(cmd *cobra.Command, args []string) error {
|
||||||
ctx := cmd.Context()
|
ctx := common.SetDebug(cmd.Context())
|
||||||
envs := common.EnvVars(ctx)
|
envs := common.EnvVars(ctx)
|
||||||
|
|
||||||
ac, err := common.GetAC()
|
ac, err := common.GetAC()
|
||||||
|
|||||||
@ -185,14 +185,14 @@ func M365SiteURL(t *testing.T) string {
|
|||||||
return strings.ToLower(cfg[TestCfgSiteURL])
|
return strings.ToLower(cfg[TestCfgSiteURL])
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetM365SiteID returns a siteID string representing the m365SitteID described
|
// GetM365SiteID returns a siteID string representing the m365SiteID described
|
||||||
// by either the env var CORSO_M365_TEST_SITE_ID, the corso_test.toml config
|
// by either the env var CORSO_M365_TEST_SITE_ID, the corso_test.toml config
|
||||||
// file or the default value (in that order of priority). The default is a
|
// file or the default value (in that order of priority). The default is a
|
||||||
// last-attempt fallback that will only work on alcion's testing org.
|
// last-attempt fallback that will only work on alcion's testing org.
|
||||||
func GetM365SiteID(ctx context.Context) string {
|
func GetM365SiteID(ctx context.Context) string {
|
||||||
cfg, err := ReadTestConfig()
|
cfg, err := ReadTestConfig()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Ctx(ctx).Error(err, "retrieving m365 user id from test configuration")
|
logger.Ctx(ctx).Error(err, "retrieving m365 site id from test configuration")
|
||||||
}
|
}
|
||||||
|
|
||||||
return strings.ToLower(cfg[TestCfgSiteID])
|
return strings.ToLower(cfg[TestCfgSiteID])
|
||||||
@ -209,6 +209,19 @@ func SecondaryM365SiteID(t *testing.T) string {
|
|||||||
return strings.ToLower(cfg[TestCfgSecondarySiteID])
|
return strings.ToLower(cfg[TestCfgSecondarySiteID])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetM365TeamID returns a groupID string representing the m365TeamID described
|
||||||
|
// by either the env var CORSO_M365_TEST_TEAM_ID, the corso_test.toml config
|
||||||
|
// file or the default value (in that order of priority). The default is a
|
||||||
|
// last-attempt fallback that will only work on alcion's testing org.
|
||||||
|
func GetM365TeamID(ctx context.Context) string {
|
||||||
|
cfg, err := ReadTestConfig()
|
||||||
|
if err != nil {
|
||||||
|
logger.Ctx(ctx).Error(err, "retrieving m365 team id from test configuration")
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.ToLower(cfg[TestCfgTeamID])
|
||||||
|
}
|
||||||
|
|
||||||
// UnlicensedM365UserID returns an userID string representing the m365UserID
|
// UnlicensedM365UserID returns an userID string representing the m365UserID
|
||||||
// described by either the env var CORSO_M365_TEST_UNLICENSED_USER, the
|
// described by either the env var CORSO_M365_TEST_UNLICENSED_USER, the
|
||||||
// corso_test.toml config file or the default value (in that order of priority).
|
// corso_test.toml config file or the default value (in that order of priority).
|
||||||
|
|||||||
@ -42,7 +42,7 @@ func (p *channelMessagePageCtrl) ValidModTimes() bool {
|
|||||||
|
|
||||||
func (c Channels) NewChannelMessagePager(
|
func (c Channels) NewChannelMessagePager(
|
||||||
teamID, channelID string,
|
teamID, channelID string,
|
||||||
selectProps ...string,
|
cc CallConfig,
|
||||||
) *channelMessagePageCtrl {
|
) *channelMessagePageCtrl {
|
||||||
builder := c.Stable.
|
builder := c.Stable.
|
||||||
Client().
|
Client().
|
||||||
@ -57,8 +57,12 @@ func (c Channels) NewChannelMessagePager(
|
|||||||
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize)),
|
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize)),
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(selectProps) > 0 {
|
if len(cc.Props) > 0 {
|
||||||
options.QueryParameters.Select = selectProps
|
options.QueryParameters.Select = cc.Props
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(cc.Expand) > 0 {
|
||||||
|
options.QueryParameters.Expand = cc.Expand
|
||||||
}
|
}
|
||||||
|
|
||||||
return &channelMessagePageCtrl{
|
return &channelMessagePageCtrl{
|
||||||
@ -70,6 +74,20 @@ func (c Channels) NewChannelMessagePager(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetChannelMessages fetches a delta of all messages in the channel.
|
||||||
|
// returns two maps: addedItems, deletedItems
|
||||||
|
func (c Channels) GetChannelMessages(
|
||||||
|
ctx context.Context,
|
||||||
|
teamID, channelID string,
|
||||||
|
cc CallConfig,
|
||||||
|
) ([]models.ChatMessageable, error) {
|
||||||
|
ctx = clues.Add(ctx, "channel_id", channelID)
|
||||||
|
pager := c.NewChannelMessagePager(teamID, channelID, cc)
|
||||||
|
items, err := enumerateItems[models.ChatMessageable](ctx, pager)
|
||||||
|
|
||||||
|
return items, graph.Stack(ctx, err).OrNil()
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// channel message delta pager
|
// channel message delta pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -163,7 +181,7 @@ func FilterOutSystemMessages(cm models.ChatMessageable) bool {
|
|||||||
content == channelMessageSystemMessageContent)
|
content == channelMessageSystemMessageContent)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetChannelMessageIDsDelta fetches a delta of all messages in the channel.
|
// GetChannelMessageIDs fetches a delta of all messages in the channel.
|
||||||
// returns two maps: addedItems, deletedItems
|
// returns two maps: addedItems, deletedItems
|
||||||
func (c Channels) GetChannelMessageIDs(
|
func (c Channels) GetChannelMessageIDs(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
@ -172,7 +190,7 @@ func (c Channels) GetChannelMessageIDs(
|
|||||||
) (map[string]time.Time, bool, []string, DeltaUpdate, error) {
|
) (map[string]time.Time, bool, []string, DeltaUpdate, error) {
|
||||||
added, validModTimes, removed, du, err := getAddedAndRemovedItemIDs[models.ChatMessageable](
|
added, validModTimes, removed, du, err := getAddedAndRemovedItemIDs[models.ChatMessageable](
|
||||||
ctx,
|
ctx,
|
||||||
c.NewChannelMessagePager(teamID, channelID),
|
c.NewChannelMessagePager(teamID, channelID, CallConfig{}),
|
||||||
c.NewChannelMessageDeltaPager(teamID, channelID, prevDeltaLink),
|
c.NewChannelMessageDeltaPager(teamID, channelID, prevDeltaLink),
|
||||||
prevDeltaLink,
|
prevDeltaLink,
|
||||||
canMakeDeltaQueries,
|
canMakeDeltaQueries,
|
||||||
|
|||||||
@ -136,6 +136,7 @@ func (c Client) Post(
|
|||||||
|
|
||||||
type CallConfig struct {
|
type CallConfig struct {
|
||||||
Expand []string
|
Expand []string
|
||||||
|
Props []string
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user