Compare commits
1 Commits
main
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
076a0488df |
@ -1,5 +1,4 @@
|
||||
name: Backup Restore Test
|
||||
description: Run various backup/restore/export tests for a service.
|
||||
|
||||
inputs:
|
||||
service:
|
||||
|
||||
1
.github/actions/go-setup-cache/action.yml
vendored
1
.github/actions/go-setup-cache/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Setup and Cache Golang
|
||||
description: Build golang binaries for later use in CI.
|
||||
|
||||
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
|
||||
#
|
||||
|
||||
1
.github/actions/publish-binary/action.yml
vendored
1
.github/actions/publish-binary/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Publish Binary
|
||||
description: Publish binary artifacts.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
|
||||
1
.github/actions/publish-website/action.yml
vendored
1
.github/actions/publish-website/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Publish Website
|
||||
description: Publish website artifacts.
|
||||
|
||||
inputs:
|
||||
aws-iam-role:
|
||||
|
||||
20
.github/actions/purge-m365-data/action.yml
vendored
20
.github/actions/purge-m365-data/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Purge M365 User Data
|
||||
description: Deletes M365 data generated during CI tests.
|
||||
|
||||
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
||||
# of data churn (creation and immediate deletion) of files, the likes
|
||||
@ -31,19 +30,12 @@ inputs:
|
||||
description: Secret value of for AZURE_CLIENT_ID
|
||||
azure-client-secret:
|
||||
description: Secret value of for AZURE_CLIENT_SECRET
|
||||
azure-pnp-client-id:
|
||||
description: Secret value of AZURE_PNP_CLIENT_ID
|
||||
azure-pnp-client-cert:
|
||||
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
|
||||
azure-tenant-id:
|
||||
description: Secret value of AZURE_TENANT_ID
|
||||
description: Secret value of for AZURE_TENANT_ID
|
||||
m365-admin-user:
|
||||
description: Secret value of for M365_TENANT_ADMIN_USER
|
||||
m365-admin-password:
|
||||
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
|
||||
tenant-domain:
|
||||
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
@ -88,9 +80,8 @@ runs:
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
@ -108,9 +99,8 @@ runs:
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
|
||||
1
.github/actions/teams-message/action.yml
vendored
1
.github/actions/teams-message/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Send a message to Teams
|
||||
description: Send messages to communication apps.
|
||||
|
||||
inputs:
|
||||
msg:
|
||||
|
||||
1
.github/actions/website-linting/action.yml
vendored
1
.github/actions/website-linting/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Lint Website
|
||||
description: Lint website content.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
|
||||
2
.github/workflows/binary-publish.yml
vendored
2
.github/workflows/binary-publish.yml
vendored
@ -40,5 +40,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Publishing Binary"
|
||||
msg: "[FAILED] Publishing Binary"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
14
.github/workflows/ci_test_cleanup.yml
vendored
14
.github/workflows/ci_test_cleanup.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
|
||||
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -33,15 +33,12 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||
msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
Test-Site-Data-Cleanup:
|
||||
@ -50,7 +47,7 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
|
||||
site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -73,13 +70,10 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||
msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
3
.github/workflows/load_test.yml
vendored
3
.github/workflows/load_test.yml
vendored
@ -155,6 +155,3 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
16
.github/workflows/longevity_test.yml
vendored
16
.github/workflows/longevity_test.yml
vendored
@ -6,7 +6,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
user:
|
||||
description: "User to run longevity test on"
|
||||
description: 'User to run longevity test on'
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
@ -23,7 +23,7 @@ jobs:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
Longevity-Tests:
|
||||
needs: [SetM365App]
|
||||
needs: [ SetM365App ]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
@ -37,7 +37,7 @@ jobs:
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
|
||||
RESTORE_DEST_PFX: Corso_Test_Longevity_
|
||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
||||
PREFIX: "longevity"
|
||||
PREFIX: 'longevity'
|
||||
|
||||
# Options for retention.
|
||||
RETENTION_MODE: GOVERNANCE
|
||||
@ -46,7 +46,7 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
|
||||
############################################################################
|
||||
# setup
|
||||
steps:
|
||||
@ -78,7 +78,7 @@ jobs:
|
||||
|
||||
- run: go build -o corso
|
||||
timeout-minutes: 10
|
||||
|
||||
|
||||
- run: mkdir ${CORSO_LOG_DIR}
|
||||
|
||||
# Use shorter-lived credentials obtained from assume-role since these
|
||||
@ -163,7 +163,7 @@ jobs:
|
||||
|
||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||
echo result=$data >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
##########################################################################
|
||||
# Onedrive
|
||||
|
||||
@ -328,7 +328,7 @@ jobs:
|
||||
--hide-progress \
|
||||
--force \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
|
||||
|
||||
- name: Maintenance test Weekly
|
||||
id: maintenance-test-weekly
|
||||
@ -392,5 +392,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Longevity Test"
|
||||
msg: "[FAILED] Longevity Test"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
8
.github/workflows/nightly_test.yml
vendored
8
.github/workflows/nightly_test.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Test-Suite-Trusted:
|
||||
needs: [Checkout, SetM365App]
|
||||
needs: [ Checkout, SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
@ -100,9 +100,9 @@ jobs:
|
||||
-timeout 2h \
|
||||
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Logging & Notifications
|
||||
# Logging & Notifications
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
@ -118,5 +118,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[COROS FAILED] Nightly Checks"
|
||||
msg: "[FAILED] Nightly Checks"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
96
.github/workflows/sanity-test.yaml
vendored
96
.github/workflows/sanity-test.yaml
vendored
@ -6,7 +6,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
user:
|
||||
description: "User to run sanity test on"
|
||||
description: 'User to run sanity test on'
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
@ -23,7 +23,7 @@ jobs:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
Sanity-Tests:
|
||||
needs: [SetM365App]
|
||||
needs: [ SetM365App ]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
@ -43,11 +43,12 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# setup
|
||||
# setup
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Golang with cache
|
||||
@ -63,9 +64,9 @@ jobs:
|
||||
|
||||
- run: mkdir ${CORSO_LOG_DIR}
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Pre-Run cleanup
|
||||
# Pre-Run cleanup
|
||||
|
||||
# unlike CI tests, sanity tests are not expected to run concurrently.
|
||||
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
|
||||
@ -90,9 +91,6 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Purge CI-Produced Folders for Sites
|
||||
timeout-minutes: 30
|
||||
@ -101,20 +99,17 @@ jobs:
|
||||
with:
|
||||
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
|
||||
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
|
||||
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
|
||||
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
|
||||
older-than: ${{ env.NOW }}
|
||||
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
|
||||
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Repository commands
|
||||
# Repository commands
|
||||
|
||||
- name: Version Test
|
||||
timeout-minutes: 10
|
||||
@ -174,9 +169,9 @@ jobs:
|
||||
--mode complete \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Exchange
|
||||
# Exchange
|
||||
|
||||
# generate new entries to roll into the next load test
|
||||
# only runs if the test was successful
|
||||
@ -198,8 +193,8 @@ jobs:
|
||||
service: exchange
|
||||
kind: first-backup
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
@ -211,8 +206,8 @@ jobs:
|
||||
service: exchange
|
||||
kind: incremental
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
@ -225,8 +220,8 @@ jobs:
|
||||
service: exchange
|
||||
kind: non-delta
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
@ -239,15 +234,16 @@ jobs:
|
||||
service: exchange
|
||||
kind: non-delta-incremental
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Onedrive
|
||||
##########################################################################################################################################
|
||||
|
||||
# Onedrive
|
||||
|
||||
# generate new entries for test
|
||||
- name: OneDrive - Create new data
|
||||
@ -274,8 +270,8 @@ jobs:
|
||||
service: onedrive
|
||||
kind: first-backup
|
||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
@ -299,14 +295,14 @@ jobs:
|
||||
service: onedrive
|
||||
kind: incremental
|
||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Sharepoint Library
|
||||
# Sharepoint Library
|
||||
|
||||
# generate new entries for test
|
||||
- name: SharePoint - Create new data
|
||||
@ -334,8 +330,8 @@ jobs:
|
||||
service: sharepoint
|
||||
kind: first-backup
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: libraries
|
||||
@ -361,15 +357,15 @@ jobs:
|
||||
service: sharepoint
|
||||
kind: incremental
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: libraries
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Sharepoint Lists
|
||||
# Sharepoint Lists
|
||||
|
||||
# generate new entries for test
|
||||
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
|
||||
@ -422,7 +418,7 @@ jobs:
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
|
||||
|
||||
|
||||
go run . sharepoint lists \
|
||||
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
@ -458,9 +454,9 @@ jobs:
|
||||
category: lists
|
||||
on-collision: copy
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Groups and Teams
|
||||
# Groups and Teams
|
||||
|
||||
# generate new entries for test
|
||||
- name: Groups - Create new data
|
||||
@ -487,8 +483,8 @@ jobs:
|
||||
with:
|
||||
service: groups
|
||||
kind: first-backup
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
@ -512,15 +508,15 @@ jobs:
|
||||
with:
|
||||
service: groups
|
||||
kind: incremental
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Logging & Notifications
|
||||
# Logging & Notifications
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
@ -536,5 +532,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Sanity Tests"
|
||||
msg: "[FAILED] Sanity Tests"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
@ -1,6 +1,3 @@
|
||||
> [!NOTE]
|
||||
> **The Corso project is no longer actively maintained and has been archived**.
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
||||
</p>
|
||||
|
||||
@ -114,8 +114,6 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
|
||||
// skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
runGroupsBackupCategoryTest(suite, flags.DataConversations)
|
||||
}
|
||||
|
||||
@ -219,9 +217,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
||||
// Skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
@ -305,10 +300,7 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
|
||||
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
|
||||
cats = []path.CategoryType{
|
||||
path.ChannelMessagesCategory,
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
|
||||
// we go fix the group mailbox.
|
||||
// path.ConversationPostsCategory,
|
||||
path.ConversationPostsCategory,
|
||||
path.LibrariesCategory,
|
||||
}
|
||||
)
|
||||
@ -462,8 +454,6 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
|
||||
// skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
|
||||
}
|
||||
|
||||
|
||||
@ -6,6 +6,12 @@ Param (
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
|
||||
[String]$Site,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
|
||||
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
|
||||
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
|
||||
[String[]]$LibraryNameList = @(),
|
||||
|
||||
@ -16,16 +22,7 @@ Param (
|
||||
[String[]]$FolderPrefixPurgeList,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
|
||||
[String[]]$LibraryPrefixDeleteList = @(),
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
|
||||
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
|
||||
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
|
||||
[String]$AppCert = $ENV:AZURE_APP_CERT
|
||||
[String[]]$LibraryPrefixDeleteList = @()
|
||||
)
|
||||
|
||||
Set-StrictMode -Version 2.0
|
||||
@ -40,7 +37,7 @@ function Get-TimestampFromFolderName {
|
||||
|
||||
$name = $folder.Name
|
||||
|
||||
#fallback on folder create time
|
||||
#fallback on folder create time
|
||||
[datetime]$timestamp = $folder.TimeCreated
|
||||
|
||||
try {
|
||||
@ -69,7 +66,7 @@ function Get-TimestampFromListName {
|
||||
|
||||
$name = $list.Title
|
||||
|
||||
#fallback on list create time
|
||||
#fallback on list create time
|
||||
[datetime]$timestamp = $list.LastItemUserModifiedDate
|
||||
|
||||
try {
|
||||
@ -109,9 +106,8 @@ function Purge-Library {
|
||||
Write-Host "`nPurging library: $LibraryName"
|
||||
|
||||
$foldersToPurge = @()
|
||||
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
|
||||
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
|
||||
|
||||
Write-Host "`nFolders: $folders"
|
||||
foreach ($f in $folders) {
|
||||
$folderName = $f.Name
|
||||
$createTime = Get-TimestampFromFolderName -Folder $f
|
||||
@ -163,7 +159,7 @@ function Delete-LibraryByPrefix {
|
||||
Write-Host "`nDeleting library: $LibraryNamePrefix"
|
||||
|
||||
$listsToDelete = @()
|
||||
$lists = Get-PnPList
|
||||
$lists = Get-PnPList
|
||||
|
||||
foreach ($l in $lists) {
|
||||
$listName = $l.Title
|
||||
@ -187,7 +183,7 @@ function Delete-LibraryByPrefix {
|
||||
Write-Host "Deleting list: "$l.Title
|
||||
try {
|
||||
$listInfo = Get-PnPList -Identity $l.Id | Select-Object -Property Hidden
|
||||
|
||||
|
||||
# Check if the 'hidden' property is true
|
||||
if ($listInfo.Hidden) {
|
||||
Write-Host "List: $($l.Title) is hidden. Skipping..."
|
||||
@ -213,8 +209,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
|
||||
}
|
||||
|
||||
|
||||
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) {
|
||||
Write-Host "ClientId and AppCert required as arguments or environment variables."
|
||||
if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
|
||||
Write-Host "Admin user name and password required as arguments or environment variables."
|
||||
Exit
|
||||
}
|
||||
|
||||
@ -255,8 +251,12 @@ else {
|
||||
Exit
|
||||
}
|
||||
|
||||
|
||||
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
|
||||
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
|
||||
|
||||
Write-Host "`nAuthenticating and connecting to $SiteUrl"
|
||||
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain
|
||||
Connect-PnPOnline -Url $siteUrl -Credential $cred
|
||||
Write-Host "Connected to $siteUrl`n"
|
||||
|
||||
# ensure that there are no unexpanded entries in the list of parameters
|
||||
|
||||
@ -151,7 +151,7 @@ require (
|
||||
golang.org/x/sys v0.16.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
google.golang.org/grpc v1.60.1 // indirect
|
||||
google.golang.org/protobuf v1.32.0 // indirect
|
||||
google.golang.org/protobuf v1.33.0 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
@ -416,8 +416,8 @@ google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU=
|
||||
google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
|
||||
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
|
||||
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
||||
@ -305,10 +305,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead int
|
||||
itemsWritten int
|
||||
nonMetaItemsWritten int
|
||||
|
||||
// TODO: Temporary mechanism to skip permissions
|
||||
// related tests. Remove once we figure out the issue.
|
||||
skipChecks bool
|
||||
}{
|
||||
{
|
||||
name: "clean incremental, no changes",
|
||||
@ -357,7 +353,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead: 1, // .data file for newitem
|
||||
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
|
||||
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
|
||||
skipChecks: true,
|
||||
},
|
||||
{
|
||||
name: "remove permission from new file",
|
||||
@ -377,7 +372,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead: 1, // .data file for newitem
|
||||
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
|
||||
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
|
||||
skipChecks: true,
|
||||
},
|
||||
{
|
||||
name: "add permission to container",
|
||||
@ -398,7 +392,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead: 0,
|
||||
itemsWritten: 2, // .dirmeta for collection
|
||||
nonMetaItemsWritten: 0, // no files updated as update on container
|
||||
skipChecks: true,
|
||||
},
|
||||
{
|
||||
name: "remove permission from container",
|
||||
@ -419,7 +412,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead: 0,
|
||||
itemsWritten: 2, // .dirmeta for collection
|
||||
nonMetaItemsWritten: 0, // no files updated
|
||||
skipChecks: true,
|
||||
},
|
||||
{
|
||||
name: "update contents of a file",
|
||||
@ -749,11 +741,9 @@ func RunIncrementalDriveishBackupTest(
|
||||
assertReadWrite = assert.LessOrEqual
|
||||
}
|
||||
|
||||
if !test.skipChecks {
|
||||
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
|
||||
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
|
||||
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
|
||||
}
|
||||
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
|
||||
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
|
||||
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
|
||||
|
||||
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
|
||||
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
|
||||
|
||||
@ -175,7 +175,7 @@ func runGroupsIncrementalBackupTests(
|
||||
suite,
|
||||
opts,
|
||||
m365.Group.ID,
|
||||
m365.SecondaryGroup.ID, // more reliable than user
|
||||
m365.User.ID,
|
||||
path.GroupsService,
|
||||
path.LibrariesCategory,
|
||||
ic,
|
||||
@ -201,12 +201,8 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
||||
|
||||
sel.Include(
|
||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupChannelScope(sel))
|
||||
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
|
||||
// we go fix the group mailbox.
|
||||
// selTD.GroupsBackupConversationScope(sel))
|
||||
selTD.GroupsBackupChannelScope(sel),
|
||||
selTD.GroupsBackupConversationScope(sel))
|
||||
|
||||
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
|
||||
defer bod.Close(t, ctx)
|
||||
@ -330,12 +326,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9MergeBas
|
||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
||||
sel.Include(
|
||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupChannelScope(sel))
|
||||
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
|
||||
// we go fix the group mailbox.
|
||||
// selTD.GroupsBackupConversationScope(sel))
|
||||
selTD.GroupsBackupChannelScope(sel),
|
||||
selTD.GroupsBackupConversationScope(sel))
|
||||
|
||||
RunMergeBaseGroupsUpdate(suite, sel.Selector, false)
|
||||
}
|
||||
@ -344,12 +336,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9AssistBa
|
||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
||||
sel.Include(
|
||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupChannelScope(sel))
|
||||
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
|
||||
// we go fix the group mailbox.
|
||||
// selTD.GroupsBackupConversationScope(sel))
|
||||
selTD.GroupsBackupChannelScope(sel),
|
||||
selTD.GroupsBackupConversationScope(sel))
|
||||
|
||||
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
|
||||
}
|
||||
|
||||
@ -32,9 +32,6 @@ func (suite *ConversationsPagerIntgSuite) SetupSuite() {
|
||||
}
|
||||
|
||||
func (suite *ConversationsPagerIntgSuite) TestEnumerateConversations_withThreadsAndPosts() {
|
||||
// Skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
|
||||
var (
|
||||
t = suite.T()
|
||||
ac = suite.its.ac.Conversations()
|
||||
|
||||
@ -701,48 +701,10 @@ func (ode oDataErr) errMessageMatchesAllFilters(err error, fs ...filters.Filter)
|
||||
// ---------------------------------------------------------------------------
|
||||
// other helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
const (
|
||||
// JWTQueryParam is a query param embed in graph download URLs which holds
|
||||
// JWT token.
|
||||
JWTQueryParam = "tempauth"
|
||||
// base64 encoded json header. Contains {"alg":"HS256","typ":"JWT"}
|
||||
//
|
||||
// Hardcoding this instead of generating it every time on the fly.
|
||||
// The algorithm doesn't matter as we are not verifying the token.
|
||||
jwtHeader = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"
|
||||
)
|
||||
|
||||
func sanitizeToken(rawToken string) string {
|
||||
segments := strings.Split(rawToken, ".")
|
||||
|
||||
// Check if the token has the old format, in which it has 3 segments and
|
||||
// conforms to jwt spec. Format is seg1.seg2.seg3.
|
||||
if len(segments) == 3 {
|
||||
return rawToken
|
||||
}
|
||||
|
||||
// Check if it is a msft proprietary token in which it has 4 segments and
|
||||
// doesn't meet jwt spec. Format is v1.seg1.seg2.seg3. Return a token which
|
||||
// meets jwt spec.
|
||||
//
|
||||
// In this proprietary token, there is no jwt header segment. Also, the claims
|
||||
// section is split into first and segments. The first segment contains the
|
||||
// `exp` claim that we are interested in.
|
||||
//
|
||||
// The second segment contains the rest of the claims, but likely encrypted.
|
||||
// We don't need it so discard it. The last segment contains the signature which
|
||||
// we don't care about either, as we are not verifying the token. So append it as is.
|
||||
//
|
||||
// It's okay if the sanitized token still doesn't meet jwt spec. It'll fail decoding
|
||||
// later and we have fallbacks for that.
|
||||
if len(segments) == 4 && segments[0] == "v1" {
|
||||
return jwtHeader + "." + segments[1] + "." + segments[3]
|
||||
}
|
||||
|
||||
// If MSFT change the token format again on us, just return empty string and let caller
|
||||
// handle it as an error.
|
||||
return ""
|
||||
}
|
||||
// JWTQueryParam is a query param embed in graph download URLs which holds
|
||||
// JWT token.
|
||||
const JWTQueryParam = "tempauth"
|
||||
|
||||
// IsURLExpired inspects the jwt token embed in the item download url
|
||||
// and returns true if it is expired.
|
||||
@ -753,20 +715,12 @@ func IsURLExpired(
|
||||
expiredErr error,
|
||||
err error,
|
||||
) {
|
||||
ctx = clues.Add(ctx, "checked_url", urlStr)
|
||||
|
||||
// Extract the raw JWT string from the download url.
|
||||
rawJWT, err := common.GetQueryParamFromURL(urlStr, JWTQueryParam)
|
||||
if err != nil {
|
||||
return nil, clues.WrapWC(ctx, err, "jwt query param not found")
|
||||
}
|
||||
|
||||
// Token may have a proprietary format. Try to sanitize it to jwt format.
|
||||
rawJWT = sanitizeToken(rawJWT)
|
||||
if len(rawJWT) == 0 {
|
||||
return nil, clues.WrapWC(ctx, err, "sanitizing jwt")
|
||||
}
|
||||
|
||||
expired, err := jwt.IsJWTExpired(rawJWT)
|
||||
if err != nil {
|
||||
return nil, clues.WrapWC(ctx, err, "checking jwt expiry")
|
||||
|
||||
@ -21,8 +21,7 @@ application to connect to your *M365 tenant* and transfer data during backup and
|
||||
## Corso concepts {#corso-concepts}
|
||||
|
||||
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
|
||||
*M365 Service*'s data. See [Repositories](../repos) for more information.
|
||||
*M365 Services* data. See [Repositories](../repos) for more information.
|
||||
|
||||
* **Backup** is a copy of a resource of your *M365 Service*'s data to be used for restores in case of deletion, loss,
|
||||
or corruption of the original data. Corso performs backups incrementally, and each backup only captures data that has
|
||||
changed between backup iterations.
|
||||
* **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the
|
||||
original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations.
|
||||
|
||||
1395
website/package-lock.json
generated
1395
website/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -15,7 +15,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.1.1",
|
||||
"@docusaurus/plugin-google-gtag": "^3.5.1",
|
||||
"@docusaurus/plugin-google-gtag": "^3.2.0",
|
||||
"@docusaurus/preset-classic": "3.1.1",
|
||||
"@loadable/component": "^5.16.3",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
@ -29,14 +29,14 @@
|
||||
"mermaid": "^10.9.0",
|
||||
"prism-react-renderer": "^2.1.0",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.3.0",
|
||||
"sass": "^1.79.1",
|
||||
"react-dom": "^18.2.0",
|
||||
"sass": "^1.74.1",
|
||||
"tiny-slider": "^2.9.4",
|
||||
"tw-elements": "1.0.0-alpha13",
|
||||
"wow.js": "^1.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.5.1",
|
||||
"@docusaurus/module-type-aliases": "3.2.0",
|
||||
"@iconify/react": "^4.1.1",
|
||||
"autoprefixer": "^10.4.17",
|
||||
"postcss": "^8.4.33",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user