Compare commits
1 Commits
main
...
export_mem
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
029cda5cdb |
@ -1,5 +1,4 @@
|
|||||||
name: Backup Restore Test
|
name: Backup Restore Test
|
||||||
description: Run various backup/restore/export tests for a service.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
service:
|
service:
|
||||||
|
|||||||
1
.github/actions/go-setup-cache/action.yml
vendored
1
.github/actions/go-setup-cache/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Setup and Cache Golang
|
name: Setup and Cache Golang
|
||||||
description: Build golang binaries for later use in CI.
|
|
||||||
|
|
||||||
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
|
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
|
||||||
#
|
#
|
||||||
|
|||||||
1
.github/actions/publish-binary/action.yml
vendored
1
.github/actions/publish-binary/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Publish Binary
|
name: Publish Binary
|
||||||
description: Publish binary artifacts.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
version:
|
version:
|
||||||
|
|||||||
1
.github/actions/publish-website/action.yml
vendored
1
.github/actions/publish-website/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Publish Website
|
name: Publish Website
|
||||||
description: Publish website artifacts.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
aws-iam-role:
|
aws-iam-role:
|
||||||
|
|||||||
44
.github/actions/purge-m365-data/action.yml
vendored
44
.github/actions/purge-m365-data/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Purge M365 User Data
|
name: Purge M365 User Data
|
||||||
description: Deletes M365 data generated during CI tests.
|
|
||||||
|
|
||||||
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
||||||
# of data churn (creation and immediate deletion) of files, the likes
|
# of data churn (creation and immediate deletion) of files, the likes
|
||||||
@ -31,19 +30,12 @@ inputs:
|
|||||||
description: Secret value of for AZURE_CLIENT_ID
|
description: Secret value of for AZURE_CLIENT_ID
|
||||||
azure-client-secret:
|
azure-client-secret:
|
||||||
description: Secret value of for AZURE_CLIENT_SECRET
|
description: Secret value of for AZURE_CLIENT_SECRET
|
||||||
azure-pnp-client-id:
|
|
||||||
description: Secret value of AZURE_PNP_CLIENT_ID
|
|
||||||
azure-pnp-client-cert:
|
|
||||||
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
|
|
||||||
azure-tenant-id:
|
azure-tenant-id:
|
||||||
description: Secret value of AZURE_TENANT_ID
|
description: Secret value of for AZURE_TENANT_ID
|
||||||
m365-admin-user:
|
m365-admin-user:
|
||||||
description: Secret value of for M365_TENANT_ADMIN_USER
|
description: Secret value of for M365_TENANT_ADMIN_USER
|
||||||
m365-admin-password:
|
m365-admin-password:
|
||||||
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
|
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
|
||||||
tenant-domain:
|
|
||||||
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
|
|
||||||
required: true
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: composite
|
using: composite
|
||||||
@ -61,13 +53,7 @@ runs:
|
|||||||
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
|
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
|
||||||
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
||||||
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
||||||
run: |
|
run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
|
||||||
{
|
|
||||||
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
|
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
|
||||||
#- name: Reset retention for all mailboxes to 0
|
#- name: Reset retention for all mailboxes to 0
|
||||||
@ -88,16 +74,10 @@ runs:
|
|||||||
shell: pwsh
|
shell: pwsh
|
||||||
working-directory: ./src/cmd/purge/scripts
|
working-directory: ./src/cmd/purge/scripts
|
||||||
env:
|
env:
|
||||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
|
||||||
run: |
|
run: |
|
||||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||||
{
|
|
||||||
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
################################################################################################################
|
################################################################################################################
|
||||||
# Sharepoint
|
# Sharepoint
|
||||||
@ -108,14 +88,6 @@ runs:
|
|||||||
shell: pwsh
|
shell: pwsh
|
||||||
working-directory: ./src/cmd/purge/scripts
|
working-directory: ./src/cmd/purge/scripts
|
||||||
env:
|
env:
|
||||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||||
run: |
|
|
||||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
|
||||||
{
|
|
||||||
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|||||||
1
.github/actions/teams-message/action.yml
vendored
1
.github/actions/teams-message/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Send a message to Teams
|
name: Send a message to Teams
|
||||||
description: Send messages to communication apps.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
msg:
|
msg:
|
||||||
|
|||||||
1
.github/actions/website-linting/action.yml
vendored
1
.github/actions/website-linting/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Lint Website
|
name: Lint Website
|
||||||
description: Lint website content.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
version:
|
version:
|
||||||
|
|||||||
2
.github/workflows/binary-publish.yml
vendored
2
.github/workflows/binary-publish.yml
vendored
@ -40,5 +40,5 @@ jobs:
|
|||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] Publishing Binary"
|
msg: "[FAILED] Publishing Binary"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
14
.github/workflows/ci_test_cleanup.yml
vendored
14
.github/workflows/ci_test_cleanup.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
|||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
|
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@ -33,15 +33,12 @@ jobs:
|
|||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|
||||||
- name: Notify failure in teams
|
- name: Notify failure in teams
|
||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|
||||||
Test-Site-Data-Cleanup:
|
Test-Site-Data-Cleanup:
|
||||||
@ -50,7 +47,7 @@ jobs:
|
|||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
|
site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@ -73,13 +70,10 @@ jobs:
|
|||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|
||||||
- name: Notify failure in teams
|
- name: Notify failure in teams
|
||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
3
.github/workflows/load_test.yml
vendored
3
.github/workflows/load_test.yml
vendored
@ -155,6 +155,3 @@ jobs:
|
|||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|||||||
8
.github/workflows/longevity_test.yml
vendored
8
.github/workflows/longevity_test.yml
vendored
@ -6,7 +6,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
user:
|
user:
|
||||||
description: "User to run longevity test on"
|
description: 'User to run longevity test on'
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
# required to retrieve AWS credentials
|
# required to retrieve AWS credentials
|
||||||
@ -23,7 +23,7 @@ jobs:
|
|||||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||||
|
|
||||||
Longevity-Tests:
|
Longevity-Tests:
|
||||||
needs: [SetM365App]
|
needs: [ SetM365App ]
|
||||||
environment: Testing
|
environment: Testing
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
@ -37,7 +37,7 @@ jobs:
|
|||||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
|
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
|
||||||
RESTORE_DEST_PFX: Corso_Test_Longevity_
|
RESTORE_DEST_PFX: Corso_Test_Longevity_
|
||||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
||||||
PREFIX: "longevity"
|
PREFIX: 'longevity'
|
||||||
|
|
||||||
# Options for retention.
|
# Options for retention.
|
||||||
RETENTION_MODE: GOVERNANCE
|
RETENTION_MODE: GOVERNANCE
|
||||||
@ -392,5 +392,5 @@ jobs:
|
|||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] Longevity Test"
|
msg: "[FAILED] Longevity Test"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
8
.github/workflows/nightly_test.yml
vendored
8
.github/workflows/nightly_test.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
|||||||
# ----------------------------------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
Test-Suite-Trusted:
|
Test-Suite-Trusted:
|
||||||
needs: [Checkout, SetM365App]
|
needs: [ Checkout, SetM365App]
|
||||||
environment: Testing
|
environment: Testing
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
defaults:
|
defaults:
|
||||||
@ -100,9 +100,9 @@ jobs:
|
|||||||
-timeout 2h \
|
-timeout 2h \
|
||||||
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Logging & Notifications
|
# Logging & Notifications
|
||||||
|
|
||||||
# Upload the original go test output as an artifact for later review.
|
# Upload the original go test output as an artifact for later review.
|
||||||
- name: Upload test log
|
- name: Upload test log
|
||||||
@ -118,5 +118,5 @@ jobs:
|
|||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[COROS FAILED] Nightly Checks"
|
msg: "[FAILED] Nightly Checks"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
90
.github/workflows/sanity-test.yaml
vendored
90
.github/workflows/sanity-test.yaml
vendored
@ -6,7 +6,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
user:
|
user:
|
||||||
description: "User to run sanity test on"
|
description: 'User to run sanity test on'
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
# required to retrieve AWS credentials
|
# required to retrieve AWS credentials
|
||||||
@ -23,7 +23,7 @@ jobs:
|
|||||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||||
|
|
||||||
Sanity-Tests:
|
Sanity-Tests:
|
||||||
needs: [SetM365App]
|
needs: [ SetM365App ]
|
||||||
environment: Testing
|
environment: Testing
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
@ -44,10 +44,11 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
working-directory: src
|
working-directory: src
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# setup
|
# setup
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Golang with cache
|
- name: Setup Golang with cache
|
||||||
@ -63,9 +64,9 @@ jobs:
|
|||||||
|
|
||||||
- run: mkdir ${CORSO_LOG_DIR}
|
- run: mkdir ${CORSO_LOG_DIR}
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Pre-Run cleanup
|
# Pre-Run cleanup
|
||||||
|
|
||||||
# unlike CI tests, sanity tests are not expected to run concurrently.
|
# unlike CI tests, sanity tests are not expected to run concurrently.
|
||||||
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
|
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
|
||||||
@ -90,9 +91,6 @@ jobs:
|
|||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|
||||||
- name: Purge CI-Produced Folders for Sites
|
- name: Purge CI-Produced Folders for Sites
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@ -108,13 +106,10 @@ jobs:
|
|||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Repository commands
|
# Repository commands
|
||||||
|
|
||||||
- name: Version Test
|
- name: Version Test
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
@ -174,9 +169,9 @@ jobs:
|
|||||||
--mode complete \
|
--mode complete \
|
||||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
|
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Exchange
|
# Exchange
|
||||||
|
|
||||||
# generate new entries to roll into the next load test
|
# generate new entries to roll into the next load test
|
||||||
# only runs if the test was successful
|
# only runs if the test was successful
|
||||||
@ -198,8 +193,8 @@ jobs:
|
|||||||
service: exchange
|
service: exchange
|
||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -211,8 +206,8 @@ jobs:
|
|||||||
service: exchange
|
service: exchange
|
||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
@ -225,8 +220,8 @@ jobs:
|
|||||||
service: exchange
|
service: exchange
|
||||||
kind: non-delta
|
kind: non-delta
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
||||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
@ -239,15 +234,16 @@ jobs:
|
|||||||
service: exchange
|
service: exchange
|
||||||
kind: non-delta-incremental
|
kind: non-delta-incremental
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
##########################################################################################################################################
|
|
||||||
|
|
||||||
# Onedrive
|
##########################################################################################################################################
|
||||||
|
|
||||||
|
# Onedrive
|
||||||
|
|
||||||
# generate new entries for test
|
# generate new entries for test
|
||||||
- name: OneDrive - Create new data
|
- name: OneDrive - Create new data
|
||||||
@ -274,8 +270,8 @@ jobs:
|
|||||||
service: onedrive
|
service: onedrive
|
||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -299,14 +295,14 @@ jobs:
|
|||||||
service: onedrive
|
service: onedrive
|
||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Sharepoint Library
|
# Sharepoint Library
|
||||||
|
|
||||||
# generate new entries for test
|
# generate new entries for test
|
||||||
- name: SharePoint - Create new data
|
- name: SharePoint - Create new data
|
||||||
@ -334,8 +330,8 @@ jobs:
|
|||||||
service: sharepoint
|
service: sharepoint
|
||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
category: libraries
|
category: libraries
|
||||||
@ -361,15 +357,15 @@ jobs:
|
|||||||
service: sharepoint
|
service: sharepoint
|
||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
category: libraries
|
category: libraries
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Sharepoint Lists
|
# Sharepoint Lists
|
||||||
|
|
||||||
# generate new entries for test
|
# generate new entries for test
|
||||||
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
|
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
|
||||||
@ -458,9 +454,9 @@ jobs:
|
|||||||
category: lists
|
category: lists
|
||||||
on-collision: copy
|
on-collision: copy
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Groups and Teams
|
# Groups and Teams
|
||||||
|
|
||||||
# generate new entries for test
|
# generate new entries for test
|
||||||
- name: Groups - Create new data
|
- name: Groups - Create new data
|
||||||
@ -487,8 +483,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
service: groups
|
service: groups
|
||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -512,15 +508,15 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
service: groups
|
service: groups
|
||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||||
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Logging & Notifications
|
# Logging & Notifications
|
||||||
|
|
||||||
# Upload the original go test output as an artifact for later review.
|
# Upload the original go test output as an artifact for later review.
|
||||||
- name: Upload test log
|
- name: Upload test log
|
||||||
@ -536,5 +532,5 @@ jobs:
|
|||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] Sanity Tests"
|
msg: "[FAILED] Sanity Tests"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
@ -11,7 +11,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Emails attached within other emails are now correctly exported
|
- Emails attached within other emails are now correctly exported
|
||||||
- Gracefully handle email and post attachments without name when exporting to eml
|
- Gracefully handle email and post attachments without name when exporting to eml
|
||||||
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
|
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
|
||||||
- Fixed an issue causing exports dealing with calendar data to have high memory usage
|
|
||||||
|
|
||||||
## [v0.19.0] (beta) - 2024-02-06
|
## [v0.19.0] (beta) - 2024-02-06
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,3 @@
|
|||||||
> [!NOTE]
|
|
||||||
> **The Corso project is no longer actively maintained and has been archived**.
|
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
||||||
</p>
|
</p>
|
||||||
|
|||||||
@ -114,8 +114,6 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
|
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
|
||||||
// skip
|
|
||||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
|
||||||
runGroupsBackupCategoryTest(suite, flags.DataConversations)
|
runGroupsBackupCategoryTest(suite, flags.DataConversations)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,9 +217,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
||||||
// Skip
|
|
||||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
|
||||||
|
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||||
@ -305,10 +300,7 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
|
|||||||
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
|
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
|
||||||
cats = []path.CategoryType{
|
cats = []path.CategoryType{
|
||||||
path.ChannelMessagesCategory,
|
path.ChannelMessagesCategory,
|
||||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
path.ConversationPostsCategory,
|
||||||
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
|
|
||||||
// we go fix the group mailbox.
|
|
||||||
// path.ConversationPostsCategory,
|
|
||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -462,8 +454,6 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
|
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
|
||||||
// skip
|
|
||||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
|
||||||
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
|
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
90
src/cli/debug/profile.go
Normal file
90
src/cli/debug/profile.go
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
package debug
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"runtime"
|
||||||
|
"runtime/pprof"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/pkg/profile"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
profileTicker = time.NewTicker(1 * time.Second)
|
||||||
|
timeSinceRefresh = time.Now()
|
||||||
|
printTicker = time.NewTicker(1 * time.Second)
|
||||||
|
profileCounter = 0
|
||||||
|
)
|
||||||
|
|
||||||
|
func SetupMemoryProfile() {
|
||||||
|
defer profile.Start(profile.MemProfile).Stop()
|
||||||
|
|
||||||
|
// debug.SetMemoryLimit(0.5 * 1024 * 1024 * 1024)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
//nolint:gosimple
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-profileTicker.C:
|
||||||
|
var m runtime.MemStats
|
||||||
|
|
||||||
|
runtime.ReadMemStats(&m)
|
||||||
|
|
||||||
|
// If it's been 90 secs since last pprof capture, take another one.
|
||||||
|
if time.Since(timeSinceRefresh) > 90*time.Second {
|
||||||
|
filename := "mem." + strconv.Itoa(profileCounter) + ".pprof"
|
||||||
|
|
||||||
|
f, _ := os.Create(filename)
|
||||||
|
if err := pprof.WriteHeapProfile(f); err != nil {
|
||||||
|
log.Fatal("could not write memory profile: ", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
f.Close()
|
||||||
|
|
||||||
|
profileCounter++
|
||||||
|
|
||||||
|
timeSinceRefresh = time.Now()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
//nolint:gosimple
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-printTicker.C:
|
||||||
|
PrintMemUsage()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrintMemUsage outputs the current, total and OS memory being used. As well as the number
|
||||||
|
// of garage collection cycles completed.
|
||||||
|
func PrintMemUsage() {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
var m runtime.MemStats
|
||||||
|
|
||||||
|
runtime.ReadMemStats(&m)
|
||||||
|
// For info on each, see: https://golang.org/pkg/runtime/#MemStats
|
||||||
|
logger.Ctx(ctx).Info("HeapAlloc = ", bToMb(m.HeapAlloc), " MB") // same as Alloc
|
||||||
|
|
||||||
|
logger.Ctx(ctx).Info("HeapReleased = ", bToMb(m.HeapReleased), " MB")
|
||||||
|
logger.Ctx(ctx).Info("HeapObjects = ", bToMb(m.HeapObjects), " MB")
|
||||||
|
logger.Ctx(ctx).Info("HeapSys = ", bToMb(m.HeapSys), " MB")
|
||||||
|
logger.Ctx(ctx).Info("HeapIdle = ", bToMb(m.HeapIdle), " MB")
|
||||||
|
logger.Ctx(ctx).Info("HeapInuse = ", bToMb(m.HeapInuse), " MB")
|
||||||
|
|
||||||
|
logger.Ctx(ctx).Info("NumGC = ", m.NumGC)
|
||||||
|
}
|
||||||
|
|
||||||
|
func bToMb(b uint64) uint64 {
|
||||||
|
return b / 1024 / 1024
|
||||||
|
}
|
||||||
@ -6,6 +6,12 @@ Param (
|
|||||||
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
|
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
|
||||||
[String]$Site,
|
[String]$Site,
|
||||||
|
|
||||||
|
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
|
||||||
|
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
|
||||||
|
|
||||||
|
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
|
||||||
|
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
|
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
|
||||||
[String[]]$LibraryNameList = @(),
|
[String[]]$LibraryNameList = @(),
|
||||||
|
|
||||||
@ -16,16 +22,7 @@ Param (
|
|||||||
[String[]]$FolderPrefixPurgeList,
|
[String[]]$FolderPrefixPurgeList,
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
|
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
|
||||||
[String[]]$LibraryPrefixDeleteList = @(),
|
[String[]]$LibraryPrefixDeleteList = @()
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
|
|
||||||
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
|
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
|
|
||||||
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
|
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
|
|
||||||
[String]$AppCert = $ENV:AZURE_APP_CERT
|
|
||||||
)
|
)
|
||||||
|
|
||||||
Set-StrictMode -Version 2.0
|
Set-StrictMode -Version 2.0
|
||||||
@ -111,7 +108,6 @@ function Purge-Library {
|
|||||||
$foldersToPurge = @()
|
$foldersToPurge = @()
|
||||||
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
|
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
|
||||||
|
|
||||||
Write-Host "`nFolders: $folders"
|
|
||||||
foreach ($f in $folders) {
|
foreach ($f in $folders) {
|
||||||
$folderName = $f.Name
|
$folderName = $f.Name
|
||||||
$createTime = Get-TimestampFromFolderName -Folder $f
|
$createTime = Get-TimestampFromFolderName -Folder $f
|
||||||
@ -213,8 +209,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) {
|
if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
|
||||||
Write-Host "ClientId and AppCert required as arguments or environment variables."
|
Write-Host "Admin user name and password required as arguments or environment variables."
|
||||||
Exit
|
Exit
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -255,8 +251,12 @@ else {
|
|||||||
Exit
|
Exit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
|
||||||
|
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
|
||||||
|
|
||||||
Write-Host "`nAuthenticating and connecting to $SiteUrl"
|
Write-Host "`nAuthenticating and connecting to $SiteUrl"
|
||||||
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain
|
Connect-PnPOnline -Url $siteUrl -Credential $cred
|
||||||
Write-Host "Connected to $siteUrl`n"
|
Write-Host "Connected to $siteUrl`n"
|
||||||
|
|
||||||
# ensure that there are no unexpanded entries in the list of parameters
|
# ensure that there are no unexpanded entries in the list of parameters
|
||||||
|
|||||||
@ -2,8 +2,12 @@ package main
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/alcionai/corso/src/cli"
|
"github.com/alcionai/corso/src/cli"
|
||||||
|
"github.com/alcionai/corso/src/cli/debug"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
// pprof and memstats hooks
|
||||||
|
debug.SetupMemoryProfile()
|
||||||
|
|
||||||
cli.Handle()
|
cli.Handle()
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,7 +3,7 @@ module github.com/alcionai/corso/src
|
|||||||
go 1.21
|
go 1.21
|
||||||
|
|
||||||
replace (
|
replace (
|
||||||
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4
|
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe
|
||||||
|
|
||||||
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
|
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
|
||||||
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
|
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
|
||||||
@ -63,10 +63,12 @@ require (
|
|||||||
github.com/andybalholm/brotli v1.0.6 // indirect
|
github.com/andybalholm/brotli v1.0.6 // indirect
|
||||||
github.com/aws/aws-sdk-go v1.48.6 // indirect
|
github.com/aws/aws-sdk-go v1.48.6 // indirect
|
||||||
github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a // indirect
|
github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a // indirect
|
||||||
|
github.com/felixge/fgprof v0.9.3 // indirect
|
||||||
github.com/fsnotify/fsnotify v1.7.0 // indirect
|
github.com/fsnotify/fsnotify v1.7.0 // indirect
|
||||||
github.com/gofrs/flock v0.8.1 // indirect
|
github.com/gofrs/flock v0.8.1 // indirect
|
||||||
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f // indirect
|
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f // indirect
|
||||||
github.com/google/go-cmp v0.6.0 // indirect
|
github.com/google/go-cmp v0.6.0 // indirect
|
||||||
|
github.com/google/pprof v0.0.0-20230602150820-91b7bce49751 // indirect
|
||||||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect
|
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect
|
||||||
github.com/hashicorp/cronexpr v1.1.2 // indirect
|
github.com/hashicorp/cronexpr v1.1.2 // indirect
|
||||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||||
@ -128,6 +130,7 @@ require (
|
|||||||
github.com/natefinch/atomic v1.0.1 // indirect
|
github.com/natefinch/atomic v1.0.1 // indirect
|
||||||
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
||||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||||
|
github.com/pkg/profile v1.7.0
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
github.com/prometheus/client_golang v1.18.0 // indirect
|
github.com/prometheus/client_golang v1.18.0 // indirect
|
||||||
github.com/prometheus/client_model v0.5.0 // indirect
|
github.com/prometheus/client_model v0.5.0 // indirect
|
||||||
|
|||||||
16
src/go.sum
16
src/go.sum
@ -23,8 +23,8 @@ github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c h1:QtARFaqYKtGjmEej
|
|||||||
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
|
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
|
||||||
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
|
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
|
||||||
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
|
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
|
||||||
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 h1:3YZ70H3mkUgwiHLiNvukrqh2awRgfl1RAkbV0IoUqqk=
|
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe h1:nLS5pxhm04Jz4+qeipNlxdyPGxqNWpBu8UGkRYpWoIw=
|
||||||
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
|
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
|
||||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
@ -56,6 +56,9 @@ github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj
|
|||||||
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
github.com/chmduquesne/rollinghash v4.0.0+incompatible h1:hnREQO+DXjqIw3rUTzWN7/+Dpw+N5Um8zpKV0JOEgbo=
|
github.com/chmduquesne/rollinghash v4.0.0+incompatible h1:hnREQO+DXjqIw3rUTzWN7/+Dpw+N5Um8zpKV0JOEgbo=
|
||||||
github.com/chmduquesne/rollinghash v4.0.0+incompatible/go.mod h1:Uc2I36RRfTAf7Dge82bi3RU0OQUmXT9iweIcPqvr8A0=
|
github.com/chmduquesne/rollinghash v4.0.0+incompatible/go.mod h1:Uc2I36RRfTAf7Dge82bi3RU0OQUmXT9iweIcPqvr8A0=
|
||||||
|
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||||
|
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||||
|
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||||
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
|
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
|
||||||
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
|
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
|
||||||
github.com/cjlapao/common-go v0.0.39 h1:bAAUrj2B9v0kMzbAOhzjSmiyDy+rd56r2sy7oEiQLlA=
|
github.com/cjlapao/common-go v0.0.39 h1:bAAUrj2B9v0kMzbAOhzjSmiyDy+rd56r2sy7oEiQLlA=
|
||||||
@ -78,6 +81,8 @@ github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9 h1:ATgqloALX6cHC
|
|||||||
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9/go.mod h1:HMJKR5wlh/ziNp+sHEDV2ltblO4JD2+IdDOWtGcQBTM=
|
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9/go.mod h1:HMJKR5wlh/ziNp+sHEDV2ltblO4JD2+IdDOWtGcQBTM=
|
||||||
github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
|
github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
|
||||||
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
|
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
|
||||||
|
github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g=
|
||||||
|
github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw=
|
||||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||||
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
|
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
|
||||||
@ -117,6 +122,9 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
|||||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
|
github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg=
|
||||||
|
github.com/google/pprof v0.0.0-20230602150820-91b7bce49751 h1:hR7/MlvK23p6+lIw9SN1TigNLn9ZnF3W4SYRKq2gAHs=
|
||||||
|
github.com/google/pprof v0.0.0-20230602150820-91b7bce49751/go.mod h1:Jh3hGz2jkYak8qXPD19ryItVnUgpgeqzdkY/D0EaeuA=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||||
@ -143,6 +151,7 @@ github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iP
|
|||||||
github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||||
|
github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056 h1:iCHtR9CQyktQ5+f3dMVZfwD2KWJUgm7M0gdL9NGr8KA=
|
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056 h1:iCHtR9CQyktQ5+f3dMVZfwD2KWJUgm7M0gdL9NGr8KA=
|
||||||
@ -252,6 +261,8 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
|
|||||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pkg/profile v1.7.0 h1:hnbDkaNWPCLMO9wGLdBFTIZvzDrDfBM2072E1S9gJkA=
|
||||||
|
github.com/pkg/profile v1.7.0/go.mod h1:8Uer0jas47ZQMJ7VD+OHknK4YDY07LPUC6dEvqDjvNo=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
@ -394,6 +405,7 @@ golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
|||||||
@ -484,14 +484,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
desc := replacer.Replace(description)
|
desc := replacer.Replace(description)
|
||||||
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
|
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
|
||||||
} else {
|
} else {
|
||||||
// Disable auto wrap, causes huge memory spikes
|
stripped, err := html2text.FromString(description, html2text.Options{PrettyTables: true})
|
||||||
// https://github.com/jaytaylor/html2text/issues/48
|
|
||||||
prettyTablesOptions := html2text.NewPrettyTablesOptions()
|
|
||||||
prettyTablesOptions.AutoWrapText = false
|
|
||||||
|
|
||||||
stripped, err := html2text.FromString(
|
|
||||||
description,
|
|
||||||
html2text.Options{PrettyTables: true, PrettyTablesOptions: prettyTablesOptions})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.Wrap(err, "converting html to text").
|
return clues.Wrap(err, "converting html to text").
|
||||||
With("description_length", len(description))
|
With("description_length", len(description))
|
||||||
|
|||||||
@ -305,10 +305,6 @@ func RunIncrementalDriveishBackupTest(
|
|||||||
itemsRead int
|
itemsRead int
|
||||||
itemsWritten int
|
itemsWritten int
|
||||||
nonMetaItemsWritten int
|
nonMetaItemsWritten int
|
||||||
|
|
||||||
// TODO: Temporary mechanism to skip permissions
|
|
||||||
// related tests. Remove once we figure out the issue.
|
|
||||||
skipChecks bool
|
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "clean incremental, no changes",
|
name: "clean incremental, no changes",
|
||||||
@ -357,7 +353,6 @@ func RunIncrementalDriveishBackupTest(
|
|||||||
itemsRead: 1, // .data file for newitem
|
itemsRead: 1, // .data file for newitem
|
||||||
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
|
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
|
||||||
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
|
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
|
||||||
skipChecks: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "remove permission from new file",
|
name: "remove permission from new file",
|
||||||
@ -377,7 +372,6 @@ func RunIncrementalDriveishBackupTest(
|
|||||||
itemsRead: 1, // .data file for newitem
|
itemsRead: 1, // .data file for newitem
|
||||||
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
|
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
|
||||||
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
|
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
|
||||||
skipChecks: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "add permission to container",
|
name: "add permission to container",
|
||||||
@ -398,7 +392,6 @@ func RunIncrementalDriveishBackupTest(
|
|||||||
itemsRead: 0,
|
itemsRead: 0,
|
||||||
itemsWritten: 2, // .dirmeta for collection
|
itemsWritten: 2, // .dirmeta for collection
|
||||||
nonMetaItemsWritten: 0, // no files updated as update on container
|
nonMetaItemsWritten: 0, // no files updated as update on container
|
||||||
skipChecks: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "remove permission from container",
|
name: "remove permission from container",
|
||||||
@ -419,7 +412,6 @@ func RunIncrementalDriveishBackupTest(
|
|||||||
itemsRead: 0,
|
itemsRead: 0,
|
||||||
itemsWritten: 2, // .dirmeta for collection
|
itemsWritten: 2, // .dirmeta for collection
|
||||||
nonMetaItemsWritten: 0, // no files updated
|
nonMetaItemsWritten: 0, // no files updated
|
||||||
skipChecks: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "update contents of a file",
|
name: "update contents of a file",
|
||||||
@ -749,11 +741,9 @@ func RunIncrementalDriveishBackupTest(
|
|||||||
assertReadWrite = assert.LessOrEqual
|
assertReadWrite = assert.LessOrEqual
|
||||||
}
|
}
|
||||||
|
|
||||||
if !test.skipChecks {
|
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
|
||||||
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
|
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
|
||||||
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
|
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
|
||||||
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
|
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
|
||||||
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
|
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
|
||||||
|
|||||||
@ -175,7 +175,7 @@ func runGroupsIncrementalBackupTests(
|
|||||||
suite,
|
suite,
|
||||||
opts,
|
opts,
|
||||||
m365.Group.ID,
|
m365.Group.ID,
|
||||||
m365.SecondaryGroup.ID, // more reliable than user
|
m365.User.ID,
|
||||||
path.GroupsService,
|
path.GroupsService,
|
||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
ic,
|
ic,
|
||||||
@ -201,12 +201,8 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
|||||||
|
|
||||||
sel.Include(
|
sel.Include(
|
||||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||||
selTD.GroupsBackupChannelScope(sel))
|
selTD.GroupsBackupChannelScope(sel),
|
||||||
|
selTD.GroupsBackupConversationScope(sel))
|
||||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
|
||||||
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
|
|
||||||
// we go fix the group mailbox.
|
|
||||||
// selTD.GroupsBackupConversationScope(sel))
|
|
||||||
|
|
||||||
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
|
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
|
||||||
defer bod.Close(t, ctx)
|
defer bod.Close(t, ctx)
|
||||||
@ -330,12 +326,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9MergeBas
|
|||||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
||||||
sel.Include(
|
sel.Include(
|
||||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||||
selTD.GroupsBackupChannelScope(sel))
|
selTD.GroupsBackupChannelScope(sel),
|
||||||
|
selTD.GroupsBackupConversationScope(sel))
|
||||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
|
||||||
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
|
|
||||||
// we go fix the group mailbox.
|
|
||||||
// selTD.GroupsBackupConversationScope(sel))
|
|
||||||
|
|
||||||
RunMergeBaseGroupsUpdate(suite, sel.Selector, false)
|
RunMergeBaseGroupsUpdate(suite, sel.Selector, false)
|
||||||
}
|
}
|
||||||
@ -344,12 +336,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9AssistBa
|
|||||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
||||||
sel.Include(
|
sel.Include(
|
||||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||||
selTD.GroupsBackupChannelScope(sel))
|
selTD.GroupsBackupChannelScope(sel),
|
||||||
|
selTD.GroupsBackupConversationScope(sel))
|
||||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
|
||||||
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
|
|
||||||
// we go fix the group mailbox.
|
|
||||||
// selTD.GroupsBackupConversationScope(sel))
|
|
||||||
|
|
||||||
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
|
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -32,9 +32,6 @@ func (suite *ConversationsPagerIntgSuite) SetupSuite() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ConversationsPagerIntgSuite) TestEnumerateConversations_withThreadsAndPosts() {
|
func (suite *ConversationsPagerIntgSuite) TestEnumerateConversations_withThreadsAndPosts() {
|
||||||
// Skip
|
|
||||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
t = suite.T()
|
t = suite.T()
|
||||||
ac = suite.its.ac.Conversations()
|
ac = suite.its.ac.Conversations()
|
||||||
|
|||||||
@ -701,48 +701,10 @@ func (ode oDataErr) errMessageMatchesAllFilters(err error, fs ...filters.Filter)
|
|||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// other helpers
|
// other helpers
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
const (
|
|
||||||
// JWTQueryParam is a query param embed in graph download URLs which holds
|
|
||||||
// JWT token.
|
|
||||||
JWTQueryParam = "tempauth"
|
|
||||||
// base64 encoded json header. Contains {"alg":"HS256","typ":"JWT"}
|
|
||||||
//
|
|
||||||
// Hardcoding this instead of generating it every time on the fly.
|
|
||||||
// The algorithm doesn't matter as we are not verifying the token.
|
|
||||||
jwtHeader = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"
|
|
||||||
)
|
|
||||||
|
|
||||||
func sanitizeToken(rawToken string) string {
|
// JWTQueryParam is a query param embed in graph download URLs which holds
|
||||||
segments := strings.Split(rawToken, ".")
|
// JWT token.
|
||||||
|
const JWTQueryParam = "tempauth"
|
||||||
// Check if the token has the old format, in which it has 3 segments and
|
|
||||||
// conforms to jwt spec. Format is seg1.seg2.seg3.
|
|
||||||
if len(segments) == 3 {
|
|
||||||
return rawToken
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if it is a msft proprietary token in which it has 4 segments and
|
|
||||||
// doesn't meet jwt spec. Format is v1.seg1.seg2.seg3. Return a token which
|
|
||||||
// meets jwt spec.
|
|
||||||
//
|
|
||||||
// In this proprietary token, there is no jwt header segment. Also, the claims
|
|
||||||
// section is split into first and segments. The first segment contains the
|
|
||||||
// `exp` claim that we are interested in.
|
|
||||||
//
|
|
||||||
// The second segment contains the rest of the claims, but likely encrypted.
|
|
||||||
// We don't need it so discard it. The last segment contains the signature which
|
|
||||||
// we don't care about either, as we are not verifying the token. So append it as is.
|
|
||||||
//
|
|
||||||
// It's okay if the sanitized token still doesn't meet jwt spec. It'll fail decoding
|
|
||||||
// later and we have fallbacks for that.
|
|
||||||
if len(segments) == 4 && segments[0] == "v1" {
|
|
||||||
return jwtHeader + "." + segments[1] + "." + segments[3]
|
|
||||||
}
|
|
||||||
|
|
||||||
// If MSFT change the token format again on us, just return empty string and let caller
|
|
||||||
// handle it as an error.
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsURLExpired inspects the jwt token embed in the item download url
|
// IsURLExpired inspects the jwt token embed in the item download url
|
||||||
// and returns true if it is expired.
|
// and returns true if it is expired.
|
||||||
@ -753,20 +715,12 @@ func IsURLExpired(
|
|||||||
expiredErr error,
|
expiredErr error,
|
||||||
err error,
|
err error,
|
||||||
) {
|
) {
|
||||||
ctx = clues.Add(ctx, "checked_url", urlStr)
|
|
||||||
|
|
||||||
// Extract the raw JWT string from the download url.
|
// Extract the raw JWT string from the download url.
|
||||||
rawJWT, err := common.GetQueryParamFromURL(urlStr, JWTQueryParam)
|
rawJWT, err := common.GetQueryParamFromURL(urlStr, JWTQueryParam)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.WrapWC(ctx, err, "jwt query param not found")
|
return nil, clues.WrapWC(ctx, err, "jwt query param not found")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Token may have a proprietary format. Try to sanitize it to jwt format.
|
|
||||||
rawJWT = sanitizeToken(rawJWT)
|
|
||||||
if len(rawJWT) == 0 {
|
|
||||||
return nil, clues.WrapWC(ctx, err, "sanitizing jwt")
|
|
||||||
}
|
|
||||||
|
|
||||||
expired, err := jwt.IsJWTExpired(rawJWT)
|
expired, err := jwt.IsJWTExpired(rawJWT)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.WrapWC(ctx, err, "checking jwt expiry")
|
return nil, clues.WrapWC(ctx, err, "checking jwt expiry")
|
||||||
|
|||||||
@ -76,11 +76,20 @@ func (suite *siteIntegrationSuite) TestSites_GetByID() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
site, err := suite.cli.SiteByID(ctx, suite.m365.Site.ID)
|
sites, err := suite.cli.Sites(ctx, fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
assert.NotEmpty(t, site.WebURL)
|
assert.NotEmpty(t, sites)
|
||||||
assert.NotEmpty(t, site.ID)
|
|
||||||
assert.NotEmpty(t, site.OwnerType)
|
for _, s := range sites {
|
||||||
|
suite.Run("site_"+s.ID, func() {
|
||||||
|
t := suite.T()
|
||||||
|
site, err := suite.cli.SiteByID(ctx, s.ID)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.NotEmpty(t, site.WebURL)
|
||||||
|
assert.NotEmpty(t, site.ID)
|
||||||
|
assert.NotEmpty(t, site.OwnerType)
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@ -21,8 +21,7 @@ application to connect to your *M365 tenant* and transfer data during backup and
|
|||||||
## Corso concepts {#corso-concepts}
|
## Corso concepts {#corso-concepts}
|
||||||
|
|
||||||
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
|
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
|
||||||
*M365 Service*'s data. See [Repositories](../repos) for more information.
|
*M365 Services* data. See [Repositories](../repos) for more information.
|
||||||
|
|
||||||
* **Backup** is a copy of a resource of your *M365 Service*'s data to be used for restores in case of deletion, loss,
|
* **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the
|
||||||
or corruption of the original data. Corso performs backups incrementally, and each backup only captures data that has
|
original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations.
|
||||||
changed between backup iterations.
|
|
||||||
|
|||||||
1811
website/package-lock.json
generated
1811
website/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -15,7 +15,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "3.1.1",
|
"@docusaurus/core": "3.1.1",
|
||||||
"@docusaurus/plugin-google-gtag": "^3.5.1",
|
"@docusaurus/plugin-google-gtag": "^3.1.1",
|
||||||
"@docusaurus/preset-classic": "3.1.1",
|
"@docusaurus/preset-classic": "3.1.1",
|
||||||
"@loadable/component": "^5.16.3",
|
"@loadable/component": "^5.16.3",
|
||||||
"@mdx-js/react": "^3.0.0",
|
"@mdx-js/react": "^3.0.0",
|
||||||
@ -26,17 +26,17 @@
|
|||||||
"feather-icons": "^4.29.1",
|
"feather-icons": "^4.29.1",
|
||||||
"jarallax": "^2.2.0",
|
"jarallax": "^2.2.0",
|
||||||
"mdx-mermaid": "^2.0.0",
|
"mdx-mermaid": "^2.0.0",
|
||||||
"mermaid": "^10.9.0",
|
"mermaid": "^10.8.0",
|
||||||
"prism-react-renderer": "^2.1.0",
|
"prism-react-renderer": "^2.1.0",
|
||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
"react-dom": "^18.3.0",
|
"react-dom": "^18.2.0",
|
||||||
"sass": "^1.79.1",
|
"sass": "^1.71.0",
|
||||||
"tiny-slider": "^2.9.4",
|
"tiny-slider": "^2.9.4",
|
||||||
"tw-elements": "1.0.0-alpha13",
|
"tw-elements": "1.0.0-alpha13",
|
||||||
"wow.js": "^1.2.2"
|
"wow.js": "^1.2.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@docusaurus/module-type-aliases": "3.5.1",
|
"@docusaurus/module-type-aliases": "3.1.1",
|
||||||
"@iconify/react": "^4.1.1",
|
"@iconify/react": "^4.1.1",
|
||||||
"autoprefixer": "^10.4.17",
|
"autoprefixer": "^10.4.17",
|
||||||
"postcss": "^8.4.33",
|
"postcss": "^8.4.33",
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user