Merge branch 'main' into refactor_get_item

This commit is contained in:
Danny 2022-12-02 16:41:33 -05:00 committed by GitHub
commit aac443056f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
293 changed files with 3083 additions and 40184 deletions

View File

@ -59,5 +59,16 @@ runs:
./foldersAndItems.ps1 -WellKnownRoot recoverableitemsroot -User ${{ inputs.user }} -FolderNamePurge Purges ./foldersAndItems.ps1 -WellKnownRoot recoverableitemsroot -User ${{ inputs.user }} -FolderNamePurge Purges
./foldersAndItems.ps1 -WellKnownRoot recoverableitemsroot -User ${{ inputs.user }} -FolderNamePurge Deletions ./foldersAndItems.ps1 -WellKnownRoot recoverableitemsroot -User ${{ inputs.user }} -FolderNamePurge Deletions
# possible future extensions - name: Run the old purge script to clear out onedrive buildup
# ./foldersAndItems.ps1 -WellKnownRoot recoverableitemsroot -User ${{ inputs.user }} -FolderNamePurge "Calendar Logging" working-directory: ./src
if: ${{ inputs.folder-prefix != '' }}
shell: sh
env:
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
run: >
go run ./cmd/purge/purge.go onedrive
--user ${{ inputs.user }}
--prefix ${{ inputs.folder-prefix }}
--before ${{ inputs.older-than }}

View File

@ -24,18 +24,6 @@ updates:
open-pull-requests-limit: 50 open-pull-requests-limit: 50
rebase-strategy: "disabled" rebase-strategy: "disabled"
# Maintain dependencies for npm - docs
- package-ecosystem: "npm"
directory: "docs/"
schedule:
interval: "daily"
reviewers:
- "alcionai/corso-maintainers"
- "ntolia"
- "gmatev"
open-pull-requests-limit: 50
rebase-strategy: "disabled"
# Maintain dependencies for go - src # Maintain dependencies for go - src
- package-ecosystem: "gomod" - package-ecosystem: "gomod"
directory: "src/" directory: "src/"

View File

@ -6,9 +6,6 @@ on:
srcfileschanged: srcfileschanged:
description: "'true' if src/** or .github/workflows/** files have changed in the branch" description: "'true' if src/** or .github/workflows/** files have changed in the branch"
value: ${{ jobs.file-change-check.outputs.srcfileschanged }} value: ${{ jobs.file-change-check.outputs.srcfileschanged }}
docfileschanged:
description: "'true' if docs/** or src/** or .github/workflows/** files have changed in the branch"
value: ${{ jobs.file-change-check.outputs.docfileschanged }}
websitefileschanged: websitefileschanged:
description: "'true' if websites/** or .github/workflows/** files have changed in the branch" description: "'true' if websites/** or .github/workflows/** files have changed in the branch"
value: ${{ jobs.file-change-check.outputs.websitefileschanged }} value: ${{ jobs.file-change-check.outputs.websitefileschanged }}
@ -21,7 +18,6 @@ jobs:
pull-requests: read pull-requests: read
outputs: outputs:
srcfileschanged: ${{ steps.srcchecker.outputs.srcfileschanged }} srcfileschanged: ${{ steps.srcchecker.outputs.srcfileschanged }}
docfileschanged: ${{ steps.docchecker.outputs.docfileschanged }}
websitefileschanged: ${{ steps.websitechecker.outputs.websitefileschanged }} websitefileschanged: ${{ steps.websitechecker.outputs.websitefileschanged }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@ -35,8 +31,6 @@ jobs:
filters: | filters: |
src: src:
- 'src/**' - 'src/**'
docs:
- 'docs/**'
website: website:
- 'website/**' - 'website/**'
actions: actions:
@ -50,16 +44,9 @@ jobs:
echo "src or workflow file changes occurred" echo "src or workflow file changes occurred"
echo ::set-output name=srcfileschanged::true echo ::set-output name=srcfileschanged::true
- name: Check dorny for changes in docs related filepaths
id: docchecker
if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.docs == 'true' || steps.dornycheck.outputs.actions == 'true'
run: |
echo "docs, src or workflow file changes occurred"
echo ::set-output name=docfileschanged::true
- name: Check dorny for changes in website related filepaths - name: Check dorny for changes in website related filepaths
id: websitechecker id: websitechecker
if: steps.dornycheck.outputs.website == 'true' || steps.dornycheck.outputs.actions == 'true' if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.website == 'true' || steps.dornycheck.outputs.actions == 'true'
run: | run: |
echo "website or workflow file changes occurred" echo "website or workflow file changes occurred"
echo ::set-output name=websitefileschanged::true echo ::set-output name=websitefileschanged::true

View File

@ -44,7 +44,7 @@ jobs:
# the results will cascade onto both testing and linting. # the results will cascade onto both testing and linting.
- name: Setup Golang with cache - name: Setup Golang with cache
uses: ./.github/actions/go-setup-cache uses: ./.github/actions/go-setup-cache
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.docfileschanged == 'true' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true'
with: with:
go-version-file: src/go.mod go-version-file: src/go.mod
@ -54,6 +54,8 @@ jobs:
outputs: outputs:
environment: ${{ steps.environment.outputs.environment }} environment: ${{ steps.environment.outputs.environment }}
version: ${{ steps.version.outputs.version }} version: ${{ steps.version.outputs.version }}
website-bucket: ${{ steps.website-bucket.output.website-bucket }}
website-cfid: ${{ steps.website-cfid.output.website-cfid }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@ -79,15 +81,37 @@ jobs:
echo "::set-output name=version::$(echo unreleased-$(git rev-parse --short HEAD))" echo "::set-output name=version::$(echo unreleased-$(git rev-parse --short HEAD))"
fi fi
- name: Get bucket name for website
id: website-bucket
run: |
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
echo "set-output name=website-bucket::corsobackup.io"
echo "::set-output name=website-bucket::corsobackup.io"
else
echo "set-output name=website-bucket::test-corso-docs"
echo "::set-output name=website-bucket::test-corso-docs"
fi
- name: Get cfid for website
id: website-cfid
run: |
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
echo "set-output name=website-cfid::E1W9NGI9YTVZ1A"
echo "::set-output name=website-cfid::E1W9NGI9YTVZ1A"
else
echo "set-output name=website-cfid::ESFTEIYTIP7Y3"
echo "::set-output name=website-cfid::ESFTEIYTIP7Y3"
fi
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- Docs Linting ----------------------------------------------------------------------------------- # --- Website Linting -----------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
Docs-Linting: Website-Linting:
needs: [Precheck, Checkout, SetEnv] needs: [Precheck, Checkout, SetEnv]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.docfileschanged == 'true' # docsfileschanged also includes srcfileschanged if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true' # websitefileschanged also includes srcfileschanged
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@ -102,39 +126,37 @@ jobs:
run: | run: |
go run ./cmd/mdgen/mdgen.go generate go run ./cmd/mdgen/mdgen.go generate
# migrate generated md files into /docs/docs/cli # migrate generated md files into /website/docs/cli
- name: Move CLI .md to Docs - name: Move CLI .md to Docs
run: | run: |
mkdir -p ./docs/docs/cli mkdir -p ./website/docs/cli
mv ./src/cmd/mdgen/cli_markdown/* ./docs/docs/cli/ mv ./src/cmd/mdgen/cli_markdown/* ./website/docs/cli/
rm -R ./src/cmd/mdgen/cli_markdown/ rm -R ./src/cmd/mdgen/cli_markdown/
- name: Install dependencies for docs lint - name: Install dependencies for website lint
run: | run: |
wget https://github.com/errata-ai/vale/releases/download/v2.20.2/vale_2.20.2_Linux_64-bit.tar.gz # NOTE: update in Dockerfile when updating wget https://github.com/errata-ai/vale/releases/download/v2.20.2/vale_2.20.2_Linux_64-bit.tar.gz # NOTE: update in Dockerfile when updating
mkdir bin && tar -xvzf vale_2.20.2_Linux_64-bit.tar.gz -C bin mkdir bin && tar -xvzf vale_2.20.2_Linux_64-bit.tar.gz -C bin
echo "$PWD/bin" >> $GITHUB_PATH echo "$PWD/bin" >> $GITHUB_PATH
npm i -g markdownlint-cli@0.32.2 # NOTE: update in Dockerfile when updating npm i -g markdownlint-cli@0.32.2 # NOTE: update in Dockerfile when updating
- name: Run docs lint - name: Run website lint
env:
CORSO_USE_DOCKER: -1 # prevent using docker inside makefile
run: | run: |
cd docs && make -o genclidocs localcheck cd website && make -o genclidocs localcheck
- name: Build docs - name: Build website
env: env:
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }} CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
run: | run: |
cd docs && cd website &&
npm ci && npm ci &&
npm run build npm run build
- uses: actions/upload-artifact@master - uses: actions/upload-artifact@master
name: Upload docs as artifacts name: Upload website as artifacts
with: with:
name: docs name: website
path: docs/build path: website/build
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- Integration and Unit Testing ------------------------------------------------------------------- # --- Integration and Unit Testing -------------------------------------------------------------------
@ -272,7 +294,7 @@ jobs:
retention-days: 14 retention-days: 14
# Update check run called "Test-Suite-Fork" # Update check run called "Test-Suite-Fork"
- uses: actions/github-script@v5 - uses: actions/github-script@v6
id: update-check-run id: update-check-run
if: ${{ always() }} if: ${{ always() }}
env: env:
@ -338,12 +360,16 @@ jobs:
- name: Run go-licenses - name: Run go-licenses
run: go-licenses check github.com/alcionai/corso/src --ignore github.com/alcionai/corso/src run: go-licenses check github.com/alcionai/corso/src --ignore github.com/alcionai/corso/src
- name: Run staticcheck
uses: dominikh/staticcheck-action@v1.2.0
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- Publish steps ---------------------------------------------------------------------------------- # --- Publish steps ----------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
Publish-Binary: Publish-Binary:
needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv] needs: [Test-Suite-Trusted, Linting, Website-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }} environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
@ -379,23 +405,23 @@ jobs:
name: corso name: corso
path: src/dist/* path: src/dist/*
Publish-Docs: Publish-Website:
needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv] needs: [Test-Suite-Trusted, Linting, Website-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }} environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
defaults: defaults:
run: run:
working-directory: docs working-directory: website
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: actions/download-artifact@master - uses: actions/download-artifact@master
name: Download docs from build step name: Download website from build step
with: with:
name: docs name: website
path: docs/build path: website/build
- name: Configure AWS credentials from Test account - name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v1 uses: aws-actions/configure-aws-credentials@v1
@ -409,16 +435,16 @@ jobs:
run: | run: |
printf 'User-agent: *\nDisallow: /' > build/robots.txt printf 'User-agent: *\nDisallow: /' > build/robots.txt
- name: Push docs - name: Push website
run: | run: |
aws s3 sync build "s3://${{ secrets.DOCS_S3_BUCKET }}" aws s3 sync build "s3://${{ needs.SetEnv.outputs.website-bucket }}" --delete
- name: Invalidate cloudfront - name: Invalidate cloudfront
run: | run: |
aws cloudfront create-invalidation --distribution-id ${{ secrets.DOCS_CF_DISTRIBUTION }} --paths "/*" aws cloudfront create-invalidation --distribution-id ${{ needs.SetEnv.outputs.website-cfid }} --paths "/*"
Publish-Image: Publish-Image:
needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv] needs: [Test-Suite-Trusted, Linting, Website-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }} environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'

View File

@ -1,34 +0,0 @@
name: CI for Website
on:
workflow_dispatch:
pull_request:
push:
branches: [main]
# cancel currently running jobs if a new version of the branch is pushed
concurrency:
group: website-ci-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
Precheck:
uses: alcionai/corso/.github/workflows/_filechange_checker.yml@main
Website-Build:
needs: [Precheck]
environment: Testing
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true'
defaults:
run:
working-directory: website
steps:
- uses: actions/checkout@v3
- name: Build website image
run: |
make buildimage
- name: Build website
run: |
make build

View File

@ -9,27 +9,110 @@ permissions:
packages: write packages: write
pull-requests: read pull-requests: read
# cancel currently running jobs if a new version of the branch is pushed
concurrency:
group: push-website-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
SetEnv:
environment: Testing
runs-on: ubuntu-latest
outputs:
version: ${{ steps.version.outputs.version }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # needed to get latest tag
- name: Get version string
id: version
run: |
echo "set-output name=version::$(git describe --tags --abbrev=0)"
echo "::set-output name=version::$(git describe --tags --abbrev=0)"
# ----------------------------------------------------------------------------------------------------
# --- Website Linting -----------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
Website-Linting:
needs: [SetEnv]
environment: Testing
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v3
with:
go-version-file: src/go.mod
- name: Generate CLI Docs
working-directory: ./src
run: |
go run ./cmd/mdgen/mdgen.go generate
# migrate generated md files into /website/docs/cli
- name: Move CLI .md to Docs
run: |
mkdir -p ./website/docs/cli
mv ./src/cmd/mdgen/cli_markdown/* ./website/docs/cli/
rm -R ./src/cmd/mdgen/cli_markdown/
- name: Install dependencies for website lint
run: |
wget https://github.com/errata-ai/vale/releases/download/v2.20.2/vale_2.20.2_Linux_64-bit.tar.gz # NOTE: update in Dockerfile when updating
mkdir bin && tar -xvzf vale_2.20.2_Linux_64-bit.tar.gz -C bin
echo "$PWD/bin" >> $GITHUB_PATH
npm i -g markdownlint-cli@0.32.2 # NOTE: update in Dockerfile when updating
- name: Run website lint
run: |
cd website && make -o genclidocs localcheck
- name: Build website
env:
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
run: |
cd website &&
npm ci &&
npm run build
- uses: actions/upload-artifact@master
name: Upload website as artifacts
with:
name: website
path: website/build
Publish-Website: Publish-Website:
needs: [Website-Linting]
environment: Production environment: Production
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
run: run:
working-directory: website working-directory: website
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Configure AWS credentials - uses: actions/download-artifact@master
name: Download website from build step
with:
name: website
path: website/build
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v1 uses: aws-actions/configure-aws-credentials@v1
with: with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }} role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing role-session-name: integration-testing
aws-region: us-east-1 aws-region: us-east-1
- name: Build docs image - name: Push website
run: | run: |
make buildimage aws s3 sync build "s3://corsobackup.io" --delete
- name: Build & Publish docs - name: Invalidate cloudfront
run: | run: |
make publish aws cloudfront create-invalidation --distribution-id E1W9NGI9YTVZ1A --paths "/*"

View File

@ -1,28 +0,0 @@
FROM ubuntu:22.04
LABEL MAINTAINER="Niraj Tolia"
ARG DEBIAN_FRONTEND=noninteractive
# NOTE for lines 13,15: update in CI when updating
RUN apt-get -y update && apt-get -y install gpg emacs curl zip git make \
&& curl -fsSL https://deb.nodesource.com/setup_current.x | bash - \
&& apt-get -y install nodejs \
&& apt-get autoclean \
&& node --version \
&& npm --version \
&& cd /tmp && curl -O -L https://github.com/errata-ai/vale/releases/download/v2.20.1/vale_2.20.1_Linux_64-bit.tar.gz \
&& tar -xvzf vale_2.20.1_Linux_64-bit.tar.gz -C /usr/bin vale \
&& npm install -g markdownlint-cli@0.32.2 \
&& curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
&& unzip awscliv2.zip && /bin/bash aws/install && rm -rf awscliv2.zip aws
WORKDIR /usr/src
COPY package.json package-lock.json* ./
RUN npm ci \
&& npm cache clean --force \
&& rm -f package.json package-lock.json*
ENV PATH /usr/src/node_modules/.bin:$PATH
WORKDIR /usr/src/docs
CMD ["npm", "start", "--", "--host", "0.0.0.0"]

View File

@ -1,72 +0,0 @@
.PHONY: buildimage build serve dev shell check genclidocs _validatemdgen publish sync
CORSO_BUILD_DIR := /tmp/.corsobuild
CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache
CORSO_BUILD_MOD := ${CORSO_BUILD_DIR}/mod
CORSO_BUILD_BIN := ${CORSO_BUILD_DIR}/bin
CORSO_REPO := /go/src/github.com/alcionai/corso
CORSO_LOCAL_PATH := $(shell git rev-parse --show-toplevel)
GIT_SHA := $(shell git rev-parse --short HEAD)
DOCSC := docker run --rm -it -p 3000:3000 -v ${PWD}:/usr/src/docs --env CORSO_VERSION=unreleased-${GIT_SHA} corso/docs
CBASE := docker run --rm -it \
-v ${CORSO_LOCAL_PATH}:${CORSO_REPO} -v ${CORSO_BUILD_DIR}:${CORSO_BUILD_DIR} \
--env GOCACHE=${CORSO_BUILD_CACHE} --env GOMODCACHE=${CORSO_BUILD_MOD} --env GOTMPDIR=${CORSO_BUILD_DIR} \
--workdir ${CORSO_REPO}/src
GOC := ${CBASE} golang:1.18
GOBASHC := ${CBASE} --entrypoint bash golang:1.18
MDGEN_SRC := ${CORSO_REPO}/src/cmd/mdgen/mdgen.go
MDGEN_BINARY := ${CORSO_BUILD_BIN}/mdgen
CLI_DOCS := ${CORSO_REPO}/docs/docs/cli
buildimage:
docker build -t "corso/docs:latest" .
dev: genclidocs
$(DOCSC) npm start -- --host 0.0.0.0
VALE_TARGET ?= docs README.md
check: genclidocs
$(DOCSC) vale $(VALE_TARGET)
$(DOCSC) markdownlint '**/*.md' --ignore styles/ --ignore src/ --ignore node_modules/
localcheck: genclidocs
vale $(VALE_TARGET)
markdownlint '**/*.md' --ignore styles/ --ignore src/ --ignore node_modules/
dockershell:
$(DOCSC) bash
build: genclidocs
$(DOCSC) npm run build
serve:
$(DOCSC) npm run serve
genclidocs: _validatemdgen ${MDGEN_BINARY}
@echo 'Auto-generating Corso CLI docs...'
$(DOCSC) rm -rf docs/cli
$(GOC) ${MDGEN_BINARY} --cli-folder ${CLI_DOCS}
_validatemdgen: # in case we have a different architecture
@echo 'Verifying dependencies...'
$(GOBASHC) -c "${MDGEN_BINARY} --help >/dev/null || rm -rf ${MDGEN_BINARY}"
${MDGEN_BINARY}: $(shell find ${CORSO_LOCAL_PATH}/src -type f -name *.go) $(shell find ${CORSO_LOCAL_PATH}/src -type d )
@echo 'Re-building Corso CLI docs auto-gen tooling...'
$(GOC) go mod download
$(GOC) go build -o ${MDGEN_BINARY} ${MDGEN_SRC}
clean:
$(DOCSC) rm -rf docs/cli build node_modules
$(GOC) rm -rf ${CORSO_BUILD_DIR}/*
publish: clean build
docker run -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY \
-e AWS_SESSION_TOKEN -e AWS_REGION \
--rm -v ${PWD}:/usr/src/docs corso/docs:latest \
make sync
sync:
aws s3 sync /usr/src/docs/build/ s3://corsobackup.io/ --exclude ".git/*" --delete
aws cloudfront create-invalidation --distribution-id E1W9NGI9YTVZ1A --paths "/*"

View File

@ -1,71 +0,0 @@
# Corso documentation
Corso documentation uses [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
[Mermaid](https://mermaid-js.github.io/mermaid/) provides support for native diagrams in Markdown.
## Requirements
Developing documentation for Corso requires the following tools on your machine:
- `make`
- Docker
## Installation
```bash
make buildimage
```
## Live documentation development
```bash
make dev
```
This command starts a local development server within the Docker container and will expose docs at [http://localhost:3000](http://localhost:3000).
## Generating Corso CLI docs
```bash
make genclidocs
```
Corso's CLI documents are auto generated. This command explicitly triggers generating these docs. This step will happen
automatically for the other commands where this is relevant.
## Building static documentation
```bash
make build
```
This command generates static content into the `build` directory for integration with any static contents hosting service.
## Serving static documentation
```bash
make serve
```
This command will serve the static content generated with `make build` at [http://localhost:3000](http://localhost:3000).
## Style and linting
```bash
# Lint all docs
make check
# Lint specific files and/or folders
make check VALE_TARGET="README.md docs/concepts"
```
This command will lint all Markdown files and check them for style issues using the Docker container
## Documentation platform development
```bash
make shell
```
Use this command to interactively (and temporarily!) change the contents or
configuration of the live documentation container image (for example, when
experimenting with new plugins).

View File

@ -1,56 +0,0 @@
{
"name": "docs",
"version": "0.1.0",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
"start": "docusaurus start",
"build": "docusaurus build",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",
"clear": "docusaurus clear",
"serve": "docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "2.2.0",
"@docusaurus/plugin-google-gtag": "^2.2.0",
"@docusaurus/preset-classic": "2.2.0",
"@loadable/component": "^5.15.2",
"@mdx-js/react": "^1.6.22",
"animate.css": "^4.1.1",
"clsx": "^1.2.1",
"docusaurus-plugin-image-zoom": "^0.1.1",
"docusaurus-plugin-sass": "^0.2.2",
"feather-icons": "^4.29.0",
"jarallax": "^2.0.4",
"mdx-mermaid": "^1.3.2",
"mermaid": "^9.2.2",
"prism-react-renderer": "^1.3.5",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"sass": "^1.56.1",
"tw-elements": "^1.0.0-alpha12",
"wowjs": "^1.1.3"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "2.2.0",
"@iconify/react": "^4.0.0",
"autoprefixer": "^10.4.13",
"postcss": "^8.4.19",
"tailwindcss": "^3.2.4"
},
"browserslist": {
"production": [
">0.5%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
}
}

View File

@ -1,65 +0,0 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: ["./src/**/*.{js,jsx,ts,tsx}"],
darkMode: ['class', '[data-theme="dark"]'],
theme: {
screens: {
xs: "540px",
sm: "640px",
md: "768px",
lg: "1024px",
xl: "1280px",
"2xl": "1536px",
},
fontFamily: {
nunito: ['"Nunito", sans-serif'],
},
container: {
center: true,
padding: {
DEFAULT: "12px",
sm: "1rem",
lg: "45px",
xl: "5rem",
"2xl": "13rem",
},
},
extend: {
colors: {
dark: "#3c4858",
black: "#161c2d",
"dark-footer": "#192132",
},
boxShadow: {
sm: "0 2px 4px 0 rgb(60 72 88 / 0.15)",
DEFAULT: "0 0 3px rgb(60 72 88 / 0.15)",
md: "0 5px 13px rgb(60 72 88 / 0.20)",
lg: "0 10px 25px -3px rgb(60 72 88 / 0.15)",
xl: "0 20px 25px -5px rgb(60 72 88 / 0.1), 0 8px 10px -6px rgb(60 72 88 / 0.1)",
"2xl": "0 25px 50px -12px rgb(60 72 88 / 0.25)",
inner: "inset 0 2px 4px 0 rgb(60 72 88 / 0.05)",
testi: "2px 2px 2px -1px rgb(60 72 88 / 0.15)",
},
spacing: {
0.75: "0.1875rem",
3.25: "0.8125rem",
},
maxWidth: ({ theme, breakpoints }) => ({
1200: "71.25rem",
992: "60rem",
768: "45rem",
}),
zIndex: {
1: "1",
2: "2",
3: "3",
999: "999",
},
},
},
plugins: [require("tw-elements/dist/plugin")],
};

View File

@ -9,8 +9,7 @@ BAD_LINT_MSG := "Missing golangci-lint version $(WANTED_LINT_VERSION). Visit $(I
.PHONY: check-lint check-lint-version lint load-test .PHONY: check-lint check-lint-version lint load-test
build: build:
go build -o corso -ldflags \ go build -o corso
"-X 'github.com/alcionai/corso/src/cli/version.Version=$(shell git describe --exact-match --tags $(git rev-parse HEAD) 2>/dev/null || echo unreleased)-$(shell git rev-parse --short HEAD)'"
lint: check-lint-version lint: check-lint-version
golangci-lint run golangci-lint run

View File

@ -24,7 +24,7 @@ require (
github.com/tidwall/pretty v1.2.1 github.com/tidwall/pretty v1.2.1
github.com/tomlazar/table v0.1.2 github.com/tomlazar/table v0.1.2
github.com/vbauerster/mpb/v8 v8.1.4 github.com/vbauerster/mpb/v8 v8.1.4
go.uber.org/zap v1.23.0 go.uber.org/zap v1.24.0
golang.org/x/tools v0.3.0 golang.org/x/tools v0.3.0
gopkg.in/resty.v1 v1.12.0 gopkg.in/resty.v1 v1.12.0
) )

View File

@ -420,8 +420,8 @@ go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0
go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI=
go.uber.org/multierr v1.8.0 h1:dg6GjLku4EH+249NNmoIciG9N/jURbDG+pFlTkhzIC8= go.uber.org/multierr v1.8.0 h1:dg6GjLku4EH+249NNmoIciG9N/jURbDG+pFlTkhzIC8=
go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak=
go.uber.org/zap v1.23.0 h1:OjGQ5KQDEUawVHxNwQgPpiypGHOxo2mNZsOqTak4fFY= go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60=
go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=

View File

@ -18,7 +18,6 @@ const (
// The following functions are based off the code in v0.41.0 of msgraph-sdk-go // The following functions are based off the code in v0.41.0 of msgraph-sdk-go
// for sending delta requests with query parameters. // for sending delta requests with query parameters.
//nolint:unused
func createGetRequestInformationWithRequestConfiguration( func createGetRequestInformationWithRequestConfiguration(
baseRequestInfoFunc func() (*abs.RequestInformation, error), baseRequestInfoFunc func() (*abs.RequestInformation, error),
requestConfig *DeltaRequestBuilderGetRequestConfiguration, requestConfig *DeltaRequestBuilderGetRequestConfiguration,
@ -43,7 +42,6 @@ func createGetRequestInformationWithRequestConfiguration(
return requestInfo, nil return requestInfo, nil
} }
//nolint:unused
func sendMessagesDeltaGet( func sendMessagesDeltaGet(
ctx context.Context, ctx context.Context,
m *msmaildelta.DeltaRequestBuilder, m *msmaildelta.DeltaRequestBuilder,
@ -83,7 +81,6 @@ func sendMessagesDeltaGet(
return res.(msmaildelta.DeltaResponseable), nil return res.(msmaildelta.DeltaResponseable), nil
} }
//nolint:unused
func sendContactsDeltaGet( func sendContactsDeltaGet(
ctx context.Context, ctx context.Context,
m *mscontactdelta.DeltaRequestBuilder, m *mscontactdelta.DeltaRequestBuilder,

View File

@ -8,6 +8,9 @@ import (
"context" "context"
"fmt" "fmt"
"io" "io"
"sync"
"sync/atomic"
"time"
absser "github.com/microsoft/kiota-abstractions-go/serialization" absser "github.com/microsoft/kiota-abstractions-go/serialization"
kw "github.com/microsoft/kiota-serialization-json-go" kw "github.com/microsoft/kiota-serialization-json-go"
@ -33,6 +36,10 @@ var (
const ( const (
collectionChannelBufferSize = 1000 collectionChannelBufferSize = 1000
numberOfRetries = 4 numberOfRetries = 4
// Outlooks expects max 4 concurrent requests
// https://learn.microsoft.com/en-us/graph/throttling-limits#outlook-service-limits
urlPrefetchChannelBufferSize = 4
) )
// Collection implements the interface from data.Collection // Collection implements the interface from data.Collection
@ -115,11 +122,11 @@ func (col *Collection) populateByOptionIdentifier(
) { ) {
var ( var (
errs error errs error
success int success int64
totalBytes int64 totalBytes int64
wg sync.WaitGroup
user = col.user user = col.user
objectWriter = kw.NewJsonSerializationWriter()
) )
colProgress, closer := observe.CollectionProgress(user, col.fullPath.Category().String(), col.fullPath.Folder()) colProgress, closer := observe.CollectionProgress(user, col.fullPath.Category().String(), col.fullPath.Folder())
@ -127,7 +134,7 @@ func (col *Collection) populateByOptionIdentifier(
defer func() { defer func() {
close(colProgress) close(colProgress)
col.finishPopulation(ctx, success, totalBytes, errs) col.finishPopulation(ctx, int(success), totalBytes, errs)
}() }()
// get QueryBasedonIdentifier // get QueryBasedonIdentifier
@ -139,34 +146,61 @@ func (col *Collection) populateByOptionIdentifier(
return return
} }
// Limit the max number of active requests to GC
semaphoreCh := make(chan struct{}, urlPrefetchChannelBufferSize)
defer close(semaphoreCh)
errUpdater := func(user string, err error) {
errs = support.WrapAndAppend(user, err, errs)
}
for _, identifier := range col.jobs { for _, identifier := range col.jobs {
response, err := query(ctx, col.service, user, identifier) if col.service.ErrPolicy() && errs != nil {
if err != nil {
errs = support.WrapAndAppendf(user, err, errs)
if col.service.ErrPolicy() {
break break
} }
semaphoreCh <- struct{}{}
continue wg.Add(1)
}
byteCount, err := serializeFunc(ctx, col.service.Client(), objectWriter, col.data, response, user) go func(identifier string) {
if err != nil { defer wg.Done()
errs = support.WrapAndAppendf(user, err, errs) defer func() { <-semaphoreCh }()
if col.service.ErrPolicy() { var (
response absser.Parsable
err error
)
for i := 1; i <= numberOfRetries; i++ {
response, err = query(ctx, col.service, user, identifier)
if err == nil {
break break
} }
// TODO: Tweak sleep times
continue if i < numberOfRetries {
time.Sleep(time.Duration(3*(i+1)) * time.Second)
}
} }
success++ if err != nil {
errUpdater(user, err)
return
}
byteCount, err := serializeFunc(ctx, col.service.Client(), kw.NewJsonSerializationWriter(), col.data, response, user)
if err != nil {
errUpdater(user, err)
return
}
atomic.AddInt64(&success, 1)
atomic.AddInt64(&totalBytes, int64(byteCount))
totalBytes += int64(byteCount)
colProgress <- struct{}{} colProgress <- struct{}{}
}(identifier)
} }
wg.Wait()
} }
// terminatePopulateSequence is a utility function used to close a Collection's data channel // terminatePopulateSequence is a utility function used to close a Collection's data channel
@ -313,14 +347,6 @@ func messageToDataCollection(
return 0, fmt.Errorf("expected Messageable, got %T", parsable) return 0, fmt.Errorf("expected Messageable, got %T", parsable)
} }
adtl := aMessage.GetAdditionalData()
if len(adtl) > 2 {
aMessage, err = support.ConvertFromMessageable(adtl, aMessage)
if err != nil {
return 0, err
}
}
if *aMessage.GetHasAttachments() { if *aMessage.GetHasAttachments() {
// getting all the attachments might take a couple attempts due to filesize // getting all the attachments might take a couple attempts due to filesize
var retriesErr error var retriesErr error

View File

@ -6,15 +6,14 @@ import (
abs "github.com/microsoft/kiota-abstractions-go" abs "github.com/microsoft/kiota-abstractions-go"
msuser "github.com/microsoftgraph/msgraph-sdk-go/users" msuser "github.com/microsoftgraph/msgraph-sdk-go/users"
mscalendars "github.com/microsoftgraph/msgraph-sdk-go/users/item/calendars" mscalendars "github.com/microsoftgraph/msgraph-sdk-go/users/item/calendars"
mscevents "github.com/microsoftgraph/msgraph-sdk-go/users/item/calendars/item/events"
mscontactfolder "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders" mscontactfolder "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders"
mscontactfolderitem "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item" mscontactfolderitem "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item"
mscontactfolderchild "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item/childfolders" mscontactfolderchild "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item/childfolders"
mscontactfolderitemcontact "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item/contacts"
mscontacts "github.com/microsoftgraph/msgraph-sdk-go/users/item/contacts" mscontacts "github.com/microsoftgraph/msgraph-sdk-go/users/item/contacts"
msevents "github.com/microsoftgraph/msgraph-sdk-go/users/item/events" msevents "github.com/microsoftgraph/msgraph-sdk-go/users/item/events"
msfolder "github.com/microsoftgraph/msgraph-sdk-go/users/item/mailfolders" msfolder "github.com/microsoftgraph/msgraph-sdk-go/users/item/mailfolders"
msfolderitem "github.com/microsoftgraph/msgraph-sdk-go/users/item/mailfolders/item" msfolderitem "github.com/microsoftgraph/msgraph-sdk-go/users/item/mailfolders/item"
msmfmessage "github.com/microsoftgraph/msgraph-sdk-go/users/item/mailfolders/item/messages"
msmessage "github.com/microsoftgraph/msgraph-sdk-go/users/item/messages" msmessage "github.com/microsoftgraph/msgraph-sdk-go/users/item/messages"
msitem "github.com/microsoftgraph/msgraph-sdk-go/users/item/messages/item" msitem "github.com/microsoftgraph/msgraph-sdk-go/users/item/messages/item"
"github.com/pkg/errors" "github.com/pkg/errors"
@ -144,16 +143,16 @@ type DeltaRequestBuilderGetRequestConfiguration struct {
QueryParameters *DeltaRequestBuilderGetQueryParameters QueryParameters *DeltaRequestBuilderGetQueryParameters
} }
func optionsForFolderMessages(moreOps []string) (*msmfmessage.MessagesRequestBuilderGetRequestConfiguration, error) { func optionsForFolderMessages(moreOps []string) (*DeltaRequestBuilderGetRequestConfiguration, error) {
selecting, err := buildOptions(moreOps, messages) selecting, err := buildOptions(moreOps, messages)
if err != nil { if err != nil {
return nil, err return nil, err
} }
requestParameters := &msmfmessage.MessagesRequestBuilderGetQueryParameters{ requestParameters := &DeltaRequestBuilderGetQueryParameters{
Select: selecting, Select: selecting,
} }
options := &msmfmessage.MessagesRequestBuilderGetRequestConfiguration{ options := &DeltaRequestBuilderGetRequestConfiguration{
QueryParameters: requestParameters, QueryParameters: requestParameters,
} }
@ -301,19 +300,36 @@ func optionsForMailFoldersItem(
} }
// optionsForContactFoldersItem is the same as optionsForContacts. // optionsForContactFoldersItem is the same as optionsForContacts.
// TODO: Remove after Issue #828; requires updating msgraph to v0.34
func optionsForContactFoldersItem( func optionsForContactFoldersItem(
moreOps []string, moreOps []string,
) (*mscontactfolderitemcontact.ContactsRequestBuilderGetRequestConfiguration, error) { ) (*DeltaRequestBuilderGetRequestConfiguration, error) {
selecting, err := buildOptions(moreOps, contacts) selecting, err := buildOptions(moreOps, contacts)
if err != nil { if err != nil {
return nil, err return nil, err
} }
requestParameters := &mscontactfolderitemcontact.ContactsRequestBuilderGetQueryParameters{ requestParameters := &DeltaRequestBuilderGetQueryParameters{
Select: selecting, Select: selecting,
} }
options := &mscontactfolderitemcontact.ContactsRequestBuilderGetRequestConfiguration{ options := &DeltaRequestBuilderGetRequestConfiguration{
QueryParameters: requestParameters,
}
return options, nil
}
// optionsForEvents ensures valid option inputs for exchange.Events
// @return is first call in Events().GetWithRequestConfigurationAndResponseHandler(options, handler)
func optionsForCalendarEvents(moreOps []string) (*mscevents.EventsRequestBuilderGetRequestConfiguration, error) {
selecting, err := buildOptions(moreOps, events)
if err != nil {
return nil, err
}
requestParameters := &mscevents.EventsRequestBuilderGetQueryParameters{
Select: selecting,
}
options := &mscevents.EventsRequestBuilderGetRequestConfiguration{
QueryParameters: requestParameters, QueryParameters: requestParameters,
} }

View File

@ -6,8 +6,10 @@ import (
"strings" "strings"
multierror "github.com/hashicorp/go-multierror" multierror "github.com/hashicorp/go-multierror"
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
msevents "github.com/microsoftgraph/msgraph-sdk-go/users/item/calendars/item/events"
cdelta "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item/contacts/delta"
mdelta "github.com/microsoftgraph/msgraph-sdk-go/users/item/mailfolders/item/messages/delta"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -16,6 +18,8 @@ import (
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
const nextLinkKey = "@odata.nextLink"
// FilterContainersAndFillCollections is a utility function // FilterContainersAndFillCollections is a utility function
// that places the M365 object ids belonging to specific directories // that places the M365 object ids belonging to specific directories
// into a Collection. Messages outside of those directories are omitted. // into a Collection. Messages outside of those directories are omitted.
@ -162,50 +166,47 @@ func FetchEventIDsFromCalendar(
gs graph.Service, gs graph.Service,
user, calendarID string, user, calendarID string,
) ([]string, error) { ) ([]string, error) {
ids := []string{} var (
errs *multierror.Error
ids []string
)
response, err := gs.Client(). options, err := optionsForCalendarEvents([]string{"id"})
if err != nil {
return nil, err
}
builder := gs.Client().
UsersById(user). UsersById(user).
CalendarsById(calendarID). CalendarsById(calendarID).
Events().Get(ctx, nil) Events()
for {
resp, err := builder.Get(ctx, options)
if err != nil { if err != nil {
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
} }
pageIterator, err := msgraphgocore.NewPageIterator( for _, item := range resp.GetValue() {
response, if item.GetId() == nil {
gs.Adapter(), errs = multierror.Append(
models.CreateEventCollectionResponseFromDiscriminatorValue, errs,
errors.Errorf("event with nil ID in calendar %s", calendarID),
) )
if err != nil {
return nil, errors.Wrap(err, "iterator creation failure during fetchEventIDs") // TODO(ashmrtn): Handle fail-fast.
continue
} }
var errs *multierror.Error ids = append(ids, *item.GetId())
err = pageIterator.Iterate(ctx, func(pageItem any) bool {
entry, ok := pageItem.(graph.Idable)
if !ok {
errs = multierror.Append(errs, errors.New("item without GetId() call"))
return true
} }
if entry.GetId() == nil { nextLink := resp.GetOdataNextLink()
errs = multierror.Append(errs, errors.New("item with nil ID")) if nextLink == nil || len(*nextLink) == 0 {
return true break
} }
ids = append(ids, *entry.GetId()) builder = msevents.NewEventsRequestBuilder(*nextLink, gs.Adapter())
return true
})
if err != nil {
return nil, errors.Wrap(
err,
support.ConnectorStackErrorTrace(err)+
" :fetching events from calendar "+calendarID,
)
} }
return ids, errs.ErrorOrNil() return ids, errs.ErrorOrNil()
@ -214,61 +215,54 @@ func FetchEventIDsFromCalendar(
// FetchContactIDsFromDirectory function that returns a list of all the m365IDs of the contacts // FetchContactIDsFromDirectory function that returns a list of all the m365IDs of the contacts
// of the targeted directory // of the targeted directory
func FetchContactIDsFromDirectory(ctx context.Context, gs graph.Service, user, directoryID string) ([]string, error) { func FetchContactIDsFromDirectory(ctx context.Context, gs graph.Service, user, directoryID string) ([]string, error) {
var (
errs *multierror.Error
ids []string
)
options, err := optionsForContactFoldersItem([]string{"parentFolderId"}) options, err := optionsForContactFoldersItem([]string{"parentFolderId"})
if err != nil { if err != nil {
return nil, err return nil, errors.Wrap(err, "getting query options")
} }
ids := []string{} builder := gs.Client().
response, err := gs.Client().
UsersById(user). UsersById(user).
ContactFoldersById(directoryID). ContactFoldersById(directoryID).
Contacts(). Contacts().
Get(ctx, options) Delta()
for {
// TODO(ashmrtn): Update to pass options once graph SDK dependency is updated.
resp, err := sendContactsDeltaGet(ctx, builder, options, gs.Adapter())
if err != nil { if err != nil {
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
} }
pageIterator, err := msgraphgocore.NewPageIterator( for _, item := range resp.GetValue() {
response, if item.GetId() == nil {
gs.Adapter(),
models.CreateContactCollectionResponseFromDiscriminatorValue,
)
if err != nil {
return nil, errors.Wrap(err, "iterator creation during FetchContactIDs")
}
var errs *multierror.Error
err = pageIterator.Iterate(ctx, func(pageItem any) bool {
entry, ok := pageItem.(graph.Idable)
if !ok {
errs = multierror.Append( errs = multierror.Append(
errs, errs,
errors.New("casting pageItem to models.Contactable"), errors.Errorf("contact with nil ID in folder %s", directoryID),
) )
return true // TODO(ashmrtn): Handle fail-fast.
continue
} }
if entry.GetId() == nil { ids = append(ids, *item.GetId())
errs = multierror.Append(errs, errors.New("item with nil ID"))
return true
} }
ids = append(ids, *entry.GetId()) nextLinkIface := resp.GetAdditionalData()[nextLinkKey]
if nextLinkIface == nil {
break
}
return true nextLink := nextLinkIface.(*string)
}) if len(*nextLink) == 0 {
break
}
if err != nil { builder = cdelta.NewDeltaRequestBuilder(*nextLink, gs.Adapter())
return nil,
errors.Wrap(
err,
support.ConnectorStackErrorTrace(err)+
" :fetching contactIDs from directory "+directoryID,
)
} }
return ids, errs.ErrorOrNil() return ids, errs.ErrorOrNil()
@ -281,57 +275,54 @@ func FetchMessageIDsFromDirectory(
gs graph.Service, gs graph.Service,
user, directoryID string, user, directoryID string,
) ([]string, error) { ) ([]string, error) {
ids := []string{} var (
errs *multierror.Error
ids []string
)
options, err := optionsForFolderMessages([]string{"id"}) options, err := optionsForFolderMessages([]string{"id"})
if err != nil { if err != nil {
return nil, errors.Wrap(err, "getting query options") return nil, errors.Wrap(err, "getting query options")
} }
response, err := gs.Client(). builder := gs.Client().
UsersById(user). UsersById(user).
MailFoldersById(directoryID). MailFoldersById(directoryID).
Messages(). Messages().
Get(ctx, options) Delta()
for {
// TODO(ashmrtn): Update to pass options once graph SDK dependency is updated.
resp, err := sendMessagesDeltaGet(ctx, builder, options, gs.Adapter())
if err != nil { if err != nil {
return nil, return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
errors.Wrap(err, support.ConnectorStackErrorTrace(err))
}
pageIter, err := msgraphgocore.NewPageIterator(
response,
gs.Adapter(),
models.CreateMessageCollectionResponseFromDiscriminatorValue,
)
if err != nil {
return nil, errors.Wrap(err, "creating graph iterator")
}
var errs *multierror.Error
err = pageIter.Iterate(ctx, func(pageItem any) bool {
item, ok := pageItem.(graph.Idable)
if !ok {
errs = multierror.Append(errs, errors.New("item without ID function"))
return true
} }
for _, item := range resp.GetValue() {
if item.GetId() == nil { if item.GetId() == nil {
errs = multierror.Append(errs, errors.New("item with nil ID")) errs = multierror.Append(
return true errs,
errors.Errorf("item with nil ID in folder %s", directoryID),
)
// TODO(ashmrtn): Handle fail-fast.
continue
} }
ids = append(ids, *item.GetId()) ids = append(ids, *item.GetId())
}
return true nextLinkIface := resp.GetAdditionalData()[nextLinkKey]
}) if nextLinkIface == nil {
break
}
if err != nil { nextLink := nextLinkIface.(*string)
return nil, errors.Wrap( if len(*nextLink) == 0 {
err, break
support.ConnectorStackErrorTrace(err)+ }
" :fetching messages from directory "+directoryID,
) builder = mdelta.NewDeltaRequestBuilder(*nextLink, gs.Adapter())
} }
return ids, errs.ErrorOrNil() return ids, errs.ErrorOrNil()

View File

@ -19,6 +19,7 @@ type MockExchangeDataCollection struct {
messageCount int messageCount int
Data [][]byte Data [][]byte
Names []string Names []string
ModTimes []time.Time
} }
var ( var (
@ -36,12 +37,15 @@ func NewMockExchangeCollection(pathRepresentation path.Path, numMessagesToReturn
messageCount: numMessagesToReturn, messageCount: numMessagesToReturn,
Data: [][]byte{}, Data: [][]byte{},
Names: []string{}, Names: []string{},
ModTimes: []time.Time{},
} }
baseTime := time.Now()
for i := 0; i < c.messageCount; i++ { for i := 0; i < c.messageCount; i++ {
// We can plug in whatever data we want here (can be an io.Reader to a test data file if needed) // We can plug in whatever data we want here (can be an io.Reader to a test data file if needed)
c.Data = append(c.Data, GetMockMessageBytes("From: NewMockExchangeCollection")) c.Data = append(c.Data, GetMockMessageBytes("From: NewMockExchangeCollection"))
c.Names = append(c.Names, uuid.NewString()) c.Names = append(c.Names, uuid.NewString())
c.ModTimes = append(c.ModTimes, baseTime.Add(1*time.Hour))
} }
return c return c
@ -100,6 +104,7 @@ func (medc *MockExchangeDataCollection) Items() <-chan data.Stream {
ID: medc.Names[i], ID: medc.Names[i],
Reader: io.NopCloser(bytes.NewReader(medc.Data[i])), Reader: io.NopCloser(bytes.NewReader(medc.Data[i])),
size: int64(len(medc.Data[i])), size: int64(len(medc.Data[i])),
modifiedTime: medc.ModTimes[i],
} }
} }
}() }()
@ -113,6 +118,7 @@ type MockExchangeData struct {
Reader io.ReadCloser Reader io.ReadCloser
ReadErr error ReadErr error
size int64 size int64
modifiedTime time.Time
} }
func (med *MockExchangeData) UUID() string { func (med *MockExchangeData) UUID() string {
@ -141,6 +147,10 @@ func (med *MockExchangeData) Size() int64 {
return med.size return med.size
} }
func (med *MockExchangeData) ModTime() time.Time {
return med.modifiedTime
}
type errReader struct { type errReader struct {
readErr error readErr error
} }

View File

@ -4,6 +4,9 @@ package onedrive
import ( import (
"context" "context"
"io" "io"
"sync"
"sync/atomic"
"time"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
@ -16,7 +19,15 @@ import (
const ( const (
// TODO: This number needs to be tuned // TODO: This number needs to be tuned
// Consider max open file limit `ulimit -n`, usually 1024 when setting this value
collectionChannelBufferSize = 50 collectionChannelBufferSize = 50
// TODO: Tune this later along with collectionChannelBufferSize
urlPrefetchChannelBufferSize = 25
// Max number of retries to get doc from M365
// Seems to timeout at times because of multiple requests
maxRetries = 4 // 1 + 3 retries
) )
var ( var (
@ -110,7 +121,9 @@ func (oc *Collection) populateItems(ctx context.Context) {
var ( var (
errs error errs error
byteCount int64 byteCount int64
itemsRead = 0 itemsRead int64
wg sync.WaitGroup
m sync.Mutex
) )
// Retrieve the OneDrive folder path to set later in // Retrieve the OneDrive folder path to set later in
@ -129,22 +142,57 @@ func (oc *Collection) populateItems(ctx context.Context) {
defer colCloser() defer colCloser()
defer close(folderProgress) defer close(folderProgress)
for _, itemID := range oc.driveItemIDs { semaphoreCh := make(chan struct{}, urlPrefetchChannelBufferSize)
// Read the item defer close(semaphoreCh)
itemInfo, itemData, err := oc.itemReader(ctx, oc.service, oc.driveID, itemID)
if err != nil {
errs = support.WrapAndAppendf(itemID, err, errs)
if oc.service.ErrPolicy() { errUpdater := func(id string, err error) {
m.Lock()
errs = support.WrapAndAppend(id, err, errs)
m.Unlock()
}
for _, itemID := range oc.driveItemIDs {
if oc.service.ErrPolicy() && errs != nil {
break break
} }
continue semaphoreCh <- struct{}{}
wg.Add(1)
go func(itemID string) {
defer wg.Done()
defer func() { <-semaphoreCh }()
// Read the item
var (
itemInfo *details.OneDriveInfo
itemData io.ReadCloser
err error
)
// Retrying as we were hitting timeouts when we have multiple requests
// https://github.com/microsoftgraph/msgraph-sdk-go/issues/302
for i := 1; i <= maxRetries; i++ {
itemInfo, itemData, err = oc.itemReader(ctx, oc.service, oc.driveID, itemID)
if err == nil {
break
} }
// TODO: Tweak sleep times
if i < maxRetries {
time.Sleep(time.Duration(3*(i+1)) * time.Second)
}
}
if err != nil {
errUpdater(itemID, err)
return
}
// Item read successfully, add to collection // Item read successfully, add to collection
itemsRead++ atomic.AddInt64(&itemsRead, 1)
// byteCount iteration // byteCount iteration
byteCount += itemInfo.Size atomic.AddInt64(&byteCount, itemInfo.Size)
itemInfo.ParentPath = parentPathString itemInfo.ParentPath = parentPathString
progReader, closer := observe.ItemProgress(itemData, observe.ItemBackupMsg, itemInfo.ItemName, itemInfo.Size) progReader, closer := observe.ItemProgress(itemData, observe.ItemBackupMsg, itemInfo.ItemName, itemInfo.Size)
@ -157,9 +205,12 @@ func (oc *Collection) populateItems(ctx context.Context) {
info: itemInfo, info: itemInfo,
} }
folderProgress <- struct{}{} folderProgress <- struct{}{}
}(itemID)
} }
oc.reportAsCompleted(ctx, itemsRead, byteCount, errs) wg.Wait()
oc.reportAsCompleted(ctx, int(itemsRead), byteCount, errs)
} }
func (oc *Collection) reportAsCompleted(ctx context.Context, itemsRead int, byteCount int64, errs error) { func (oc *Collection) reportAsCompleted(ctx context.Context, itemsRead int, byteCount int64, errs error) {

View File

@ -275,7 +275,7 @@ func GetAllFolders(
return nil, errors.Wrap(err, "getting OneDrive folders") return nil, errors.Wrap(err, "getting OneDrive folders")
} }
res := []*Displayable{} folders := map[string]*Displayable{}
for _, d := range drives { for _, d := range drives {
err = collectItems( err = collectItems(
@ -294,13 +294,18 @@ func GetAllFolders(
continue continue
} }
if item.GetId() == nil || len(*item.GetId()) == 0 {
logger.Ctx(ctx).Warn("folder without ID")
continue
}
if !strings.HasPrefix(*item.GetName(), prefix) { if !strings.HasPrefix(*item.GetName(), prefix) {
continue continue
} }
// Add the item instead of the folder because the item has more // Add the item instead of the folder because the item has more
// functionality. // functionality.
res = append(res, &Displayable{item}) folders[*item.GetId()] = &Displayable{item}
} }
return nil return nil
@ -311,6 +316,12 @@ func GetAllFolders(
} }
} }
res := make([]*Displayable, 0, len(folders))
for _, f := range folders {
res = append(res, f)
}
return res, nil return res, nil
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -49,15 +50,20 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
driveID := *drives[0].GetId() driveID := *drives[0].GetId()
defer func() {
for _, id := range folderIDs {
err := DeleteItem(ctx, gs, driveID, id)
if err != nil {
logger.Ctx(ctx).Warnw("deleting folder", "id", id, "error", err)
}
}
}()
folderID, err := createRestoreFolders(ctx, gs, driveID, folderElements) folderID, err := createRestoreFolders(ctx, gs, driveID, folderElements)
require.NoError(t, err) require.NoError(t, err)
folderIDs = append(folderIDs, folderID) folderIDs = append(folderIDs, folderID)
defer func() {
assert.NoError(t, DeleteItem(ctx, gs, driveID, folderIDs[0]))
}()
folderName2 := "Corso_Folder_Test_" + common.FormatNow(common.SimpleTimeTesting) folderName2 := "Corso_Folder_Test_" + common.FormatNow(common.SimpleTimeTesting)
folderElements = append(folderElements, folderName2) folderElements = append(folderElements, folderName2)

View File

@ -1,12 +1,16 @@
package sharepoint package sharepoint
import ( import (
"bytes"
"context" "context"
"io" "io"
kw "github.com/microsoft/kiota-serialization-json-go"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -63,6 +67,7 @@ func (sc *Collection) FullPath() path.Path {
} }
func (sc *Collection) Items() <-chan data.Stream { func (sc *Collection) Items() <-chan data.Stream {
go sc.populate(context.TODO())
return sc.data return sc.data
} }
@ -100,3 +105,62 @@ func (sc *Collection) finishPopulation(ctx context.Context, success int, totalBy
sc.fullPath.Folder()) sc.fullPath.Folder())
logger.Ctx(ctx).Debug(status.String()) logger.Ctx(ctx).Debug(status.String())
} }
// populate utility function to retrieve data from back store for a given collection
func (sc *Collection) populate(ctx context.Context) {
var (
success int
totalBytes, arrayLength int64
errs error
writer = kw.NewJsonSerializationWriter()
)
// TODO: Insert correct ID for CollectionProgress
colProgress, closer := observe.CollectionProgress("name", sc.fullPath.Category().String(), sc.fullPath.Folder())
go closer()
defer func() {
close(colProgress)
sc.finishPopulation(ctx, success, totalBytes, errs)
}()
// sc.jobs contains query = all of the site IDs.
for _, id := range sc.jobs {
// Retrieve list data from M365
lists, err := loadLists(ctx, sc.service, id)
if err != nil {
errs = support.WrapAndAppend(id, err, errs)
}
// Write Data and Send
for _, lst := range lists {
err = writer.WriteObjectValue("", lst)
if err != nil {
errs = support.WrapAndAppend(*lst.GetId(), err, errs)
continue
}
byteArray, err := writer.GetSerializedContent()
if err != nil {
errs = support.WrapAndAppend(*lst.GetId(), err, errs)
continue
}
writer.Close()
arrayLength = int64(len(byteArray))
if arrayLength > 0 {
totalBytes += arrayLength
success++
sc.data <- &Item{
id: *lst.GetId(),
data: io.NopCloser(bytes.NewReader(byteArray)),
info: sharePointListInfo(lst, arrayLength),
}
colProgress <- struct{}{}
}
}
}
}

View File

@ -12,7 +12,6 @@ import (
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -43,9 +42,6 @@ func (suite *SharePointCollectionSuite) TestSharePointDataReader_Valid() {
// SharePoint collection and to use the data stream channel. // SharePoint collection and to use the data stream channel.
func (suite *SharePointCollectionSuite) TestSharePointListCollection() { func (suite *SharePointCollectionSuite) TestSharePointListCollection() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
ow := kw.NewJsonSerializationWriter() ow := kw.NewJsonSerializationWriter()
listing := mockconnector.GetMockList("Mock List") listing := mockconnector.GetMockList("Mock List")
@ -73,7 +69,6 @@ func (suite *SharePointCollectionSuite) TestSharePointListCollection() {
data: io.NopCloser(bytes.NewReader(byteArray)), data: io.NopCloser(bytes.NewReader(byteArray)),
info: sharePointListInfo(listing, int64(len(byteArray))), info: sharePointListInfo(listing, int64(len(byteArray))),
} }
col.finishPopulation(ctx, 0, 0, nil)
readItems := []data.Stream{} readItems := []data.Stream{}
for item := range col.Items() { for item := range col.Items() {

View File

@ -57,6 +57,12 @@ func DataCollections(
defer close(foldersComplete) defer close(foldersComplete)
switch scope.Category().PathType() { switch scope.Category().PathType() {
// TODO path.ListCategory: PR
// collect Lists
// done?
case path.ListsCategory:
return nil, fmt.Errorf("sharePoint list collections not supported")
case path.LibrariesCategory: case path.LibrariesCategory:
spcs, err := collectLibraries( spcs, err := collectLibraries(
ctx, ctx,

View File

@ -0,0 +1,52 @@
package sharepoint
import (
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/account"
)
type testService struct {
client msgraphsdk.GraphServiceClient
adapter msgraphsdk.GraphRequestAdapter
credentials account.M365Config
}
//------------------------------------------------------------
// Functions to comply with graph.Service Interface
//------------------------------------------------------------
func (ts *testService) Client() *msgraphsdk.GraphServiceClient {
return &ts.client
}
func (ts *testService) Adapter() *msgraphsdk.GraphRequestAdapter {
return &ts.adapter
}
func (ts *testService) ErrPolicy() bool {
return false
}
func createTestService(credentials account.M365Config) (*testService, error) {
{
adapter, err := graph.CreateAdapter(
credentials.AzureTenantID,
credentials.AzureClientID,
credentials.AzureClientSecret,
)
if err != nil {
return nil, errors.Wrap(err, "creating microsoft graph service for exchange")
}
service := testService{
adapter: *adapter,
client: *msgraphsdk.NewGraphServiceClient(adapter),
credentials: credentials,
}
return &service, nil
}
}

View File

@ -0,0 +1,284 @@
package sharepoint
import (
"context"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/sites/item/lists"
"github.com/microsoftgraph/msgraph-sdk-go/sites/item/lists/item/columns"
"github.com/microsoftgraph/msgraph-sdk-go/sites/item/lists/item/contenttypes"
"github.com/microsoftgraph/msgraph-sdk-go/sites/item/lists/item/contenttypes/item/columnlinks"
tc "github.com/microsoftgraph/msgraph-sdk-go/sites/item/lists/item/contenttypes/item/columns"
"github.com/microsoftgraph/msgraph-sdk-go/sites/item/lists/item/items"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
)
// list.go contains additional functions to help retrieve SharePoint List data from M365
// SharePoint lists represent lists on a site. Inherits additional properties from
// baseItem: https://learn.microsoft.com/en-us/graph/api/resources/baseitem?view=graph-rest-1.0
// The full details concerning SharePoint Lists can
// be found at: https://learn.microsoft.com/en-us/graph/api/resources/list?view=graph-rest-1.0
// Note additional calls are required for the relationships that exist outside of the object properties.
// loadLists is a utility function to populate the List object.
// @param siteID the M365 ID that represents the SharePoint Site
// Makes additional calls to retrieve the following relationships:
// - Columns
// - ContentTypes
// - List Items
func loadLists(
ctx context.Context,
gs graph.Service,
siteID string,
) ([]models.Listable, error) {
var (
prefix = gs.Client().SitesById(siteID)
builder = prefix.Lists()
results = make([]models.Listable, 0)
errs error
)
for {
resp, err := builder.Get(ctx, nil)
if err != nil {
return nil, support.WrapAndAppend(support.ConnectorStackErrorTrace(err), err, errs)
}
for _, entry := range resp.GetValue() {
id := *entry.GetId()
cols, err := fetchColumns(ctx, gs, siteID, id, "")
if err != nil {
errs = support.WrapAndAppend(siteID, err, errs)
continue
}
entry.SetColumns(cols)
cTypes, err := fetchContentTypes(ctx, gs, siteID, id)
if err != nil {
errs = support.WrapAndAppend(siteID, err, errs)
continue
}
entry.SetContentTypes(cTypes)
lItems, err := fetchListItems(ctx, gs, siteID, id)
if err != nil {
errs = support.WrapAndAppend(siteID, err, errs)
continue
}
entry.SetItems(lItems)
results = append(results, entry)
}
if resp.GetOdataNextLink() == nil {
break
}
builder = lists.NewListsRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
}
if errs != nil {
return nil, errs
}
return results, nil
}
// fetchListItems utility for retrieving ListItem data and the associated relationship
// data. Additional call append data to the tracked items, and do not create additional collections.
// Additional Call:
// * Fields
func fetchListItems(
ctx context.Context,
gs graph.Service,
siteID, listID string,
) ([]models.ListItemable, error) {
var (
prefix = gs.Client().SitesById(siteID).ListsById(listID)
builder = prefix.Items()
itms = make([]models.ListItemable, 0)
errs error
)
for {
resp, err := builder.Get(ctx, nil)
if err != nil {
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
}
for _, itm := range resp.GetValue() {
newPrefix := prefix.ItemsById(*itm.GetId())
fields, err := newPrefix.Fields().Get(ctx, nil)
if err != nil {
errs = errors.Wrap(err, support.ConnectorStackErrorTrace(err))
}
itm.SetFields(fields)
itms = append(itms, itm)
}
if resp.GetOdataNextLink() == nil {
break
}
builder = items.NewItemsRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
}
if errs != nil {
return nil, errors.Wrap(errs, "fetchListItem unsuccessful")
}
return itms, nil
}
// fetchColumns utility function to return columns from a site.
// An additional call required to check for details concerning the SourceColumn.
// For additional details: https://learn.microsoft.com/en-us/graph/api/resources/columndefinition?view=graph-rest-1.0
// TODO: Refactor on if/else (dadams39)
func fetchColumns(
ctx context.Context,
gs graph.Service,
siteID, listID, cTypeID string,
) ([]models.ColumnDefinitionable, error) {
cs := make([]models.ColumnDefinitionable, 0)
if len(cTypeID) == 0 {
builder := gs.Client().SitesById(siteID).ListsById(listID).Columns()
for {
resp, err := builder.Get(ctx, nil)
if err != nil {
return nil, support.WrapAndAppend(support.ConnectorStackErrorTrace(err), err, nil)
}
cs = append(cs, resp.GetValue()...)
if resp.GetOdataNextLink() == nil {
break
}
builder = columns.NewColumnsRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
}
} else {
builder := gs.Client().SitesById(siteID).ListsById(listID).ContentTypesById(cTypeID).Columns()
for {
resp, err := builder.Get(ctx, nil)
if err != nil {
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
}
cs = append(cs, resp.GetValue()...)
if resp.GetOdataNextLink() == nil {
break
}
builder = tc.NewColumnsRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
}
}
return cs, nil
}
// fetchContentTypes retrieves all data for content type. Additional queries required
// for the following:
// - ColumnLinks
// - Columns
// The following two are not included:
// - ColumnPositions
// - BaseTypes
// These relationships are not included as they following error from the API:
// itemNotFound Item not found: error status code received from the API
// Current as of github.com/microsoftgraph/msgraph-sdk-go v0.40.0
// TODO: Verify functionality after version upgrade or remove (dadams39) Check Stubs
func fetchContentTypes(
ctx context.Context,
gs graph.Service,
siteID, listID string,
) ([]models.ContentTypeable, error) {
var (
cTypes = make([]models.ContentTypeable, 0)
builder = gs.Client().SitesById(siteID).ListsById(listID).ContentTypes()
errs error
)
for {
resp, err := builder.Get(ctx, nil)
if err != nil {
return nil, support.WrapAndAppend(support.ConnectorStackErrorTrace(err), err, errs)
}
for _, cont := range resp.GetValue() {
id := *cont.GetId()
links, err := fetchColumnLinks(ctx, gs, siteID, listID, id)
if err != nil {
errs = support.WrapAndAppend("unable to add column links to list", err, errs)
break
}
cont.SetColumnLinks(links)
// TODO: stub for columPositions
cs, err := fetchColumns(ctx, gs, siteID, listID, id)
if err != nil {
errs = support.WrapAndAppend("unable to populate columns for contentType", err, errs)
}
cont.SetColumns(cs)
// TODO: stub for BaseTypes
cTypes = append(cTypes, cont)
}
if resp.GetOdataNextLink() == nil {
break
}
builder = contenttypes.NewContentTypesRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
}
if errs != nil {
return nil, errs
}
return cTypes, nil
}
func fetchColumnLinks(
ctx context.Context,
gs graph.Service,
siteID, listID, cTypeID string,
) ([]models.ColumnLinkable, error) {
var (
builder = gs.Client().SitesById(siteID).ListsById(listID).ContentTypesById(cTypeID).ColumnLinks()
links = make([]models.ColumnLinkable, 0)
)
for {
resp, err := builder.Get(ctx, nil)
if err != nil {
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
}
links = append(links, resp.GetValue()...)
if resp.GetOdataNextLink() == nil {
break
}
builder = columnlinks.NewColumnLinksRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
}
return links, nil
}

View File

@ -0,0 +1,61 @@
package sharepoint
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
)
type SharePointSuite struct {
suite.Suite
creds account.M365Config
}
func (suite *SharePointSuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
suite.creds = m365
}
func TestSharePointSuite(t *testing.T) {
if err := tester.RunOnAny(
tester.CorsoCITests,
); err != nil {
t.Skip(err)
}
suite.Run(t, new(SharePointSuite))
}
// Test LoadList --> Retrieves all data from backStore
// Functions tested:
// - fetchListItems()
// - fetchColumns()
// - fetchContentColumns()
// - fetchContentTypes()
// - fetchColumnLinks
// TODO: upgrade passed github.com/microsoftgraph/msgraph-sdk-go v0.40.0
// to verify if these 2 calls are valid
// - fetchContentBaseTypes
// - fetchColumnPositions
func (suite *SharePointSuite) TestLoadList() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
service, err := createTestService(suite.creds)
require.NoError(t, err)
lists, err := loadLists(ctx, service, "root")
assert.NoError(t, err)
assert.Greater(t, len(lists), 0)
t.Logf("Length: %d\n", len(lists))
}

View File

@ -1,18 +1,9 @@
package support package support
import ( import (
"fmt"
"strconv"
"strings" "strings"
kw "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors"
)
var (
eventResponsableFields = []string{"responseType"}
eventRequestableFields = []string{"allowNewTimeProposals", "meetingRequestType", "responseRequested"}
) )
// CloneMessageableFields places data from original data into new message object. // CloneMessageableFields places data from original data into new message object.
@ -64,346 +55,6 @@ func ToMessage(orig models.Messageable) models.Messageable {
return aMessage return aMessage
} }
func SetEventMessageRequest(orig models.Messageable, adtl map[string]any) (models.EventMessageRequestable, error) {
aMessage := models.NewEventMessageRequest()
temp := CloneMessageableFields(orig, aMessage)
message, ok := temp.(models.EventMessageRequestable)
if !ok {
return nil, errors.New(*orig.GetId() + " failed to convert to eventMessageRequestable")
}
newMessage, err := SetAdditionalDataToEventMessage(adtl, message)
if err != nil {
return nil, errors.Wrap(err, *orig.GetId()+" eventMessageRequest could not set additional data")
}
additional, err := buildMapFromAdditional(eventRequestableFields, adtl)
if err != nil {
return nil, errors.Wrap(err, *orig.GetId()+" eventMessageRequest failed on method buildMapFromAdditional")
}
message, ok = newMessage.(models.EventMessageRequestable)
if !ok {
return nil, errors.New(*orig.GetId() + " failed to convert to eventMessageRequestable")
}
eventMessage, err := setEventRequestableFields(message, additional)
if err != nil {
return nil, errors.Wrap(err, "unable to set all fields for eventMessageRequestable")
}
return eventMessage, nil
}
func SetEventMessageResponse(orig models.Messageable, adtl map[string]any) (models.EventMessageResponseable, error) {
aMessage := models.NewEventMessageResponse()
temp := CloneMessageableFields(orig, aMessage)
message, ok := temp.(models.EventMessageResponseable)
if !ok {
return nil, errors.New(*orig.GetId() + " failed to convert to eventMessageRequestable")
}
newMessage, err := SetAdditionalDataToEventMessage(adtl, message)
if err != nil {
return nil, errors.Wrap(err, *orig.GetId()+" eventMessageResponse could not set additional data")
}
message, ok = newMessage.(models.EventMessageResponseable)
if !ok {
return nil, errors.New("unable to create event message responseable from " + *orig.GetId())
}
additional, err := buildMapFromAdditional(eventResponsableFields, adtl)
if err != nil {
return nil, errors.Wrap(err, *orig.GetId()+" eventMessageResponse failed on method buildMapFromAdditional")
}
for key, val := range additional {
switch key {
case "responseType":
temp, err := models.ParseResponseType(*val)
if err != nil {
return nil, errors.Wrap(err, *orig.GetId()+"failure to parse response type")
}
rType, ok := temp.(*models.ResponseType)
if !ok {
return nil, fmt.Errorf(
"%s : responseType not returned from models.ParseResponseType: %v\t%T",
*orig.GetId(),
temp,
temp,
)
}
message.SetResponseType(rType)
default:
return nil, errors.New(key + " not supported for setEventMessageResponse")
}
}
return message, nil
}
// ConvertFromMessageable temporary function. Converts incorrect cast of messageable object to known
// type until upstream can make the appropriate changes
func ConvertFromMessageable(adtl map[string]any, orig models.Messageable) (models.EventMessageable, error) {
var aType string
aPointer, ok := adtl["@odata.type"]
if !ok {
return nil, errors.New("unknown data type: no @odata.type field")
}
ptr, ok := aPointer.(*string)
if !ok {
return nil, errors.New("unknown map type encountered")
}
aType = *ptr
if aType == "#microsoft.graph.eventMessageRequest" {
eventRequest, err := SetEventMessageRequest(orig, adtl)
if err != nil {
return nil, err
}
eventRequest.SetId(orig.GetId())
return eventRequest, err
}
if aType == "#microsoft.graph.eventMessageResponse" {
eventMessage, err := SetEventMessageResponse(orig, adtl)
if err != nil {
return nil, err
}
eventMessage.SetId(orig.GetId())
return eventMessage, nil
}
return nil, errors.New("unknown data type: " + aType)
}
// buildMapFromAdditional returns a submap of map[string]*string from map[string]any
func buildMapFromAdditional(list []string, adtl map[string]any) (map[string]*string, error) {
returnMap := make(map[string]*string)
for _, entry := range list {
ptr, ok := adtl[entry]
if !ok {
continue
}
value, ok := ptr.(*string)
if !ok {
boolConvert, ok := ptr.(*bool)
if !ok {
return nil, errors.New("unsupported value type: key: " + entry + fmt.Sprintf(" with type: %T", ptr))
}
aBool := *boolConvert
boolString := strconv.FormatBool(aBool)
returnMap[entry] = &boolString
continue
}
returnMap[entry] = value
}
return returnMap, nil
}
func setEventRequestableFields(
em models.EventMessageRequestable,
adtl map[string]*string,
) (models.EventMessageRequestable, error) {
for key, value := range adtl {
switch key {
case "meetingRequestType":
temp, err := models.ParseMeetingRequestType(*value)
if err != nil {
return nil, errors.Wrap(err, *em.GetId()+": failed on models.ParseMeetingRequestType")
}
rType, ok := temp.(*models.MeetingRequestType)
if !ok {
return nil, errors.New(*em.GetId() + ": failed to set meeting request type")
}
em.SetMeetingRequestType(rType)
case "responseRequested":
boolValue, err := strconv.ParseBool(*value)
if err != nil {
return nil, errors.Wrap(err, *em.GetId()+": failed to set responseRequested")
}
em.SetResponseRequested(&boolValue)
case "allowNewTimeProposals":
boolValue, err := strconv.ParseBool(*value)
if err != nil {
return nil, errors.Wrap(err, *em.GetId()+": failed to set allowNewTimeProposals")
}
em.SetAllowNewTimeProposals(&boolValue)
}
}
return em, nil
}
// SetAdditionalDataToEventMessage sets shared fields for 2 types of EventMessage: Response and Request
func SetAdditionalDataToEventMessage(
adtl map[string]any,
newMessage models.EventMessageable,
) (models.EventMessageable, error) {
for key, entry := range adtl {
if key == "endDateTime" {
dateTime := models.NewDateTimeTimeZone()
mapped, ok := entry.(map[string]*kw.JsonParseNode)
if ok {
for key, val := range mapped {
node := *val
value, err := node.GetStringValue()
if err != nil {
return nil, errors.Wrapf(err, "could not parse string value for %s", key)
}
switch key {
case "dateTime":
dateTime.SetDateTime(value)
case "timeZone":
dateTime.SetTimeZone(value)
default:
return nil, errors.New("key not supported DateTime")
}
newMessage.SetEndDateTime(dateTime)
}
continue
}
}
if key == "startDateTime" {
dateTime := models.NewDateTimeTimeZone()
mapped, ok := entry.(map[string]*kw.JsonParseNode)
if ok {
for key, val := range mapped {
node := *val
value, err := node.GetStringValue()
if err != nil {
return nil, errors.Wrapf(err, "could not parse string value for %s", key)
}
switch key {
case "dateTime":
dateTime.SetDateTime(value)
case "timeZone":
dateTime.SetTimeZone(value)
default:
return nil, errors.New("key not supported DateTime")
}
newMessage.SetStartDateTime(dateTime)
}
continue
}
}
if key == "location" {
aLocation := models.NewLocation()
mapped, ok := entry.(map[string]*kw.JsonParseNode)
if ok {
for key, val := range mapped {
node := *val
value, err := node.GetStringValue()
if err != nil {
return nil, errors.Wrapf(err, "could not parse string value for key %s", key)
}
switch key {
case "displayName":
aLocation.SetDisplayName(value)
case "locationType":
temp, err := models.ParseLocationType(*value)
if err != nil {
return nil, errors.New("location type parse failure")
}
lType, ok := temp.(*models.LocationType)
if !ok {
return nil, errors.New("location type interface failure")
}
aLocation.SetLocationType(lType)
}
}
}
newMessage.SetLocation(aLocation)
}
value, ok := entry.(*string)
if ok {
switch key {
case "isAllDay":
boolValue, err := strconv.ParseBool(*value)
if err != nil {
return nil, err
}
newMessage.SetIsAllDay(&boolValue)
case "isDelegated":
boolValue, err := strconv.ParseBool(*value)
if err != nil {
return nil, err
}
newMessage.SetIsDelegated(&boolValue)
case "isOutOfDate":
boolValue, err := strconv.ParseBool(*value)
if err != nil {
return nil, err
}
newMessage.SetIsOutOfDate(&boolValue)
case "meetingMessageType":
temp, err := models.ParseMeetingMessageType(*value)
if err != nil {
return nil, err
}
mType, ok := temp.(*models.MeetingMessageType)
if !ok {
return nil, errors.New("failed to create meeting message type")
}
newMessage.SetMeetingMessageType(mType)
}
}
}
return newMessage, nil
}
// ToEventSimplified transforms an event to simplifed restore format // ToEventSimplified transforms an event to simplifed restore format
// To overcome some of the MS Graph API challenges, the event object is modified in the following ways: // To overcome some of the MS Graph API challenges, the event object is modified in the following ways:
// - Instead of adding attendees and generating spurious notifications, // - Instead of adding attendees and generating spurious notifications,

View File

@ -2,6 +2,7 @@ package data
import ( import (
"io" "io"
"time"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -47,6 +48,11 @@ type StreamSize interface {
Size() int64 Size() int64
} }
// StreamModTime is used to provide the modified time of the stream's data.
type StreamModTime interface {
ModTime() time.Time
}
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
// functionality // functionality
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------

View File

@ -19,6 +19,7 @@ import (
const ( const (
corsoVersion = "corso_version" corsoVersion = "corso_version"
repoID = "repo_id" repoID = "repo_id"
tenantID = "m365_tenant_hash"
// Event Keys // Event Keys
RepoInit = "repo_init" RepoInit = "repo_init"
@ -53,6 +54,7 @@ type Bus struct {
client analytics.Client client analytics.Client
repoID string // one-way hash that uniquely identifies the repo. repoID string // one-way hash that uniquely identifies the repo.
tenant string // one-way hash that uniquely identifies the tenant.
version string // the Corso release version version string // the Corso release version
} }
@ -66,8 +68,6 @@ func NewBus(ctx context.Context, s storage.Storage, tenID string, opts control.O
return Bus{}, nil return Bus{}, nil
} }
hash := repoHash(s, tenID)
envWK := os.Getenv("RUDDERSTACK_CORSO_WRITE_KEY") envWK := os.Getenv("RUDDERSTACK_CORSO_WRITE_KEY")
if len(envWK) > 0 { if len(envWK) > 0 {
RudderStackWriteKey = envWK RudderStackWriteKey = envWK
@ -96,7 +96,8 @@ func NewBus(ctx context.Context, s storage.Storage, tenID string, opts control.O
return Bus{ return Bus{
client: client, client: client,
repoID: hash, repoID: repoHash(s),
tenant: tenantHash(tenID),
version: "vTODO", // TODO: corso versioning implementation version: "vTODO", // TODO: corso versioning implementation
}, nil }, nil
} }
@ -117,6 +118,7 @@ func (b Bus) Event(ctx context.Context, key string, data map[string]any) {
props := analytics. props := analytics.
NewProperties(). NewProperties().
Set(repoID, b.repoID). Set(repoID, b.repoID).
Set(tenantID, b.tenant).
Set(corsoVersion, b.version) Set(corsoVersion, b.version)
for k, v := range data { for k, v := range data {
@ -128,7 +130,8 @@ func (b Bus) Event(ctx context.Context, key string, data map[string]any) {
err := b.client.Enqueue(analytics.Identify{ err := b.client.Enqueue(analytics.Identify{
UserId: b.repoID, UserId: b.repoID,
Traits: analytics.NewTraits(). Traits: analytics.NewTraits().
SetName(b.repoID), SetName(b.tenant).
Set(tenantID, b.tenant),
}) })
if err != nil { if err != nil {
logger.Ctx(ctx).Debugw("analytics event failure", "err", err) logger.Ctx(ctx).Debugw("analytics event failure", "err", err)
@ -162,9 +165,17 @@ func storageID(s storage.Storage) string {
return id return id
} }
func repoHash(s storage.Storage, tenID string) string { func repoHash(s storage.Storage) string {
return md5HashOf(storageID(s))
}
func tenantHash(tenID string) string {
return md5HashOf(tenID)
}
func md5HashOf(s string) string {
sum := md5.Sum( sum := md5.Sum(
[]byte(storageID(s) + tenID), []byte(s),
) )
return fmt.Sprintf("%x", sum) return fmt.Sprintf("%x", sum)

View File

@ -10,6 +10,7 @@ import (
"github.com/kopia/kopia/repo/blob" "github.com/kopia/kopia/repo/blob"
"github.com/kopia/kopia/repo/compression" "github.com/kopia/kopia/repo/compression"
"github.com/kopia/kopia/repo/content" "github.com/kopia/kopia/repo/content"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot" "github.com/kopia/kopia/snapshot"
"github.com/kopia/kopia/snapshot/policy" "github.com/kopia/kopia/snapshot/policy"
"github.com/pkg/errors" "github.com/pkg/errors"
@ -59,6 +60,8 @@ func IsRepoAlreadyExistsError(e error) bool {
return errors.As(e, &erae) return errors.As(e, &erae)
} }
var _ snapshotManager = &conn{}
type conn struct { type conn struct {
storage storage.Storage storage storage.Storage
repo.Repository repo.Repository
@ -379,3 +382,10 @@ func checkCompressor(compressor compression.Name) error {
return errors.Errorf("unknown compressor type %s", compressor) return errors.Errorf("unknown compressor type %s", compressor)
} }
func (w *conn) LoadSnapshots(
ctx context.Context,
ids []manifest.ID,
) ([]*snapshot.Manifest, error) {
return snapshot.LoadSnapshots(ctx, w.Repository, ids)
}

View File

@ -0,0 +1,239 @@
package kopia
import (
"context"
"sort"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
)
const (
// Kopia does not do comparisons properly for empty tags right now so add some
// placeholder value to them.
defaultTagValue = "0"
// Kopia CLI prefixes all user tags with "tag:"[1]. Maintaining this will
// ensure we don't accidentally take reserved tags and that tags can be
// displayed with kopia CLI.
// (permalinks)
// [1] https://github.com/kopia/kopia/blob/05e729a7858a6e86cb48ba29fb53cb6045efce2b/cli/command_snapshot_create.go#L169
userTagPrefix = "tag:"
)
type snapshotManager interface {
FindManifests(
ctx context.Context,
tags map[string]string,
) ([]*manifest.EntryMetadata, error)
LoadSnapshots(ctx context.Context, ids []manifest.ID) ([]*snapshot.Manifest, error)
}
type ownersCats struct {
resourceOwners map[string]struct{}
serviceCats map[string]struct{}
}
func serviceCatTag(p path.Path) string {
return p.Service().String() + p.Category().String()
}
func makeTagKV(k string) (string, string) {
return userTagPrefix + k, defaultTagValue
}
// tagsFromStrings returns a map[string]string with tags for all ownersCats
// passed in. Currently uses placeholder values for each tag because there can
// be multiple instances of resource owners and categories in a single snapshot.
func tagsFromStrings(oc *ownersCats) map[string]string {
res := make(map[string]string, len(oc.serviceCats)+len(oc.resourceOwners))
for k := range oc.serviceCats {
tk, tv := makeTagKV(k)
res[tk] = tv
}
for k := range oc.resourceOwners {
tk, tv := makeTagKV(k)
res[tk] = tv
}
return res
}
// getLastIdx searches for manifests contained in both foundMans and metas
// and returns the most recent complete manifest index. If no complete manifest
// is in both lists returns -1.
func getLastIdx(
foundMans map[manifest.ID]*snapshot.Manifest,
metas []*manifest.EntryMetadata,
) int {
// Minor optimization: the current code seems to return the entries from
// earliest timestamp to latest (this is undocumented). Sort in the same
// fashion so that we don't incur a bunch of swaps.
sort.Slice(metas, func(i, j int) bool {
return metas[i].ModTime.Before(metas[j].ModTime)
})
// Search newest to oldest.
for i := len(metas) - 1; i >= 0; i-- {
m := foundMans[metas[i].ID]
if m == nil || len(m.IncompleteReason) > 0 {
continue
}
return i
}
return -1
}
// manifestsSinceLastComplete searches through mans and returns the most recent
// complete manifest (if one exists) and maybe the most recent incomplete
// manifest. If the newest incomplete manifest is more recent than the newest
// complete manifest then adds it to the returned list. Otherwise no incomplete
// manifest is returned. Returns nil if there are no complete or incomplete
// manifests in mans.
func manifestsSinceLastComplete(
mans []*snapshot.Manifest,
) []*snapshot.Manifest {
var (
res []*snapshot.Manifest
foundIncomplete = false
)
// Manifests should maintain the sort order of the original IDs that were used
// to fetch the data, but just in case sort oldest to newest.
mans = snapshot.SortByTime(mans, false)
for i := len(mans) - 1; i >= 0; i-- {
m := mans[i]
if len(m.IncompleteReason) > 0 {
if !foundIncomplete {
foundIncomplete = true
res = append(res, m)
}
continue
}
// Once we find a complete snapshot we're done, even if we haven't
// found an incomplete one yet.
res = append(res, m)
break
}
return res
}
// fetchPrevManifests returns the most recent, as-of-yet unfound complete and
// (maybe) incomplete manifests in metas. If the most recent incomplete manifest
// is older than the most recent complete manifest no incomplete manifest is
// returned. If only incomplete manifests exists, returns the most recent one.
// Returns no manifests if an error occurs.
func fetchPrevManifests(
ctx context.Context,
sm snapshotManager,
foundMans map[manifest.ID]*snapshot.Manifest,
tags map[string]string,
) ([]*snapshot.Manifest, error) {
metas, err := sm.FindManifests(ctx, tags)
if err != nil {
return nil, errors.Wrap(err, "fetching manifest metas by tag")
}
if len(metas) == 0 {
return nil, nil
}
lastCompleteIdx := getLastIdx(foundMans, metas)
// We have a complete cached snapshot and it's the most recent. No need
// to do anything else.
if lastCompleteIdx == len(metas)-1 {
return nil, nil
}
// TODO(ashmrtn): Remainder of the function can be simplified if we can inject
// different tags to the snapshot checkpoints than the complete snapshot.
// Fetch all manifests newer than the oldest complete snapshot. A little
// wasteful as we may also re-fetch the most recent incomplete manifest, but
// it reduces the complexity of returning the most recent incomplete manifest
// if it is newer than the most recent complete manifest.
ids := make([]manifest.ID, 0, len(metas)-(lastCompleteIdx+1))
for i := lastCompleteIdx + 1; i < len(metas); i++ {
ids = append(ids, metas[i].ID)
}
mans, err := sm.LoadSnapshots(ctx, ids)
if err != nil {
return nil, errors.Wrap(err, "fetching previous manifests")
}
return manifestsSinceLastComplete(mans), nil
}
// fetchPrevSnapshotManifests returns a set of manifests for complete and maybe
// incomplete snapshots for the given (resource owner, service, category)
// tuples. Up to two manifests can be returned per tuple: one complete and one
// incomplete. An incomplete manifest may be returned if it is newer than the
// newest complete manifest for the tuple. Manifests are deduped such that if
// multiple tuples match the same manifest it will only be returned once.
func fetchPrevSnapshotManifests(
ctx context.Context,
sm snapshotManager,
oc *ownersCats,
) []*snapshot.Manifest {
mans := map[manifest.ID]*snapshot.Manifest{}
// For each serviceCat/resource owner pair that we will be backing up, see if
// there's a previous incomplete snapshot and/or a previous complete snapshot
// we can pass in. Can be expanded to return more than the most recent
// snapshots, but may require more memory at runtime.
for serviceCat := range oc.serviceCats {
serviceTagKey, serviceTagValue := makeTagKV(serviceCat)
for resourceOwner := range oc.resourceOwners {
resourceOwnerTagKey, resourceOwnerTagValue := makeTagKV(resourceOwner)
tags := map[string]string{
serviceTagKey: serviceTagValue,
resourceOwnerTagKey: resourceOwnerTagValue,
}
found, err := fetchPrevManifests(ctx, sm, mans, tags)
if err != nil {
logger.Ctx(ctx).Warnw(
"fetching previous snapshot manifests for service/category/resource owner",
"error",
err,
"service/category",
serviceCat,
)
// Snapshot can still complete fine, just not as efficient.
continue
}
// If we found more recent snapshots then add them.
for _, m := range found {
mans[m.ID] = m
}
}
}
res := make([]*snapshot.Manifest, 0, len(mans))
for _, m := range mans {
res = append(res, m)
}
return res
}

View File

@ -0,0 +1,540 @@
package kopia
import (
"context"
"testing"
"time"
"github.com/kopia/kopia/fs"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
const (
testCompleteMan = false
testIncompleteMan = !testCompleteMan
)
var (
testT1 = time.Now()
testT2 = testT1.Add(1 * time.Hour)
testT3 = testT2.Add(1 * time.Hour)
testID1 = manifest.ID("snap1")
testID2 = manifest.ID("snap2")
testID3 = manifest.ID("snap3")
testMail = path.ExchangeService.String() + path.EmailCategory.String()
testEvents = path.ExchangeService.String() + path.EventsCategory.String()
testUser1 = "user1"
testUser2 = "user2"
testUser3 = "user3"
testAllUsersAllCats = &ownersCats{
resourceOwners: map[string]struct{}{
testUser1: {},
testUser2: {},
testUser3: {},
},
serviceCats: map[string]struct{}{
testMail: {},
testEvents: {},
},
}
testAllUsersMail = &ownersCats{
resourceOwners: map[string]struct{}{
testUser1: {},
testUser2: {},
testUser3: {},
},
serviceCats: map[string]struct{}{
testMail: {},
},
}
)
type manifestInfo struct {
// We don't currently use the values in the tags.
tags map[string]struct{}
metadata *manifest.EntryMetadata
man *snapshot.Manifest
}
func newManifestInfo(
id manifest.ID,
modTime time.Time,
incomplete bool,
tags ...string,
) manifestInfo {
incompleteStr := ""
if incomplete {
incompleteStr = "checkpoint"
}
structTags := make(map[string]struct{}, len(tags))
for _, t := range tags {
tk, _ := makeTagKV(t)
structTags[tk] = struct{}{}
}
return manifestInfo{
tags: structTags,
metadata: &manifest.EntryMetadata{
ID: id,
ModTime: modTime,
},
man: &snapshot.Manifest{
ID: id,
StartTime: fs.UTCTimestamp(modTime.UnixNano()),
IncompleteReason: incompleteStr,
},
}
}
type mockSnapshotManager struct {
data []manifestInfo
loadCallback func(ids []manifest.ID)
}
func matchesTags(mi manifestInfo, tags map[string]string) bool {
for k := range tags {
if _, ok := mi.tags[k]; !ok {
return false
}
}
return true
}
func (msm *mockSnapshotManager) FindManifests(
ctx context.Context,
tags map[string]string,
) ([]*manifest.EntryMetadata, error) {
if msm == nil {
return nil, assert.AnError
}
res := []*manifest.EntryMetadata{}
for _, mi := range msm.data {
if matchesTags(mi, tags) {
res = append(res, mi.metadata)
}
}
return res, nil
}
func (msm *mockSnapshotManager) LoadSnapshots(
ctx context.Context,
ids []manifest.ID,
) ([]*snapshot.Manifest, error) {
if msm == nil {
return nil, assert.AnError
}
// Allow checking set of IDs passed in.
if msm.loadCallback != nil {
msm.loadCallback(ids)
}
res := []*snapshot.Manifest{}
for _, id := range ids {
for _, mi := range msm.data {
if mi.man.ID == id {
res = append(res, mi.man)
}
}
}
return res, nil
}
type SnapshotFetchUnitSuite struct {
suite.Suite
}
func TestSnapshotFetchUnitSuite(t *testing.T) {
suite.Run(t, new(SnapshotFetchUnitSuite))
}
func (suite *SnapshotFetchUnitSuite) TestFetchPrevSnapshots() {
table := []struct {
name string
input *ownersCats
data []manifestInfo
// Use this to denote which manifests in data should be expected. Allows
// defining data in a table while not repeating things between data and
// expected.
expectedIdxs []int
// Expected number of times a manifest should try to be loaded from kopia.
// Used to check that caching is functioning properly.
expectedLoadCounts map[manifest.ID]int
}{
{
name: "AllOneSnapshot",
input: testAllUsersAllCats,
data: []manifestInfo{
newManifestInfo(
testID1,
testT1,
testCompleteMan,
testMail,
testEvents,
testUser1,
testUser2,
testUser3,
),
},
expectedIdxs: []int{0},
expectedLoadCounts: map[manifest.ID]int{
testID1: 1,
},
},
{
name: "SplitByCategory",
input: testAllUsersAllCats,
data: []manifestInfo{
newManifestInfo(
testID1,
testT1,
testCompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
newManifestInfo(
testID2,
testT2,
testCompleteMan,
testEvents,
testUser1,
testUser2,
testUser3,
),
},
expectedIdxs: []int{0, 1},
expectedLoadCounts: map[manifest.ID]int{
testID1: 1,
testID2: 1,
},
},
{
name: "IncompleteNewerThanComplete",
input: testAllUsersMail,
data: []manifestInfo{
newManifestInfo(
testID1,
testT1,
testCompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
newManifestInfo(
testID2,
testT2,
testIncompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
},
expectedIdxs: []int{0, 1},
expectedLoadCounts: map[manifest.ID]int{
testID1: 1,
testID2: 3,
},
},
{
name: "IncompleteOlderThanComplete",
input: testAllUsersMail,
data: []manifestInfo{
newManifestInfo(
testID1,
testT1,
testIncompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
newManifestInfo(
testID2,
testT2,
testCompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
},
expectedIdxs: []int{1},
expectedLoadCounts: map[manifest.ID]int{
testID1: 1,
testID2: 1,
},
},
{
name: "OnlyIncomplete",
input: testAllUsersMail,
data: []manifestInfo{
newManifestInfo(
testID1,
testT1,
testIncompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
},
expectedIdxs: []int{0},
expectedLoadCounts: map[manifest.ID]int{
testID1: 3,
},
},
{
name: "NewestComplete",
input: testAllUsersMail,
data: []manifestInfo{
newManifestInfo(
testID1,
testT1,
testCompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
newManifestInfo(
testID2,
testT2,
testCompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
},
expectedIdxs: []int{1},
expectedLoadCounts: map[manifest.ID]int{
testID1: 1,
testID2: 1,
},
},
{
name: "NewestIncomplete",
input: testAllUsersMail,
data: []manifestInfo{
newManifestInfo(
testID1,
testT1,
testIncompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
newManifestInfo(
testID2,
testT2,
testIncompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
},
expectedIdxs: []int{1},
expectedLoadCounts: map[manifest.ID]int{
testID1: 3,
testID2: 3,
},
},
{
name: "SomeCachedSomeNewer",
input: testAllUsersMail,
data: []manifestInfo{
newManifestInfo(
testID1,
testT1,
testCompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
newManifestInfo(
testID2,
testT2,
testCompleteMan,
testMail,
testUser3,
),
},
expectedIdxs: []int{0, 1},
expectedLoadCounts: map[manifest.ID]int{
testID1: 2,
testID2: 1,
},
},
{
name: "SomeCachedSomeNewerIncomplete",
input: testAllUsersMail,
data: []manifestInfo{
newManifestInfo(
testID1,
testT1,
testCompleteMan,
testMail,
testUser1,
testUser2,
testUser3,
),
newManifestInfo(
testID2,
testT2,
testIncompleteMan,
testMail,
testUser3,
),
},
expectedIdxs: []int{0, 1},
expectedLoadCounts: map[manifest.ID]int{
testID1: 1,
testID2: 1,
},
},
{
name: "NoMatches",
input: testAllUsersMail,
data: nil,
expectedIdxs: nil,
// Stop failure for nil-map comparison.
expectedLoadCounts: map[manifest.ID]int{},
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
msm := &mockSnapshotManager{
data: test.data,
}
loadCounts := map[manifest.ID]int{}
msm.loadCallback = func(ids []manifest.ID) {
for _, id := range ids {
loadCounts[id]++
}
}
snaps := fetchPrevSnapshotManifests(ctx, msm, test.input)
expected := make([]*snapshot.Manifest, 0, len(test.expectedIdxs))
for _, i := range test.expectedIdxs {
expected = append(expected, test.data[i].man)
}
assert.ElementsMatch(t, expected, snaps)
// Need to manually check because we don't know the order the
// user/service/category labels will be iterated over. For some tests this
// could cause more loads than the ideal case.
assert.Len(t, loadCounts, len(test.expectedLoadCounts))
for id, count := range loadCounts {
assert.GreaterOrEqual(t, test.expectedLoadCounts[id], count)
}
})
}
}
// mockErrorSnapshotManager returns an error the first time LoadSnapshot and
// FindSnapshot are called. After that it passes the calls through to the
// contained snapshotManager.
type mockErrorSnapshotManager struct {
retFindErr bool
retLoadErr bool
sm snapshotManager
}
func (msm *mockErrorSnapshotManager) FindManifests(
ctx context.Context,
tags map[string]string,
) ([]*manifest.EntryMetadata, error) {
if !msm.retFindErr {
msm.retFindErr = true
return nil, assert.AnError
}
return msm.sm.FindManifests(ctx, tags)
}
func (msm *mockErrorSnapshotManager) LoadSnapshots(
ctx context.Context,
ids []manifest.ID,
) ([]*snapshot.Manifest, error) {
if !msm.retLoadErr {
msm.retLoadErr = true
return nil, assert.AnError
}
return msm.sm.LoadSnapshots(ctx, ids)
}
func (suite *SnapshotFetchUnitSuite) TestFetchPrevSnapshotsWorksWithErrors() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
input := testAllUsersMail
mockData := []manifestInfo{
newManifestInfo(
testID1,
testT1,
testCompleteMan,
testMail,
testUser1,
),
newManifestInfo(
testID2,
testT2,
testCompleteMan,
testMail,
testUser2,
),
newManifestInfo(
testID3,
testT3,
testCompleteMan,
testMail,
testUser3,
),
}
msm := &mockErrorSnapshotManager{
sm: &mockSnapshotManager{
data: mockData,
},
}
snaps := fetchPrevSnapshotManifests(ctx, msm, input)
// Only 1 snapshot should be chosen because the other two attempts fail.
// However, which one is returned is non-deterministic because maps are used.
assert.Len(t, snaps, 1)
}

View File

@ -7,6 +7,7 @@ import (
"runtime/trace" "runtime/trace"
"sync" "sync"
"sync/atomic" "sync/atomic"
"time"
"unsafe" "unsafe"
"github.com/hashicorp/go-multierror" "github.com/hashicorp/go-multierror"
@ -127,6 +128,8 @@ type BackupStats struct {
TotalUploadedBytes int64 TotalUploadedBytes int64
TotalFileCount int TotalFileCount int
CachedFileCount int
UncachedFileCount int
TotalDirectoryCount int TotalDirectoryCount int
IgnoredErrorCount int IgnoredErrorCount int
ErrorCount int ErrorCount int
@ -147,6 +150,8 @@ func manifestToStats(
TotalUploadedBytes: uploadCount.NumBytes, TotalUploadedBytes: uploadCount.NumBytes,
TotalFileCount: int(man.Stats.TotalFileCount), TotalFileCount: int(man.Stats.TotalFileCount),
CachedFileCount: int(man.Stats.CachedFiles),
UncachedFileCount: int(man.Stats.NonCachedFiles),
TotalDirectoryCount: int(man.Stats.TotalDirectoryCount), TotalDirectoryCount: int(man.Stats.TotalDirectoryCount),
IgnoredErrorCount: int(man.Stats.IgnoredErrorCount), IgnoredErrorCount: int(man.Stats.IgnoredErrorCount),
ErrorCount: int(man.Stats.ErrorCount), ErrorCount: int(man.Stats.ErrorCount),
@ -340,8 +345,14 @@ func getStreamItemFunc(
d := &itemDetails{info: ei.Info(), repoPath: itemPath} d := &itemDetails{info: ei.Info(), repoPath: itemPath}
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d) progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
entry := virtualfs.StreamingFileFromReader( modTime := time.Now()
if smt, ok := e.(data.StreamModTime); ok {
modTime = smt.ModTime()
}
entry := virtualfs.StreamingFileWithModTimeFromReader(
encodeAsPath(e.UUID()), encodeAsPath(e.UUID()),
modTime,
&backupStreamReader{ &backupStreamReader{
version: serializationVersion, version: serializationVersion,
ReadCloser: e.ToReader(), ReadCloser: e.ToReader(),
@ -522,32 +533,6 @@ func (w Wrapper) BackupCollections(
return s, progress.deets, nil return s, progress.deets, nil
} }
type ownersCats struct {
resourceOwners map[string]struct{}
serviceCats map[string]struct{}
}
func serviceCatTag(p path.Path) string {
return p.Service().String() + p.Category().String()
}
// tagsFromStrings returns a map[string]string with the union of both maps
// passed in. Currently uses empty values for each tag because there can be
// multiple instances of resource owners and categories in a single snapshot.
func tagsFromStrings(oc *ownersCats) map[string]string {
res := make(map[string]string, len(oc.serviceCats)+len(oc.resourceOwners))
for k := range oc.serviceCats {
res[k] = ""
}
for k := range oc.resourceOwners {
res[k] = ""
}
return res
}
func (w Wrapper) makeSnapshotWithRoot( func (w Wrapper) makeSnapshotWithRoot(
ctx context.Context, ctx context.Context,
root fs.Directory, root fs.Directory,
@ -556,6 +541,8 @@ func (w Wrapper) makeSnapshotWithRoot(
) (*BackupStats, error) { ) (*BackupStats, error) {
var man *snapshot.Manifest var man *snapshot.Manifest
prevSnaps := fetchPrevSnapshotManifests(ctx, w.c, oc)
bc := &stats.ByteCounter{} bc := &stats.ByteCounter{}
err := repo.WriteSession( err := repo.WriteSession(
@ -595,7 +582,7 @@ func (w Wrapper) makeSnapshotWithRoot(
progress.UploadProgress = u.Progress progress.UploadProgress = u.Progress
u.Progress = progress u.Progress = progress
man, err = u.Upload(innerCtx, root, policyTree, si) man, err = u.Upload(innerCtx, root, policyTree, si, prevSnaps...)
if err != nil { if err != nil {
err = errors.Wrap(err, "uploading data") err = errors.Wrap(err, "uploading data")
logger.Ctx(innerCtx).Errorw("kopia backup", err) logger.Ctx(innerCtx).Errorw("kopia backup", err)

View File

@ -839,8 +839,6 @@ func (suite *KopiaIntegrationSuite) TearDownTest() {
} }
func (suite *KopiaIntegrationSuite) TestBackupCollections() { func (suite *KopiaIntegrationSuite) TestBackupCollections() {
t := suite.T()
collections := []data.Collection{ collections := []data.Collection{
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
suite.testPath1, suite.testPath1,
@ -851,23 +849,56 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
42, 42,
), ),
} }
expectedTags := map[string]string{
serviceCatTag(suite.testPath1): "", baseTagKeys := []string{
suite.testPath1.ResourceOwner(): "", serviceCatTag(suite.testPath1),
serviceCatTag(suite.testPath2): "", suite.testPath1.ResourceOwner(),
suite.testPath2.ResourceOwner(): "", serviceCatTag(suite.testPath2),
suite.testPath2.ResourceOwner(),
}
expectedTags := map[string]string{}
for _, k := range baseTagKeys {
tk, tv := makeTagKV(k)
expectedTags[tk] = tv
} }
table := []struct {
name string
expectedUploadedFiles int
expectedCachedFiles int
}{
{
name: "Uncached",
expectedUploadedFiles: 47,
expectedCachedFiles: 0,
},
{
name: "Cached",
expectedUploadedFiles: 0,
expectedCachedFiles: 47,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
stats, deets, err := suite.w.BackupCollections(suite.ctx, collections, path.ExchangeService) stats, deets, err := suite.w.BackupCollections(suite.ctx, collections, path.ExchangeService)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, stats.TotalFileCount, 47)
assert.Equal(t, stats.TotalDirectoryCount, 6) assert.Equal(t, test.expectedUploadedFiles, stats.TotalFileCount, "total files")
assert.Equal(t, stats.IgnoredErrorCount, 0) assert.Equal(t, test.expectedUploadedFiles, stats.UncachedFileCount, "uncached files")
assert.Equal(t, stats.ErrorCount, 0) assert.Equal(t, test.expectedCachedFiles, stats.CachedFileCount, "cached files")
assert.Equal(t, 6, stats.TotalDirectoryCount)
assert.Equal(t, 0, stats.IgnoredErrorCount)
assert.Equal(t, 0, stats.ErrorCount)
assert.False(t, stats.Incomplete) assert.False(t, stats.Incomplete)
assert.Equal(t, path.ExchangeService.String(), deets.Tags[model.ServiceTag]) assert.Equal(t, path.ExchangeService.String(), deets.Tags[model.ServiceTag])
// 47 file and 6 folder entries. // 47 file and 6 folder entries.
assert.Len(t, deets.Entries, 47+6) assert.Len(
t,
deets.Entries,
test.expectedUploadedFiles+test.expectedCachedFiles+6,
)
checkSnapshotTags( checkSnapshotTags(
t, t,
@ -876,6 +907,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
expectedTags, expectedTags,
stats.SnapshotID, stats.SnapshotID,
) )
})
}
} }
func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() { func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {

View File

@ -21,6 +21,7 @@ import (
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
@ -172,6 +173,11 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
} }
backupCh <- struct{}{} backupCh <- struct{}{}
logger.Ctx(ctx).Debugf(
"Backed up %d directories and %d files",
opStats.k.TotalDirectoryCount, opStats.k.TotalFileCount,
)
opStats.started = true opStats.started = true
opStats.gc = gc.AwaitStatus() opStats.gc = gc.AwaitStatus()

View File

@ -95,6 +95,8 @@ type Printable struct {
Status string `json:"status"` Status string `json:"status"`
Version string `json:"version"` Version string `json:"version"`
Selectors selectors.Printable `json:"selectors"` Selectors selectors.Printable `json:"selectors"`
BytesRead int64 `json:"bytesRead"`
BytesUploaded int64 `json:"bytesUploaded"`
} }
// MinimumPrintable reduces the Backup to its minimally printable details. // MinimumPrintable reduces the Backup to its minimally printable details.
@ -106,6 +108,8 @@ func (b Backup) MinimumPrintable() any {
Status: b.Status, Status: b.Status,
Version: "0", Version: "0",
Selectors: b.Selectors.ToPrintable(), Selectors: b.Selectors.ToPrintable(),
BytesRead: b.BytesRead,
BytesUploaded: b.BytesUploaded,
} }
} }

View File

@ -45,6 +45,8 @@ func stubBackup(t time.Time) backup.Backup {
WriteErrors: errors.New("1"), WriteErrors: errors.New("1"),
}, },
ReadWrites: stats.ReadWrites{ ReadWrites: stats.ReadWrites{
BytesRead: 301,
BytesUploaded: 301,
ItemsRead: 1, ItemsRead: 1,
ItemsWritten: 1, ItemsWritten: 1,
}, },
@ -98,4 +100,7 @@ func (suite *BackupSuite) TestBackup_MinimumPrintable() {
bselp := b.Selectors.ToPrintable() bselp := b.Selectors.ToPrintable()
assert.Equal(t, bselp, result.Selectors, "selectors") assert.Equal(t, bselp, result.Selectors, "selectors")
assert.Equal(t, bselp.Resources(), result.Selectors.Resources(), "selector resources") assert.Equal(t, bselp.Resources(), result.Selectors.Resources(), "selector resources")
assert.Equal(t, b.BytesRead, result.BytesRead, "size")
assert.Equal(t, b.BytesUploaded, result.BytesUploaded, "stored size")
} }

View File

@ -28,10 +28,14 @@ const (
IdentityValue IdentityValue
// "foo" is a prefix of "foobarbaz" // "foo" is a prefix of "foobarbaz"
TargetPrefixes TargetPrefixes
// "baz" is a suffix of "foobarbaz"
TargetSuffixes
// "foo" equals any complete element prefix of "foo/bar/baz" // "foo" equals any complete element prefix of "foo/bar/baz"
TargetPathPrefix TargetPathPrefix
// "foo" equals any complete element in "foo/bar/baz" // "foo" equals any complete element in "foo/bar/baz"
TargetPathContains TargetPathContains
// "baz" equals any complete element suffix of "foo/bar/baz"
TargetPathSuffix
) )
func norm(s string) string { func norm(s string) string {
@ -161,6 +165,18 @@ func NotPrefix(target string) Filter {
return newFilter(TargetPrefixes, target, true) return newFilter(TargetPrefixes, target, true)
} }
// Suffix creates a filter where Compare(v) is true if
// target.Suffix(v)
func Suffix(target string) Filter {
return newFilter(TargetSuffixes, target, false)
}
// NotSuffix creates a filter where Compare(v) is true if
// !target.Suffix(v)
func NotSuffix(target string) Filter {
return newFilter(TargetSuffixes, target, true)
}
// PathPrefix creates a filter where Compare(v) is true if // PathPrefix creates a filter where Compare(v) is true if
// target.Prefix(v) && // target.Prefix(v) &&
// split(target)[i].Equals(split(v)[i]) for _all_ i in 0..len(target)-1 // split(target)[i].Equals(split(v)[i]) for _all_ i in 0..len(target)-1
@ -241,6 +257,44 @@ func NotPathContains(targets []string) Filter {
return newSliceFilter(TargetPathContains, targets, tgts, true) return newSliceFilter(TargetPathContains, targets, tgts, true)
} }
// PathSuffix creates a filter where Compare(v) is true if
// target.Suffix(v) &&
// split(target)[i].Equals(split(v)[i]) for _all_ i in 0..len(target)-1
// ex: target "/bar/baz" returns true for input "/foo/bar/baz",
// but false for "/foobar/baz"
//
// Unlike single-target filters, this filter accepts a
// slice of targets, will compare an input against each target
// independently, and returns true if one or more of the
// comparisons succeed.
func PathSuffix(targets []string) Filter {
tgts := make([]string, len(targets))
for i := range targets {
tgts[i] = normPathElem(targets[i])
}
return newSliceFilter(TargetPathSuffix, targets, tgts, false)
}
// NotPathSuffix creates a filter where Compare(v) is true if
// !target.Suffix(v) ||
// !split(target)[i].Equals(split(v)[i]) for _any_ i in 0..len(target)-1
// ex: target "/bar/baz" returns false for input "/foo/bar/baz",
// but true for "/foobar/baz"
//
// Unlike single-target filters, this filter accepts a
// slice of targets, will compare an input against each target
// independently, and returns true if one or more of the
// comparisons succeed.
func NotPathSuffix(targets []string) Filter {
tgts := make([]string, len(targets))
for i := range targets {
tgts[i] = normPathElem(targets[i])
}
return newSliceFilter(TargetPathSuffix, targets, tgts, true)
}
// newFilter is the standard filter constructor. // newFilter is the standard filter constructor.
func newFilter(c comparator, target string, negate bool) Filter { func newFilter(c comparator, target string, negate bool) Filter {
return Filter{ return Filter{
@ -302,12 +356,17 @@ func (f Filter) Compare(input string) bool {
cmp = in cmp = in
case TargetPrefixes: case TargetPrefixes:
cmp = prefixed cmp = prefixed
case TargetSuffixes:
cmp = suffixed
case TargetPathPrefix: case TargetPathPrefix:
cmp = pathPrefix cmp = pathPrefix
hasSlice = true hasSlice = true
case TargetPathContains: case TargetPathContains:
cmp = pathContains cmp = pathContains
hasSlice = true hasSlice = true
case TargetPathSuffix:
cmp = pathSuffix
hasSlice = true
case Passes: case Passes:
return true return true
case Fails: case Fails:
@ -364,6 +423,11 @@ func prefixed(target, input string) bool {
return strings.HasPrefix(input, target) return strings.HasPrefix(input, target)
} }
// true if target has input as a suffix.
func suffixed(target, input string) bool {
return strings.HasSuffix(input, target)
}
// true if target is an _element complete_ prefix match // true if target is an _element complete_ prefix match
// on the input. Element complete means we do not // on the input. Element complete means we do not
// succeed on partial element matches (ex: "/foo" does // succeed on partial element matches (ex: "/foo" does
@ -393,6 +457,20 @@ func pathContains(target, input string) bool {
return strings.Contains(normPathElem(input), target) return strings.Contains(normPathElem(input), target)
} }
// true if target is an _element complete_ suffix match
// on the input. Element complete means we do not
// succeed on partial element matches (ex: "/bar" does
// not match "/foobar").
//
// As a precondition, assumes the target value has been
// passed through normPathElem().
//
// The input is assumed to be the complete path that may
// have the target as a suffix.
func pathSuffix(target, input string) bool {
return strings.HasSuffix(normPathElem(input), target)
}
// ---------------------------------------------------------------------------------------------------- // ----------------------------------------------------------------------------------------------------
// Helpers // Helpers
// ---------------------------------------------------------------------------------------------------- // ----------------------------------------------------------------------------------------------------
@ -405,8 +483,10 @@ var prefixString = map[comparator]string{
TargetContains: "cont:", TargetContains: "cont:",
TargetIn: "in:", TargetIn: "in:",
TargetPrefixes: "pfx:", TargetPrefixes: "pfx:",
TargetSuffixes: "sfx:",
TargetPathPrefix: "pathPfx:", TargetPathPrefix: "pathPfx:",
TargetPathContains: "pathCont:", TargetPathContains: "pathCont:",
TargetPathSuffix: "pathSfx:",
} }
func (f Filter) String() string { func (f Filter) String() string {

View File

@ -206,6 +206,31 @@ func (suite *FiltersSuite) TestPrefixes() {
} }
} }
func (suite *FiltersSuite) TestSuffixes() {
target := "folderB"
f := filters.Suffix(target)
nf := filters.NotSuffix(target)
table := []struct {
name string
input string
expectF assert.BoolAssertionFunc
expectNF assert.BoolAssertionFunc
}{
{"Exact match - same case", "folderB", assert.True, assert.False},
{"Exact match - different case", "Folderb", assert.True, assert.False},
{"Suffix match - same case", "folderA/folderB", assert.True, assert.False},
{"Suffix match - different case", "Foldera/folderb", assert.True, assert.False},
{"Should not match substring", "folderB/folder1", assert.False, assert.True},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expectF(t, f.Compare(test.input), "filter")
test.expectNF(t, nf.Compare(test.input), "negated filter")
})
}
}
func (suite *FiltersSuite) TestPathPrefix() { func (suite *FiltersSuite) TestPathPrefix() {
table := []struct { table := []struct {
name string name string
@ -360,3 +385,79 @@ func (suite *FiltersSuite) TestPathContains_NormalizedTargets() {
}) })
} }
} }
func (suite *FiltersSuite) TestPathSuffix() {
table := []struct {
name string
targets []string
input string
expectF assert.BoolAssertionFunc
expectNF assert.BoolAssertionFunc
}{
{"Exact - same case", []string{"fA"}, "/fA", assert.True, assert.False},
{"Exact - different case", []string{"fa"}, "/fA", assert.True, assert.False},
{"Suffix - same case", []string{"fB"}, "/fA/fB", assert.True, assert.False},
{"Suffix - different case", []string{"fb"}, "/fA/fB", assert.True, assert.False},
{"Exact - multiple folders", []string{"fA/fB"}, "/fA/fB", assert.True, assert.False},
{"Suffix - single folder partial", []string{"f"}, "/fA/fB", assert.False, assert.True},
{"Suffix - multi folder partial", []string{"A/fB"}, "/fA/fB", assert.False, assert.True},
{"Target Longer - single folder", []string{"fA"}, "/f", assert.False, assert.True},
{"Target Longer - multi folder", []string{"fA/fB"}, "/fA/f", assert.False, assert.True},
{"Not suffix - single folder", []string{"fA"}, "/af", assert.False, assert.True},
{"Not suffix - multi folder", []string{"fA/fB"}, "/Af/fB", assert.False, assert.True},
{"Exact - target variations - none", []string{"fA"}, "/fA", assert.True, assert.False},
{"Exact - target variations - prefix", []string{"/fA"}, "/fA", assert.True, assert.False},
{"Exact - target variations - suffix", []string{"fA/"}, "/fA", assert.True, assert.False},
{"Exact - target variations - both", []string{"/fA/"}, "/fA", assert.True, assert.False},
{"Exact - input variations - none", []string{"fA"}, "fA", assert.True, assert.False},
{"Exact - input variations - prefix", []string{"fA"}, "/fA", assert.True, assert.False},
{"Exact - input variations - suffix", []string{"fA"}, "fA/", assert.True, assert.False},
{"Exact - input variations - both", []string{"fA"}, "/fA/", assert.True, assert.False},
{"Suffix - target variations - none", []string{"fb"}, "/fA/fb", assert.True, assert.False},
{"Suffix - target variations - prefix", []string{"/fb"}, "/fA/fb", assert.True, assert.False},
{"Suffix - target variations - suffix", []string{"fb/"}, "/fA/fb", assert.True, assert.False},
{"Suffix - target variations - both", []string{"/fb/"}, "/fA/fb", assert.True, assert.False},
{"Suffix - input variations - none", []string{"fb"}, "fA/fb", assert.True, assert.False},
{"Suffix - input variations - prefix", []string{"fb"}, "/fA/fb", assert.True, assert.False},
{"Suffix - input variations - suffix", []string{"fb"}, "fA/fb/", assert.True, assert.False},
{"Suffix - input variations - both", []string{"fb"}, "/fA/fb/", assert.True, assert.False},
{"Slice - one matches", []string{"foo", "fa/f", "fb"}, "/fA/fb", assert.True, assert.True},
{"Slice - none match", []string{"foo", "fa/f", "f"}, "/fA/fb", assert.False, assert.True},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
f := filters.PathSuffix(test.targets)
nf := filters.NotPathSuffix(test.targets)
test.expectF(t, f.Compare(test.input), "filter")
test.expectNF(t, nf.Compare(test.input), "negated filter")
})
}
}
func (suite *FiltersSuite) TestPathSuffix_NormalizedTargets() {
table := []struct {
name string
targets []string
expect []string
}{
{"Single - no slash", []string{"fA"}, []string{"/fA/"}},
{"Single - pre slash", []string{"/fA"}, []string{"/fA/"}},
{"Single - suff slash", []string{"fA/"}, []string{"/fA/"}},
{"Single - both slashes", []string{"/fA/"}, []string{"/fA/"}},
{"Multipath - no slash", []string{"fA/fB"}, []string{"/fA/fB/"}},
{"Multipath - pre slash", []string{"/fA/fB"}, []string{"/fA/fB/"}},
{"Multipath - suff slash", []string{"fA/fB/"}, []string{"/fA/fB/"}},
{"Multipath - both slashes", []string{"/fA/fB/"}, []string{"/fA/fB/"}},
{"Multi input - no slash", []string{"fA", "fB"}, []string{"/fA/", "/fB/"}},
{"Multi input - pre slash", []string{"/fA", "/fB"}, []string{"/fA/", "/fB/"}},
{"Multi input - suff slash", []string{"fA/", "fB/"}, []string{"/fA/", "/fB/"}},
{"Multi input - both slashes", []string{"/fA/", "/fB/"}, []string{"/fA/", "/fB/"}},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
f := filters.PathSuffix(test.targets)
assert.Equal(t, test.expect, f.NormalizedTargets)
})
}
}

View File

@ -16,6 +16,8 @@ var (
// logging level flag // logging level flag
// TODO: infer default based on environment. // TODO: infer default based on environment.
llFlag = "info" llFlag = "info"
readableOutput bool
) )
type logLevel int type logLevel int
@ -28,7 +30,10 @@ const (
Disabled Disabled
) )
const logLevelFN = "log-level" const (
logLevelFN = "log-level"
readableLogsFN = "readable-logs"
)
// adds the persistent flag --log-level to the provided command. // adds the persistent flag --log-level to the provided command.
// defaults to "info". // defaults to "info".
@ -37,6 +42,12 @@ const logLevelFN = "log-level"
func AddLogLevelFlag(parent *cobra.Command) { func AddLogLevelFlag(parent *cobra.Command) {
fs := parent.PersistentFlags() fs := parent.PersistentFlags()
fs.StringVar(&llFlag, logLevelFN, "info", "set the log level to debug|info|warn|error") fs.StringVar(&llFlag, logLevelFN, "info", "set the log level to debug|info|warn|error")
fs.Bool(
readableLogsFN, false,
"minimizes log output for console readability: removes the file and date, colors the level")
//nolint:errcheck
fs.MarkHidden(readableLogsFN)
} }
// Due to races between the lazy evaluation of flags in cobra and the need to init logging // Due to races between the lazy evaluation of flags in cobra and the need to init logging
@ -46,6 +57,7 @@ func PreloadLogLevel() string {
fs := pflag.NewFlagSet("seed-logger", pflag.ContinueOnError) fs := pflag.NewFlagSet("seed-logger", pflag.ContinueOnError)
fs.ParseErrorsWhitelist.UnknownFlags = true fs.ParseErrorsWhitelist.UnknownFlags = true
fs.String(logLevelFN, "info", "set the log level to debug|info|warn|error") fs.String(logLevelFN, "info", "set the log level to debug|info|warn|error")
fs.BoolVar(&readableOutput, readableLogsFN, false, "minimizes log output: removes the file and date, colors the level")
// prevents overriding the corso/cobra help processor // prevents overriding the corso/cobra help processor
fs.BoolP("help", "h", false, "") fs.BoolP("help", "h", false, "")
@ -111,7 +123,15 @@ func genLogger(level logLevel) (*zapcore.Core, *zap.SugaredLogger) {
cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel) cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel)
} }
lgr, err = cfg.Build() opts := []zap.Option{}
if readableOutput {
opts = append(opts, zap.WithCaller(false), zap.AddStacktrace(zapcore.DPanicLevel))
cfg.EncoderConfig.EncodeTime = zapcore.TimeEncoderOfLayout("15:04:05.00")
cfg.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
}
lgr, err = cfg.Build(opts...)
} else { } else {
lgr, err = zap.NewProduction() lgr, err = zap.NewProduction()
} }

View File

@ -3,28 +3,26 @@ LABEL MAINTAINER="Niraj Tolia"
ARG DEBIAN_FRONTEND=noninteractive ARG DEBIAN_FRONTEND=noninteractive
WORKDIR /usr/src/ # NOTE for lines 13,15: update in CI when updating
RUN apt-get -y update && apt-get -y install gpg emacs curl zip git make \
RUN apt-get -y update && apt-get -y install curl zip make git emacs \
&& curl -fsSL https://deb.nodesource.com/setup_current.x | bash - \ && curl -fsSL https://deb.nodesource.com/setup_current.x | bash - \
&& apt-get -y install nodejs \ && apt-get -y install nodejs \
&& apt-get autoclean \ && apt-get autoclean \
&& npm install -g gulp \
&& corepack enable \
&& node --version \ && node --version \
&& npm --version \ && npm --version \
&& yarn --version \ && cd /tmp && curl -O -L https://github.com/errata-ai/vale/releases/download/v2.20.1/vale_2.20.1_Linux_64-bit.tar.gz \
&& gulp --version && tar -xvzf vale_2.20.1_Linux_64-bit.tar.gz -C /usr/bin vale \
&& npm install -g markdownlint-cli@0.32.2 \
RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \ && curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
unzip awscliv2.zip && /bin/bash /usr/src/aws/install && unzip awscliv2.zip && /bin/bash aws/install && rm -rf awscliv2.zip aws
WORKDIR /usr/src WORKDIR /usr/src
COPY package.json yarn.lock ./ COPY package.json package-lock.json* ./
RUN yarn install \ RUN npm ci \
&& yarn cache clean \ && npm cache clean --force \
&& rm -f package.json yarn.lock && rm -f package.json package-lock.json*
ENV PATH /usr/src/node_modules/.bin:$PATH ENV PATH /usr/src/node_modules/.bin:$PATH
ENV NODE_MODULES_PATH ../
WORKDIR /usr/src/website WORKDIR /usr/src/website
CMD ["npm", "start", "--", "--host", "0.0.0.0"]

View File

@ -1,34 +1,69 @@
.PHONY: buildimage dev shell build publish sync clean .PHONY: buildimage build dev shell check genclidocs _validatemdgen publish sync
WEBC := docker run --rm -it --init -p 5050:3000 -p 5051:3001 \ CORSO_BUILD_DIR := /tmp/.corsobuild
-v ${PWD}:/usr/src/website corso/website:latest CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache
ifeq (${CI},true) CORSO_BUILD_MOD := ${CORSO_BUILD_DIR}/mod
# Cannot run in interactive/tty mode in CI CORSO_BUILD_BIN := ${CORSO_BUILD_DIR}/bin
WEBC := docker run --rm --init -p 5050:3000 -p 5051:3001 \ CORSO_REPO := /go/src/github.com/alcionai/corso
-v ${PWD}:/usr/src/website corso/website:latest CORSO_LOCAL_PATH := $(shell git rev-parse --show-toplevel)
endif GIT_SHA := $(shell git rev-parse --short HEAD)
WEBC := docker run --rm -it -p 3000:3000 -v ${PWD}:/usr/src/website --env CORSO_VERSION=unreleased-${GIT_SHA} corso/website
CBASE := docker run --rm -it \
-v ${CORSO_LOCAL_PATH}:${CORSO_REPO} -v ${CORSO_BUILD_DIR}:${CORSO_BUILD_DIR} \
--env GOCACHE=${CORSO_BUILD_CACHE} --env GOMODCACHE=${CORSO_BUILD_MOD} --env GOTMPDIR=${CORSO_BUILD_DIR} \
--workdir ${CORSO_REPO}/src
GOC := ${CBASE} golang:1.18
GOBASHC := ${CBASE} --entrypoint bash golang:1.18
MDGEN_SRC := ${CORSO_REPO}/src/cmd/mdgen/mdgen.go
MDGEN_BINARY := ${CORSO_BUILD_BIN}/mdgen
CLI_DOCS := ${CORSO_REPO}/website/docs/cli
buildimage: clean buildimage:
docker build -t "corso/website:latest" . docker build -t "corso/website:latest" .
dev: dev: genclidocs
$(WEBC) gulp $(WEBC) npm start -- --host 0.0.0.0
shell: VALE_TARGET ?= docs README.md
$(WEBC) /bin/bash
build: check: genclidocs
$(WEBC) gulp build $(WEBC) vale $(VALE_TARGET)
$(WEBC) markdownlint '**/*.md' --ignore styles/ --ignore src/ --ignore node_modules/
publish: build localcheck: genclidocs
vale $(VALE_TARGET)
markdownlint '**/*.md' --ignore styles/ --ignore src/ --ignore node_modules/
dockershell:
$(WEBC) bash
build: genclidocs
$(WEBC) npm run build
genclidocs: _validatemdgen ${MDGEN_BINARY}
@echo 'Auto-generating Corso CLI docs...'
$(WEBC) rm -rf docs/cli
$(GOC) ${MDGEN_BINARY} --cli-folder ${CLI_DOCS}
_validatemdgen: # in case we have a different architecture
@echo 'Verifying dependencies...'
$(GOBASHC) -c "${MDGEN_BINARY} --help >/dev/null || rm -rf ${MDGEN_BINARY}"
${MDGEN_BINARY}: $(shell find ${CORSO_LOCAL_PATH}/src -type f -name *.go) $(shell find ${CORSO_LOCAL_PATH}/src -type d )
@echo 'Re-building Corso CLI docs auto-gen tooling...'
$(GOC) go mod download
$(GOC) go build -o ${MDGEN_BINARY} ${MDGEN_SRC}
clean:
$(WEBC) rm -rf docs/cli build node_modules
$(GOC) rm -rf ${CORSO_BUILD_DIR}/*
publish: clean build
docker run -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY \ docker run -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY \
-e AWS_SESSION_TOKEN -e AWS_REGION \ -e AWS_SESSION_TOKEN -e AWS_REGION \
--rm -v ${PWD}:/usr/src/website corso/website:latest \ --rm -v ${PWD}:/usr/src/website corso/website:latest \
make sync make sync
sync: sync:
aws s3 sync /usr/src/website/dist/ s3://corsobackup.io/ --exclude ".git/*" --delete aws s3 sync /usr/src/website/build/ s3://corsobackup.io/ --delete
aws cloudfront create-invalidation --distribution-id E1W9NGI9YTVZ1A --paths "/*" aws cloudfront create-invalidation --distribution-id E1W9NGI9YTVZ1A --paths "/*"
clean:
rm -rf node_modules

View File

@ -1,8 +1,11 @@
# Corso website documentation # Corso documentation
Corso documentation uses [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
[Mermaid](https://mermaid-js.github.io/mermaid/) provides support for native diagrams in Markdown.
## Requirements ## Requirements
Building the Corso website requires the following tools on your machine: Developing documentation for Corso requires the following tools on your machine:
- `make` - `make`
- Docker - Docker
@ -19,22 +22,50 @@ make buildimage
make dev make dev
``` ```
This command starts a local development server within the Docker container and will expose a live website preview at [http://localhost:5050](http://localhost:5050). This command starts a local development server within the Docker container and will expose docs at [http://localhost:3000](http://localhost:3000).
## Building a static website ## Generating Corso CLI docs
```bash
make genclidocs
```
Corso's CLI documents are auto generated. This command explicitly triggers generating these docs. This step will happen
automatically for the other commands where this is relevant.
## Building static documentation
```bash ```bash
make build make build
``` ```
This command generates static content into the `dist` directory for integration with any static contents hosting service. If you are using AWS S3 + CloudFront, you can run `make publish` to upload to the configured S3 bucket. This command generates static content into the `build` directory for integration with any static contents hosting service.
## Website platform development ## Serving static documentation
```bash
make serve
```
This command will serve the static content generated with `make build` at [http://localhost:3000](http://localhost:3000).
## Style and linting
```bash
# Lint all docs
make check
# Lint specific files and/or folders
make check VALE_TARGET="README.md docs/concepts"
```
This command will lint all Markdown files and check them for style issues using the Docker container
## Documentation platform development
```bash ```bash
make shell make shell
``` ```
Use this command to interactively (and temporarily!) change the contents or Use this command to interactively (and temporarily!) change the contents or
configuration of the live website container image (for example, when configuration of the live documentation container image (for example, when
experimenting with new packages). experimenting with new plugins).

View File

@ -1,248 +0,0 @@
const browsersync = require('browser-sync').create();
const cached = require('gulp-cached');
const cleanCSS = require('clean-css');
const cssnano = require('gulp-cssnano');
const del = require('del');
const fileinclude = require('gulp-file-include');
const gulp = require('gulp');
const gulpif = require('gulp-if');
const npmdist = require('gulp-npm-dist');
const replace = require('gulp-replace');
const uglify = require('gulp-uglify');
const useref = require('gulp-useref-plus');
const rename = require('gulp-rename');
const sass = require('gulp-sass')(require('sass'));
const sourcemaps = require("gulp-sourcemaps");
const postcss = require('gulp-postcss');
const autoprefixer = require("autoprefixer");
const tailwindcss = require('tailwindcss');
const paths = {
config: {
tailwind: "./tailwind.config.js",
},
base: {
base: {
dir: './'
},
node: {
dir: '../node_modules'
},
packageLock: {
files: './package-lock.json'
}
},
dist: {
base: {
dir: './dist',
files: './dist/**/*'
},
libs: {
dir: './dist/assets/libs'
},
css: {
dir: './dist/assets/css',
},
js: {
dir: './dist/assets/js',
files: './dist/assets/js/pages',
},
},
src: {
base: {
dir: './src',
files: './src/**/*'
},
css: {
dir: './src/assets/css',
files: './src/assets/css/**/*'
},
html: {
dir: './src',
files: './src/**/*.html',
},
img: {
dir: './src/assets/images',
files: './src/assets/images/**/*',
},
js: {
dir: './src/assets/js',
pages: './src/assets/js/pages',
files: './src/assets/js/pages/*.js',
main: './src/assets/js/*.js',
},
partials: {
dir: './src/partials',
files: './src/partials/**/*'
},
scss: {
dir: './src/assets/scss',
files: './src/assets/scss/**/*',
main: './src/assets/scss/*.scss',
icon: './src/assets/scss/icons.scss'
}
}
};
gulp.task('browsersync', function (callback) {
browsersync.init({
server: {
baseDir: [paths.dist.base.dir, paths.src.base.dir, paths.base.base.dir]
},
open: false,
});
callback();
});
gulp.task('browsersyncReload', function (callback) {
browsersync.reload();
callback();
});
gulp.task('watch', function () {
gulp.watch([paths.src.scss.files, '!' + paths.src.scss.icon], gulp.series('scss', 'browsersyncReload'));
gulp.watch(paths.src.scss.icon, gulp.series('icons', 'browsersyncReload'));
gulp.watch([paths.src.js.dir], gulp.series('js', 'browsersyncReload'));
// gulp.watch([paths.src.js.pages], gulp.series('jsPages', 'browsersyncReload'));
gulp.watch([paths.src.html.files, paths.src.partials.files], gulp.series(['fileinclude', 'scss'], 'browsersyncReload'));
});
gulp.task('js', function () {
return gulp
.src(paths.src.js.main)
// .pipe(uglify())
.pipe(gulp.dest(paths.dist.js.dir));
});
// gulp.task('jsPages', function () {
// return gulp
// .src(paths.src.js.files)
// // .pipe(uglify())
// .pipe(gulp.dest(paths.dist.js.files));
// });
const cssOptions = {
compatibility: "*", // (default) - Internet Explorer 10+ compatibility mode
inline: ["all"], // enables all inlining, same as ['local', 'remote']
level: 2, // Optimization levels. The level option can be either 0, 1 (default), or 2, e.g.
};
gulp.task('scss', function () {
// generate tailwind
return gulp
.src([paths.src.scss.main, '!' + paths.src.scss.icon])
.pipe(sourcemaps.init())
.pipe(sass().on('error', sass.logError))
.pipe(postcss([
tailwindcss(paths.config.tailwind),
autoprefixer()
]))
.pipe(gulp.dest(paths.dist.css.dir))
// .pipe(cssnano({ svgo: false }))
.on("data", function (file) {
const buferFile = new cleanCSS(cssOptions).minify(file.contents);
return (file.contents = Buffer.from(buferFile.styles));
})
.pipe(
rename({
suffix: ".min"
})
)
.pipe(sourcemaps.write("./"))
.pipe(gulp.dest(paths.dist.css.dir));
});
gulp.task('icons', function () {
return gulp
.src(paths.src.scss.icon)
.pipe(sass().on('error', sass.logError))
.pipe(gulp.dest(paths.dist.css.dir))
.on("data", function (file) {
const buferFile = new cleanCSS(cssOptions).minify(file.contents);
return (file.contents = Buffer.from(buferFile.styles));
})
.pipe(
rename({
suffix: ".min"
})
)
.pipe(gulp.dest(paths.dist.css.dir));
});
gulp.task('fileinclude', function () {
return gulp
.src([
paths.src.html.files,
'!' + paths.dist.base.files,
'!' + paths.src.partials.files
])
.pipe(fileinclude({
prefix: '@@',
basepath: '@file',
indent: true,
}))
.pipe(cached())
.pipe(gulp.dest(paths.dist.base.dir));
});
gulp.task('clean:packageLock', function (callback) {
del.sync(paths.base.packageLock.files);
callback();
});
gulp.task('clean:dist', function (callback) {
del.sync(paths.dist.base.dir);
callback();
});
gulp.task('copy:all', function () {
return gulp
.src([
paths.src.base.files,
'!' + paths.src.partials.dir, '!' + paths.src.partials.files,
'!' + paths.src.scss.dir, '!' + paths.src.scss.files,
'!' + paths.src.js.dir, '!' + paths.src.js.files, '!' + paths.src.js.main,
'!' + paths.src.html.files,
])
.pipe(gulp.dest(paths.dist.base.dir));
});
gulp.task('copy:libs', function () {
return gulp
.src(npmdist({ nodeModulesPath: process.env.NODE_MODULES_PATH || './' }), { base: paths.base.node.dir })
.pipe(rename(function (path) {
path.dirname = path.dirname.replace(/\/dist/, '').replace(/\\dist/, '');
}))
.pipe(gulp.dest(paths.dist.libs.dir));
});
gulp.task('html', function () {
return gulp
.src([
paths.src.html.files,
'!' + paths.dist.base.files,
'!' + paths.src.partials.files
])
.pipe(fileinclude({
prefix: '@@',
basepath: '@file',
indent: true,
}))
.pipe(replace(/href="(.{0,10})node_modules/g, 'href="$1assets/libs'))
.pipe(replace(/src="(.{0,10})node_modules/g, 'src="$1assets/libs'))
.pipe(useref())
.pipe(cached())
.pipe(gulpif('*.js', uglify()))
.pipe(gulpif('*.css', cssnano({ svgo: false })))
.pipe(gulp.dest(paths.dist.base.dir));
});
// Default(Producation) Task
gulp.task('default', gulp.series(gulp.parallel('clean:packageLock', 'clean:dist', 'copy:all', 'copy:libs', 'fileinclude', 'scss', 'icons', 'js', 'html'), gulp.parallel('browsersync', 'watch')));
// Build(Development) Task
gulp.task('build', gulp.series('clean:packageLock', 'clean:dist', 'copy:all', 'copy:libs', 'fileinclude', 'scss', 'icons', 'js', 'html'));

View File

@ -30,7 +30,7 @@
}, },
"devDependencies": { "devDependencies": {
"@docusaurus/module-type-aliases": "2.2.0", "@docusaurus/module-type-aliases": "2.2.0",
"@iconify/react": "^4.0.0", "@iconify/react": "^4.0.1",
"autoprefixer": "^10.4.13", "autoprefixer": "^10.4.13",
"postcss": "^8.4.19", "postcss": "^8.4.19",
"tailwindcss": "^3.2.4" "tailwindcss": "^3.2.4"
@ -2741,15 +2741,15 @@
} }
}, },
"node_modules/@iconify/react": { "node_modules/@iconify/react": {
"version": "4.0.0", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/@iconify/react/-/react-4.0.0.tgz", "resolved": "https://registry.npmjs.org/@iconify/react/-/react-4.0.1.tgz",
"integrity": "sha512-YE+z5PCegYyuJtLR6KBbJNlssmuVgRyFyRxcgxSPVkN3Gsm7npSzrUh+gWlBxrNb5g4Q6Nnb1gczPBNcwtUpng==", "integrity": "sha512-/DBJqh5K7W4f+d4kpvyJa/OTpVa3GfgrE9bZFAKP0vIWDr0cvVU9MVvbbkek216w9nLQhpJY/FeJtc6izB1PHw==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@iconify/types": "^2.0.0" "@iconify/types": "^2.0.0"
}, },
"funding": { "funding": {
"url": "http://github.com/sponsors/cyberalien" "url": "https://github.com/sponsors/cyberalien"
}, },
"peerDependencies": { "peerDependencies": {
"react": ">=16" "react": ">=16"
@ -11829,23 +11829,14 @@
} }
}, },
"node_modules/recursive-readdir": { "node_modules/recursive-readdir": {
"version": "2.2.2", "version": "2.2.3",
"license": "MIT", "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.3.tgz",
"integrity": "sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA==",
"dependencies": { "dependencies": {
"minimatch": "3.0.4" "minimatch": "^3.0.5"
}, },
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=6.0.0"
}
},
"node_modules/recursive-readdir/node_modules/minimatch": {
"version": "3.0.4",
"license": "ISC",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
} }
}, },
"node_modules/regenerate": { "node_modules/regenerate": {
@ -12609,29 +12600,20 @@
} }
}, },
"node_modules/serve-handler": { "node_modules/serve-handler": {
"version": "6.1.3", "version": "6.1.5",
"license": "MIT", "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.5.tgz",
"integrity": "sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg==",
"dependencies": { "dependencies": {
"bytes": "3.0.0", "bytes": "3.0.0",
"content-disposition": "0.5.2", "content-disposition": "0.5.2",
"fast-url-parser": "1.1.3", "fast-url-parser": "1.1.3",
"mime-types": "2.1.18", "mime-types": "2.1.18",
"minimatch": "3.0.4", "minimatch": "3.1.2",
"path-is-inside": "1.0.2", "path-is-inside": "1.0.2",
"path-to-regexp": "2.2.1", "path-to-regexp": "2.2.1",
"range-parser": "1.2.0" "range-parser": "1.2.0"
} }
}, },
"node_modules/serve-handler/node_modules/minimatch": {
"version": "3.0.4",
"license": "ISC",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/serve-handler/node_modules/path-to-regexp": { "node_modules/serve-handler/node_modules/path-to-regexp": {
"version": "2.2.1", "version": "2.2.1",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz",
@ -16792,9 +16774,9 @@
} }
}, },
"@iconify/react": { "@iconify/react": {
"version": "4.0.0", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/@iconify/react/-/react-4.0.0.tgz", "resolved": "https://registry.npmjs.org/@iconify/react/-/react-4.0.1.tgz",
"integrity": "sha512-YE+z5PCegYyuJtLR6KBbJNlssmuVgRyFyRxcgxSPVkN3Gsm7npSzrUh+gWlBxrNb5g4Q6Nnb1gczPBNcwtUpng==", "integrity": "sha512-/DBJqh5K7W4f+d4kpvyJa/OTpVa3GfgrE9bZFAKP0vIWDr0cvVU9MVvbbkek216w9nLQhpJY/FeJtc6izB1PHw==",
"dev": true, "dev": true,
"requires": { "requires": {
"@iconify/types": "^2.0.0" "@iconify/types": "^2.0.0"
@ -22828,17 +22810,11 @@
} }
}, },
"recursive-readdir": { "recursive-readdir": {
"version": "2.2.2", "version": "2.2.3",
"resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.3.tgz",
"integrity": "sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA==",
"requires": { "requires": {
"minimatch": "3.0.4" "minimatch": "^3.0.5"
},
"dependencies": {
"minimatch": {
"version": "3.0.4",
"requires": {
"brace-expansion": "^1.1.7"
}
}
} }
}, },
"regenerate": { "regenerate": {
@ -23355,24 +23331,20 @@
} }
}, },
"serve-handler": { "serve-handler": {
"version": "6.1.3", "version": "6.1.5",
"resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.5.tgz",
"integrity": "sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg==",
"requires": { "requires": {
"bytes": "3.0.0", "bytes": "3.0.0",
"content-disposition": "0.5.2", "content-disposition": "0.5.2",
"fast-url-parser": "1.1.3", "fast-url-parser": "1.1.3",
"mime-types": "2.1.18", "mime-types": "2.1.18",
"minimatch": "3.0.4", "minimatch": "3.1.2",
"path-is-inside": "1.0.2", "path-is-inside": "1.0.2",
"path-to-regexp": "2.2.1", "path-to-regexp": "2.2.1",
"range-parser": "1.2.0" "range-parser": "1.2.0"
}, },
"dependencies": { "dependencies": {
"minimatch": {
"version": "3.0.4",
"requires": {
"brace-expansion": "^1.1.7"
}
},
"path-to-regexp": { "path-to-regexp": {
"version": "2.2.1", "version": "2.2.1",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz",

View File

@ -1,52 +1,56 @@
{ {
"name": "docs",
"version": "0.1.0",
"private": true, "private": true,
"name": "techwind",
"version": "1.4.0",
"description": "Tailwind CSS Saas & Software Multipurpose Template",
"main": "gulpfile.js",
"author": "ShreeThemes",
"browserslist": [
"last 2 version",
"> 2%"
],
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1" "docusaurus": "docusaurus",
}, "start": "docusaurus start",
"devDependencies": { "build": "docusaurus build",
"autoprefixer": "^10.4.13", "swizzle": "docusaurus swizzle",
"browser-sync": "^2.26.7", "deploy": "docusaurus deploy",
"clean-css": "^5.3.1", "clear": "docusaurus clear",
"del": "4.1.1", "serve": "docusaurus serve",
"gulp": "^4.0.2", "write-translations": "docusaurus write-translations",
"gulp-cached": "1.1.1", "write-heading-ids": "docusaurus write-heading-ids"
"gulp-cssnano": "2.1.3",
"gulp-file-include": "2.3.0",
"gulp-if": "^3.0.0",
"gulp-npm-dist": "^1.0.3",
"gulp-postcss": "^9.0.1",
"gulp-rename": "^2.0.0",
"gulp-replace": "1.1.3",
"gulp-sass": "^5.1.0",
"gulp-sourcemaps": "^3.0.0",
"gulp-uglify": "3.0.2",
"gulp-useref-plus": "0.0.8",
"postcss": "^8.4.19",
"sass": "1.56.1",
"tailwindcss": "^3.2.4"
}, },
"dependencies": { "dependencies": {
"@iconscout/unicons": "^4.0.1", "@docusaurus/core": "2.2.0",
"@midzer/tobii": "^2.3.3", "@docusaurus/plugin-google-gtag": "^2.2.0",
"@docusaurus/preset-classic": "2.2.0",
"@loadable/component": "^5.15.2",
"@mdx-js/react": "^1.6.22",
"animate.css": "^4.1.1", "animate.css": "^4.1.1",
"choices.js": "^10.1.0", "clsx": "^1.2.1",
"feather-icons": "^4.28.0", "docusaurus-plugin-image-zoom": "^0.1.1",
"gumshoejs": "^5.1.2", "docusaurus-plugin-sass": "^0.2.2",
"feather-icons": "^4.29.0",
"jarallax": "^2.0.4", "jarallax": "^2.0.4",
"js-datepicker": "^5.18.1", "mdx-mermaid": "^1.3.2",
"shufflejs": "^6.1.0", "mermaid": "^9.2.2",
"swiper": "8.4.5", "prism-react-renderer": "^1.3.5",
"tiny-slider": "^2.9.4", "react": "^17.0.2",
"tobii": "^2.0.0-alpha", "react-dom": "^17.0.2",
"wow.js": "^1.2.2" "sass": "^1.56.1",
"tw-elements": "^1.0.0-alpha12",
"wowjs": "^1.1.3"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "2.2.0",
"@iconify/react": "^4.0.1",
"autoprefixer": "^10.4.13",
"postcss": "^8.4.19",
"tailwindcss": "^3.2.4"
},
"browserslist": {
"production": [
">0.5%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
} }
} }

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 384 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 145 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 138 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

View File

@ -1 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1350 1350"><defs><style>.cls-1{fill:#205eab;}</style></defs><path class="cls-1" d="M733.67,1251.47c-75.58,0-146.63-29.43-200.08-82.88L40,675,533.6,181.42c53.43-53.45,124.49-82.89,200.08-82.89s146.63,29.43,200.07,82.89l293.52,293.51c110.32,110.33,110.32,289.83,0,400.14l-293.52,293.52c-53.43,53.44-124.49,82.88-200.07,82.88ZM145.48,675l440.85,440.86c81.26,81.24,213.44,81.22,294.68,0l293.52-293.52c81.24-81.24,81.24-213.43,0-294.68L881.01,234.15c-39.36-39.36-91.68-61.04-147.33-61.04s-107.99,21.68-147.34,61.04L145.48,675Z"/><g><path class="cls-1" d="M1079.78,673.48l-14.74-92.16c-2.24-15.96-14.18-24.79-32.94-24.79l-109.95-.31c-17.69,0-30.7-8.84-32.94-24.79l-.38-2.62c-3.57-25.34-21.36-46.39-45.77-54.13l-147.73-54.08c-3.68-1.19-7.09-3.07-10.03-5.56-1.06-.9-1.78-2.14-2.17-3.48l-27.63-94.9c-.28-.96-.79-1.85-1.56-2.48-8.12-6.68-20.56-4.33-25.58,5.17l-54.49,122.05c-1.98,3.74-3.12,7.88-3.33,12.12l6.69,55.63c-.31,5.88-2.41,11.57-6,16.24l-108.67,108.07c-11.57,11.57-35.77,35.77-35.77,35.77l304.33,304.36,27.13-111.43c2.33-13.05,13.15-22.89,26.34-23.96l57.58-4.64c179.88-7.69,181.76-59.61,233.29-140.3,3-4.72,4.6-10.19,4.6-15.74,0-1.33-.1-2.69-.29-4.02Zm-236.75-89.74c-13.26,3.33-50.23,1.19-53.56-12.07-3.33-13.24,28.27-32.6,41.51-35.91,13.26-3.33,26.7,4.72,30.03,17.98,3.31,13.24-4.74,26.7-17.98,30.01Z"/><path class="cls-1" d="M781.88,435.5l-29.01-108.92c-.32-1.19-1.04-2.24-2.04-2.96-6.43-4.6-15.71-2.63-19.55,4.64l-36.4,75.39,87,31.84Z"/><path class="cls-1" d="M726.53,992.3l-1.88,7.71c-4.28,17.56-26.12,23.68-38.91,10.9l-274.22-274.22-26.1-26.1,29.7-29.7,26.1,26.1,285.31,285.31Z"/></g></svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 419 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 415 KiB

View File

@ -1 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1920 523"><defs><style>.cls-1{fill:#205eab;}</style></defs><g><path class="cls-1" d="M134.51,129.94c28.63,0,54.6,7.95,75.81,22.79,11.67,7.95,14.31,23.33,6.36,36.58-7.42,12.19-25.98,12.73-37.64,5.83-12.73-7.42-28.63-12.19-44.53-12.19-41.35,0-77.93,30.22-77.93,76.34s36.58,75.81,77.93,75.81c15.91,0,31.81-4.77,44.53-12.19,11.66-6.89,30.22-6.36,37.64,5.83,7.95,13.25,5.3,28.63-6.36,36.58-21.21,14.84-47.18,22.8-75.81,22.8C63.47,388.12,2.5,337.76,2.5,259.29S63.47,129.94,134.51,129.94Z"/><path class="cls-1" d="M261.22,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/><path class="cls-1" d="M633.91,293.75v64.15c0,14.84-12.19,27.57-28.1,27.57-14.84,0-26.51-12.72-26.51-27.57V160.15c0-14.84,11.67-27.57,26.51-27.57,15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.03,67.86-62.03,7.95,0,15.91,.53,23.33,2.12,13.79,3.18,22.8,16.97,19.62,31.28-4.77,23.86-28.63,18.03-44.53,18.03-46.65,0-66.27,46.65-66.27,112.39Z"/><path class="cls-1" d="M788.19,302.24c13.25-5.3,23.33,1.59,27.57,10.6,10.08,19.09,29.16,29.69,53.55,29.69s42.94-11.13,42.94-29.69c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.7,3.18-36.58,42.94-65.21,88.53-65.21,32.87,0,63.09,10.6,79.53,36.58,7.42,12.72,3.71,25.44-4.77,31.81-9.01,7.42-20.15,6.89-31.81-3.18-13.78-12.19-29.69-16.97-42.41-16.97-13.79,0-29.16,4.77-34.46,13.25-4.24,6.89-4.77,13.78-2.12,21.21,3.18,9.54,18.02,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16,0,47.71-41.88,74.75-98.61,74.75-38.17,0-76.87-20.15-90.13-56.2-4.24-13.25,1.59-25.44,13.25-29.68Z"/><path class="cls-1" d="M1006.61,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/></g><path class="cls-1" d="M1658.37,520.7c-33.98,0-65.93-13.23-89.96-37.26l-221.94-221.93,221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27s65.93,13.23,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98c-24.03,24.03-55.98,37.26-89.96,37.26Zm-264.47-259.2l198.22,198.22c36.53,36.53,95.97,36.52,132.5,0l131.98-131.98c36.53-36.53,36.53-95.97,0-132.5l-131.98-131.97c-17.7-17.7-41.22-27.44-66.25-27.44s-48.55,9.75-66.25,27.44l-198.22,198.22Z"/><g><path class="cls-1" d="M1813.99,260.82l-6.63-41.44c-1.01-7.17-6.37-11.15-14.81-11.15l-49.44-.14c-7.95,0-13.8-3.97-14.81-11.15l-.17-1.18c-1.61-11.39-9.61-20.86-20.58-24.34l-66.42-24.32c-1.66-.53-3.19-1.38-4.51-2.5-.48-.41-.8-.96-.97-1.57l-12.42-42.67c-.13-.43-.36-.83-.7-1.12-3.65-3-9.25-1.95-11.5,2.32l-24.5,54.88c-.89,1.68-1.4,3.54-1.5,5.45l3.01,25.01c-.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85,12.2-50.1c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.9-63.08,1.35-2.12,2.07-4.58,2.07-7.08,0-.6-.04-1.21-.13-1.81Zm-106.45-40.35c-5.96,1.5-22.58,.54-24.08-5.43-1.5-5.95,12.71-14.66,18.66-16.15,5.96-1.5,12,2.12,13.5,8.08,1.49,5.95-2.13,12-8.08,13.49Z"/><path class="cls-1" d="M1680.04,153.81l-13.04-48.97c-.14-.53-.47-1.01-.92-1.33-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9,39.12,14.32Z"/><path class="cls-1" d="M1655.16,404.17l-.85,3.47c-1.93,7.9-11.75,10.65-17.49,4.9l-123.3-123.3-11.74-11.74,13.35-13.35,11.74,11.74,128.28,128.28Z"/></g></svg>

Before

Width:  |  Height:  |  Size: 3.5 KiB

View File

@ -1 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1920 523"><defs><style>.cls-1{fill:#fff;}</style></defs><g><path class="cls-1" d="M134.51,129.94c28.63,0,54.6,7.95,75.81,22.79,11.67,7.95,14.31,23.33,6.36,36.58-7.42,12.19-25.98,12.73-37.64,5.83-12.73-7.42-28.63-12.19-44.53-12.19-41.35,0-77.93,30.22-77.93,76.34s36.58,75.81,77.93,75.81c15.91,0,31.81-4.77,44.53-12.19,11.66-6.89,30.22-6.36,37.64,5.83,7.95,13.25,5.3,28.63-6.36,36.58-21.21,14.84-47.18,22.8-75.81,22.8C63.47,388.12,2.5,337.76,2.5,259.29S63.47,129.94,134.51,129.94Z"/><path class="cls-1" d="M261.22,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/><path class="cls-1" d="M633.91,293.75v64.15c0,14.84-12.19,27.57-28.1,27.57-14.84,0-26.51-12.72-26.51-27.57V160.15c0-14.84,11.67-27.57,26.51-27.57,15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.03,67.86-62.03,7.95,0,15.91,.53,23.33,2.12,13.79,3.18,22.8,16.97,19.62,31.28-4.77,23.86-28.63,18.03-44.53,18.03-46.65,0-66.27,46.65-66.27,112.39Z"/><path class="cls-1" d="M788.19,302.24c13.25-5.3,23.33,1.59,27.57,10.6,10.08,19.09,29.16,29.69,53.55,29.69s42.94-11.13,42.94-29.69c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.7,3.18-36.58,42.94-65.21,88.53-65.21,32.87,0,63.09,10.6,79.53,36.58,7.42,12.72,3.71,25.44-4.77,31.81-9.01,7.42-20.15,6.89-31.81-3.18-13.78-12.19-29.69-16.97-42.41-16.97-13.79,0-29.16,4.77-34.46,13.25-4.24,6.89-4.77,13.78-2.12,21.21,3.18,9.54,18.02,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16,0,47.71-41.88,74.75-98.61,74.75-38.17,0-76.87-20.15-90.13-56.2-4.24-13.25,1.59-25.44,13.25-29.68Z"/><path class="cls-1" d="M1006.61,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/></g><path class="cls-1" d="M1658.37,520.7c-33.98,0-65.93-13.23-89.96-37.26l-221.94-221.93,221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27s65.93,13.23,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98c-24.03,24.03-55.98,37.26-89.96,37.26Zm-264.47-259.2l198.22,198.22c36.53,36.53,95.97,36.52,132.5,0l131.98-131.98c36.53-36.53,36.53-95.97,0-132.5l-131.98-131.97c-17.7-17.7-41.22-27.44-66.25-27.44s-48.55,9.75-66.25,27.44l-198.22,198.22Z"/><g><path class="cls-1" d="M1813.99,260.82l-6.63-41.44c-1.01-7.17-6.37-11.15-14.81-11.15l-49.44-.14c-7.95,0-13.8-3.97-14.81-11.15l-.17-1.18c-1.61-11.39-9.61-20.86-20.58-24.34l-66.42-24.32c-1.66-.53-3.19-1.38-4.51-2.5-.48-.41-.8-.96-.97-1.57l-12.42-42.67c-.13-.43-.36-.83-.7-1.12-3.65-3-9.25-1.95-11.5,2.32l-24.5,54.88c-.89,1.68-1.4,3.54-1.5,5.45l3.01,25.01c-.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85,12.2-50.1c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.9-63.08,1.35-2.12,2.07-4.58,2.07-7.08,0-.6-.04-1.21-.13-1.81Zm-106.45-40.35c-5.96,1.5-22.58,.54-24.08-5.43-1.5-5.95,12.71-14.66,18.66-16.15,5.96-1.5,12,2.12,13.5,8.08,1.49,5.95-2.13,12-8.08,13.49Z"/><path class="cls-1" d="M1680.04,153.81l-13.04-48.97c-.14-.53-.47-1.01-.92-1.33-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9,39.12,14.32Z"/><path class="cls-1" d="M1655.16,404.17l-.85,3.47c-1.93,7.9-11.75,10.65-17.49,4.9l-123.3-123.3-11.74-11.74,13.35-13.35,11.74,11.74,128.28,128.28Z"/></g></svg>

Before

Width:  |  Height:  |  Size: 3.5 KiB

View File

@ -1 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1350 1350"><defs><style>.cls-1{fill:#205eab;}</style></defs><path class="cls-1" d="M733.67,1251.47c-75.58,0-146.63-29.43-200.08-82.88L40,675,533.6,181.42c53.43-53.45,124.49-82.89,200.08-82.89s146.63,29.43,200.07,82.89l293.52,293.51c110.32,110.33,110.32,289.83,0,400.14l-293.52,293.52c-53.43,53.44-124.49,82.88-200.07,82.88ZM145.48,675l440.85,440.86c81.26,81.24,213.44,81.22,294.68,0l293.52-293.52c81.24-81.24,81.24-213.43,0-294.68L881.01,234.15c-39.36-39.36-91.68-61.04-147.33-61.04s-107.99,21.68-147.34,61.04L145.48,675Z"/><g><path class="cls-1" d="M1079.78,673.48l-14.74-92.16c-2.24-15.96-14.18-24.79-32.94-24.79l-109.95-.31c-17.69,0-30.7-8.84-32.94-24.79l-.38-2.62c-3.57-25.34-21.36-46.39-45.77-54.13l-147.73-54.08c-3.68-1.19-7.09-3.07-10.03-5.56-1.06-.9-1.78-2.14-2.17-3.48l-27.63-94.9c-.28-.96-.79-1.85-1.56-2.48-8.12-6.68-20.56-4.33-25.58,5.17l-54.49,122.05c-1.98,3.74-3.12,7.88-3.33,12.12l6.69,55.63c-.31,5.88-2.41,11.57-6,16.24l-108.67,108.07c-11.57,11.57-35.77,35.77-35.77,35.77l304.33,304.36,27.13-111.43c2.33-13.05,13.15-22.89,26.34-23.96l57.58-4.64c179.88-7.69,181.76-59.61,233.29-140.3,3-4.72,4.6-10.19,4.6-15.74,0-1.33-.1-2.69-.29-4.02Zm-236.75-89.74c-13.26,3.33-50.23,1.19-53.56-12.07-3.33-13.24,28.27-32.6,41.51-35.91,13.26-3.33,26.7,4.72,30.03,17.98,3.31,13.24-4.74,26.7-17.98,30.01Z"/><path class="cls-1" d="M781.88,435.5l-29.01-108.92c-.32-1.19-1.04-2.24-2.04-2.96-6.43-4.6-15.71-2.63-19.55,4.64l-36.4,75.39,87,31.84Z"/><path class="cls-1" d="M726.53,992.3l-1.88,7.71c-4.28,17.56-26.12,23.68-38.91,10.9l-274.22-274.22-26.1-26.1,29.7-29.7,26.1,26.1,285.31,285.31Z"/></g></svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -1 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1350 1350"><defs><style>.cls-1{fill:#fff;}</style></defs><path class="cls-1" d="M733.67,1251.47c-75.58,0-146.63-29.43-200.08-82.88L40,675,533.6,181.42c53.43-53.45,124.49-82.89,200.08-82.89s146.63,29.43,200.07,82.89l293.52,293.51c110.32,110.33,110.32,289.83,0,400.14l-293.52,293.52c-53.43,53.44-124.49,82.88-200.07,82.88ZM145.48,675l440.85,440.86c81.26,81.24,213.44,81.22,294.68,0l293.52-293.52c81.24-81.24,81.24-213.43,0-294.68L881.01,234.15c-39.36-39.36-91.68-61.04-147.33-61.04s-107.99,21.68-147.34,61.04L145.48,675Z"/><g><path class="cls-1" d="M1079.78,673.48l-14.74-92.16c-2.24-15.96-14.18-24.79-32.94-24.79l-109.95-.31c-17.69,0-30.7-8.84-32.94-24.79l-.38-2.62c-3.57-25.34-21.36-46.39-45.77-54.13l-147.73-54.08c-3.68-1.19-7.09-3.07-10.03-5.56-1.06-.9-1.78-2.14-2.17-3.48l-27.63-94.9c-.28-.96-.79-1.85-1.56-2.48-8.12-6.68-20.56-4.33-25.58,5.17l-54.49,122.05c-1.98,3.74-3.12,7.88-3.33,12.12l6.69,55.63c-.31,5.88-2.41,11.57-6,16.24l-108.67,108.07c-11.57,11.57-35.77,35.77-35.77,35.77l304.33,304.36,27.13-111.43c2.33-13.05,13.15-22.89,26.34-23.96l57.58-4.64c179.88-7.69,181.76-59.61,233.29-140.3,3-4.72,4.6-10.19,4.6-15.74,0-1.33-.1-2.69-.29-4.02Zm-236.75-89.74c-13.26,3.33-50.23,1.19-53.56-12.07-3.33-13.24,28.27-32.6,41.51-35.91,13.26-3.33,26.7,4.72,30.03,17.98,3.31,13.24-4.74,26.7-17.98,30.01Z"/><path class="cls-1" d="M781.88,435.5l-29.01-108.92c-.32-1.19-1.04-2.24-2.04-2.96-6.43-4.6-15.71-2.63-19.55,4.64l-36.4,75.39,87,31.84Z"/><path class="cls-1" d="M726.53,992.3l-1.88,7.71c-4.28,17.56-26.12,23.68-38.91,10.9l-274.22-274.22-26.1-26.1,29.7-29.7,26.1,26.1,285.31,285.31Z"/></g></svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 88 KiB

View File

@ -1,29 +0,0 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="PowerShell" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="204.691px" height="154.521px" viewBox="0 0 204.691 154.521" style="enable-background:new 0 0 204.691 154.521;"
xml:space="preserve">
<g>
<path style="display:none;fill-rule:evenodd;clip-rule:evenodd;fill:#2671BE;" d="M-47.547,226.872
c0-97.129,0.094-194.259-0.195-291.387c-0.021-6.982,1.404-8.411,8.388-8.389c94.397,0.292,188.798,0.292,283.195,0
c6.984-0.022,8.41,1.407,8.389,8.389c-0.289,97.128-0.195,194.258-0.195,291.387c-3.238,2.008-6.837,1.129-10.268,1.131
c-93.015,0.049-186.031,0.049-279.047,0C-40.711,228.001-44.31,228.88-47.547,226.872z"/>
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#E0EAF5;" d="M120.14,0.032c23.011-0.008,46.023-0.078,69.034,0.019
c13.68,0.056,17.537,4.627,14.588,18.137c-8.636,39.566-17.466,79.092-26.415,118.589c-2.83,12.484-9.332,17.598-22.465,17.637
c-46.023,0.137-92.046,0.152-138.068-0.006c-15.043-0.053-19-5.148-15.759-19.404C9.849,96.287,18.69,57.582,27.602,18.892
C30.997,4.148,36.099,0.1,51.104,0.057C74.116-0.008,97.128,0.04,120.14,0.032z"/>
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#2671BE;" d="M85.365,149.813c-23.014-0.008-46.029,0.098-69.042-0.053
c-11.67-0.076-13.792-2.83-11.165-14.244c8.906-38.71,18.099-77.355,26.807-116.109C34.3,9.013,39.337,4.419,50.473,4.522
c46.024,0.427,92.056,0.137,138.083,0.184c11.543,0.011,13.481,2.48,10.89,14.187c-8.413,38.007-16.879,76.003-25.494,113.965
c-3.224,14.207-6.938,16.918-21.885,16.951C129.833,149.856,107.598,149.821,85.365,149.813z"/>
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#FDFDFE;" d="M104.948,73.951c-1.543-1.81-3.237-3.894-5.031-5.886
c-10.173-11.3-20.256-22.684-30.61-33.815c-4.738-5.094-6.248-10.041-0.558-15.069c5.623-4.97,11.148-4.53,16.306,1.188
c14.365,15.919,28.713,31.856,43.316,47.556c5.452,5.864,4.182,9.851-1.823,14.196c-23.049,16.683-45.968,33.547-68.862,50.443
c-5.146,3.799-10.052,4.75-14.209-0.861c-4.586-6.189-0.343-9.871,4.414-13.335c17.013-12.392,33.993-24.83,50.9-37.366
C101.146,79.256,104.527,78.238,104.948,73.951z"/>
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#FCFDFD;" d="M112.235,133.819c-6.196,0-12.401,0.213-18.583-0.068
c-4.932-0.223-7.9-2.979-7.838-8.174c0.06-4.912,2.536-8.605,7.463-8.738c13.542-0.363,27.104-0.285,40.651-0.02
c4.305,0.084,7.483,2.889,7.457,7.375c-0.031,5.146-2.739,9.133-8.25,9.465c-6.944,0.42-13.931,0.104-20.899,0.104
C112.235,133.78,112.235,133.8,112.235,133.819z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 24 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 38 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 115 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 24 KiB

View File

@ -1,329 +0,0 @@
/* Template Name: Techwind - Multipurpose Tailwind CSS Landing Page Template
Author: Shreethemes
Email: support@shreethemes.in
Website: https://shreethemes.in
Version: 1.4.0
Created: May 2022
File Description: Main JS file of the template
*/
/*********************************/
/* INDEX */
/*================================
* 01. Loader *
* 02. Toggle Menus *
* 03. Active Menu *
* 04. Clickable Menu *
* 05. Back to top *
* 06. Feather icon *
* 06. DD Menu *
* 06. Active Sidebar Menu *
* 07. Contact us *
* 08. Wow Animation JS *
================================*/
window.addEventListener('load', fn, false)
// window.onload = function loader() {
function fn() {
// Preloader
if (document.getElementById('preloader')) {
setTimeout(() => {
document.getElementById('preloader').style.visibility = 'hidden';
document.getElementById('preloader').style.opacity = '0';
}, 350);
}
// Menus
activateMenu();
}
//Menu
/*********************/
/* Toggle Menu */
/*********************/
function toggleMenu() {
document.getElementById('isToggle').classList.toggle('open');
var isOpen = document.getElementById('navigation')
if (isOpen.style.display === "block") {
isOpen.style.display = "none";
} else {
isOpen.style.display = "block";
}
};
/*********************/
/* Menu Active */
/*********************/
function getClosest(elem, selector) {
// Element.matches() polyfill
if (!Element.prototype.matches) {
Element.prototype.matches =
Element.prototype.matchesSelector ||
Element.prototype.mozMatchesSelector ||
Element.prototype.msMatchesSelector ||
Element.prototype.oMatchesSelector ||
Element.prototype.webkitMatchesSelector ||
function (s) {
var matches = (this.document || this.ownerDocument).querySelectorAll(s),
i = matches.length;
while (--i >= 0 && matches.item(i) !== this) {}
return i > -1;
};
}
// Get the closest matching element
for (; elem && elem !== document; elem = elem.parentNode) {
if (elem.matches(selector)) return elem;
}
return null;
};
function activateMenu() {
var menuItems = document.getElementsByClassName("sub-menu-item");
if (menuItems) {
var matchingMenuItem = null;
for (var idx = 0; idx < menuItems.length; idx++) {
if (menuItems[idx].href === window.location.href) {
matchingMenuItem = menuItems[idx];
}
}
if (matchingMenuItem) {
matchingMenuItem.classList.add('active');
var immediateParent = getClosest(matchingMenuItem, 'li');
if (immediateParent) {
immediateParent.classList.add('active');
}
var parent = getClosest(immediateParent, '.child-menu-item');
if(parent){
parent.classList.add('active');
}
var parent = getClosest(parent || immediateParent , '.parent-menu-item');
if (parent) {
parent.classList.add('active');
var parentMenuitem = parent.querySelector('.menu-item');
if (parentMenuitem) {
parentMenuitem.classList.add('active');
}
var parentOfParent = getClosest(parent, '.parent-parent-menu-item');
if (parentOfParent) {
parentOfParent.classList.add('active');
}
} else {
var parentOfParent = getClosest(matchingMenuItem, '.parent-parent-menu-item');
if (parentOfParent) {
parentOfParent.classList.add('active');
}
}
}
}
}
/*********************/
/* Clickable manu */
/*********************/
if (document.getElementById("navigation")) {
var elements = document.getElementById("navigation").getElementsByTagName("a");
for (var i = 0, len = elements.length; i < len; i++) {
elements[i].onclick = function (elem) {
if (elem.target.getAttribute("href") === "javascript:void(0)") {
var submenu = elem.target.nextElementSibling.nextElementSibling;
submenu.classList.toggle('open');
}
}
}
}
/*********************/
/* Menu Sticky */
/*********************/
function windowScroll() {
const navbar = document.getElementById("topnav");
if (navbar != null) {
if (
document.body.scrollTop >= 50 ||
document.documentElement.scrollTop >= 50
) {
navbar.classList.add("nav-sticky");
} else {
navbar.classList.remove("nav-sticky");
}
}
}
window.addEventListener('scroll', (ev) => {
ev.preventDefault();
windowScroll();
})
/*********************/
/* Back To TOp */
/*********************/
window.onscroll = function () {
scrollFunction();
};
function scrollFunction() {
var mybutton = document.getElementById("back-to-top");
if(mybutton!=null){
if (document.body.scrollTop > 500 || document.documentElement.scrollTop > 500) {
mybutton.classList.add("block");
mybutton.classList.remove("hidden");
} else {
mybutton.classList.add("hidden");
mybutton.classList.remove("block");
}
}
}
function topFunction() {
document.body.scrollTop = 0;
document.documentElement.scrollTop = 0;
}
/*********************/
/* Active Sidebar */
/*********************/
(function () {
var current = location.pathname.substring(location.pathname.lastIndexOf('/') + 1);;
if (current === "") return;
var menuItems = document.querySelectorAll('.sidebar-nav a');
for (var i = 0, len = menuItems.length; i < len; i++) {
if (menuItems[i].getAttribute("href").indexOf(current) !== -1) {
menuItems[i].parentElement.className += " active";
}
}
})();
/*********************/
/* Feather Icons */
/*********************/
feather.replace();
/*********************/
/* DD Menu */
/*********************/
var ddmenu = document.getElementsByClassName("dd-menu");
for (var i = 0, len = ddmenu.length; i < len; i++) {
ddmenu[i].onclick = function (elem) {
elem.stopPropagation();
}
}
/*********************/
/* Small Menu */
/*********************/
try {
var spy = new Gumshoe('#navmenu-nav a');
} catch (err) {
}
/*********************/
/* Dark & Light Mode */
/*********************/
try {
function changeTheme(e){
e.preventDefault()
const htmlTag = document.getElementsByTagName("html")[0]
if (htmlTag.className.includes("dark")) {
htmlTag.className = 'light'
} else {
htmlTag.className = 'dark'
}
}
const switcher = document.getElementById("theme-mode")
switcher?.addEventListener("click" ,changeTheme )
const chk = document.getElementById('chk');
chk.addEventListener('change',changeTheme);
} catch (err) {
}
/*********************/
/* WoW Js */
/*********************/
try {
new WOW().init();
} catch (error) {
}
/*************************/
/* Contact Js */
/*************************/
try {
function validateForm() {
var name = document.forms["myForm"]["name"].value;
var email = document.forms["myForm"]["email"].value;
var subject = document.forms["myForm"]["subject"].value;
var comments = document.forms["myForm"]["comments"].value;
document.getElementById("error-msg").style.opacity = 0;
document.getElementById('error-msg').innerHTML = "";
if (name == "" || name == null) {
document.getElementById('error-msg').innerHTML = "<div class='alert alert-warning error_message'>*Please enter a Name*</div>";
fadeIn();
return false;
}
if (email == "" || email == null) {
document.getElementById('error-msg').innerHTML = "<div class='alert alert-warning error_message'>*Please enter a Email*</div>";
fadeIn();
return false;
}
if (subject == "" || subject == null) {
document.getElementById('error-msg').innerHTML = "<div class='alert alert-warning error_message'>*Please enter a Subject*</div>";
fadeIn();
return false;
}
if (comments == "" || comments == null) {
document.getElementById('error-msg').innerHTML = "<div class='alert alert-warning error_message'>*Please enter a Comments*</div>";
fadeIn();
return false;
}
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function () {
if (this.readyState == 4 && this.status == 200) {
document.getElementById("simple-msg").innerHTML = this.responseText;
document.forms["myForm"]["name"].value = "";
document.forms["myForm"]["email"].value = "";
document.forms["myForm"]["subject"].value = "";
document.forms["myForm"]["comments"].value = "";
}
};
xhttp.open("POST", "php/contact.php", true);
xhttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
xhttp.send("name=" + name + "&email=" + email + "&subject=" + subject + "&comments=" + comments);
return false;
}
function fadeIn() {
var fade = document.getElementById("error-msg");
var opacity = 0;
var intervalID = setInterval(function () {
if (opacity < 1) {
opacity = opacity + 0.5
fade.style.opacity = opacity;
} else {
clearInterval(intervalID);
}
}, 200);
}
} catch (error) {
}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More