first commit
Some checks failed
Backend Tests / Static Checks (push) Has been cancelled
Backend Tests / Tests (other) (push) Has been cancelled
Backend Tests / Tests (plugin) (push) Has been cancelled
Backend Tests / Tests (server) (push) Has been cancelled
Backend Tests / Tests (store) (push) Has been cancelled
Build Canary Image / build-frontend (push) Has been cancelled
Build Canary Image / build-push (linux/amd64) (push) Has been cancelled
Build Canary Image / build-push (linux/arm64) (push) Has been cancelled
Build Canary Image / merge (push) Has been cancelled
Frontend Tests / Lint (push) Has been cancelled
Frontend Tests / Build (push) Has been cancelled
Proto Linter / Lint Protos (push) Has been cancelled

This commit is contained in:
2026-03-04 06:30:47 +00:00
commit bb402d4ccc
777 changed files with 135661 additions and 0 deletions

24
.air.toml Normal file
View File

@@ -0,0 +1,24 @@
root = "."
tmp_dir = "tmp"
[build]
entrypoint = ["./tmp/main", "--port", "8081"]
cmd = "go build -o ./tmp/main ./cmd/memos"
delay = 2000
exclude_dir = ["assets", "tmp", "vendor", "web", "proto/gen", "arms_cache", "store/migration"]
include_ext = ["go", "yaml", "yml", "toml"]
exclude_regex = ["_test.go"]
[log]
time = true
[color]
app = "cyan"
build = "yellow"
main = "magenta"
runner = "green"
watcher = "blue"
[screen]
clear_on_rebuild = true
keep_scroll = true

13
.dockerignore Normal file
View File

@@ -0,0 +1,13 @@
web/node_modules
web/dist
.git
.github
build/
tmp/
memos
*.md
.gitignore
.golangci.yaml
.dockerignore
docs/
.DS_Store

1
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1 @@
github: usememos

82
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View File

@@ -0,0 +1,82 @@
name: Bug Report
description: If something isn't working as expected
labels: [bug]
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to report a bug! Please complete the form below to help us understand and fix the issue.
- type: checkboxes
id: pre-check
attributes:
label: Pre-submission Checklist
description: Please confirm you have completed the following steps before submitting
options:
- label: I have searched the existing issues and this bug has not been reported yet
required: true
- label: I have tested this issue on the [demo site](https://demo.usememos.com) or the latest version
required: true
- type: dropdown
id: issue-location
attributes:
label: Where did you encounter this bug?
description: Select where you tested and confirmed this issue
options:
- Latest stable version (self-hosted)
- Latest development version (self-hosted)
- Demo site (demo.usememos.com)
- Older version (please specify below)
default: 0
validations:
required: true
- type: input
id: version
attributes:
label: Memos Version
description: Provide the exact version (e.g., `v0.25.2`). Find this in Settings → About or via `--version` flag
placeholder: v0.25.2
validations:
required: true
- type: textarea
id: bug-description
attributes:
label: Bug Description
description: A clear and concise description of what the bug is
placeholder: When I try to..., the application...
validations:
required: true
- type: textarea
id: reproduction-steps
attributes:
label: Steps to Reproduce
description: Detailed steps to reproduce the behavior
placeholder: |
1. Go to '...'
2. Click on '...'
3. Scroll down to '...'
4. See error
validations:
required: true
- type: textarea
id: expected-behavior
attributes:
label: Expected Behavior
description: What did you expect to happen?
placeholder: I expected...
- type: textarea
id: additional-context
attributes:
label: Screenshots & Additional Context
description: Add screenshots, browser/OS info, deployment method (Docker/binary), or any other relevant details
placeholder: |
- Browser: Chrome 120
- OS: macOS 14
- Deployment: Docker
- Database: SQLite

1
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1 @@
blank_issues_enabled: false

View File

@@ -0,0 +1,76 @@
name: Feature Request
description: If you have a suggestion for a new feature
labels: [enhancement]
body:
- type: markdown
attributes:
value: |
Thank you for suggesting a new feature! Please complete the form below to help us understand your idea.
- type: checkboxes
id: pre-check
attributes:
label: Pre-submission Checklist
description: Please confirm you have completed the following steps before submitting
options:
- label: I have searched the existing issues and this feature has not been requested yet
required: true
- type: dropdown
id: feature-type
attributes:
label: Type of Feature
description: What type of feature is this?
options:
- User Interface (UI)
- User Experience (UX)
- API / Backend
- Documentation
- Integrations / Plugins
- Security / Privacy
- Performance
- Other
default: 0
validations:
required: true
- type: textarea
id: problem-statement
attributes:
label: Problem or Use Case
description: What problem does this feature solve? What are you trying to accomplish?
placeholder: |
I often need to... but currently there's no way to...
This would help me/users to...
validations:
required: true
- type: textarea
id: proposed-solution
attributes:
label: Proposed Solution
description: A clear and concise description of what you want to happen
placeholder: |
It would be great if Memos could...
For example, a button/feature that...
validations:
required: true
- type: textarea
id: alternatives
attributes:
label: Alternatives Considered
description: Have you considered any alternative solutions or workarounds?
placeholder: |
I've tried... but it doesn't work well because...
An alternative could be...
- type: textarea
id: additional-context
attributes:
label: Additional Context
description: Add any other context, mockups, screenshots, or examples about the feature request
placeholder: |
- Similar feature in other apps: ...
- Mockups or screenshots: ...
- Related discussions: ...

91
.github/workflows/backend-tests.yml vendored Normal file
View File

@@ -0,0 +1,91 @@
name: Backend Tests
on:
push:
branches: [main]
pull_request:
branches: [main]
paths:
- "go.mod"
- "go.sum"
- "**.go"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
GO_VERSION: "1.25.7"
jobs:
static-checks:
name: Static Checks
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Setup Go
uses: actions/setup-go@v6
with:
go-version: ${{ env.GO_VERSION }}
cache: true
cache-dependency-path: go.sum
- name: Verify go.mod is tidy
run: |
go mod tidy -go=${{ env.GO_VERSION }}
git diff --exit-code
- name: Run golangci-lint
uses: golangci/golangci-lint-action@v9
with:
version: v2.4.0
args: --timeout=3m
tests:
name: Tests (${{ matrix.test-group }})
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
test-group: [store, server, plugin, other]
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Setup Go
uses: actions/setup-go@v6
with:
go-version: ${{ env.GO_VERSION }}
cache: true
cache-dependency-path: go.sum
- name: Run tests
run: |
case "${{ matrix.test-group }}" in
store)
# Run store tests for all drivers (sqlite, mysql, postgres)
go test -v -coverprofile=coverage.out -covermode=atomic ./store/...
;;
server)
go test -v -race -coverprofile=coverage.out -covermode=atomic ./server/...
;;
plugin)
go test -v -race -coverprofile=coverage.out -covermode=atomic ./plugin/...
;;
other)
go test -v -race -coverprofile=coverage.out -covermode=atomic \
./cmd/... ./internal/... ./proto/...
;;
esac
env:
DRIVER: ${{ matrix.test-group == 'store' && '' || 'sqlite' }}
- name: Upload coverage
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
uses: codecov/codecov-action@v5
with:
files: ./coverage.out
flags: ${{ matrix.test-group }}
fail_ci_if_error: false

231
.github/workflows/build-binaries.yml vendored Normal file
View File

@@ -0,0 +1,231 @@
name: Build Binaries
# Build multi-platform binaries on release or manual trigger
# Produces distributable packages for Linux, macOS, and Windows
on:
release:
types: [published]
workflow_dispatch:
# Environment variables for build configuration
env:
GO_VERSION: "1.25.7"
NODE_VERSION: "22"
PNPM_VERSION: "10"
ARTIFACT_RETENTION_DAYS: 60
# Artifact naming: {ARTIFACT_PREFIX}_{version}_{os}_{arch}.tar.gz|zip
ARTIFACT_PREFIX: memos
jobs:
# Job 1: Extract version information
# - For git tags: use tag version (e.g., v0.28.1 -> 0.28.1)
# - For branches: use branch-name-shortSHA format
prepare:
name: Extract Version
runs-on: ubuntu-latest
outputs:
version: ${{ steps.version.outputs.version }}
steps:
- name: Checkout code
uses: actions/checkout@v6
with:
fetch-depth: 0 # Full history for git describe
- name: Extract version
id: version
run: |
# Try to get version from git tag
TAG=$(git describe --tags --exact-match 2>/dev/null || echo "")
if [ -n "$TAG" ]; then
echo "version=${TAG#v}" >> $GITHUB_OUTPUT
echo "Version from tag: ${TAG#v}"
else
# Use branch name + short SHA
BRANCH="${GITHUB_REF_NAME//\//-}"
SHORT_SHA="${GITHUB_SHA:0:7}"
echo "version=${BRANCH}-${SHORT_SHA}" >> $GITHUB_OUTPUT
echo "Version from branch: ${BRANCH}-${SHORT_SHA}"
fi
# Job 2: Build frontend assets
# - Builds React frontend with Vite
# - Produces static files that will be embedded in Go binary
# - Shared across all platform builds
build-frontend:
name: Build Frontend
needs: prepare
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Setup pnpm
uses: pnpm/action-setup@v4.2.0
with:
version: ${{ env.PNPM_VERSION }}
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: pnpm
cache-dependency-path: web/pnpm-lock.yaml
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
- name: Setup pnpm cache
uses: actions/cache@v5
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('web/pnpm-lock.yaml') }}
restore-keys: ${{ runner.os }}-pnpm-store-
- name: Install dependencies
working-directory: web
run: pnpm install --frozen-lockfile
- name: Build frontend
working-directory: web
run: pnpm release
- name: Upload frontend artifacts
uses: actions/upload-artifact@v6
with:
name: frontend-dist
path: server/router/frontend/dist
retention-days: ${{ env.ARTIFACT_RETENTION_DAYS }}
# Job 3: Build Go binaries for multiple platforms
# - Cross-compiles using native Go toolchain
# - Embeds frontend assets built in previous job
# - Produces static binaries with no external dependencies
# - Packages as tar.gz (Unix) or zip (Windows)
build-binaries:
name: Build ${{ matrix.goos }}-${{ matrix.goarch }}${{ matrix.goarm && format('v{0}', matrix.goarm) || '' }}
needs: [prepare, build-frontend]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
# Linux targets
- goos: linux
goarch: amd64
- goos: linux
goarch: arm64
- goos: linux
goarch: arm
goarm: "7"
# macOS targets
- goos: darwin
goarch: amd64 # Intel Macs
- goos: darwin
goarch: arm64 # Apple Silicon
# Windows targets
- goos: windows
goarch: amd64
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Setup Go
uses: actions/setup-go@v6
with:
go-version: ${{ env.GO_VERSION }}
cache: true
- name: Download frontend artifacts
uses: actions/download-artifact@v7
with:
name: frontend-dist
path: server/router/frontend/dist
- name: Build binary
env:
GOOS: ${{ matrix.goos }}
GOARCH: ${{ matrix.goarch }}
GOARM: ${{ matrix.goarm }}
CGO_ENABLED: "0"
run: |
# Determine output binary name
OUTPUT_NAME="memos"
if [ "$GOOS" = "windows" ]; then
OUTPUT_NAME="memos.exe"
fi
mkdir -p build
# Build static binary with optimizations
go build \
-trimpath \
-ldflags="-s -w -extldflags '-static'" \
-tags netgo,osusergo \
-o "build/${OUTPUT_NAME}" \
./cmd/memos
echo "✓ Built: build/${OUTPUT_NAME}"
ls -lh build/
- name: Package binary
id: package
env:
VERSION: ${{ needs.prepare.outputs.version }}
GOOS: ${{ matrix.goos }}
GOARCH: ${{ matrix.goarch }}
GOARM: ${{ matrix.goarm }}
run: |
cd build
# Construct package name: {prefix}_{version}_{os}_{arch}[v{arm_version}]
PACKAGE_NAME="${ARTIFACT_PREFIX}_${VERSION}_${GOOS}_${GOARCH}"
if [ -n "$GOARM" ]; then
PACKAGE_NAME="${PACKAGE_NAME}v${GOARM}"
fi
# Package based on platform
if [ "$GOOS" = "windows" ]; then
ARTIFACT_NAME="${PACKAGE_NAME}.zip"
zip -q "${ARTIFACT_NAME}" memos.exe
else
ARTIFACT_NAME="${PACKAGE_NAME}.tar.gz"
tar czf "${ARTIFACT_NAME}" memos
fi
# Output for next step
echo "ARTIFACT_NAME=${ARTIFACT_NAME}" >> $GITHUB_ENV
echo "✓ Package created: ${ARTIFACT_NAME} ($(du -h "${ARTIFACT_NAME}" | cut -f1))"
- name: Upload binary artifact
uses: actions/upload-artifact@v6
with:
name: ${{ env.ARTIFACT_NAME }}
path: build/${{ env.ARTIFACT_NAME }}
retention-days: ${{ env.ARTIFACT_RETENTION_DAYS }}
# Job 4: Upload artifacts to GitHub Release
# - Only runs when triggered by a release publish event
# - Downloads all built artifacts and attaches them to the release
release:
name: Upload Release Assets
needs: build-binaries
if: github.event_name == 'release'
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Download all artifacts
uses: actions/download-artifact@v7
with:
path: artifacts
pattern: ${{ env.ARTIFACT_PREFIX }}_*
merge-multiple: true
- name: Upload to GitHub Release
uses: softprops/action-gh-release@v2
with:
files: artifacts/*

166
.github/workflows/build-canary-image.yml vendored Normal file
View File

@@ -0,0 +1,166 @@
name: Build Canary Image
on:
push:
branches: [main]
concurrency:
group: ${{ github.workflow }}-${{ github.repository }}
cancel-in-progress: true
jobs:
build-frontend:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: pnpm/action-setup@v4.2.0
with:
version: 10
- uses: actions/setup-node@v6
with:
node-version: "22"
cache: pnpm
cache-dependency-path: "web/pnpm-lock.yaml"
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
- name: Setup pnpm cache
uses: actions/cache@v5
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('web/pnpm-lock.yaml') }}
restore-keys: ${{ runner.os }}-pnpm-store-
- run: pnpm install --frozen-lockfile
working-directory: web
- name: Run frontend build
run: pnpm release
working-directory: web
- name: Upload frontend artifacts
uses: actions/upload-artifact@v6
with:
name: frontend-dist
path: server/router/frontend/dist
retention-days: 1
build-push:
needs: build-frontend
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- uses: actions/checkout@v6
- name: Download frontend artifacts
uses: actions/download-artifact@v7
with:
name: frontend-dist
path: server/router/frontend/dist
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: .
file: ./scripts/Dockerfile
platforms: ${{ matrix.platform }}
cache-from: type=gha,scope=build-${{ matrix.platform }}
cache-to: type=gha,mode=max,scope=build-${{ matrix.platform }}
outputs: type=image,name=neosmemo/memos,push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v6
with:
name: digests-${{ strategy.job-index }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
needs: build-push
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Download digests
uses: actions/download-artifact@v7
with:
pattern: digests-*
merge-multiple: true
path: /tmp/digests
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
neosmemo/memos
ghcr.io/usememos/memos
flavor: |
latest=false
tags: |
type=raw,value=canary
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf 'neosmemo/memos@sha256:%s ' *)
env:
DOCKER_METADATA_OUTPUT_JSON: ${{ steps.meta.outputs.json }}
- name: Inspect images
run: |
docker buildx imagetools inspect neosmemo/memos:canary
docker buildx imagetools inspect ghcr.io/usememos/memos:canary

184
.github/workflows/build-stable-image.yml vendored Normal file
View File

@@ -0,0 +1,184 @@
name: Build Stable Image
on:
push:
branches:
- "release/**"
tags:
- "v*.*.*"
jobs:
prepare:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.version.outputs.version }}
steps:
- name: Extract version
id: version
run: |
if [[ "$GITHUB_REF_TYPE" == "tag" ]]; then
echo "version=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
else
echo "version=${GITHUB_REF_NAME#release/}" >> $GITHUB_OUTPUT
fi
build-frontend:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: pnpm/action-setup@v4.2.0
with:
version: 10
- uses: actions/setup-node@v6
with:
node-version: "22"
cache: pnpm
cache-dependency-path: "web/pnpm-lock.yaml"
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
- name: Setup pnpm cache
uses: actions/cache@v5
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('web/pnpm-lock.yaml') }}
restore-keys: ${{ runner.os }}-pnpm-store-
- run: pnpm install --frozen-lockfile
working-directory: web
- name: Run frontend build
run: pnpm release
working-directory: web
- name: Upload frontend artifacts
uses: actions/upload-artifact@v6
with:
name: frontend-dist
path: server/router/frontend/dist
retention-days: 1
build-push:
needs: [prepare, build-frontend]
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm/v7
- linux/arm64
steps:
- uses: actions/checkout@v6
- name: Download frontend artifacts
uses: actions/download-artifact@v7
with:
name: frontend-dist
path: server/router/frontend/dist
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: .
file: ./scripts/Dockerfile
platforms: ${{ matrix.platform }}
cache-from: type=gha,scope=build-${{ matrix.platform }}
cache-to: type=gha,mode=max,scope=build-${{ matrix.platform }}
outputs: type=image,name=neosmemo/memos,push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v6
with:
name: digests-${{ strategy.job-index }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
needs: [prepare, build-push]
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Download digests
uses: actions/download-artifact@v7
with:
pattern: digests-*
merge-multiple: true
path: /tmp/digests
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
neosmemo/memos
ghcr.io/usememos/memos
tags: |
type=semver,pattern={{version}},value=${{ needs.prepare.outputs.version }}
type=semver,pattern={{major}}.{{minor}},value=${{ needs.prepare.outputs.version }}
type=raw,value=stable
flavor: |
latest=false
labels: |
org.opencontainers.image.version=${{ needs.prepare.outputs.version }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf 'neosmemo/memos@sha256:%s ' *)
env:
DOCKER_METADATA_OUTPUT_JSON: ${{ steps.meta.outputs.json }}
- name: Inspect images
run: |
docker buildx imagetools inspect neosmemo/memos:stable
docker buildx imagetools inspect ghcr.io/usememos/memos:stable

17
.github/workflows/demo-deploy.yml vendored Normal file
View File

@@ -0,0 +1,17 @@
name: Demo Deploy
on:
workflow_dispatch:
jobs:
deploy-demo:
runs-on: ubuntu-latest
steps:
- name: Trigger Render Deploy
run: |
curl -X POST "${{ secrets.RENDER_DEPLOY_HOOK }}" \
-H "Content-Type: application/json" \
-d '{"trigger": "github_action"}'
- name: Deployment Status
run: echo "Demo deployment triggered successfully on Render"

72
.github/workflows/frontend-tests.yml vendored Normal file
View File

@@ -0,0 +1,72 @@
name: Frontend Tests
on:
push:
branches: [main]
pull_request:
branches: [main]
paths:
- "web/**"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
NODE_VERSION: "22"
PNPM_VERSION: "10"
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Setup pnpm
uses: pnpm/action-setup@v4.2.0
with:
version: ${{ env.PNPM_VERSION }}
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: pnpm
cache-dependency-path: web/pnpm-lock.yaml
- name: Install dependencies
working-directory: web
run: pnpm install --frozen-lockfile
- name: Run lint
working-directory: web
run: pnpm lint
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Setup pnpm
uses: pnpm/action-setup@v4.2.0
with:
version: ${{ env.PNPM_VERSION }}
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: pnpm
cache-dependency-path: web/pnpm-lock.yaml
- name: Install dependencies
working-directory: web
run: pnpm install --frozen-lockfile
- name: Build frontend
working-directory: web
run: pnpm build

40
.github/workflows/proto-linter.yml vendored Normal file
View File

@@ -0,0 +1,40 @@
name: Proto Linter
on:
push:
branches: [main]
pull_request:
branches: [main]
paths:
- "proto/**"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
lint:
name: Lint Protos
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Setup buf
uses: bufbuild/buf-setup-action@v1
with:
github_token: ${{ github.token }}
- name: Run buf lint
uses: bufbuild/buf-lint-action@v1
with:
input: proto
- name: Check buf format
run: |
if [[ $(buf format -d) ]]; then
echo "❌ Proto files are not formatted. Run 'buf format -w' to fix."
exit 1
fi

24
.github/workflows/stale.yml vendored Normal file
View File

@@ -0,0 +1,24 @@
name: Close Stale
on:
schedule:
- cron: "0 */8 * * *" # Every 8 hours
jobs:
close-stale:
name: Close Stale Issues and PRs
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- name: Mark and close stale issues and PRs
uses: actions/stale@v10.1.1
with:
# Issues: mark stale after 14 days of inactivity, close after 3 more days
days-before-issue-stale: 14
days-before-issue-close: 3
# Pull requests: mark stale after 14 days of inactivity, close after 3 more days
days-before-pr-stale: 14
days-before-pr-close: 3

29
.gitignore vendored Normal file
View File

@@ -0,0 +1,29 @@
# temp folder
tmp
# Frontend asset
web/dist
# Build artifacts
build/
bin/
memos
# Plan/design documents
docs/plans/
.DS_Store
# Jetbrains
.idea
# Docker Compose Environment File
.env
dist
# VSCode settings
.vscode
# Git worktrees
.worktrees/

101
.golangci.yaml Normal file
View File

@@ -0,0 +1,101 @@
version: "2"
linters:
enable:
- revive
- govet
- staticcheck
- misspell
- gocritic
- sqlclosecheck
- rowserrcheck
- nilerr
- godot
- forbidigo
- mirror
- bodyclose
disable:
- errcheck
settings:
exhaustive:
explicit-exhaustive-switch: false
staticcheck:
checks:
- all
- -ST1000
- -ST1003
- -ST1021
- -QF1003
revive:
# Default to run all linters so that new rules in the future could automatically be added to the static check.
enable-all-rules: true
rules:
# The following rules are too strict and make coding harder. We do not enable them for now.
- name: file-header
disabled: true
- name: line-length-limit
disabled: true
- name: function-length
disabled: true
- name: max-public-structs
disabled: true
- name: function-result-limit
disabled: true
- name: banned-characters
disabled: true
- name: argument-limit
disabled: true
- name: cognitive-complexity
disabled: true
- name: cyclomatic
disabled: true
- name: confusing-results
disabled: true
- name: add-constant
disabled: true
- name: flag-parameter
disabled: true
- name: nested-structs
disabled: true
- name: import-shadowing
disabled: true
- name: early-return
disabled: true
- name: use-any
disabled: true
- name: exported
disabled: true
- name: unhandled-error
disabled: true
- name: if-return
disabled: true
- name: max-control-nesting
disabled: true
- name: redefines-builtin-id
disabled: true
- name: package-comments
disabled: true
gocritic:
disabled-checks:
- ifElseChain
govet:
settings:
printf: # The name of the analyzer, run `go tool vet help` to see the list of all analyzers
funcs: # Run `go tool vet help printf` to see the full configuration of `printf`.
- common.Errorf
enable-all: true
disable:
- fieldalignment
- shadow
forbidigo:
forbid:
- pattern: 'fmt\.Errorf(# Please use errors\.Wrap\|Wrapf\|Errorf instead)?'
- pattern: 'ioutil\.ReadDir(# Please use os\.ReadDir)?'
formatters:
enable:
- goimports
settings:
goimports:
local-prefixes:
- github.com/usememos/memos

BIN
.thumbnail_cache/1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

572
AGENTS.md Normal file
View File

@@ -0,0 +1,572 @@
# Memos Codebase Guide for AI Agents
This document provides comprehensive guidance for AI agents working with the Memos codebase. It covers architecture, workflows, conventions, and key patterns.
## Project Overview
Memos is a self-hosted knowledge management platform built with:
- **Backend:** Go 1.25 with gRPC + Connect RPC
- **Frontend:** React 18.3 + TypeScript + Vite 7
- **Databases:** SQLite (default), MySQL, PostgreSQL
- **Protocol:** Protocol Buffers (v2) with buf for code generation
- **API Layer:** Dual protocol - Connect RPC (browsers) + gRPC-Gateway (REST)
## Architecture
### Backend Architecture
```
cmd/memos/ # Entry point
└── main.go # Cobra CLI, profile setup, server initialization
server/
├── server.go # Echo HTTP server, healthz, background runners
├── auth/ # Authentication (JWT, PAT, session)
├── router/
│ ├── api/v1/ # gRPC service implementations
│ │ ├── v1.go # Service registration, gateway & Connect setup
│ │ ├── acl_config.go # Public endpoints whitelist
│ │ ├── connect_services.go # Connect RPC handlers
│ │ ├── connect_interceptors.go # Auth, logging, recovery
│ │ └── *_service.go # Individual services (memo, user, etc.)
│ ├── frontend/ # Static file serving (SPA)
│ ├── fileserver/ # Native HTTP file serving for media
│ └── rss/ # RSS feed generation
└── runner/
├── memopayload/ # Memo payload processing (tags, links, tasks)
└── s3presign/ # S3 presigned URL management
store/ # Data layer with caching
├── driver.go # Driver interface (database operations)
├── store.go # Store wrapper with cache layer
├── cache.go # In-memory caching (instance settings, users)
├── migrator.go # Database migrations
├── db/
│ ├── db.go # Driver factory
│ ├── sqlite/ # SQLite implementation
│ ├── mysql/ # MySQL implementation
│ └── postgres/ # PostgreSQL implementation
└── migration/ # SQL migration files (embedded)
proto/ # Protocol Buffer definitions
├── api/v1/ # API v1 service definitions
└── gen/ # Generated Go & TypeScript code
```
### Frontend Architecture
```
web/
├── src/
│ ├── components/ # React components
│ ├── contexts/ # React Context (client state)
│ │ ├── AuthContext.tsx # Current user, auth state
│ │ ├── ViewContext.tsx # Layout, sort order
│ │ └── MemoFilterContext.tsx # Filters, shortcuts
│ ├── hooks/ # React Query hooks (server state)
│ │ ├── useMemoQueries.ts # Memo CRUD, pagination
│ │ ├── useUserQueries.ts # User operations
│ │ ├── useAttachmentQueries.ts # Attachment operations
│ │ └── ...
│ ├── lib/ # Utilities
│ │ ├── query-client.ts # React Query v5 client
│ │ └── connect.ts # Connect RPC client setup
│ ├── pages/ # Page components
│ └── types/proto/ # Generated TypeScript from .proto
├── package.json # Dependencies
└── vite.config.mts # Vite config with dev proxy
plugin/ # Backend plugins
├── scheduler/ # Cron jobs
├── email/ # Email delivery
├── filter/ # CEL filter expressions
├── webhook/ # Webhook dispatch
├── markdown/ # Markdown parsing & rendering
├── httpgetter/ # HTTP fetching (metadata, images)
└── storage/s3/ # S3 storage backend
```
## Key Architectural Patterns
### 1. API Layer: Dual Protocol
**Connect RPC (Browser Clients):**
- Protocol: `connectrpc.com/connect`
- Base path: `/memos.api.v1.*`
- Interceptor chain: Metadata → Logging → Recovery → Auth
- Returns type-safe responses to React frontend
- See: `server/router/api/v1/connect_interceptors.go:177-227`
**gRPC-Gateway (REST API):**
- Protocol: Standard HTTP/JSON
- Base path: `/api/v1/*`
- Uses same service implementations as Connect
- Useful for external tools, CLI clients
- See: `server/router/api/v1/v1.go:52-96`
**Authentication:**
- JWT Access Tokens (V2): Stateless, 15-min expiration, verified via `AuthenticateByAccessTokenV2`
- Personal Access Tokens (PAT): Stateful, long-lived, validated against database
- Both use `Authorization: Bearer <token>` header
- See: `server/auth/authenticator.go:17-166`
### 2. Store Layer: Interface Pattern
All database operations go through the `Driver` interface:
```go
type Driver interface {
GetDB() *sql.DB
Close() error
IsInitialized(ctx context.Context) (bool, error)
CreateMemo(ctx context.Context, create *Memo) (*Memo, error)
ListMemos(ctx context.Context, find *FindMemo) ([]*Memo, error)
UpdateMemo(ctx context.Context, update *UpdateMemo) error
DeleteMemo(ctx context.Context, delete *DeleteMemo) error
// ... similar methods for all resources
}
```
**Three Implementations:**
- `store/db/sqlite/` - SQLite (modernc.org/sqlite)
- `store/db/mysql/` - MySQL (go-sql-driver/mysql)
- `store/db/postgres/` - PostgreSQL (lib/pq)
**Caching Strategy:**
- Store wrapper maintains in-memory caches for:
- Instance settings (`instanceSettingCache`)
- Users (`userCache`)
- User settings (`userSettingCache`)
- Config: Default TTL 10 min, cleanup interval 5 min, max 1000 items
- See: `store/store.go:10-57`
### 3. Frontend State Management
**React Query v5 (Server State):**
- All API calls go through custom hooks in `web/src/hooks/`
- Query keys organized by resource: `memoKeys`, `userKeys`, `attachmentKeys`
- Default staleTime: 30s, gcTime: 5min
- Automatic refetch on window focus, reconnect
- See: `web/src/lib/query-client.ts`
**React Context (Client State):**
- `AuthContext`: Current user, auth initialization, logout
- `ViewContext`: Layout mode (LIST/MASONRY), sort order
- `MemoFilterContext`: Active filters, shortcut selection, URL sync
### 4. Database Migration System
**Migration Flow:**
1. `preMigrate`: Check if DB exists. If not, apply `LATEST.sql`
2. `checkMinimumUpgradeVersion`: Reject pre-0.22 installations
3. `applyMigrations`: Apply incremental migrations in single transaction
4. Demo mode: Seed with demo data
**Schema Versioning:**
- Stored in `system_setting` table
- Format: `major.minor.patch`
- Migration files: `store/migration/{driver}/{version}/NN__description.sql`
- See: `store/migrator.go:21-414`
### 5. Protocol Buffer Code Generation
**Definition Location:** `proto/api/v1/*.proto`
**Regeneration:**
```bash
cd proto && buf generate
```
**Generated Outputs:**
- Go: `proto/gen/api/v1/` (used by backend services)
- TypeScript: `web/src/types/proto/api/v1/` (used by frontend)
**Linting:** `proto/buf.yaml` - BASIC lint rules, FILE breaking changes
## Development Commands
### Backend
```bash
# Start dev server
go run ./cmd/memos --port 8081
# Run all tests
go test ./...
# Run tests for specific package
go test ./store/...
go test ./server/router/api/v1/test/...
# Lint (golangci-lint)
golangci-lint run
# Format imports
goimports -w .
# Run with MySQL/Postgres
DRIVER=mysql go run ./cmd/memos
DRIVER=postgres go run ./cmd/memos
```
### Frontend
```bash
# Install dependencies
cd web && pnpm install
# Start dev server (proxies API to localhost:8081)
pnpm dev
# Type checking
pnpm lint
# Auto-fix lint issues
pnpm lint:fix
# Format code
pnpm format
# Build for production
pnpm build
# Build and copy to backend
pnpm release
```
### Protocol Buffers
```bash
# Regenerate Go and TypeScript from .proto files
cd proto && buf generate
# Lint proto files
cd proto && buf lint
# Check for breaking changes
cd proto && buf breaking --against .git#main
```
## Key Workflows
### Adding a New API Endpoint
1. **Define in Protocol Buffer:**
- Edit `proto/api/v1/*_service.proto`
- Add request/response messages
- Add RPC method to service
2. **Regenerate Code:**
```bash
cd proto && buf generate
```
3. **Implement Service (Backend):**
- Add method to `server/router/api/v1/*_service.go`
- Follow existing patterns: fetch user, validate, call store
- Add Connect wrapper to `server/router/api/v1/connect_services.go` (optional, same implementation)
4. **If Public Endpoint:**
- Add to `server/router/api/v1/acl_config.go:11-34`
5. **Create Frontend Hook (if needed):**
- Add query/mutation to `web/src/hooks/use*Queries.ts`
- Use existing query key factories
### Database Schema Changes
1. **Create Migration Files:**
```
store/migration/sqlite/0.28/1__add_new_column.sql
store/migration/mysql/0.28/1__add_new_column.sql
store/migration/postgres/0.28/1__add_new_column.sql
```
2. **Update LATEST.sql:**
- Add change to `store/migration/{driver}/LATEST.sql`
3. **Update Store Interface (if new table/model):**
- Add methods to `store/driver.go:8-71`
- Implement in `store/db/{driver}/*.go`
4. **Test Migration:**
- Run `go test ./store/test/...` to verify
### Adding a New Frontend Page
1. **Create Page Component:**
- Add to `web/src/pages/NewPage.tsx`
- Use existing hooks for data fetching
2. **Add Route:**
- Edit `web/src/App.tsx` (or router configuration)
3. **Use React Query:**
```typescript
import { useMemos } from "@/hooks/useMemoQueries";
const { data, isLoading } = useMemos({ filter: "..." });
```
4. **Use Context for Client State:**
```typescript
import { useView } from "@/contexts/ViewContext";
const { layout, toggleSortOrder } = useView();
```
## Testing
### Backend Tests
**Test Pattern:**
```go
func TestMemoCreation(t *testing.T) {
ctx := context.Background()
store := test.NewTestingStore(ctx, t)
// Create test user
user, _ := createTestUser(ctx, store, t)
// Execute operation
memo, err := store.CreateMemo(ctx, &store.Memo{
CreatorID: user.ID,
Content: "Test memo",
// ...
})
require.NoError(t, err)
assert.NotNil(t, memo)
}
```
**Test Utilities:**
- `store/test/store.go:22-35` - `NewTestingStore()` creates isolated DB
- `store/test/store.go:37-77` - `resetTestingDB()` cleans tables
- Test DB determined by `DRIVER` env var (default: sqlite)
**Running Tests:**
```bash
# All tests
go test ./...
# Specific package
go test ./store/...
go test ./server/router/api/v1/test/...
# With coverage
go test -cover ./...
```
### Frontend Testing
**TypeScript Checking:**
```bash
cd web && pnpm lint
```
**No Automated Tests:**
- Frontend relies on TypeScript checking and manual validation
- React Query DevTools available in dev mode (bottom-left)
## Code Conventions
### Go
**Error Handling:**
- Use `github.com/pkg/errors` for wrapping: `errors.Wrap(err, "context")`
- Return structured gRPC errors: `status.Errorf(codes.NotFound, "message")`
**Naming:**
- Package names: lowercase, single word (e.g., `store`, `server`)
- Interfaces: `Driver`, `Store`, `Service`
- Methods: PascalCase for exported, camelCase for internal
**Comments:**
- Public exported functions must have comments (godot enforces)
- Use `//` for single-line, `/* */` for multi-line
**Imports:**
- Grouped: stdlib, third-party, local
- Sorted alphabetically within groups
- Use `goimports -w .` to format
### TypeScript/React
**Components:**
- Functional components with hooks
- Use `useMemo`, `useCallback` for optimization
- Props interfaces: `interface Props { ... }`
**State Management:**
- Server state: React Query hooks
- Client state: React Context
- Avoid direct useState for server data
**Styling:**
- Tailwind CSS v4 via `@tailwindcss/vite`
- Use `clsx` and `tailwind-merge` for conditional classes
**Imports:**
- Absolute imports with `@/` alias
- Group: React, third-party, local
- Auto-organized by Biome
## Important Files Reference
### Backend Entry Points
| File | Purpose |
|------|---------|
| `cmd/memos/main.go` | Server entry point, CLI setup |
| `server/server.go` | Echo server initialization, background runners |
| `store/store.go` | Store wrapper with caching |
| `store/driver.go` | Database driver interface |
### API Layer
| File | Purpose |
|------|---------|
| `server/router/api/v1/v1.go` | Service registration, gateway setup |
| `server/router/api/v1/acl_config.go` | Public endpoints whitelist |
| `server/router/api/v1/connect_interceptors.go` | Connect interceptors |
| `server/auth/authenticator.go` | Authentication logic |
### Frontend Core
| File | Purpose |
|------|---------|
| `web/src/lib/query-client.ts` | React Query client configuration |
| `web/src/contexts/AuthContext.tsx` | User authentication state |
| `web/src/contexts/ViewContext.tsx` | UI preferences |
| `web/src/contexts/MemoFilterContext.tsx` | Filter state |
| `web/src/hooks/useMemoQueries.ts` | Memo queries/mutations |
### Data Layer
| File | Purpose |
|------|---------|
| `store/memo.go` | Memo model definitions, store methods |
| `store/user.go` | User model definitions |
| `store/attachment.go` | Attachment model definitions |
| `store/migrator.go` | Migration logic |
| `store/db/db.go` | Driver factory |
| `store/db/sqlite/sqlite.go` | SQLite driver implementation |
## Configuration
### Backend Environment Variables
| Variable | Default | Description |
|----------|----------|-------------|
| `MEMOS_DEMO` | `false` | Enable demo mode |
| `MEMOS_PORT` | `8081` | HTTP port |
| `MEMOS_ADDR` | `` | Bind address (empty = all) |
| `MEMOS_DATA` | `~/.memos` | Data directory |
| `MEMOS_DRIVER` | `sqlite` | Database: `sqlite`, `mysql`, `postgres` |
| `MEMOS_DSN` | `` | Database connection string |
| `MEMOS_INSTANCE_URL` | `` | Instance base URL |
### Frontend Environment Variables
| Variable | Default | Description |
|----------|----------|-------------|
| `DEV_PROXY_SERVER` | `http://localhost:8081` | Backend proxy target |
## CI/CD
### GitHub Workflows
**Backend Tests** (`.github/workflows/backend-tests.yml`):
- Runs on `go.mod`, `go.sum`, `**.go` changes
- Steps: verify `go mod tidy`, golangci-lint, all tests
**Frontend Tests** (`.github/workflows/frontend-tests.yml`):
- Runs on `web/**` changes
- Steps: pnpm install, lint, build
**Proto Lint** (`.github/workflows/proto-linter.yml`):
- Runs on `.proto` changes
- Steps: buf lint, buf breaking check
### Linting Configuration
**Go** (`.golangci.yaml`):
- Linters: revive, govet, staticcheck, misspell, gocritic, etc.
- Formatter: goimports
- Forbidden: `fmt.Errorf`, `ioutil.ReadDir`
**TypeScript** (`web/biome.json`):
- Linting: Biome (ESLint replacement)
- Formatting: Biome (Prettier replacement)
- Line width: 140 characters
- Semicolons: always
## Common Tasks
### Debugging API Issues
1. Check Connect interceptor logs: `server/router/api/v1/connect_interceptors.go:79-105`
2. Verify endpoint is in `acl_config.go` if public
3. Check authentication via `auth/authenticator.go:133-165`
4. Test with curl: `curl -H "Authorization: Bearer <token>" http://localhost:8081/api/v1/...`
### Debugging Frontend State
1. Open React Query DevTools (bottom-left in dev)
2. Inspect query cache, mutations, refetch behavior
3. Check Context state via React DevTools
4. Verify filter state in MemoFilterContext
### Running Tests Against Multiple Databases
```bash
# SQLite (default)
DRIVER=sqlite go test ./...
# MySQL (requires running MySQL server)
DRIVER=mysql DSN="user:pass@tcp(localhost:3306)/memos" go test ./...
# PostgreSQL (requires running PostgreSQL server)
DRIVER=postgres DSN="postgres://user:pass@localhost:5432/memos" go test ./...
```
## Plugin System
Backend supports pluggable components in `plugin/`:
| Plugin | Purpose |
|--------|----------|
| `scheduler` | Cron-based job scheduling |
| `email` | SMTP email delivery |
| `filter` | CEL expression filtering |
| `webhook` | HTTP webhook dispatch |
| `markdown` | Markdown parsing (goldmark) |
| `httpgetter` | HTTP content fetching |
| `storage/s3` | S3-compatible storage |
Each plugin has its own README with usage examples.
## Performance Considerations
### Backend
- Database queries use pagination (`limit`, `offset`)
- In-memory caching reduces DB hits for frequently accessed data
- WAL journal mode for SQLite (reduces locking)
- Thumbnail generation limited to 3 concurrent operations
### Frontend
- React Query reduces redundant API calls
- Infinite queries for large lists (pagination)
- Manual chunks: `utils-vendor`, `mermaid-vendor`, `leaflet-vendor`
- Lazy loading for heavy components
## Security Notes
- JWT secrets must be kept secret (generated on first run in production mode)
- Personal Access Tokens stored as SHA-256 hashes in database
- CSRF protection via SameSite cookies
- CORS enabled for all origins (configure for production)
- Input validation at service layer
- SQL injection prevention via parameterized queries

2
CODEOWNERS Normal file
View File

@@ -0,0 +1,2 @@
# These owners will be the default owners for everything in the repo.
* @boojack @johnnyjoygh

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 Memos
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

111
README.md Normal file
View File

@@ -0,0 +1,111 @@
# Memos
<img align="right" height="96px" src="https://raw.githubusercontent.com/usememos/.github/refs/heads/main/assets/logo-rounded.png" alt="Memos" />
An open-source, self-hosted note-taking service. Your thoughts, your data, your control — no tracking, no ads, no subscription fees.
[![Home](https://img.shields.io/badge/🏠-usememos.com-blue?style=flat-square)](https://usememos.com)
[![Live Demo](https://img.shields.io/badge/✨-Try%20Demo-orange?style=flat-square)](https://demo.usememos.com/)
[![Docs](https://img.shields.io/badge/📚-Documentation-green?style=flat-square)](https://usememos.com/docs)
[![Discord](https://img.shields.io/badge/💬-Discord-5865f2?style=flat-square&logo=discord&logoColor=white)](https://discord.gg/tfPJa4UmAv)
[![Docker Pulls](https://img.shields.io/docker/pulls/neosmemo/memos?style=flat-square&logo=docker)](https://hub.docker.com/r/neosmemo/memos)
<img src="https://raw.githubusercontent.com/usememos/.github/refs/heads/main/assets/demo.png" alt="Memos Demo Screenshot" height="512" />
### 💎 Featured Sponsors
[**Warp** — The AI-powered terminal built for speed and collaboration](https://go.warp.dev/memos)
<a href="https://go.warp.dev/memos" target="_blank" rel="noopener">
<img src="https://raw.githubusercontent.com/warpdotdev/brand-assets/main/Github/Sponsor/Warp-Github-LG-02.png" alt="Warp - The AI-powered terminal built for speed and collaboration" width="512" />
</a>
<p></p>
[**TestMu AI** - The worlds first full-stack Agentic AI Quality Engineering platform](https://www.testmuai.com/?utm_medium=sponsor&utm_source=memos)
<a href="https://www.testmuai.com/?utm_medium=sponsor&utm_source=memos" target="_blank" rel="noopener">
<img src="https://usememos.com/sponsors/testmu.svg" alt="TestMu AI" height="36" />
</a>
<p></p>
[**SSD Nodes** - Affordable VPS hosting for self-hosters](https://ssdnodes.com/?utm_source=memos&utm_medium=sponsor)
<a href="https://ssdnodes.com/?utm_source=memos&utm_medium=sponsor" target="_blank" rel="noopener">
<img src="https://usememos.com/sponsors/ssd-nodes.svg" alt="SSD Nodes" height="72" />
</a>
## Overview
Memos is a privacy-first, self-hosted knowledge base for personal notes, team wikis, and knowledge management. Built with Go and React, it runs as a single binary with minimal resource usage.
## Features
- **Privacy-First** — Self-hosted on your infrastructure with zero telemetry, no tracking, and no ads.
- **Markdown Native** — Full markdown support with plain text storage. Your data is always portable.
- **Lightweight** — Single Go binary with a React frontend. Low memory footprint, starts in seconds.
- **Easy to Deploy** — One-line Docker install. Supports SQLite, MySQL, and PostgreSQL.
- **Developer-Friendly** — Full REST and gRPC APIs for integration with existing workflows.
- **Clean Interface** — Minimal design with dark mode and mobile-responsive layout.
## Quick Start
### Docker (Recommended)
```bash
docker run -d \
--name memos \
-p 5230:5230 \
-v ~/.memos:/var/opt/memos \
neosmemo/memos:stable
```
Open `http://localhost:5230` and start writing!
### Try the Live Demo
Don't want to install yet? Try our [live demo](https://demo.usememos.com/) first!
### Other Installation Methods
- **Docker Compose** - Recommended for production deployments
- **Pre-built Binaries** - Available for Linux, macOS, and Windows
- **Kubernetes** - Helm charts and manifests available
- **Build from Source** - For development and customization
See our [installation guide](https://usememos.com/docs/deploy) for detailed instructions.
## Contributing
Contributions are welcome — bug reports, feature suggestions, pull requests, documentation, and translations.
- [Report bugs](https://github.com/usememos/memos/issues/new?template=bug_report.md)
- [Suggest features](https://github.com/usememos/memos/issues/new?template=feature_request.md)
- [Submit pull requests](https://github.com/usememos/memos/pulls)
- [Improve documentation](https://github.com/usememos/dotcom)
- [Help with translations](https://github.com/usememos/memos/tree/main/web/src/locales)
## Sponsors
Love Memos? [Sponsor us on GitHub](https://github.com/sponsors/usememos) to help keep the project growing!
## Star History
[![Star History Chart](https://api.star-history.com/svg?repos=usememos/memos&type=Date)](https://star-history.com/#usememos/memos&Date)
## License
Memos is open-source software licensed under the [MIT License](LICENSE).
## Privacy Policy
Memos is built with privacy as a core principle. As a self-hosted application, all your data stays on your infrastructure. There is no telemetry, no tracking, and no data collection. See our [Privacy Policy](https://usememos.com/privacy) for details.
---
**[Website](https://usememos.com)** • **[Documentation](https://usememos.com/docs)** • **[Demo](https://demo.usememos.com/)** • **[Discord](https://discord.gg/tfPJa4UmAv)** • **[X/Twitter](https://x.com/usememos)**
<a href="https://vercel.com/oss">
<img alt="Vercel OSS Program" src="https://vercel.com/oss/program-badge.svg" />
</a>

46
SECURITY.md Normal file
View File

@@ -0,0 +1,46 @@
# Security Policy
## Project Status
Memos is currently in beta (v0.x). While we take security seriously, we are not yet ready for formal CVE assignments or coordinated disclosure programs.
## Reporting Security Issues
### For All Security Concerns:
Please report via **email only**: dev@usememos.com
**DO NOT open public GitHub issues for security vulnerabilities.**
Include in your report:
- Description of the issue
- Steps to reproduce
- Affected versions
- Your assessment of severity
### What to Expect:
- We will acknowledge your report as soon as we can
- Fixes will be included in regular releases without special security advisories
- No CVEs will be assigned during the beta phase
- Credit will be given in release notes if you wish
### For Non-Security Bugs:
Use GitHub issues for functionality bugs, feature requests, and general questions.
## Philosophy
As a beta project, we prioritize:
1. **Rapid iteration** over lengthy disclosure timelines
2. **Quick patches** over formal security processes
3. **Transparency** about our beta status
We plan to implement formal vulnerability disclosure and CVE handling after reaching v1.0 stable.
## Self-Hosting Security
Since Memos is self-hosted software:
- Keep your instance updated to the latest release
- Don't expose your instance directly to the internet without authentication
- Use reverse proxies (nginx, Caddy) with rate limiting
- Review the deployment documentation for security best practices
Thank you for helping improve Memos!

View File

@@ -0,0 +1 @@
{"id":3255,"version":1,"configs":{"raw_sql":{"pri":5,"over":true,"type":"unknown","enable":true,"converge":true,"before":{},"after":{},"lmtCfg":{"wnd":3600,"th":50,"globalLimit":200,"ignWhenFail":false,"limit":100,"type":"s_lmt","enable":true,"stopWhExclude":true},"monRAW":false,"monRS":false},"raw_slow_sql":{"pri":5,"over":true,"type":"unknown","enable":true,"converge":true,"before":{},"after":{},"lmtCfg":{"wnd":3600,"th":50,"globalLimit":200,"ignWhenFail":false,"limit":100,"type":"s_lmt","enable":true,"stopWhExclude":true},"monRAW":false,"monRS":false}},"appId":"fit0dygksm@5deca4ebd39a6e7","enable":true,"extraLabels":{}}

View File

@@ -0,0 +1 @@
{"otel":{"instrumentation":{"panic":{"enabled":true}}},"profiler":{"enable":true,"metrics":{"report":{"interval":15},"jvm":{"captureGcCause":false}},"quantile":{"enable":true},"exception":{"whitelist":"","filterByParentClass":false,"fromConstructor":{"enable":false},"fromInstrumentedMethod":{"enable":true},"advancedWhitelist":"[]","stacktrace":2},"SLS":{"regionId":"pub-cn-hangzhou-staging","bindType":"logstore","index":""},"compress":{"enable":false},"logging":{"injectTraceId2Log":{"enable":false},"injectSpanId2Log":{"enable":false},"enable":true},"agent":{"logger":{"level":"WARN"}},"thresholds":{"interface":500,"sql":500,"limit":1000},"error":{"skip":""},"param":{"maxLength":1024},"http":{"metrics":{"recordHttpCode":false}},"jdbc":{"tracesqlraw":false,"tracesqlbindvalue":false},"callsql":{"maxLength":1024},"threadpoolmonitor":{"enable":true},"cp":{"enable":true,"cpuEnable":true,"allocEnable":true,"wallClockEnable":false},"span":{"exporter":{"enable":true}},"sampling":{"rate":100,"useSamplingStrategyV2":false,"v2config":{"spanNames4FullSampleStr":"","spanNamePrefixes4FullSampleStr":"","spanNameSuffixes4FullSampleStr":""}},"trace":{"protocol":{"name":"W3C"}},"responseInject":{"enable":false},"defined":{"excludeurl":"SELECT,UPDATE,func cron,select"},"metricsAndSpan":{"entranceless":{"enable":false}},"dubbo":{"enable":true},"elasticsearch":{"enable":true},"grpc":{"enable":true},"liberty":{"enable":true},"mongodb":{"enable":true},"mysql":{"enable":true},"postgresql":{"enable":true},"redis":{"enable":true},"rabbitmq":{"client":{"enable":true}},"kafka":{"enable":true},"go":{"sampling":{"useSamplingStrategyV2":false,"rate":100,"v2config":{"spanNames4FullSampleStr":"","spanNamePrefixes4FullSampleStr":"","spanNameSuffixes4FullSampleStr":""}},"opentelemetry":{"enable":false},"opentracing":{"enable":false},"runtime":true,"cp":{"enable":true,"cpuEnable":true,"allocEnable":true,"wallClockEnable":false,"goroutineEnable":true,"blockEnable":false,"mutexEnable":false},"httpRequestBody":{"enable":false,"size":1024},"httpRequestHeader":{"enable":false,"key":""},"httpResponseBody":{"enable":false,"size":1024},"httpResponseHeader":{"enable":false,"key":""},"span":{"names":""},"exception":{"length":4096},"enable":true},"gin":{"enable":true},"kratos":{"enable":true},"fasthttp":{"enable":true},"restful":{"enable":false},"micro":{"enable":false},"iris":{"enable":true},"echo":{"enable":false},"mux":{"enable":false},"fiber":{"enable":true},"thrift":{"enable":true},"kitex":{"enable":true},"hertz":{"enable":true},"fc":{"enable":true},"gorm":{"enable":false},"k8sclient":{"enable":true},"nethttp":{"enable":true},"mcp":{"enable":true},"langchain":{"enable":true}}}

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.9 KiB

644
docapi.txt Normal file
View File

@@ -0,0 +1,644 @@
# Memos API Documentation
## Overview
Memos is a self-hosted knowledge management platform with a RESTful API built using Protocol Buffers and gRPC. The API supports both gRPC and HTTP/JSON protocols.
## Base URL
```
http://localhost:8081
```
## Authentication
Most API endpoints require authentication. Memos uses JWT-based authentication with short-lived access tokens.
### Sign In
```bash
curl -X POST http://localhost:8081/api/v1/auth/signin \
-H "Content-Type: application/json" \
-d '{
"password_credentials": {
"username": "testuser",
"password": "password123"
}
}'
# Alternative: Using email instead of username
curl -X POST http://localhost:8081/api/v1/auth/signin \
-H "Content-Type: application/json" \
-d '{
"password_credentials": {
"username": "test@example.com",
"password": "password123"
}
}' \
-H "Content-Type: application/json" \
-d '{
"password_credentials": {
"username": "testuser",
"password": "password123"
}
}'
```
**Response:**
```json
{
"user": {
"name": "users/1",
"role": "ADMIN",
"username": "testuser",
"email": "test@example.com",
"displayName": "",
"avatarUrl": "",
"description": "",
"state": "NORMAL",
"createTime": "2026-03-03T13:03:20Z",
"updateTime": "2026-03-03T13:03:20Z"
},
"accessToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
"accessTokenExpiresAt": "2026-03-03T13:20:35.055464409Z"
}```
```
### Using Authentication Token
Include the access token in the Authorization header:
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/users
# Example with actual token
curl -H "Authorization: Bearer eyJhbGciOiJIUzI1NiIsImtpZCI6InYxIiwidHlwIjoiSldUIn0.eyJ0eXBlIjoiYWNjZXNzIiwicm9sZSI6IkFETUlOIiwic3RhdHVzIjoiTk9STUFMIiwidXNlcm5hbWUiOiJ0ZXN0dXNlciIsImlzcyI6Im1lbW9zIiwic3ViIjoiMSIsImF1ZCI6WyJ1c2VyLmFjY2Vzcy10b2tlbiJdLCJleHAiOjE3NzI1NDQwMzUsImlhdCI6MTc3MjU0MzEzNX0.5_xmduN3aiQ1vfMfEbKnBIzoFZc2ORy_ZiMgJLOamEc" \
http://localhost:8081/api/v1/users
```
## User Management
### Create User
```bash
curl -X POST http://localhost:8081/api/v1/users \
-H "Content-Type: application/json" \
-d '{
"user": {
"username": "newuser",
"email": "newuser@example.com",
"password": "securepassword",
"role": "USER"
}
}'
```
### List Users
```bash
# Requires authentication
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/users
# Example response:
{
"users": [
{
"name": "users/1",
"role": "ADMIN",
"username": "testuser",
"email": "test@example.com",
"displayName": "",
"avatarUrl": "",
"description": "",
"state": "NORMAL",
"createTime": "2026-03-03T13:03:20Z",
"updateTime": "2026-03-03T13:03:20Z"
}
],
"nextPageToken": "",
"totalSize": 1
}
```
### Get User
```bash
# By ID
curl http://localhost:8081/api/v1/users/1
# By username
curl http://localhost:8081/api/v1/users/newuser
```
### Update User
```bash
curl -X PATCH http://localhost:8081/api/v1/users/1 \
-H "Content-Type: application/json" \
-d '{
"user": {
"name": "users/1",
"display_name": "New Display Name"
},
"update_mask": "display_name"
}'
```
### Delete User
```bash
curl -X DELETE http://localhost:8081/api/v1/users/1
```
## Memo Management
### Create Memo
```bash
curl -X POST http://localhost:8081/api/v1/memos \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"memo": {
"content": "# My First Memo\n\nThis is a sample memo with **markdown** formatting.",
"visibility": "PRIVATE"
}
}'
# Example with actual token
curl -X POST http://localhost:8081/api/v1/memos \
-H "Authorization: Bearer eyJhbGciOiJIUzI1NiIsImtpZCI6InYxIiwidHlwIjoiSldUIn0.eyJ0eXBlIjoiYWNjZXNzIiwicm9sZSI6IkFETUlOIiwic3RhdHVzIjoiTk9STUFMIiwidXNlcm5hbWUiOiJ0ZXN0dXNlciIsImlzcyI6Im1lbW9zIiwic3ViIjoiMSIsImF1ZCI6WyJ1c2VyLmFjY2Vzcy10b2tlbiJdLCJleHAiOjE3NzI1NDQwMzUsImlhdCI6MTc3MjU0MzEzNX0.5_xmduN3aiQ1vfMfEbKnBIzoFZc2ORy_ZiMgJLOamEc" \
-H "Content-Type: application/json" \
-d '{
"memo": {
"content": "# Welcome to Memos\n\nThis is my first memo created via API! 🚀\n\n## Features\n- Markdown support\n- Easy organization\n- Fast search",
"visibility": "PRIVATE"
}
}'
```
### List Memos
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
"http://localhost:8081/api/v1/memos?page_size=10"
```
### Get Memo
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/memos/1
```
### Update Memo
```bash
curl -X PATCH http://localhost:8081/api/v1/memos/1 \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"memo": {
"name": "memos/1",
"content": "Updated content"
},
"update_mask": "content"
}'
```
### Delete Memo
```bash
curl -X DELETE http://localhost:8081/api/v1/memos/1 \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN"
```
## Memo Comments
### Create Comment
```bash
curl -X POST http://localhost:8081/api/v1/memos/1/comments \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"comment": {
"content": "This is a comment on the memo"
}
}'
```
### List Comments
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/memos/1/comments
```
## Memo Reactions
### Add Reaction
```bash
curl -X POST http://localhost:8081/api/v1/memos/1/reactions \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"reaction_type": "HEART"
}'
```
### List Reactions
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/memos/1/reactions
```
## Attachments
### Upload Attachment
First, upload the file:
```bash
curl -X POST http://localhost:8081/api/v1/attachments \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-F "file=@/path/to/your/file.jpg" \
-F "type=image/jpeg"
```
### List Attachments
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/attachments
```
## Identity Providers (SSO)
### List Identity Providers
```bash
curl http://localhost:8081/api/v1/identity-providers
```
### Create Identity Provider
```bash
curl -X POST http://localhost:8081/api/v1/identity-providers \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"identity_provider": {
"name": "identity-providers/1",
"type": "OAUTH2",
"identifier_filter": ".*@company.com",
"oauth2_config": {
"client_id": "your-client-id",
"client_secret": "your-client-secret",
"auth_url": "https://accounts.google.com/o/oauth2/auth",
"token_url": "https://oauth2.googleapis.com/token",
"user_info_url": "https://www.googleapis.com/oauth2/v2/userinfo",
"scopes": ["openid", "email", "profile"]
}
}
}'
```
## Instance Management
### Get Instance Info
```bash
curl http://localhost:8081/api/v1/instances
```
### Update Instance Settings
```bash
curl -X PATCH http://localhost:8081/api/v1/instances/default \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"instance": {
"name": "instances/default",
"custom_style": "body { font-family: Arial, sans-serif; }"
},
"update_mask": "custom_style"
}'
```
## User Settings
### Get User Settings
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/users/1/settings
```
### Update User Settings
```bash
curl -X PATCH http://localhost:8081/api/v1/users/1/settings/GENERAL \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"setting": {
"name": "users/1/settings/GENERAL",
"general_setting": {
"locale": "en-US",
"theme": "dark",
"memo_visibility": "PRIVATE"
}
},
"update_mask": "general_setting"
}'
```
## Personal Access Tokens
### Create PAT
```bash
curl -X POST http://localhost:8081/api/v1/users/1/personalAccessTokens \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"description": "API access for automation",
"expires_in_days": 30
}'
```
### List PATs
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/users/1/personalAccessTokens
```
### Delete PAT
```bash
curl -X DELETE http://localhost:8081/api/v1/users/1/personalAccessTokens/TOKEN_ID \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN"
```
## Webhooks
### Create User Webhook
```bash
curl -X POST http://localhost:8081/api/v1/users/1/webhooks \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"webhook": {
"url": "https://your-webhook-endpoint.com/memos",
"display_name": "Memo Notifications"
}
}'
```
### List Webhooks
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/users/1/webhooks
```
## Activities
### List Activities
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/activities
```
## Shortcuts
### Create Shortcut
```bash
curl -X POST http://localhost:8081/api/v1/shortcuts \
-H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"shortcut": {
"title": "Quick Meeting Notes",
"payload": "# Meeting Notes\n\n## Attendees\n\n## Agenda\n\n## Action Items"
}
}'
```
### List Shortcuts
```bash
curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
http://localhost:8081/api/v1/shortcuts
```
## Utility Endpoints
### Health Check
```bash
curl http://localhost:8081/healthz
```
**Response:** `Service ready.`
### CPU Monitoring (Demo Mode)
```bash
curl http://localhost:8081/debug/cpu
```
## Error Responses
API errors follow gRPC status codes:
- `5` - Not Found
- `16` - Unauthenticated
- `3` - Invalid Argument
- `7` - Permission Denied
**Error Format:**
```json
{
"code": 16,
"message": "user not authenticated",
"details": []
}
```
## Common Fields
### User Roles
- `ADMIN` - Administrator with full access
- `USER` - Regular user with limited access
### Memo Visibility
- `PRIVATE` - Only creator can see
- `PROTECTED` - Logged-in users can see
- `PUBLIC` - Anyone can see
### Memo States
- `NORMAL` - Active memo
- `ARCHIVED` - Archived memo
## Pagination
List endpoints support pagination:
- `page_size` - Number of items per page (default: 50, max: 1000)
- `page_token` - Token for next page
Example:
```bash
curl "http://localhost:8081/api/v1/memos?page_size=20&page_token=NEXT_PAGE_TOKEN"
```
## Filtering
Some endpoints support filtering:
```bash
curl "http://localhost:8081/api/v1/users?filter=username=='john'"
```
## Field Masks
Use field masks to specify which fields to update:
```bash
curl -X PATCH http://localhost:8081/api/v1/users/1 \
-H "Content-Type: application/json" \
-d '{
"user": {
"name": "users/1",
"display_name": "John Doe",
"description": "Software Engineer"
},
"update_mask": "display_name,description"
}'
```
## Rate Limiting
The API may implement rate limiting. Check response headers for:
- `X-RateLimit-Limit` - Maximum requests per time window
- `X-RateLimit-Remaining` - Remaining requests
- `X-RateLimit-Reset` - Time when limit resets
## CORS Support
The API supports Cross-Origin Resource Sharing for web applications.
## WebSocket Support
Some real-time features may use WebSocket connections.
## Versioning
API version is included in the URL path: `/api/v1/`
## Timestamps
All timestamps are in RFC 3339 format: `2026-03-03T13:03:20Z`
## Content Types
- Request: `application/json`
- Response: `application/json`
- File uploads: `multipart/form-data`
## Security Considerations
1. Always use HTTPS in production
2. Store access tokens securely (not in localStorage)
3. Implement proper token refresh mechanisms
4. Validate all input data
5. Use appropriate CORS headers
6. Implement rate limiting
7. Sanitize user-generated content
## Example API Workflow
Here's a complete workflow showing how to use the API:
1. **Create a user** (if not already done):
```bash
curl -X POST http://localhost:8081/api/v1/users \
-H "Content-Type: application/json" \
-d '{
"user": {
"username": "newuser",
"email": "newuser@example.com",
"password": "securepassword",
"role": "USER"
}
}'
```
2. **Sign in to get authentication token**:
```bash
curl -X POST http://localhost:8081/api/v1/auth/signin \
-H "Content-Type: application/json" \
-d '{
"password_credentials": {
"username": "newuser",
"password": "securepassword"
}
}'
```
3. **Use the token for subsequent requests**:
```bash
# Store the token from step 2
TOKEN="your-access-token-here"
# Create a memo
curl -X POST http://localhost:8081/api/v1/memos \
-H "Authorization: Bearer $TOKEN" \
-H "Content-Type: application/json" \
-d '{
"memo": {
"content": "# My Daily Notes\n\n## Today's Tasks\n- [ ] Review PRs\n- [ ] Update documentation\n- [ ] Team meeting at 2 PM",
"visibility": "PRIVATE"
}
}'
# List all memos
curl -H "Authorization: Bearer $TOKEN" \
"http://localhost:8081/api/v1/memos?page_size=20"
```
## Troubleshooting
**Common Issues:**
1. **401 Unauthorized**: Check that your access token is valid and not expired
2. **404 Not Found**: Verify the endpoint URL and resource ID
3. **400 Bad Request**: Check request format and required fields
4. **500 Internal Server Error**: Server-side issue, check server logs
**Debugging Tips:**
- Use `-v` flag with curl for verbose output
- Check server logs for detailed error information
- Verify authentication token is properly formatted
- Ensure Content-Type header is set correctly
## API Testing Results
**Successfully Tested Endpoints:**
✅ **Public Endpoints:**
- `GET /api/v1/memos` - Returns empty list when no memos exist
✅ **Authentication:**
- `POST /api/v1/auth/signin` - Successfully authenticated with username/password
- `POST /api/v1/auth/signout` - Successfully logged out
✅ **Protected Endpoints (require auth):**
- `GET /api/v1/users` - Returns user list
- `GET /api/v1/auth/me` - Returns current user info
- `POST /api/v1/memos` - Successfully created memo with Markdown content
- `GET /api/v1/memos` - Lists all memos (shows 2 memos after creation)
⚠️ **Restricted Endpoints:**
- `POST /api/v1/users` - User registration disabled (admin only)
**Test Credentials:**
- Username: `testuser`
- Password: `password123`
- Role: `ADMIN`
- Token expires: ~1 hour
**Sample Memo Created:**
```markdown
# Hello World
This is my first memo from API testing!
- Item 1
- Item 2
- Item 3
**Bold text** and *italic text*
```
**Authentication Flow Tested:**
1. Sign in → Receive JWT token
2. Use token for authenticated requests
3. Access protected endpoints successfully
4. Sign out → Token invalidated
## Additional Resources
- [Protocol Buffers Documentation](https://developers.google.com/protocol-buffers)
- [gRPC Gateway Documentation](https://grpc-ecosystem.github.io/grpc-gateway/)
- [Memos GitHub Repository](https://github.com/usememos/memos)
- [Memos Official Documentation](https://usememos.com/docs)

658
frontend.txt Normal file
View File

@@ -0,0 +1,658 @@
# Memos Frontend Documentation
## Authentication and Middleware System
### Frontend Authentication Architecture
The frontend authentication system uses a combination of:
1. **React Context** for state management
2. **Route-based guards** for access control
3. **Connect RPC interceptors** for API authentication
4. **Layout-based protection** for UI components
### Core Authentication Components
#### 1. Auth Context (`web/src/contexts/AuthContext.tsx`)
```typescript
// Manages global authentication state
const AuthContext = createContext<AuthContextType>({
currentUser: undefined,
userGeneralSetting: undefined,
isInitialized: false,
isLoading: true,
initialize: async () => {},
logout: () => {},
refetchSettings: async () => {}
});
// Provides authentication state to entire app
function AuthProvider({ children }: { children: React.ReactNode }) {
// Handles token initialization, user fetching, logout logic
}
```
#### 2. Route Protection System
**Public Routes** (no auth required):
- `/auth` - Authentication pages
- `/auth/signup` - User registration
- `/auth/callback` - OAuth callback
- `/explore` - Public explore page
- `/u/:username` - User profiles
- `/memos/:uid` - Individual memo details
**Private Routes** (auth required):
- `/` (root) - Main dashboard
- `/attachments` - File attachments
- `/inbox` - Notifications
- `/archived` - Archived memos
- `/setting` - User settings
#### 3. Layout-Based Authentication Guards
**RootLayout** (`web/src/layouts/RootLayout.tsx`):
```typescript
const RootLayout = () => {
const currentUser = useCurrentUser();
useEffect(() => {
if (!currentUser) {
redirectOnAuthFailure(); // Redirects to /auth
}
}, [currentUser]);
// Renders navigation and main content
return (
<div className="w-full min-h-full flex flex-row">
<Navigation />
<main>
<Outlet />
</main>
</div>
);
};
```
**Route Configuration** (`web/src/router/index.tsx`):
```typescript
const router = createBrowserRouter([
{
path: "/",
element: <App />, // Handles instance initialization
children: [
{
path: "/auth", // Public routes
children: [
{ path: "", element: <SignIn /> },
{ path: "signup", element: <SignUp /> },
{ path: "callback", element: <AuthCallback /> }
]
},
{
path: "/", // Protected routes (wrapped in RootLayout)
element: <RootLayout />, // Auth guard here
children: [
{
element: <MainLayout />, // Main app layout
children: [
{ path: "", element: <Home /> },
{ path: "explore", element: <Explore /> }
// ... other protected routes
]
}
]
}
]
}
]);
```
### API Authentication Interceptors
#### Connect RPC Interceptor (`web/src/connect.ts`):
```typescript
const authInterceptor: Interceptor = (next) => async (req) => {
const isRetryAttempt = req.header.get(RETRY_HEADER) === RETRY_HEADER_VALUE;
const token = await getRequestToken();
setAuthorizationHeader(req, token);
try {
return await next(req);
} catch (error) {
// Handle 401 Unauthorized
if (error.code === Code.Unauthenticated && !isRetryAttempt) {
try {
// Attempt token refresh
const newToken = await refreshAndGetAccessToken();
setAuthorizationHeader(req, newToken);
req.header.set(RETRY_HEADER, RETRY_HEADER_VALUE);
return await next(req);
} catch (refreshError) {
redirectOnAuthFailure(); // Redirect to login
throw refreshError;
}
}
throw error;
}
};
```
#### Token Management
- **Storage**: Access tokens stored in memory (not localStorage)
- **Refresh**: Automatic token refresh on 401 errors
- **Expiration**: Proactive refresh on tab focus
- **Cleanup**: Tokens cleared on logout/auth failure
### Authentication Flow
#### 1. App Initialization (`web/src/main.tsx`)
```typescript
// Early theme/locale setup to prevent flash
applyThemeEarly();
applyLocaleEarly();
// Initialize auth and instance contexts
<AuthProvider>
<InstanceProvider>
<RouterProvider router={router} />
</InstanceProvider>
</AuthProvider>
```
#### 2. Authentication Check (`web/src/utils/auth-redirect.ts`)
```typescript
export function redirectOnAuthFailure(): void {
const currentPath = window.location.pathname;
// Allow public routes
if (isPublicRoute(currentPath)) return;
// Redirect private routes to auth
if (isPrivateRoute(currentPath)) {
clearAccessToken();
window.location.replace(ROUTES.AUTH);
}
}
```
#### 3. User Session Management
- Token refresh on window focus
- Automatic logout on token expiration
- Context cleanup on logout
- Query cache invalidation
### Key Security Features
- **Token Storage**: In-memory only (security best practice)
- **Automatic Refresh**: Handles token rotation seamlessly
- **Route Guards**: Prevent unauthorized access to protected routes
- **Context Isolation**: Auth state managed centrally
- **Error Handling**: Graceful degradation on auth failures
- **Session Cleanup**: Complete state reset on logout
### Related Files
- `web/src/contexts/AuthContext.tsx` - Authentication state management
- `web/src/layouts/RootLayout.tsx` - Main authentication guard
- `web/src/utils/auth-redirect.ts` - Route protection logic
- `web/src/connect.ts` - API authentication interceptors
- `web/src/router/index.tsx` - Route configuration
- `web/src/main.tsx` - App initialization
## Theme System Implementation
### Vite Build Process for Themes
The CSS themes are processed and built by **Vite** during the build process:
1. **Vite Configuration** (`web/vite.config.mts`)
- Uses `@tailwindcss/vite` plugin for CSS processing
- Tailwind CSS v4 handles theme token compilation
- Themes are bundled during `pnpm build` process
2. **CSS Processing Pipeline**
- Base styles imported in `web/src/index.css`
- Theme-specific CSS files located in `web/src/themes/`
- Vite processes `@theme inline` directives at build time
- Dynamic theme switching handled via JavaScript at runtime
### Theme Architecture
#### Core Theme Files
- `web/src/index.css` - Main CSS entry point
- `web/src/themes/default.css` - Base theme with Tailwind token mappings
- `web/src/themes/default-dark.css` - Dark theme variables
- `web/src/themes/paper.css` - Paper-style theme
- `web/src/utils/theme.ts` - Theme loading and management logic
#### How Themes Are Built
##### 1. Build Time Processing (Vite + Tailwind)
```css
/* In web/src/themes/default.css */
@theme inline {
--color-background: var(--background);
--color-foreground: var(--foreground);
/* ... other CSS variables */
}
```
- Tailwind compiles these into static CSS classes
- Shared across all themes
- Optimized during Vite build process
##### 2. Runtime Theme Switching
- JavaScript dynamically injects theme CSS
- Uses `?raw` import to get CSS as string
- Injects `<style>` elements into document head
- Controlled by `web/src/utils/theme.ts`
### Theme Loading Mechanism
In `web/src/utils/theme.ts`:
```typescript
import defaultDarkThemeContent from "../themes/default-dark.css?raw";
import paperThemeContent from "../themes/paper.css?raw";
const THEME_CONTENT: Record<ResolvedTheme, string | null> = {
default: null, // Uses base CSS
"default-dark": defaultDarkThemeContent,
paper: paperThemeContent,
};
// Dynamically injects theme CSS
const injectThemeStyle = (theme: ResolvedTheme): void => {
if (theme === "default") return; // Use base CSS
const css = THEME_CONTENT[theme];
if (css) {
const style = document.createElement("style");
style.id = "instance-theme";
style.textContent = css;
document.head.appendChild(style);
}
};
```
### Available Themes
1. **System** (`system`) - Follows OS preference
2. **Light** (`default`) - Default light theme
3. **Dark** (`default-dark`) - Dark mode theme
4. **Paper** (`paper`) - Paper-style theme
### Theme Selection Components
- `web/src/components/ThemeSelect.tsx` - Theme dropdown selector
- `web/src/pages/UserSetting.tsx` - User profile theme settings
- `web/src/contexts/InstanceContext.tsx` - Instance-wide theme defaults
### Build Process Commands
```bash
# Development
pnpm dev
# Production build (processes themes)
pnpm build
# The build process:
# 1. Vite processes Tailwind CSS
# 2. Theme CSS files are bundled
# 3. Dynamic theme loading code is included
# 4. Output goes to dist/ directory
```
### Key Technical Details
- **CSS-in-JS Approach**: Themes are injected as `<style>` elements
- **Tree Shaking**: Unused theme CSS is removed during build
- **Hot Reloading**: Theme changes reflect instantly in development
- **Performance**: Theme switching is instant (no page reload)
- **Storage**: Theme preference stored in localStorage
- **Fallback**: Defaults to "system" theme if none selected
The theme system leverages Vite's build optimization while maintaining runtime flexibility for dynamic theme switching.
## Menu and Navigation System
### Navigation Component Architecture
The menu/navigation system is implemented through the `Navigation` component which serves as the main sidebar navigation.
### Core Navigation Component (`web/src/components/Navigation.tsx`)
```typescript
interface NavLinkItem {
id: string;
path: string;
title: string;
icon: React.ReactNode;
}
const Navigation = (props: { collapsed?: boolean; className?: string }) => {
const currentUser = useCurrentUser();
const { data: notifications = [] } = useNotifications();
// Navigation items are defined as objects
const homeNavLink: NavLinkItem = {
id: "header-memos",
path: Routes.ROOT,
title: t("common.memos"),
icon: <LibraryIcon className="w-6 h-auto shrink-0" />
};
// Conditional navigation based on authentication state
const navLinks: NavLinkItem[] = currentUser
? [homeNavLink, exploreNavLink, attachmentsNavLink, inboxNavLink]
: [exploreNavLink, signInNavLink];
}
```
### Menu Items Configuration
#### For Authenticated Users
1. **Home** (`/`) - Main memos dashboard
- Icon: `LibraryIcon`
- Shows user's memos
2. **Explore** (`/explore`) - Public memos exploration
- Icon: `EarthIcon`
- Browse public content
3. **Attachments** (`/attachments`) - File management
- Icon: `PaperclipIcon`
- Manage uploaded files
4. **Inbox** (`/inbox`) - Notifications
- Icon: `BellIcon` with badge
- Shows unread notification count
#### For Unauthenticated Users
1. **Explore** (`/explore`) - Public content browsing
2. **Sign In** (`/auth`) - Authentication page
- Icon: `UserCircleIcon`
### Navigation Layout Structure
#### Desktop (>768px)
- Fixed sidebar on left (`w-16` when collapsed, wider when expanded)
- `RootLayout` renders `Navigation` component vertically
- Collapsed state shows icons only with tooltips
- Expanded state shows icons + text labels
#### Mobile (<768px)
- `NavigationDrawer` component in mobile header
- Slide-out drawer from left side
- Full-width navigation when opened
### Navigation Implementation
```typescript
// In RootLayout.tsx
{sm && (
<div className="fixed top-0 left-0 h-full w-16">
<Navigation className="py-4" collapsed={true} />
</div>
)}
// In MobileHeader.tsx
<NavigationDrawer /> // For mobile devices
```
### Key Features
#### 1. Conditional Rendering
- Different menus for authenticated vs unauthenticated users
- Notification badges for unread messages
- Responsive design for mobile/desktop
#### 2. Active State Management
```typescript
<NavLink
className={({ isActive }) =>
cn(
"px-2 py-2 rounded-2xl border flex flex-row",
isActive
? "bg-sidebar-accent text-sidebar-accent-foreground"
: "border-transparent hover:bg-sidebar-accent"
)
}
to={navLink.path}
>
```
#### 3. Collapsed State
- Icons only with tooltip hints
- Space-efficient for narrow screens
- Smooth transitions
#### 4. User Menu Integration
- Bottom section shows `UserMenu` component
- Profile/avatar display
- Settings and logout options
### Related Components
- `web/src/components/Navigation.tsx` - Main navigation logic
- `web/src/components/NavigationDrawer.tsx` - Mobile drawer implementation
- `web/src/components/UserMenu.tsx` - User profile dropdown
- `web/src/layouts/RootLayout.tsx` - Desktop layout with sidebar
- `web/src/components/MobileHeader.tsx` - Mobile header with drawer
### Internationalization
- Menu titles translated via `useTranslate()` hook
- Supports multiple languages
- Dynamic text based on user locale
The navigation system provides a clean, responsive menu that adapts to user authentication state and screen size while maintaining consistent UX across devices.
## Layout System (Masonry vs List)
### View Context Architecture
The layout system is managed through the `ViewContext` which provides global state management for layout preferences and sorting options.
### Core View Context (`web/src/contexts/ViewContext.tsx`)
```typescript
export type LayoutMode = "LIST" | "MASONRY";
interface ViewContextValue {
orderByTimeAsc: boolean; // Sort order
layout: LayoutMode; // Current layout mode
toggleSortOrder: () => void;
setLayout: (layout: LayoutMode) => void;
}
// Persistent storage in localStorage
const LOCAL_STORAGE_KEY = "memos-view-setting";
// Default state
return { orderByTimeAsc: false, layout: "LIST" as LayoutMode };
```
### Layout Modes
#### 1. LIST Layout (`"LIST"`)
- **Description**: Traditional linear list view
- **Implementation**: Single column layout
- **Behavior**: Memos displayed vertically in chronological order
- **Use Case**: Reading-focused, sequential browsing
#### 2. MASONRY Layout (`"MASONRY"`)
- **Description**: Pinterest-style grid layout
- **Implementation**: Multi-column responsive grid
- **Behavior**: Memos distributed based on actual rendered heights
- **Use Case**: Visual browsing, efficient space utilization
### Masonry Layout Implementation
#### Core Components
1. **MasonryView** (`web/src/components/MasonryView/MasonryView.tsx`)
- Main container component
- Manages column distribution
- Uses CSS Grid for layout
2. **MasonryColumn** (`web/src/components/MasonryView/MasonryColumn.tsx`)
- Represents individual columns
- Contains assigned memos
- Handles prefix elements (like memo editor)
3. **MasonryItem** (`web/src/components/MasonryView/MasonryItem.tsx`)
- Wraps individual memos
- Measures actual rendered height
- Uses ResizeObserver for dynamic updates
4. **useMasonryLayout** Hook (`web/src/components/MasonryView/useMasonryLayout.ts`)
- Calculates optimal column count
- Distributes memos to columns
- Manages height measurements
#### Key Features
##### 1. Height-Based Distribution
```typescript
// Smart algorithm that assigns memos to shortest column
const shortestColumnIndex = columnHeights.reduce(
(minIndex, currentHeight, currentIndex) =>
(currentHeight < columnHeights[minIndex] ? currentIndex : minIndex),
0
);
```
##### 2. Dynamic Column Count
```typescript
const calculateColumns = useCallback(() => {
if (!containerRef.current || listMode) return 1;
const containerWidth = containerRef.current.offsetWidth;
const scale = containerWidth / MINIMUM_MEMO_VIEWPORT_WIDTH;
return scale >= 1.2 ? Math.ceil(scale) : 1;
}, [containerRef, listMode]);
```
##### 3. ResizeObserver Integration
```typescript
useEffect(() => {
const measureHeight = () => {
if (itemRef.current) {
const height = itemRef.current.offsetHeight;
onHeightChange(memo.name, height);
}
};
resizeObserverRef.current = new ResizeObserver(measureHeight);
resizeObserverRef.current.observe(itemRef.current);
}, [memo.name, onHeightChange]);
```
##### 4. Responsive Behavior
- Automatically adjusts columns based on viewport width
- Minimum width threshold for multi-column layout
- Smooth transitions during resizing
### Layout Switching
#### User Interface
```typescript
// MemoDisplaySettingMenu component
<Select value={layout} onValueChange={(value) => setLayout(value as "LIST" | "MASONRY")}>
<SelectItem value="LIST">{t("memo.list")}</SelectItem>
<SelectItem value="MASONRY">{t("memo.masonry")}</SelectItem>
</Select>
```
#### State Management
```typescript
const { layout, setLayout } = useView();
// Toggle between layouts
setLayout("MASONRY"); // or "LIST"
// Persistence
localStorage.setItem("memos-view-setting", JSON.stringify({ layout, orderByTimeAsc }));
```
### Integration with Memo Display
#### PagedMemoList Component
```typescript
const PagedMemoList = (props: Props) => {
const { layout } = useView(); // Get current layout preference
return (
<div className="flex flex-col justify-start items-start w-full max-w-full">
{layout === "MASONRY" ? (
<MasonryView
memoList={sortedMemoList}
renderer={props.renderer}
prefixElement={<MemoEditor />}
/>
) : (
// Traditional list view implementation
<ListView memoList={sortedMemoList} />
)}
</div>
);
};
```
### Performance Optimizations
1. **Debounced Redistribution**: Prevents excessive re-layout during rapid changes
2. **Memoized Calculations**: Optimized height calculations and distribution algorithms
3. **Efficient State Updates**: Only re-render when necessary
4. **ResizeObserver Cleanup**: Proper memory management
### Related Files
- `web/src/contexts/ViewContext.tsx` - Global layout state management
- `web/src/components/MasonryView/` - Masonry layout implementation
- `web/src/components/MemoDisplaySettingMenu.tsx` - Layout selection UI
- `web/src/components/PagedMemoList/PagedMemoList.tsx` - Memo list container
- `web/src/components/MasonryView/README.md` - Detailed technical documentation
### User Experience Benefits
- **Choice**: Users can choose preferred browsing style
- **Persistence**: Layout preference saved across sessions
- **Responsiveness**: Adapts to different screen sizes
- **Performance**: Optimized rendering for both modes
- **Consistency**: Same memo content, different presentation
The layout system provides flexible viewing options that cater to different user preferences and use cases while maintaining optimal performance and responsive design.

142
go.mod Normal file
View File

@@ -0,0 +1,142 @@
module github.com/usememos/memos
go 1.25.7
require (
connectrpc.com/connect v1.19.1
github.com/aws/aws-sdk-go-v2 v1.39.2
github.com/aws/aws-sdk-go-v2/config v1.31.12
github.com/aws/aws-sdk-go-v2/credentials v1.18.16
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.4
github.com/aws/aws-sdk-go-v2/service/s3 v1.87.3
github.com/docker/docker v28.5.1+incompatible
github.com/go-sql-driver/mysql v1.9.3
github.com/google/cel-go v0.26.1
github.com/google/uuid v1.6.0
github.com/gorilla/feeds v1.2.0
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2
github.com/joho/godotenv v1.5.1
github.com/labstack/echo/v5 v5.0.3
github.com/lib/pq v1.10.9
github.com/lithammer/shortuuid/v4 v4.2.0
github.com/mark3labs/mcp-go v0.44.0
github.com/pkg/errors v0.9.1
github.com/spf13/cobra v1.10.1
github.com/spf13/viper v1.20.1
github.com/stretchr/testify v1.11.1
github.com/testcontainers/testcontainers-go v0.40.0
github.com/testcontainers/testcontainers-go/modules/mysql v0.40.0
github.com/testcontainers/testcontainers-go/modules/postgres v0.40.0
github.com/yuin/goldmark v1.7.13
golang.org/x/crypto v0.47.0
golang.org/x/mod v0.31.0
golang.org/x/net v0.49.0
golang.org/x/oauth2 v0.30.0
golang.org/x/sync v0.19.0
google.golang.org/genproto/googleapis/api v0.0.0-20250826171959-ef028d996bc1
google.golang.org/grpc v1.75.1
modernc.org/sqlite v1.38.2
)
require (
cel.dev/expr v0.24.0 // indirect
dario.cat/mergo v1.0.2 // indirect
filippo.io/edwards25519 v1.1.0 // indirect
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/antlr4-go/antlr/v4 v4.13.1 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/containerd/errdefs v1.0.0 // indirect
github.com/containerd/errdefs/pkg v0.3.0 // indirect
github.com/containerd/log v0.1.0 // indirect
github.com/containerd/platforms v0.2.1 // indirect
github.com/cpuguy83/dockercfg v0.3.2 // indirect
github.com/distribution/reference v0.6.0 // indirect
github.com/docker/go-connections v0.6.0 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/ebitengine/purego v0.8.4 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fsnotify/fsnotify v1.8.0 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/invopop/jsonschema v0.13.0 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
github.com/magiconair/properties v1.8.10 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/moby/docker-image-spec v1.3.1 // indirect
github.com/moby/go-archive v0.1.0 // indirect
github.com/moby/patternmatcher v0.6.0 // indirect
github.com/moby/sys/sequential v0.6.0 // indirect
github.com/moby/sys/user v0.4.0 // indirect
github.com/moby/sys/userns v0.1.0 // indirect
github.com/moby/term v0.5.0 // indirect
github.com/morikuni/aec v1.0.0 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/sagikazarmark/locafero v0.7.0 // indirect
github.com/sashabaranov/go-openai v1.41.2 // indirect
github.com/shirou/gopsutil/v4 v4.25.6 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.12.0 // indirect
github.com/spf13/cast v1.7.1 // indirect
github.com/spf13/pflag v1.0.9 // indirect
github.com/stoewer/go-strcase v1.3.1 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/tklauser/go-sysconf v0.3.12 // indirect
github.com/tklauser/numcpus v0.6.1 // indirect
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect
go.opentelemetry.io/otel v1.37.0 // indirect
go.opentelemetry.io/otel/metric v1.37.0 // indirect
go.opentelemetry.io/otel/trace v1.37.0 // indirect
go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b // indirect
golang.org/x/image v0.30.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1 // indirect
modernc.org/libc v1.66.8 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
)
require (
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.1 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.9 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.6 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.8.6 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.9 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.6 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.29.6 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.38.6 // indirect
github.com/aws/smithy-go v1.23.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/disintegration/imaging v1.6.2
github.com/golang-jwt/jwt/v5 v5.3.0
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
golang.org/x/sys v0.40.0 // indirect
golang.org/x/text v0.33.0
golang.org/x/time v0.14.0 // indirect
google.golang.org/protobuf v1.36.9
gopkg.in/yaml.v3 v3.0.1 // indirect
)

358
go.sum Normal file
View File

@@ -0,0 +1,358 @@
cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY=
cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
connectrpc.com/connect v1.19.1 h1:R5M57z05+90EfEvCY1b7hBxDVOUl45PrtXtAV2fOC14=
connectrpc.com/connect v1.19.1/go.mod h1:tN20fjdGlewnSFeZxLKb0xwIZ6ozc3OQs2hTXy4du9w=
dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8=
dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA=
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk=
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ=
github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw=
github.com/aws/aws-sdk-go-v2 v1.39.2 h1:EJLg8IdbzgeD7xgvZ+I8M1e0fL0ptn/M47lianzth0I=
github.com/aws/aws-sdk-go-v2 v1.39.2/go.mod h1:sDioUELIUO9Znk23YVmIk86/9DOpkbyyVb1i/gUNFXY=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.1 h1:i8p8P4diljCr60PpJp6qZXNlgX4m2yQFpYk+9ZT+J4E=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.1/go.mod h1:ddqbooRZYNoJ2dsTwOty16rM+/Aqmk/GOXrK8cg7V00=
github.com/aws/aws-sdk-go-v2/config v1.31.12 h1:pYM1Qgy0dKZLHX2cXslNacbcEFMkDMl+Bcj5ROuS6p8=
github.com/aws/aws-sdk-go-v2/config v1.31.12/go.mod h1:/MM0dyD7KSDPR+39p9ZNVKaHDLb9qnfDurvVS2KAhN8=
github.com/aws/aws-sdk-go-v2/credentials v1.18.16 h1:4JHirI4zp958zC026Sm+V4pSDwW4pwLefKrc0bF2lwI=
github.com/aws/aws-sdk-go-v2/credentials v1.18.16/go.mod h1:qQMtGx9OSw7ty1yLclzLxXCRbrkjWAM7JnObZjmCB7I=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.9 h1:Mv4Bc0mWmv6oDuSWTKnk+wgeqPL5DRFu5bQL9BGPQ8Y=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.9/go.mod h1:IKlKfRppK2a1y0gy1yH6zD+yX5uplJ6UuPlgd48dJiQ=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.4 h1:BTl+TXrpnrpPWb/J3527GsJ/lMkn7z3GO12j6OlsbRg=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.4/go.mod h1:cG2tenc/fscpChiZE29a2crG9uo2t6nQGflFllFL8M8=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9 h1:se2vOWGD3dWQUtfn4wEjRQJb1HK1XsNIt825gskZ970=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9/go.mod h1:hijCGH2VfbZQxqCDN7bwz/4dzxV+hkyhjawAtdPWKZA=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9 h1:6RBnKZLkJM4hQ+kN6E7yWFveOTg8NLPHAkqrs4ZPlTU=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9/go.mod h1:V9rQKRmK7AWuEsOMnHzKj8WyrIir1yUJbZxDuZLFvXI=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.6 h1:R0tNFJqfjHL3900cqhXuwQ+1K4G0xc9Yf8EDbFXCKEw=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.6/go.mod h1:y/7sDdu+aJvPtGXr4xYosdpq9a6T9Z0jkXfugmti0rI=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 h1:oegbebPEMA/1Jny7kvwejowCaHz1FWZAQ94WXFNCyTM=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1/go.mod h1:kemo5Myr9ac0U9JfSjMo9yHLtw+pECEHsFtJ9tqCEI8=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.8.6 h1:hncKj/4gR+TPauZgTAsxOxNcvBayhUlYZ6LO/BYiQ30=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.8.6/go.mod h1:OiIh45tp6HdJDDJGnja0mw8ihQGz3VGrUflLqSL0SmM=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.9 h1:5r34CgVOD4WZudeEKZ9/iKpiT6cM1JyEROpXjOcdWv8=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.9/go.mod h1:dB12CEbNWPbzO2uC6QSWHteqOg4JfBVJOojbAoAUb5I=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.6 h1:nEXUSAwyUfLTgnc9cxlDWy637qsq4UWwp3sNAfl0Z3Y=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.6/go.mod h1:HGzIULx4Ge3Do2V0FaiYKcyKzOqwrhUZgCI77NisswQ=
github.com/aws/aws-sdk-go-v2/service/s3 v1.87.3 h1:ETkfWcXP2KNPLecaDa++5bsQhCRa5M5sLUJa5DWYIIg=
github.com/aws/aws-sdk-go-v2/service/s3 v1.87.3/go.mod h1:+/3ZTqoYb3Ur7DObD00tarKMLMuKg8iqz5CHEanqTnw=
github.com/aws/aws-sdk-go-v2/service/sso v1.29.6 h1:A1oRkiSQOWstGh61y4Wc/yQ04sqrQZr1Si/oAXj20/s=
github.com/aws/aws-sdk-go-v2/service/sso v1.29.6/go.mod h1:5PfYspyCU5Vw1wNPsxi15LZovOnULudOQuVxphSflQA=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.1 h1:5fm5RTONng73/QA73LhCNR7UT9RpFH3hR6HWL6bIgVY=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.1/go.mod h1:xBEjWD13h+6nq+z4AkqSfSvqRKFgDIQeaMguAJndOWo=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.6 h1:p3jIvqYwUZgu/XYeI48bJxOhvm47hZb5HUQ0tn6Q9kA=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.6/go.mod h1:WtKK+ppze5yKPkZ0XwqIVWD4beCwv056ZbPQNoeHqM8=
github.com/aws/smithy-go v1.23.0 h1:8n6I3gXzWJB2DxBDnfxgBaSX6oe0d/t10qGz7OKqMCE=
github.com/aws/smithy-go v1.23.0/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI=
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA=
github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM=
github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94=
github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw=
github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M=
github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/cel-go v0.26.1 h1:iPbVVEdkhTX++hpe3lzSk7D3G3QSYqLGoHOcEio+UXQ=
github.com/google/cel-go v0.26.1/go.mod h1:A9O8OU9rdvrK5MQyrqfIxo1a0u4g3sF8KB6PUIaryMM=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/feeds v1.2.0 h1:O6pBiXJ5JHhPvqy53NsjKOThq+dNFm8+DFrxBEdzSCc=
github.com/gorilla/feeds v1.2.0/go.mod h1:WMib8uJP3BbY+X8Szd1rA5Pzhdfh+HCCAYT2z7Fza6Y=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/invopop/jsonschema v0.13.0 h1:KvpoAJWEjR3uD9Kbm2HWJmqsEaHt8lBUpd0qHcIi21E=
github.com/invopop/jsonschema v0.13.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.5.4 h1:Xp2aQS8uXButQdnCMWNmvx6UysWQQC+u1EoizjguY+8=
github.com/jackc/pgx/v5 v5.5.4/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/labstack/echo/v5 v5.0.3 h1:Jql8sDtCYXrhh2Mbs6jKwjR6r7X8FSQQmch+w6QS7kc=
github.com/labstack/echo/v5 v5.0.3/go.mod h1:SyvlSdObGjRXeQfCCXW/sybkZdOOQZBmpKF0bvALaeo=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lithammer/shortuuid/v4 v4.2.0 h1:LMFOzVB3996a7b8aBuEXxqOBflbfPQAiVzkIcHO0h8c=
github.com/lithammer/shortuuid/v4 v4.2.0/go.mod h1:D5noHZ2oFw/YaKCfGy0YxyE7M0wMbezmMjPdhyEFe6Y=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mark3labs/mcp-go v0.44.0 h1:OlYfcVviAnwNN40QZUrrzU0QZjq3En7rCU5X09a/B7I=
github.com/mark3labs/mcp-go v0.44.0/go.mod h1:YnJfOL382MIWDx1kMY+2zsRHU/q78dBg9aFb8W6Thdw=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mdelapenya/tlscert v0.2.0 h1:7H81W6Z/4weDvZBNOfQte5GpIMo0lGYEeWbkGp5LJHI=
github.com/mdelapenya/tlscert v0.2.0/go.mod h1:O4njj3ELLnJjGdkN7M/vIVCpZ+Cf0L6muqOG4tLSl8o=
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ=
github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo=
github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk=
github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc=
github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw=
github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs=
github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU=
github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko=
github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs=
github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs=
github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g=
github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28=
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw=
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo=
github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k=
github.com/sashabaranov/go-openai v1.41.2 h1:vfPRBZNMpnqu8ELsclWcAvF19lDNgh1t6TVfFFOPiSM=
github.com/sashabaranov/go-openai v1.41.2/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs=
github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
github.com/stoewer/go-strcase v1.3.1 h1:iS0MdW+kVTxgMoE1LAZyMiYJFKlOzLooE4MxjirtkAs=
github.com/stoewer/go-strcase v1.3.1/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU=
github.com/testcontainers/testcontainers-go v0.40.0/go.mod h1:FSXV5KQtX2HAMlm7U3APNyLkkap35zNLxukw9oBi/MY=
github.com/testcontainers/testcontainers-go/modules/mysql v0.40.0 h1:P9Txfy5Jothx2wFdcus0QoSmX/PKSIXZxrTbZPVJswA=
github.com/testcontainers/testcontainers-go/modules/mysql v0.40.0/go.mod h1:oZPHHqJqXG7FD8OB/yWH7gLnDvZUlFHAVJNrGftL+eg=
github.com/testcontainers/testcontainers-go/modules/postgres v0.40.0 h1:s2bIayFXlbDFexo96y+htn7FzuhpXLYJNnIuglNKqOk=
github.com/testcontainers/testcontainers-go/modules/postgres v0.40.0/go.mod h1:h+u/2KoREGTnTl9UwrQ/g+XhasAT8E6dClclAADeXoQ=
github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc=
github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw=
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
github.com/yuin/goldmark v1.7.13 h1:GPddIs617DnBLFFVJFgpo1aBfe/4xcvMc3SB5t/D0pA=
github.com/yuin/goldmark v1.7.13/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8=
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU=
go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI=
go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg=
go.opentelemetry.io/otel/sdk/metric v1.37.0 h1:90lI228XrB9jCMuSdA0673aubgRobVZFhbjxHHspCPc=
go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps=
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b h1:DXr+pvt3nC887026GRP39Ej11UATqWDmWuS99x26cD0=
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b/go.mod h1:4QTo5u+SEIbbKW1RacMZq1YEfOBqeXa19JeshGi+zc4=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.30.0 h1:jD5RhkmVAnjqaCUXfbGBrn3lpxbknfN9w2UhHHU+5B4=
golang.org/x/image v0.30.0/go.mod h1:SAEUTxCCMWSrJcCy/4HwavEsfZZJlYxeHLc6tTiAe/c=
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
google.golang.org/genproto/googleapis/api v0.0.0-20250826171959-ef028d996bc1 h1:APHvLLYBhtZvsbnpkfknDZ7NyH4z5+ub/I0u8L3Oz6g=
google.golang.org/genproto/googleapis/api v0.0.0-20250826171959-ef028d996bc1/go.mod h1:xUjFWUnWDpZ/C0Gu0qloASKFb6f8/QXiiXhSPFsD668=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1 h1:pmJpJEvT846VzausCQ5d7KreSROcDqmO388w5YbnltA=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1/go.mod h1:GmFNa4BdJZ2a8G+wCe9Bg3wwThLrJun751XstdJt5Og=
google.golang.org/grpc v1.75.1 h1:/ODCNEuf9VghjgO3rqLcfg8fiOP0nSluljWFlDxELLI=
google.golang.org/grpc v1.75.1/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ=
google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw=
google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
modernc.org/cc/v4 v4.26.4 h1:jPhG8oNjtTYuP2FA4YefTJ/wioNUGALmGuEWt7SUR6s=
modernc.org/cc/v4 v4.26.4/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.28.1 h1:wPKYn5EC/mYTqBO373jKjvX2n+3+aK7+sICCv4Fjy1A=
modernc.org/ccgo/v4 v4.28.1/go.mod h1:uD+4RnfrVgE6ec9NGguUNdhqzNIeeomeXf6CL0GTE5Q=
modernc.org/fileutil v1.3.28 h1:Vp156KUA2nPu9F1NEv036x9UGOjg2qsi5QlWTjZmtMk=
modernc.org/fileutil v1.3.28/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
modernc.org/libc v1.66.8 h1:/awsvTnyN/sNjvJm6S3lb7KZw5WV4ly/sBEG7ZUzmIE=
modernc.org/libc v1.66.8/go.mod h1:aVdcY7udcawRqauu0HukYYxtBSizV+R80n/6aQe9D5k=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek=
modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=

379
info.txt Normal file
View File

@@ -0,0 +1,379 @@
# MEMOS - PROJECT INFO (Bahasa Indonesia)
## 📋 STRUKTUR PROYEK
### Arsitektur Utama
Proyek Memos adalah aplikasi catatan pribadi berbasis web yang dibangun dengan:
- **Backend**: Go (Golang) versi 1.25
- **Frontend**: React 18.3 + TypeScript
- **Protokol API**: gRPC + Connect RPC
- **Database**: SQLite (default), MySQL, PostgreSQL
### Struktur Direktori Utama
```
memos/
├── cmd/memos/ # Entry point aplikasi
│ └── main.go # File utama untuk menjalankan server
├── server/ # Server HTTP dan routing
│ ├── server.go # Konfigurasi Echo server
│ ├── router/ # Routing API dan frontend
│ │ ├── api/v1/ # Implementasi service gRPC
│ │ ├── frontend/ # Serving file statis React
│ │ └── fileserver/ # Serving file media
│ └── auth/ # Autentikasi (JWT, PAT)
├── store/ # Layer data dan database
│ ├── db/ # Driver database (sqlite/mysql/postgres)
│ ├── migration/ # File migrasi database
│ └── *.go # Model dan operasi data
├── proto/ # Definisi Protocol Buffer
│ ├── api/v1/ # Service dan message definitions
│ └── gen/ # Kode yang di-generate
├── web/ # Aplikasi frontend React
│ ├── src/ # Source code TypeScript
│ ├── package.json # Dependencies frontend
│ └── vite.config.mts # Konfigurasi Vite
└── plugin/ # Plugin tambahan
├── markdown/ # Parsing markdown
├── email/ # Pengiriman email
├── scheduler/ # Cron jobs
└── webhook/ # Webhook integration
```
## 🔧 CARA PEMBUATAN API
### 1. Definisi Protokol Buffer (.proto)
API didefinisikan dalam file `.proto` di direktori `proto/api/v1/`:
Contoh: `memo_service.proto`
```protobuf
syntax = "proto3";
package memos.api.v1;
service MemoService {
rpc CreateMemo(CreateMemoRequest) returns (Memo) {
option (google.api.http) = {
post: "/api/v1/memos"
body: "memo"
};
}
rpc ListMemos(ListMemosRequest) returns (ListMemosResponse) {
option (google.api.http) = {get: "/api/v1/memos"};
}
}
message Memo {
string name = 1;
string content = 7 [(google.api.field_behavior) = REQUIRED];
Visibility visibility = 9 [(google.api.field_behavior) = REQUIRED];
}
```
### 2. Generate Kode dari Proto
Jalankan perintah:
```bash
cd proto && buf generate
```
Ini akan menghasilkan:
- Kode Go di `proto/gen/api/v1/` (untuk backend)
- Kode TypeScript di `web/src/types/proto/api/v1/` (untuk frontend)
### 3. Implementasi Service Backend
File: `server/router/api/v1/memo_service.go`
```go
func (s *APIV1Service) CreateMemo(ctx context.Context, request *v1pb.CreateMemoRequest) (*v1pb.Memo, error) {
// 1. Validasi user
user, err := s.fetchCurrentUser(ctx)
if err != nil {
return nil, status.Errorf(codes.Unauthenticated, "user not authenticated")
}
// 2. Mapping ke model store
create := &store.Memo{
UID: shortuuid.New(),
CreatorID: user.ID,
Content: request.Memo.Content,
Visibility: convertVisibilityToStore(request.Memo.Visibility),
}
// 3. Simpan ke database
memo, err := s.Store.CreateMemo(ctx, create)
if err != nil {
return nil, err
}
// 4. Convert ke response proto
return s.convertMemoFromStore(ctx, memo, nil, nil)
}
```
### 4. Registrasi Service
File: `server/router/api/v1/v1.go`
```go
// Register gRPC gateway
if err := apiV1Service.RegisterGateway(ctx, echoServer); err != nil {
return nil, errors.Wrap(err, "failed to register gRPC gateway")
}
// Register Connect handlers
connectHandlers := v1pbconnect.NewMemoServiceHandler(apiV1Service)
echoServer.POST("/memos.api.v1.MemoService/CreateMemo",
connect.NewHandler(connectHandlers.CreateMemo))
```
### 5. Middleware dan Interceptor
Autentikasi dilakukan melalui interceptor:
- **Connect Interceptor**: `connect_interceptors.go`
- **gRPC Gateway Interceptor**: Middleware Echo
## 🖥️ FRONTEND MENGGUNAKAN TEKNOLOGI
### Framework dan Library Utama
1. **React 18.3** - Library UI utama
2. **TypeScript** - Typing untuk JavaScript
3. **Vite 7** - Build tool dan development server
4. **Tailwind CSS 4** - Styling utility-first
5. **React Query (TanStack Query) v5** - State management server
6. **Connect RPC** - Komunikasi dengan backend
### State Management
#### 1. Server State (React Query)
```typescript
// hooks/useMemoQueries.ts
export const useMemos = (filters: MemoFilters) => {
return useQuery({
queryKey: memoKeys.list(filters),
queryFn: () => memoServiceClient.listMemos({ filter: filters.query }),
});
};
export const useCreateMemo = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: (memo: CreateMemoRequest) =>
memoServiceClient.createMemo(memo),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: memoKeys.all });
},
});
};
```
#### 2. Client State (React Context)
```typescript
// contexts/AuthContext.tsx
const AuthContext = createContext<AuthContextValue | null>(null);
export function AuthProvider({ children }: { children: ReactNode }) {
const [currentUser, setCurrentUser] = useState<User | undefined>();
const logout = useCallback(async () => {
await authServiceClient.signOut({});
setCurrentUser(undefined);
}, []);
return (
<AuthContext.Provider value={{ currentUser, logout }}>
{children}
</AuthContext.Provider>
);
}
```
### Komunikasi API
File: `web/src/connect.ts`
```typescript
// Transport dengan autentikasi
const transport = createConnectTransport({
baseUrl: window.location.origin,
useBinaryFormat: true,
interceptors: [authInterceptor], // Menangani JWT token
});
// Client service
export const memoServiceClient = createClient(MemoService, transport);
export const userServiceClient = createClient(UserService, transport);
```
### Routing
Menggunakan React Router DOM v7:
```typescript
// router/index.tsx
const router = createBrowserRouter([
{
path: "/",
element: <App />,
children: [
{ index: true, element: <Home /> },
{ path: "/m/:memoId", element: <MemoDetail /> },
{ path: "/auth", element: <AuthLayout /> },
],
},
]);
```
### Styling
Menggunakan Tailwind CSS dengan komponen UI:
```tsx
// components/ui/button.tsx
import { cva, type VariantProps } from "class-variance-authority";
const buttonVariants = cva(
"inline-flex items-center justify-center rounded-md text-sm font-medium transition-colors",
{
variants: {
variant: {
default: "bg-primary text-primary-foreground hover:bg-primary/90",
destructive: "bg-destructive text-destructive-foreground",
outline: "border border-input bg-background hover:bg-accent",
},
size: {
default: "h-10 px-4 py-2",
sm: "h-9 rounded-md px-3",
lg: "h-11 rounded-md px-8",
},
},
defaultVariants: {
variant: "default",
size: "default",
},
}
);
```
## ⚙️ ARSITEKTUR API DUAL PROTOCOL
### 1. Connect RPC (Untuk Browser)
- Path: `/memos.api.v1.MemoService/*`
- Format: Binary protocol buffers
- Digunakan oleh frontend React
### 2. gRPC-Gateway (REST API)
- Path: `/api/v1/memos`
- Format: JSON over HTTP
- Berguna untuk integrasi eksternal
Keduanya menggunakan implementasi service yang sama, sehingga konsisten.
## 🔐 AUTENTIKASI
### Metode Token
1. **JWT Access Token V2** - Stateless, 15 menit expiry
2. **Personal Access Token (PAT)** - Stateful, long-lived
### Flow Autentikasi
```typescript
// Frontend interceptor
const authInterceptor: Interceptor = (next) => async (req) => {
const token = getAccessToken();
if (token) {
req.header.set("Authorization", `Bearer ${token}`);
}
try {
return await next(req);
} catch (error) {
if (error.code === Code.Unauthenticated) {
// Refresh token otomatis
await refreshAccessToken();
// Retry request
return await next(req);
}
throw error;
}
};
```
## 🗃️ DATABASE
### Layer Abstraksi
```go
type Driver interface {
CreateMemo(ctx context.Context, create *Memo) (*Memo, error)
ListMemos(ctx context.Context, find *FindMemo) ([]*Memo, error)
UpdateMemo(ctx context.Context, update *UpdateMemo) error
DeleteMemo(ctx context.Context, delete *DeleteMemo) error
}
```
### Implementasi:
- SQLite: `store/db/sqlite/`
- MySQL: `store/db/mysql/`
- PostgreSQL: `store/db/postgres/`
### Caching:
Menggunakan in-memory cache untuk:
- Pengaturan instance
- Data user
- Pengaturan user
## 🚀 DEVELOPMENT WORKFLOW
### Backend Development
```bash
# Jalankan server development
go run ./cmd/memos --port 8081
# Testing
go test ./...
# Linting
golangci-lint run
```
### Frontend Development
```bash
cd web
pnpm install
pnpm dev # Server jalan di port 3001, proxy ke 8081
```
### Generate Proto
```bash
cd proto
buf generate
```
## 📦 DEPLOYMENT
### Opsi Deployment:
1. **Docker** (Direkomendasikan)
2. **Binary langsung**
3. **Docker Compose**
4. **Kubernetes**
### Contoh Docker:
```bash
docker run -d \
--name memos \
-p 5230:5230 \
-v ~/.memos:/var/opt/memos \
neosmemo/memos:stable
```
---
**Ringkasan Teknologi Utama:**
- **Bahasa**: Go + TypeScript
- **Framework**: React 18 + Echo (Go)
- **API**: gRPC + Connect RPC + REST
- **Database**: SQLite/MySQL/PostgreSQL
- **Build Tool**: Vite + buf (proto)
- **State Management**: React Query + Context
- **Styling**: Tailwind CSS

View File

@@ -0,0 +1,7 @@
package base
import "regexp"
var (
UIDMatcher = regexp.MustCompile("^[a-zA-Z0-9]([a-zA-Z0-9-]{0,30}[a-zA-Z0-9])?$")
)

View File

@@ -0,0 +1,35 @@
package base
import (
"testing"
)
func TestUIDMatcher(t *testing.T) {
tests := []struct {
input string
expected bool
}{
{"", false},
{"-abc123", false},
{"012345678901234567890123456789", true},
{"1abc-123", true},
{"A123B456C789", true},
{"a", true},
{"ab", true},
{"a*b&c", false},
{"a--b", true},
{"a-1b-2c", true},
{"a1234567890123456789012345678901", true},
{"abc123", true},
{"abc123-", false},
}
for _, test := range tests {
t.Run(test.input, func(*testing.T) {
result := UIDMatcher.MatchString(test.input)
if result != test.expected {
t.Errorf("For input '%s', expected %v but got %v", test.input, test.expected, result)
}
})
}
}

109
internal/profile/profile.go Normal file
View File

@@ -0,0 +1,109 @@
package profile
import (
"fmt"
"log/slog"
"os"
"path/filepath"
"runtime"
"strings"
"github.com/pkg/errors"
)
// Profile is the configuration to start main server.
type Profile struct {
// Demo indicates if the server is in demo mode
Demo bool
// Addr is the binding address for server
Addr string
// Port is the binding port for server
Port int
// UNIXSock is the IPC binding path. Overrides Addr and Port
UNIXSock string
// Data is the data directory
Data string
// DSN points to where memos stores its own data
DSN string
// Driver is the database driver
// sqlite, mysql
Driver string
// Version is the current version of server
Version string
// InstanceURL is the url of your memos instance.
InstanceURL string
// LowCPU enables low CPU usage mode
LowCPU bool
}
func checkDataDir(dataDir string) (string, error) {
// Convert to absolute path if relative path is supplied.
if !filepath.IsAbs(dataDir) {
// Use current working directory, not the binary's directory
// This ensures we use the actual working directory where the process runs
absDir, err := filepath.Abs(dataDir)
if err != nil {
return "", err
}
dataDir = absDir
}
// Trim trailing \ or / in case user supplies
dataDir = strings.TrimRight(dataDir, "\\/")
if _, err := os.Stat(dataDir); err != nil {
return "", errors.Wrapf(err, "unable to access data folder %s", dataDir)
}
return dataDir, nil
}
func (p *Profile) Validate() error {
// Set default data directory if not specified
if p.Data == "" {
if runtime.GOOS == "windows" {
p.Data = filepath.Join(os.Getenv("ProgramData"), "memos")
} else {
// On Linux/macOS, check if /var/opt/memos exists and is writable (Docker scenario)
if info, err := os.Stat("/var/opt/memos"); err == nil && info.IsDir() {
// Check if we can write to this directory
testFile := filepath.Join("/var/opt/memos", ".write-test")
if err := os.WriteFile(testFile, []byte("test"), 0600); err == nil {
os.Remove(testFile)
p.Data = "/var/opt/memos"
} else {
// /var/opt/memos exists but is not writable, use current directory
slog.Warn("/var/opt/memos is not writable, using current directory")
p.Data = "."
}
} else {
// /var/opt/memos doesn't exist, use current directory (local development)
p.Data = "."
}
}
}
// Create data directory if it doesn't exist
if _, err := os.Stat(p.Data); os.IsNotExist(err) {
if err := os.MkdirAll(p.Data, 0770); err != nil {
slog.Error("failed to create data directory", slog.String("data", p.Data), slog.String("error", err.Error()))
return err
}
}
dataDir, err := checkDataDir(p.Data)
if err != nil {
slog.Error("failed to check dsn", slog.String("data", dataDir), slog.String("error", err.Error()))
return err
}
p.Data = dataDir
if p.Driver == "sqlite" && p.DSN == "" {
mode := "prod"
if p.Demo {
mode = "demo"
}
dbFile := fmt.Sprintf("memos_%s.db", mode)
p.DSN = filepath.Join(dataDir, dbFile)
}
return nil
}

73
internal/util/util.go Normal file
View File

@@ -0,0 +1,73 @@
package util //nolint:revive // util namespace is intentional for shared helpers
import (
"crypto/rand"
"math/big"
"net/mail"
"strconv"
"strings"
"github.com/google/uuid"
)
// ConvertStringToInt32 converts a string to int32.
func ConvertStringToInt32(src string) (int32, error) {
parsed, err := strconv.ParseInt(src, 10, 32)
if err != nil {
return 0, err
}
return int32(parsed), nil
}
// HasPrefixes returns true if the string s has any of the given prefixes.
func HasPrefixes(src string, prefixes ...string) bool {
for _, prefix := range prefixes {
if strings.HasPrefix(src, prefix) {
return true
}
}
return false
}
// ValidateEmail validates the email.
func ValidateEmail(email string) bool {
if _, err := mail.ParseAddress(email); err != nil {
return false
}
return true
}
func GenUUID() string {
return uuid.New().String()
}
var letters = []rune("0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
// RandomString returns a random string with length n.
func RandomString(n int) (string, error) {
var sb strings.Builder
sb.Grow(n)
for i := 0; i < n; i++ {
// The reason for using crypto/rand instead of math/rand is that
// the former relies on hardware to generate random numbers and
// thus has a stronger source of random numbers.
randNum, err := rand.Int(rand.Reader, big.NewInt(int64(len(letters))))
if err != nil {
return "", err
}
if _, err := sb.WriteRune(letters[randNum.Uint64()]); err != nil {
return "", err
}
}
return sb.String(), nil
}
// ReplaceString replaces all occurrences of old in slice with new.
func ReplaceString(slice []string, old, new string) []string {
for i, s := range slice {
if s == old {
slice[i] = new
}
}
return slice
}

View File

@@ -0,0 +1,31 @@
package util //nolint:revive // util is an appropriate package name for utility functions
import (
"testing"
)
func TestValidateEmail(t *testing.T) {
tests := []struct {
email string
want bool
}{
{
email: "t@gmail.com",
want: true,
},
{
email: "@usememos.com",
want: false,
},
{
email: "1@gmail",
want: true,
},
}
for _, test := range tests {
result := ValidateEmail(test.email)
if result != test.want {
t.Errorf("Validate Email %s: got result %v, want %v.", test.email, result, test.want)
}
}
}

View File

@@ -0,0 +1,54 @@
package version
import (
"fmt"
"strings"
"golang.org/x/mod/semver"
)
// Version is the service current released version.
// Semantic versioning: https://semver.org/
var Version = "0.26.2"
func GetCurrentVersion() string {
return Version
}
// GetMinorVersion extracts the minor version (e.g., "0.25") from a full version string (e.g., "0.25.1").
// Returns the minor version string or empty string if the version format is invalid.
// Version format should be "major.minor.patch" (e.g., "0.25.1").
func GetMinorVersion(version string) string {
versionList := strings.Split(version, ".")
if len(versionList) < 2 {
return ""
}
// Return major.minor only (first two components)
return versionList[0] + "." + versionList[1]
}
// IsVersionGreaterOrEqualThan returns true if version is greater than or equal to target.
func IsVersionGreaterOrEqualThan(version, target string) bool {
return semver.Compare(fmt.Sprintf("v%s", version), fmt.Sprintf("v%s", target)) > -1
}
// IsVersionGreaterThan returns true if version is greater than target.
func IsVersionGreaterThan(version, target string) bool {
return semver.Compare(fmt.Sprintf("v%s", version), fmt.Sprintf("v%s", target)) > 0
}
type SortVersion []string
func (s SortVersion) Len() int {
return len(s)
}
func (s SortVersion) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
func (s SortVersion) Less(i, j int) bool {
v1 := fmt.Sprintf("v%s", s[i])
v2 := fmt.Sprintf("v%s", s[j])
return semver.Compare(v1, v2) == -1
}

View File

@@ -0,0 +1,103 @@
package version
import (
"sort"
"testing"
"github.com/stretchr/testify/assert"
)
func TestIsVersionGreaterOrEqualThan(t *testing.T) {
tests := []struct {
version string
target string
want bool
}{
{
version: "0.9.1",
target: "0.9.1",
want: true,
},
{
version: "0.10.0",
target: "0.9.1",
want: true,
},
{
version: "0.9.0",
target: "0.9.1",
want: false,
},
}
for _, test := range tests {
result := IsVersionGreaterOrEqualThan(test.version, test.target)
if result != test.want {
t.Errorf("got result %v, want %v.", result, test.want)
}
}
}
func TestIsVersionGreaterThan(t *testing.T) {
tests := []struct {
version string
target string
want bool
}{
{
version: "0.9.1",
target: "0.9.1",
want: false,
},
{
version: "0.10.0",
target: "0.8.0",
want: true,
},
{
version: "0.23",
target: "0.22",
want: true,
},
{
version: "0.8.0",
target: "0.10.0",
want: false,
},
{
version: "0.9.0",
target: "0.9.1",
want: false,
},
{
version: "0.22",
target: "0.22",
want: false,
},
}
for _, test := range tests {
result := IsVersionGreaterThan(test.version, test.target)
if result != test.want {
t.Errorf("got result %v, want %v.", result, test.want)
}
}
}
func TestSortVersion(t *testing.T) {
tests := []struct {
versionList []string
want []string
}{
{
versionList: []string{"0.9.1", "0.10.0", "0.8.0"},
want: []string{"0.8.0", "0.9.1", "0.10.0"},
},
{
versionList: []string{"1.9.1", "0.9.1", "0.10.0", "0.8.0"},
want: []string{"0.8.0", "0.9.1", "0.10.0", "1.9.1"},
},
}
for _, test := range tests {
sort.Sort(SortVersion(test.versionList))
assert.Equal(t, test.versionList, test.want)
}
}

BIN
memos-linux-arm64 Executable file

Binary file not shown.

BIN
memos_prod.db Normal file

Binary file not shown.

BIN
memos_prod.db-shm Normal file

Binary file not shown.

BIN
memos_prod.db-wal Normal file

Binary file not shown.

110
plugin/ai/groq/groq.go Normal file
View File

@@ -0,0 +1,110 @@
package ai
import (
"context"
"fmt"
"github.com/sashabaranov/go-openai"
)
type GroqClient struct {
client *openai.Client
config GroqConfig
}
type GroqConfig struct {
APIKey string
BaseURL string
DefaultModel string
}
type CompletionRequest struct {
Model string
Prompt string
Temperature float32
MaxTokens int
}
type CompletionResponse struct {
Text string
Model string
PromptTokens int
CompletionTokens int
TotalTokens int
}
func NewGroqClient(config GroqConfig) *GroqClient {
if config.BaseURL == "" {
config.BaseURL = "https://api.groq.com/openai/v1"
}
clientConfig := openai.DefaultConfig(config.APIKey)
clientConfig.BaseURL = config.BaseURL
return &GroqClient{
client: openai.NewClientWithConfig(clientConfig),
config: config,
}
}
func (g *GroqClient) TestConnection(ctx context.Context) error {
// Test by listing available models
_, err := g.client.ListModels(ctx)
if err != nil {
return fmt.Errorf("failed to connect to Groq API: %w", err)
}
return nil
}
func (g *GroqClient) GenerateCompletion(ctx context.Context, req CompletionRequest) (*CompletionResponse, error) {
model := req.Model
if model == "" {
model = g.config.DefaultModel
}
if model == "" {
model = "llama-3.1-8b-instant"
}
chatReq := openai.ChatCompletionRequest{
Model: model,
Messages: []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleUser,
Content: req.Prompt,
},
},
Temperature: req.Temperature,
MaxTokens: req.MaxTokens,
}
resp, err := g.client.CreateChatCompletion(ctx, chatReq)
if err != nil {
return nil, fmt.Errorf("failed to generate completion: %w", err)
}
if len(resp.Choices) == 0 {
return nil, fmt.Errorf("no completion choices returned")
}
return &CompletionResponse{
Text: resp.Choices[0].Message.Content,
Model: resp.Model,
PromptTokens: resp.Usage.PromptTokens,
CompletionTokens: resp.Usage.CompletionTokens,
TotalTokens: resp.Usage.TotalTokens,
}, nil
}
func (g *GroqClient) ListModels(ctx context.Context) ([]string, error) {
models, err := g.client.ListModels(ctx)
if err != nil {
return nil, fmt.Errorf("failed to list models: %w", err)
}
var modelNames []string
for _, model := range models.Models {
modelNames = append(modelNames, model.ID)
}
return modelNames, nil
}

199
plugin/ai/ollama/ollama.go Normal file
View File

@@ -0,0 +1,199 @@
package ai
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"time"
)
type OllamaClient struct {
httpClient *http.Client
config OllamaConfig
}
type OllamaConfig struct {
Host string
DefaultModel string
Timeout time.Duration
}
// CompletionRequest is the input for generating completions.
type CompletionRequest struct {
Model string
Prompt string
Temperature float32
MaxTokens int
}
// CompletionResponse is the output from generating completions.
type CompletionResponse struct {
Text string
Model string
PromptTokens int
CompletionTokens int
TotalTokens int
}
type OllamaGenerateRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Stream bool `json:"stream"`
Options map[string]interface{} `json:"options,omitempty"`
}
type OllamaGenerateResponse struct {
Model string `json:"model"`
Response string `json:"response"`
Done bool `json:"done"`
Context []int `json:"context,omitempty"`
TotalDuration int64 `json:"total_duration,omitempty"`
LoadDuration int64 `json:"load_duration,omitempty"`
PromptEvalCount int `json:"prompt_eval_count,omitempty"`
PromptEvalDuration int64 `json:"prompt_eval_duration,omitempty"`
EvalCount int `json:"eval_count,omitempty"`
EvalDuration int64 `json:"eval_duration,omitempty"`
}
type OllamaTagsResponse struct {
Models []struct {
Name string `json:"name"`
Model string `json:"model"`
Size int64 `json:"size"`
Digest string `json:"digest"`
} `json:"models"`
}
func NewOllamaClient(config OllamaConfig) *OllamaClient {
if config.Timeout == 0 {
config.Timeout = 120 * time.Second // Increase to 2 minutes for generation
}
return &OllamaClient{
httpClient: &http.Client{
Timeout: config.Timeout,
},
config: config,
}
}
func (o *OllamaClient) TestConnection(ctx context.Context) error {
url := fmt.Sprintf("%s/api/tags", o.config.Host)
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return fmt.Errorf("failed to create request: %w", err)
}
resp, err := o.httpClient.Do(req)
if err != nil {
return fmt.Errorf("failed to connect to Ollama: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return fmt.Errorf("Ollama returned status %d: %s", resp.StatusCode, string(body))
}
return nil
}
func (o *OllamaClient) GenerateCompletion(ctx context.Context, req CompletionRequest) (*CompletionResponse, error) {
model := req.Model
if model == "" {
model = o.config.DefaultModel
}
if model == "" {
model = "llama3"
}
ollamaReq := OllamaGenerateRequest{
Model: model,
Prompt: req.Prompt,
Stream: false,
Options: map[string]interface{}{
"temperature": req.Temperature,
},
}
if req.MaxTokens > 0 {
ollamaReq.Options["num_predict"] = req.MaxTokens
}
jsonData, err := json.Marshal(ollamaReq)
if err != nil {
return nil, fmt.Errorf("failed to marshal request: %w", err)
}
url := fmt.Sprintf("%s/api/generate", o.config.Host)
httpReq, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewBuffer(jsonData))
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
httpReq.Header.Set("Content-Type", "application/json")
resp, err := o.httpClient.Do(httpReq)
if err != nil {
// Check if it's a timeout error
if ctx.Err() == context.DeadlineExceeded {
return nil, fmt.Errorf("Ollama request timed out after %.0f seconds. Try reducing the max tokens or using a smaller model", o.config.Timeout.Seconds())
}
return nil, fmt.Errorf("failed to call Ollama API: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("Ollama returned status %d: %s", resp.StatusCode, string(body))
}
var ollamaResp OllamaGenerateResponse
if err := json.NewDecoder(resp.Body).Decode(&ollamaResp); err != nil {
return nil, fmt.Errorf("failed to decode response: %w", err)
}
return &CompletionResponse{
Text: ollamaResp.Response,
Model: ollamaResp.Model,
PromptTokens: ollamaResp.PromptEvalCount,
CompletionTokens: ollamaResp.EvalCount,
TotalTokens: ollamaResp.PromptEvalCount + ollamaResp.EvalCount,
}, nil
}
func (o *OllamaClient) ListModels(ctx context.Context) ([]string, error) {
url := fmt.Sprintf("%s/api/tags", o.config.Host)
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
resp, err := o.httpClient.Do(req)
if err != nil {
return nil, fmt.Errorf("failed to get models: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("Ollama returned status %d: %s", resp.StatusCode, string(body))
}
var tagsResp OllamaTagsResponse
if err := json.NewDecoder(resp.Body).Decode(&tagsResp); err != nil {
return nil, fmt.Errorf("failed to decode models response: %w", err)
}
var modelNames []string
for _, model := range tagsResp.Models {
modelNames = append(modelNames, model.Name)
}
return modelNames, nil
}

1
plugin/cron/README.md Normal file
View File

@@ -0,0 +1 @@
Fork from https://github.com/robfig/cron

96
plugin/cron/chain.go Normal file
View File

@@ -0,0 +1,96 @@
package cron
import (
"errors"
"fmt"
"runtime"
"sync"
"time"
)
// JobWrapper decorates the given Job with some behavior.
type JobWrapper func(Job) Job
// Chain is a sequence of JobWrappers that decorates submitted jobs with
// cross-cutting behaviors like logging or synchronization.
type Chain struct {
wrappers []JobWrapper
}
// NewChain returns a Chain consisting of the given JobWrappers.
func NewChain(c ...JobWrapper) Chain {
return Chain{c}
}
// Then decorates the given job with all JobWrappers in the chain.
//
// This:
//
// NewChain(m1, m2, m3).Then(job)
//
// is equivalent to:
//
// m1(m2(m3(job)))
func (c Chain) Then(j Job) Job {
for i := range c.wrappers {
j = c.wrappers[len(c.wrappers)-i-1](j)
}
return j
}
// Recover panics in wrapped jobs and log them with the provided logger.
func Recover(logger Logger) JobWrapper {
return func(j Job) Job {
return FuncJob(func() {
defer func() {
if r := recover(); r != nil {
const size = 64 << 10
buf := make([]byte, size)
buf = buf[:runtime.Stack(buf, false)]
err, ok := r.(error)
if !ok {
err = errors.New("panic: " + fmt.Sprint(r))
}
logger.Error(err, "panic", "stack", "...\n"+string(buf))
}
}()
j.Run()
})
}
}
// DelayIfStillRunning serializes jobs, delaying subsequent runs until the
// previous one is complete. Jobs running after a delay of more than a minute
// have the delay logged at Info.
func DelayIfStillRunning(logger Logger) JobWrapper {
return func(j Job) Job {
var mu sync.Mutex
return FuncJob(func() {
start := time.Now()
mu.Lock()
defer mu.Unlock()
if dur := time.Since(start); dur > time.Minute {
logger.Info("delay", "duration", dur)
}
j.Run()
})
}
}
// SkipIfStillRunning skips an invocation of the Job if a previous invocation is
// still running. It logs skips to the given logger at Info level.
func SkipIfStillRunning(logger Logger) JobWrapper {
return func(j Job) Job {
var ch = make(chan struct{}, 1)
ch <- struct{}{}
return FuncJob(func() {
select {
case v := <-ch:
defer func() { ch <- v }()
j.Run()
default:
logger.Info("skip")
}
})
}
}

239
plugin/cron/chain_test.go Normal file
View File

@@ -0,0 +1,239 @@
//nolint:all
package cron
import (
"io"
"log"
"reflect"
"sync"
"testing"
"time"
)
func appendingJob(slice *[]int, value int) Job {
var m sync.Mutex
return FuncJob(func() {
m.Lock()
*slice = append(*slice, value)
m.Unlock()
})
}
func appendingWrapper(slice *[]int, value int) JobWrapper {
return func(j Job) Job {
return FuncJob(func() {
appendingJob(slice, value).Run()
j.Run()
})
}
}
func TestChain(t *testing.T) {
var nums []int
var (
append1 = appendingWrapper(&nums, 1)
append2 = appendingWrapper(&nums, 2)
append3 = appendingWrapper(&nums, 3)
append4 = appendingJob(&nums, 4)
)
NewChain(append1, append2, append3).Then(append4).Run()
if !reflect.DeepEqual(nums, []int{1, 2, 3, 4}) {
t.Error("unexpected order of calls:", nums)
}
}
func TestChainRecover(t *testing.T) {
panickingJob := FuncJob(func() {
panic("panickingJob panics")
})
t.Run("panic exits job by default", func(*testing.T) {
defer func() {
if err := recover(); err == nil {
t.Errorf("panic expected, but none received")
}
}()
NewChain().Then(panickingJob).
Run()
})
t.Run("Recovering JobWrapper recovers", func(*testing.T) {
NewChain(Recover(PrintfLogger(log.New(io.Discard, "", 0)))).
Then(panickingJob).
Run()
})
t.Run("composed with the *IfStillRunning wrappers", func(*testing.T) {
NewChain(Recover(PrintfLogger(log.New(io.Discard, "", 0)))).
Then(panickingJob).
Run()
})
}
type countJob struct {
m sync.Mutex
started int
done int
delay time.Duration
}
func (j *countJob) Run() {
j.m.Lock()
j.started++
j.m.Unlock()
time.Sleep(j.delay)
j.m.Lock()
j.done++
j.m.Unlock()
}
func (j *countJob) Started() int {
defer j.m.Unlock()
j.m.Lock()
return j.started
}
func (j *countJob) Done() int {
defer j.m.Unlock()
j.m.Lock()
return j.done
}
func TestChainDelayIfStillRunning(t *testing.T) {
t.Run("runs immediately", func(*testing.T) {
var j countJob
wrappedJob := NewChain(DelayIfStillRunning(DiscardLogger)).Then(&j)
go wrappedJob.Run()
time.Sleep(2 * time.Millisecond) // Give the job 2ms to complete.
if c := j.Done(); c != 1 {
t.Errorf("expected job run once, immediately, got %d", c)
}
})
t.Run("second run immediate if first done", func(*testing.T) {
var j countJob
wrappedJob := NewChain(DelayIfStillRunning(DiscardLogger)).Then(&j)
go func() {
go wrappedJob.Run()
time.Sleep(time.Millisecond)
go wrappedJob.Run()
}()
time.Sleep(3 * time.Millisecond) // Give both jobs 3ms to complete.
if c := j.Done(); c != 2 {
t.Errorf("expected job run twice, immediately, got %d", c)
}
})
t.Run("second run delayed if first not done", func(*testing.T) {
var j countJob
j.delay = 10 * time.Millisecond
wrappedJob := NewChain(DelayIfStillRunning(DiscardLogger)).Then(&j)
go func() {
go wrappedJob.Run()
time.Sleep(time.Millisecond)
go wrappedJob.Run()
}()
// After 5ms, the first job is still in progress, and the second job was
// run but should be waiting for it to finish.
time.Sleep(5 * time.Millisecond)
started, done := j.Started(), j.Done()
if started != 1 || done != 0 {
t.Error("expected first job started, but not finished, got", started, done)
}
// Verify that the second job completes.
time.Sleep(25 * time.Millisecond)
started, done = j.Started(), j.Done()
if started != 2 || done != 2 {
t.Error("expected both jobs done, got", started, done)
}
})
}
func TestChainSkipIfStillRunning(t *testing.T) {
t.Run("runs immediately", func(*testing.T) {
var j countJob
wrappedJob := NewChain(SkipIfStillRunning(DiscardLogger)).Then(&j)
go wrappedJob.Run()
time.Sleep(2 * time.Millisecond) // Give the job 2ms to complete.
if c := j.Done(); c != 1 {
t.Errorf("expected job run once, immediately, got %d", c)
}
})
t.Run("second run immediate if first done", func(*testing.T) {
var j countJob
wrappedJob := NewChain(SkipIfStillRunning(DiscardLogger)).Then(&j)
go func() {
go wrappedJob.Run()
time.Sleep(time.Millisecond)
go wrappedJob.Run()
}()
time.Sleep(3 * time.Millisecond) // Give both jobs 3ms to complete.
if c := j.Done(); c != 2 {
t.Errorf("expected job run twice, immediately, got %d", c)
}
})
t.Run("second run skipped if first not done", func(*testing.T) {
var j countJob
j.delay = 10 * time.Millisecond
wrappedJob := NewChain(SkipIfStillRunning(DiscardLogger)).Then(&j)
go func() {
go wrappedJob.Run()
time.Sleep(time.Millisecond)
go wrappedJob.Run()
}()
// After 5ms, the first job is still in progress, and the second job was
// already skipped.
time.Sleep(5 * time.Millisecond)
started, done := j.Started(), j.Done()
if started != 1 || done != 0 {
t.Error("expected first job started, but not finished, got", started, done)
}
// Verify that the first job completes and second does not run.
time.Sleep(25 * time.Millisecond)
started, done = j.Started(), j.Done()
if started != 1 || done != 1 {
t.Error("expected second job skipped, got", started, done)
}
})
t.Run("skip 10 jobs on rapid fire", func(*testing.T) {
var j countJob
j.delay = 10 * time.Millisecond
wrappedJob := NewChain(SkipIfStillRunning(DiscardLogger)).Then(&j)
for i := 0; i < 11; i++ {
go wrappedJob.Run()
}
time.Sleep(200 * time.Millisecond)
done := j.Done()
if done != 1 {
t.Error("expected 1 jobs executed, 10 jobs dropped, got", done)
}
})
t.Run("different jobs independent", func(*testing.T) {
var j1, j2 countJob
j1.delay = 10 * time.Millisecond
j2.delay = 10 * time.Millisecond
chain := NewChain(SkipIfStillRunning(DiscardLogger))
wrappedJob1 := chain.Then(&j1)
wrappedJob2 := chain.Then(&j2)
for i := 0; i < 11; i++ {
go wrappedJob1.Run()
go wrappedJob2.Run()
}
time.Sleep(100 * time.Millisecond)
var (
done1 = j1.Done()
done2 = j2.Done()
)
if done1 != 1 || done2 != 1 {
t.Error("expected both jobs executed once, got", done1, "and", done2)
}
})
}

View File

@@ -0,0 +1,27 @@
package cron
import "time"
// ConstantDelaySchedule represents a simple recurring duty cycle, e.g. "Every 5 minutes".
// It does not support jobs more frequent than once a second.
type ConstantDelaySchedule struct {
Delay time.Duration
}
// Every returns a crontab Schedule that activates once every duration.
// Delays of less than a second are not supported (will round up to 1 second).
// Any fields less than a Second are truncated.
func Every(duration time.Duration) ConstantDelaySchedule {
if duration < time.Second {
duration = time.Second
}
return ConstantDelaySchedule{
Delay: duration - time.Duration(duration.Nanoseconds())%time.Second,
}
}
// Next returns the next time this should be run.
// This rounds so that the next activation time will be on the second.
func (schedule ConstantDelaySchedule) Next(t time.Time) time.Time {
return t.Add(schedule.Delay - time.Duration(t.Nanosecond())*time.Nanosecond)
}

View File

@@ -0,0 +1,55 @@
//nolint:all
package cron
import (
"testing"
"time"
)
func TestConstantDelayNext(t *testing.T) {
tests := []struct {
time string
delay time.Duration
expected string
}{
// Simple cases
{"Mon Jul 9 14:45 2012", 15*time.Minute + 50*time.Nanosecond, "Mon Jul 9 15:00 2012"},
{"Mon Jul 9 14:59 2012", 15 * time.Minute, "Mon Jul 9 15:14 2012"},
{"Mon Jul 9 14:59:59 2012", 15 * time.Minute, "Mon Jul 9 15:14:59 2012"},
// Wrap around hours
{"Mon Jul 9 15:45 2012", 35 * time.Minute, "Mon Jul 9 16:20 2012"},
// Wrap around days
{"Mon Jul 9 23:46 2012", 14 * time.Minute, "Tue Jul 10 00:00 2012"},
{"Mon Jul 9 23:45 2012", 35 * time.Minute, "Tue Jul 10 00:20 2012"},
{"Mon Jul 9 23:35:51 2012", 44*time.Minute + 24*time.Second, "Tue Jul 10 00:20:15 2012"},
{"Mon Jul 9 23:35:51 2012", 25*time.Hour + 44*time.Minute + 24*time.Second, "Thu Jul 11 01:20:15 2012"},
// Wrap around months
{"Mon Jul 9 23:35 2012", 91*24*time.Hour + 25*time.Minute, "Thu Oct 9 00:00 2012"},
// Wrap around minute, hour, day, month, and year
{"Mon Dec 31 23:59:45 2012", 15 * time.Second, "Tue Jan 1 00:00:00 2013"},
// Round to nearest second on the delay
{"Mon Jul 9 14:45 2012", 15*time.Minute + 50*time.Nanosecond, "Mon Jul 9 15:00 2012"},
// Round up to 1 second if the duration is less.
{"Mon Jul 9 14:45:00 2012", 15 * time.Millisecond, "Mon Jul 9 14:45:01 2012"},
// Round to nearest second when calculating the next time.
{"Mon Jul 9 14:45:00.005 2012", 15 * time.Minute, "Mon Jul 9 15:00 2012"},
// Round to nearest second for both.
{"Mon Jul 9 14:45:00.005 2012", 15*time.Minute + 50*time.Nanosecond, "Mon Jul 9 15:00 2012"},
}
for _, c := range tests {
actual := Every(c.delay).Next(getTime(c.time))
expected := getTime(c.expected)
if actual != expected {
t.Errorf("%s, \"%s\": (expected) %v != %v (actual)", c.time, c.delay, expected, actual)
}
}
}

353
plugin/cron/cron.go Normal file
View File

@@ -0,0 +1,353 @@
package cron
import (
"context"
"sort"
"sync"
"time"
)
// Cron keeps track of any number of entries, invoking the associated func as
// specified by the schedule. It may be started, stopped, and the entries may
// be inspected while running.
type Cron struct {
entries []*Entry
chain Chain
stop chan struct{}
add chan *Entry
remove chan EntryID
snapshot chan chan []Entry
running bool
logger Logger
runningMu sync.Mutex
location *time.Location
parser ScheduleParser
nextID EntryID
jobWaiter sync.WaitGroup
}
// ScheduleParser is an interface for schedule spec parsers that return a Schedule.
type ScheduleParser interface {
Parse(spec string) (Schedule, error)
}
// Job is an interface for submitted cron jobs.
type Job interface {
Run()
}
// Schedule describes a job's duty cycle.
type Schedule interface {
// Next returns the next activation time, later than the given time.
// Next is invoked initially, and then each time the job is run.
Next(time.Time) time.Time
}
// EntryID identifies an entry within a Cron instance.
type EntryID int
// Entry consists of a schedule and the func to execute on that schedule.
type Entry struct {
// ID is the cron-assigned ID of this entry, which may be used to look up a
// snapshot or remove it.
ID EntryID
// Schedule on which this job should be run.
Schedule Schedule
// Next time the job will run, or the zero time if Cron has not been
// started or this entry's schedule is unsatisfiable
Next time.Time
// Prev is the last time this job was run, or the zero time if never.
Prev time.Time
// WrappedJob is the thing to run when the Schedule is activated.
WrappedJob Job
// Job is the thing that was submitted to cron.
// It is kept around so that user code that needs to get at the job later,
// e.g. via Entries() can do so.
Job Job
}
// Valid returns true if this is not the zero entry.
func (e Entry) Valid() bool { return e.ID != 0 }
// byTime is a wrapper for sorting the entry array by time
// (with zero time at the end).
type byTime []*Entry
func (s byTime) Len() int { return len(s) }
func (s byTime) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s byTime) Less(i, j int) bool {
// Two zero times should return false.
// Otherwise, zero is "greater" than any other time.
// (To sort it at the end of the list.)
if s[i].Next.IsZero() {
return false
}
if s[j].Next.IsZero() {
return true
}
return s[i].Next.Before(s[j].Next)
}
// New returns a new Cron job runner, modified by the given options.
//
// Available Settings
//
// Time Zone
// Description: The time zone in which schedules are interpreted
// Default: time.Local
//
// Parser
// Description: Parser converts cron spec strings into cron.Schedules.
// Default: Accepts this spec: https://en.wikipedia.org/wiki/Cron
//
// Chain
// Description: Wrap submitted jobs to customize behavior.
// Default: A chain that recovers panics and logs them to stderr.
//
// See "cron.With*" to modify the default behavior.
func New(opts ...Option) *Cron {
c := &Cron{
entries: nil,
chain: NewChain(),
add: make(chan *Entry),
stop: make(chan struct{}),
snapshot: make(chan chan []Entry),
remove: make(chan EntryID),
running: false,
runningMu: sync.Mutex{},
logger: DefaultLogger,
location: time.Local,
parser: standardParser,
}
for _, opt := range opts {
opt(c)
}
return c
}
// FuncJob is a wrapper that turns a func() into a cron.Job.
type FuncJob func()
func (f FuncJob) Run() { f() }
// AddFunc adds a func to the Cron to be run on the given schedule.
// The spec is parsed using the time zone of this Cron instance as the default.
// An opaque ID is returned that can be used to later remove it.
func (c *Cron) AddFunc(spec string, cmd func()) (EntryID, error) {
return c.AddJob(spec, FuncJob(cmd))
}
// AddJob adds a Job to the Cron to be run on the given schedule.
// The spec is parsed using the time zone of this Cron instance as the default.
// An opaque ID is returned that can be used to later remove it.
func (c *Cron) AddJob(spec string, cmd Job) (EntryID, error) {
schedule, err := c.parser.Parse(spec)
if err != nil {
return 0, err
}
return c.Schedule(schedule, cmd), nil
}
// Schedule adds a Job to the Cron to be run on the given schedule.
// The job is wrapped with the configured Chain.
func (c *Cron) Schedule(schedule Schedule, cmd Job) EntryID {
c.runningMu.Lock()
defer c.runningMu.Unlock()
c.nextID++
entry := &Entry{
ID: c.nextID,
Schedule: schedule,
WrappedJob: c.chain.Then(cmd),
Job: cmd,
}
if !c.running {
c.entries = append(c.entries, entry)
} else {
c.add <- entry
}
return entry.ID
}
// Entries returns a snapshot of the cron entries.
func (c *Cron) Entries() []Entry {
c.runningMu.Lock()
defer c.runningMu.Unlock()
if c.running {
replyChan := make(chan []Entry, 1)
c.snapshot <- replyChan
return <-replyChan
}
return c.entrySnapshot()
}
// Location gets the time zone location.
func (c *Cron) Location() *time.Location {
return c.location
}
// Entry returns a snapshot of the given entry, or nil if it couldn't be found.
func (c *Cron) Entry(id EntryID) Entry {
for _, entry := range c.Entries() {
if id == entry.ID {
return entry
}
}
return Entry{}
}
// Remove an entry from being run in the future.
func (c *Cron) Remove(id EntryID) {
c.runningMu.Lock()
defer c.runningMu.Unlock()
if c.running {
c.remove <- id
} else {
c.removeEntry(id)
}
}
// Start the cron scheduler in its own goroutine, or no-op if already started.
func (c *Cron) Start() {
c.runningMu.Lock()
defer c.runningMu.Unlock()
if c.running {
return
}
c.running = true
go c.runScheduler()
}
// Run the cron scheduler, or no-op if already running.
func (c *Cron) Run() {
c.runningMu.Lock()
if c.running {
c.runningMu.Unlock()
return
}
c.running = true
c.runningMu.Unlock()
c.runScheduler()
}
// runScheduler runs the scheduler.. this is private just due to the need to synchronize
// access to the 'running' state variable.
func (c *Cron) runScheduler() {
c.logger.Info("start")
// Figure out the next activation times for each entry.
now := c.now()
for _, entry := range c.entries {
entry.Next = entry.Schedule.Next(now)
c.logger.Info("schedule", "now", now, "entry", entry.ID, "next", entry.Next)
}
for {
// Determine the next entry to run.
sort.Sort(byTime(c.entries))
var timer *time.Timer
if len(c.entries) == 0 || c.entries[0].Next.IsZero() {
// If there are no entries yet, just sleep - it still handles new entries
// and stop requests.
timer = time.NewTimer(100000 * time.Hour)
} else {
timer = time.NewTimer(c.entries[0].Next.Sub(now))
}
for {
select {
case now = <-timer.C:
now = now.In(c.location)
c.logger.Info("wake", "now", now)
// Run every entry whose next time was less than now
for _, e := range c.entries {
if e.Next.After(now) || e.Next.IsZero() {
break
}
c.startJob(e.WrappedJob)
e.Prev = e.Next
e.Next = e.Schedule.Next(now)
c.logger.Info("run", "now", now, "entry", e.ID, "next", e.Next)
}
case newEntry := <-c.add:
timer.Stop()
now = c.now()
newEntry.Next = newEntry.Schedule.Next(now)
c.entries = append(c.entries, newEntry)
c.logger.Info("added", "now", now, "entry", newEntry.ID, "next", newEntry.Next)
case replyChan := <-c.snapshot:
replyChan <- c.entrySnapshot()
continue
case <-c.stop:
timer.Stop()
c.logger.Info("stop")
return
case id := <-c.remove:
timer.Stop()
now = c.now()
c.removeEntry(id)
c.logger.Info("removed", "entry", id)
}
break
}
}
}
// startJob runs the given job in a new goroutine.
func (c *Cron) startJob(j Job) {
c.jobWaiter.Go(func() {
j.Run()
})
}
// now returns current time in c location.
func (c *Cron) now() time.Time {
return time.Now().In(c.location)
}
// Stop stops the cron scheduler if it is running; otherwise it does nothing.
// A context is returned so the caller can wait for running jobs to complete.
func (c *Cron) Stop() context.Context {
c.runningMu.Lock()
defer c.runningMu.Unlock()
if c.running {
c.stop <- struct{}{}
c.running = false
}
ctx, cancel := context.WithCancel(context.Background())
go func() {
c.jobWaiter.Wait()
cancel()
}()
return ctx
}
// entrySnapshot returns a copy of the current cron entry list.
func (c *Cron) entrySnapshot() []Entry {
var entries = make([]Entry, len(c.entries))
for i, e := range c.entries {
entries[i] = *e
}
return entries
}
func (c *Cron) removeEntry(id EntryID) {
var entries []*Entry
for _, e := range c.entries {
if e.ID != id {
entries = append(entries, e)
}
}
c.entries = entries
}

702
plugin/cron/cron_test.go Normal file
View File

@@ -0,0 +1,702 @@
//nolint:all
package cron
import (
"bytes"
"fmt"
"log"
"strings"
"sync"
"sync/atomic"
"testing"
"time"
)
// Many tests schedule a job for every second, and then wait at most a second
// for it to run. This amount is just slightly larger than 1 second to
// compensate for a few milliseconds of runtime.
const OneSecond = 1*time.Second + 50*time.Millisecond
type syncWriter struct {
wr bytes.Buffer
m sync.Mutex
}
func (sw *syncWriter) Write(data []byte) (n int, err error) {
sw.m.Lock()
n, err = sw.wr.Write(data)
sw.m.Unlock()
return
}
func (sw *syncWriter) String() string {
sw.m.Lock()
defer sw.m.Unlock()
return sw.wr.String()
}
func newBufLogger(sw *syncWriter) Logger {
return PrintfLogger(log.New(sw, "", log.LstdFlags))
}
func TestFuncPanicRecovery(t *testing.T) {
var buf syncWriter
cron := New(WithParser(secondParser),
WithChain(Recover(newBufLogger(&buf))))
cron.Start()
defer cron.Stop()
cron.AddFunc("* * * * * ?", func() {
panic("YOLO")
})
select {
case <-time.After(OneSecond):
if !strings.Contains(buf.String(), "YOLO") {
t.Error("expected a panic to be logged, got none")
}
return
}
}
type DummyJob struct{}
func (DummyJob) Run() {
panic("YOLO")
}
func TestJobPanicRecovery(t *testing.T) {
var job DummyJob
var buf syncWriter
cron := New(WithParser(secondParser),
WithChain(Recover(newBufLogger(&buf))))
cron.Start()
defer cron.Stop()
cron.AddJob("* * * * * ?", job)
select {
case <-time.After(OneSecond):
if !strings.Contains(buf.String(), "YOLO") {
t.Error("expected a panic to be logged, got none")
}
return
}
}
// Start and stop cron with no entries.
func TestNoEntries(t *testing.T) {
cron := newWithSeconds()
cron.Start()
select {
case <-time.After(OneSecond):
t.Fatal("expected cron will be stopped immediately")
case <-stop(cron):
}
}
// Start, stop, then add an entry. Verify entry doesn't run.
func TestStopCausesJobsToNotRun(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(1)
cron := newWithSeconds()
cron.Start()
cron.Stop()
cron.AddFunc("* * * * * ?", func() { wg.Done() })
select {
case <-time.After(OneSecond):
// No job ran!
case <-wait(wg):
t.Fatal("expected stopped cron does not run any job")
}
}
// Add a job, start cron, expect it runs.
func TestAddBeforeRunning(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(1)
cron := newWithSeconds()
cron.AddFunc("* * * * * ?", func() { wg.Done() })
cron.Start()
defer cron.Stop()
// Give cron 2 seconds to run our job (which is always activated).
select {
case <-time.After(OneSecond):
t.Fatal("expected job runs")
case <-wait(wg):
}
}
// Start cron, add a job, expect it runs.
func TestAddWhileRunning(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(1)
cron := newWithSeconds()
cron.Start()
defer cron.Stop()
cron.AddFunc("* * * * * ?", func() { wg.Done() })
select {
case <-time.After(OneSecond):
t.Fatal("expected job runs")
case <-wait(wg):
}
}
// Test for #34. Adding a job after calling start results in multiple job invocations
func TestAddWhileRunningWithDelay(t *testing.T) {
cron := newWithSeconds()
cron.Start()
defer cron.Stop()
time.Sleep(5 * time.Second)
var calls int64
cron.AddFunc("* * * * * *", func() { atomic.AddInt64(&calls, 1) })
<-time.After(OneSecond)
if atomic.LoadInt64(&calls) != 1 {
t.Errorf("called %d times, expected 1\n", calls)
}
}
// Add a job, remove a job, start cron, expect nothing runs.
func TestRemoveBeforeRunning(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(1)
cron := newWithSeconds()
id, _ := cron.AddFunc("* * * * * ?", func() { wg.Done() })
cron.Remove(id)
cron.Start()
defer cron.Stop()
select {
case <-time.After(OneSecond):
// Success, shouldn't run
case <-wait(wg):
t.FailNow()
}
}
// Start cron, add a job, remove it, expect it doesn't run.
func TestRemoveWhileRunning(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(1)
cron := newWithSeconds()
cron.Start()
defer cron.Stop()
id, _ := cron.AddFunc("* * * * * ?", func() { wg.Done() })
cron.Remove(id)
select {
case <-time.After(OneSecond):
case <-wait(wg):
t.FailNow()
}
}
// Test timing with Entries.
func TestSnapshotEntries(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(1)
cron := New()
cron.AddFunc("@every 2s", func() { wg.Done() })
cron.Start()
defer cron.Stop()
// Cron should fire in 2 seconds. After 1 second, call Entries.
select {
case <-time.After(OneSecond):
cron.Entries()
}
// Even though Entries was called, the cron should fire at the 2 second mark.
select {
case <-time.After(OneSecond):
t.Error("expected job runs at 2 second mark")
case <-wait(wg):
}
}
// Test that the entries are correctly sorted.
// Add a bunch of long-in-the-future entries, and an immediate entry, and ensure
// that the immediate entry runs immediately.
// Also: Test that multiple jobs run in the same instant.
func TestMultipleEntries(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(2)
cron := newWithSeconds()
cron.AddFunc("0 0 0 1 1 ?", func() {})
cron.AddFunc("* * * * * ?", func() { wg.Done() })
id1, _ := cron.AddFunc("* * * * * ?", func() { t.Fatal() })
id2, _ := cron.AddFunc("* * * * * ?", func() { t.Fatal() })
cron.AddFunc("0 0 0 31 12 ?", func() {})
cron.AddFunc("* * * * * ?", func() { wg.Done() })
cron.Remove(id1)
cron.Start()
cron.Remove(id2)
defer cron.Stop()
select {
case <-time.After(OneSecond):
t.Error("expected job run in proper order")
case <-wait(wg):
}
}
// Test running the same job twice.
func TestRunningJobTwice(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(2)
cron := newWithSeconds()
cron.AddFunc("0 0 0 1 1 ?", func() {})
cron.AddFunc("0 0 0 31 12 ?", func() {})
cron.AddFunc("* * * * * ?", func() { wg.Done() })
cron.Start()
defer cron.Stop()
select {
case <-time.After(2 * OneSecond):
t.Error("expected job fires 2 times")
case <-wait(wg):
}
}
func TestRunningMultipleSchedules(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(2)
cron := newWithSeconds()
cron.AddFunc("0 0 0 1 1 ?", func() {})
cron.AddFunc("0 0 0 31 12 ?", func() {})
cron.AddFunc("* * * * * ?", func() { wg.Done() })
cron.Schedule(Every(time.Minute), FuncJob(func() {}))
cron.Schedule(Every(time.Second), FuncJob(func() { wg.Done() }))
cron.Schedule(Every(time.Hour), FuncJob(func() {}))
cron.Start()
defer cron.Stop()
select {
case <-time.After(2 * OneSecond):
t.Error("expected job fires 2 times")
case <-wait(wg):
}
}
// Test that the cron is run in the local time zone (as opposed to UTC).
func TestLocalTimezone(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(2)
now := time.Now()
// FIX: Issue #205
// This calculation doesn't work in seconds 58 or 59.
// Take the easy way out and sleep.
if now.Second() >= 58 {
time.Sleep(2 * time.Second)
now = time.Now()
}
spec := fmt.Sprintf("%d,%d %d %d %d %d ?",
now.Second()+1, now.Second()+2, now.Minute(), now.Hour(), now.Day(), now.Month())
cron := newWithSeconds()
cron.AddFunc(spec, func() { wg.Done() })
cron.Start()
defer cron.Stop()
select {
case <-time.After(OneSecond * 2):
t.Error("expected job fires 2 times")
case <-wait(wg):
}
}
// Test that the cron is run in the given time zone (as opposed to local).
func TestNonLocalTimezone(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(2)
loc, err := time.LoadLocation("Atlantic/Cape_Verde")
if err != nil {
fmt.Printf("Failed to load time zone Atlantic/Cape_Verde: %+v", err)
t.Fail()
}
now := time.Now().In(loc)
// FIX: Issue #205
// This calculation doesn't work in seconds 58 or 59.
// Take the easy way out and sleep.
if now.Second() >= 58 {
time.Sleep(2 * time.Second)
now = time.Now().In(loc)
}
spec := fmt.Sprintf("%d,%d %d %d %d %d ?",
now.Second()+1, now.Second()+2, now.Minute(), now.Hour(), now.Day(), now.Month())
cron := New(WithLocation(loc), WithParser(secondParser))
cron.AddFunc(spec, func() { wg.Done() })
cron.Start()
defer cron.Stop()
select {
case <-time.After(OneSecond * 2):
t.Error("expected job fires 2 times")
case <-wait(wg):
}
}
// Test that calling stop before start silently returns without
// blocking the stop channel.
func TestStopWithoutStart(t *testing.T) {
cron := New()
cron.Stop()
}
type testJob struct {
wg *sync.WaitGroup
name string
}
func (t testJob) Run() {
t.wg.Done()
}
// Test that adding an invalid job spec returns an error
func TestInvalidJobSpec(t *testing.T) {
cron := New()
_, err := cron.AddJob("this will not parse", nil)
if err == nil {
t.Errorf("expected an error with invalid spec, got nil")
}
}
// Test blocking run method behaves as Start()
func TestBlockingRun(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(1)
cron := newWithSeconds()
cron.AddFunc("* * * * * ?", func() { wg.Done() })
var unblockChan = make(chan struct{})
go func() {
cron.Run()
close(unblockChan)
}()
defer cron.Stop()
select {
case <-time.After(OneSecond):
t.Error("expected job fires")
case <-unblockChan:
t.Error("expected that Run() blocks")
case <-wait(wg):
}
}
// Test that double-running is a no-op
func TestStartNoop(t *testing.T) {
var tickChan = make(chan struct{}, 2)
cron := newWithSeconds()
cron.AddFunc("* * * * * ?", func() {
tickChan <- struct{}{}
})
cron.Start()
defer cron.Stop()
// Wait for the first firing to ensure the runner is going
<-tickChan
cron.Start()
<-tickChan
// Fail if this job fires again in a short period, indicating a double-run
select {
case <-time.After(time.Millisecond):
case <-tickChan:
t.Error("expected job fires exactly twice")
}
}
// Simple test using Runnables.
func TestJob(t *testing.T) {
wg := &sync.WaitGroup{}
wg.Add(1)
cron := newWithSeconds()
cron.AddJob("0 0 0 30 Feb ?", testJob{wg, "job0"})
cron.AddJob("0 0 0 1 1 ?", testJob{wg, "job1"})
job2, _ := cron.AddJob("* * * * * ?", testJob{wg, "job2"})
cron.AddJob("1 0 0 1 1 ?", testJob{wg, "job3"})
cron.Schedule(Every(5*time.Second+5*time.Nanosecond), testJob{wg, "job4"})
job5 := cron.Schedule(Every(5*time.Minute), testJob{wg, "job5"})
// Test getting an Entry pre-Start.
if actualName := cron.Entry(job2).Job.(testJob).name; actualName != "job2" {
t.Error("wrong job retrieved:", actualName)
}
if actualName := cron.Entry(job5).Job.(testJob).name; actualName != "job5" {
t.Error("wrong job retrieved:", actualName)
}
cron.Start()
defer cron.Stop()
select {
case <-time.After(OneSecond):
t.FailNow()
case <-wait(wg):
}
// Ensure the entries are in the right order.
expecteds := []string{"job2", "job4", "job5", "job1", "job3", "job0"}
var actuals []string
for _, entry := range cron.Entries() {
actuals = append(actuals, entry.Job.(testJob).name)
}
for i, expected := range expecteds {
if actuals[i] != expected {
t.Fatalf("Jobs not in the right order. (expected) %s != %s (actual)", expecteds, actuals)
}
}
// Test getting Entries.
if actualName := cron.Entry(job2).Job.(testJob).name; actualName != "job2" {
t.Error("wrong job retrieved:", actualName)
}
if actualName := cron.Entry(job5).Job.(testJob).name; actualName != "job5" {
t.Error("wrong job retrieved:", actualName)
}
}
// Issue #206
// Ensure that the next run of a job after removing an entry is accurate.
func TestScheduleAfterRemoval(t *testing.T) {
var wg1 sync.WaitGroup
var wg2 sync.WaitGroup
wg1.Add(1)
wg2.Add(1)
// The first time this job is run, set a timer and remove the other job
// 750ms later. Correct behavior would be to still run the job again in
// 250ms, but the bug would cause it to run instead 1s later.
var calls int
var mu sync.Mutex
cron := newWithSeconds()
hourJob := cron.Schedule(Every(time.Hour), FuncJob(func() {}))
cron.Schedule(Every(time.Second), FuncJob(func() {
mu.Lock()
defer mu.Unlock()
switch calls {
case 0:
wg1.Done()
calls++
case 1:
time.Sleep(750 * time.Millisecond)
cron.Remove(hourJob)
calls++
case 2:
calls++
wg2.Done()
case 3:
panic("unexpected 3rd call")
}
}))
cron.Start()
defer cron.Stop()
// the first run might be any length of time 0 - 1s, since the schedule
// rounds to the second. wait for the first run to true up.
wg1.Wait()
select {
case <-time.After(2 * OneSecond):
t.Error("expected job fires 2 times")
case <-wait(&wg2):
}
}
type ZeroSchedule struct{}
func (*ZeroSchedule) Next(time.Time) time.Time {
return time.Time{}
}
// Tests that job without time does not run
func TestJobWithZeroTimeDoesNotRun(t *testing.T) {
cron := newWithSeconds()
var calls int64
cron.AddFunc("* * * * * *", func() { atomic.AddInt64(&calls, 1) })
cron.Schedule(new(ZeroSchedule), FuncJob(func() { t.Error("expected zero task will not run") }))
cron.Start()
defer cron.Stop()
<-time.After(OneSecond)
if atomic.LoadInt64(&calls) != 1 {
t.Errorf("called %d times, expected 1\n", calls)
}
}
func TestStopAndWait(t *testing.T) {
t.Run("nothing running, returns immediately", func(*testing.T) {
cron := newWithSeconds()
cron.Start()
ctx := cron.Stop()
select {
case <-ctx.Done():
case <-time.After(time.Millisecond):
t.Error("context was not done immediately")
}
})
t.Run("repeated calls to Stop", func(*testing.T) {
cron := newWithSeconds()
cron.Start()
_ = cron.Stop()
time.Sleep(time.Millisecond)
ctx := cron.Stop()
select {
case <-ctx.Done():
case <-time.After(time.Millisecond):
t.Error("context was not done immediately")
}
})
t.Run("a couple fast jobs added, still returns immediately", func(*testing.T) {
cron := newWithSeconds()
cron.AddFunc("* * * * * *", func() {})
cron.Start()
cron.AddFunc("* * * * * *", func() {})
cron.AddFunc("* * * * * *", func() {})
cron.AddFunc("* * * * * *", func() {})
time.Sleep(time.Second)
ctx := cron.Stop()
select {
case <-ctx.Done():
case <-time.After(time.Millisecond):
t.Error("context was not done immediately")
}
})
t.Run("a couple fast jobs and a slow job added, waits for slow job", func(*testing.T) {
cron := newWithSeconds()
cron.AddFunc("* * * * * *", func() {})
cron.Start()
cron.AddFunc("* * * * * *", func() { time.Sleep(2 * time.Second) })
cron.AddFunc("* * * * * *", func() {})
time.Sleep(time.Second)
ctx := cron.Stop()
// Verify that it is not done for at least 750ms
select {
case <-ctx.Done():
t.Error("context was done too quickly immediately")
case <-time.After(750 * time.Millisecond):
// expected, because the job sleeping for 1 second is still running
}
// Verify that it IS done in the next 500ms (giving 250ms buffer)
select {
case <-ctx.Done():
// expected
case <-time.After(1500 * time.Millisecond):
t.Error("context not done after job should have completed")
}
})
t.Run("repeated calls to stop, waiting for completion and after", func(*testing.T) {
cron := newWithSeconds()
cron.AddFunc("* * * * * *", func() {})
cron.AddFunc("* * * * * *", func() { time.Sleep(2 * time.Second) })
cron.Start()
cron.AddFunc("* * * * * *", func() {})
time.Sleep(time.Second)
ctx := cron.Stop()
ctx2 := cron.Stop()
// Verify that it is not done for at least 1500ms
select {
case <-ctx.Done():
t.Error("context was done too quickly immediately")
case <-ctx2.Done():
t.Error("context2 was done too quickly immediately")
case <-time.After(1500 * time.Millisecond):
// expected, because the job sleeping for 2 seconds is still running
}
// Verify that it IS done in the next 1s (giving 500ms buffer)
select {
case <-ctx.Done():
// expected
case <-time.After(time.Second):
t.Error("context not done after job should have completed")
}
// Verify that ctx2 is also done.
select {
case <-ctx2.Done():
// expected
case <-time.After(time.Millisecond):
t.Error("context2 not done even though context1 is")
}
// Verify that a new context retrieved from stop is immediately done.
ctx3 := cron.Stop()
select {
case <-ctx3.Done():
// expected
case <-time.After(time.Millisecond):
t.Error("context not done even when cron Stop is completed")
}
})
}
func TestMultiThreadedStartAndStop(t *testing.T) {
cron := New()
go cron.Run()
time.Sleep(2 * time.Millisecond)
cron.Stop()
}
func wait(wg *sync.WaitGroup) chan bool {
ch := make(chan bool)
go func() {
wg.Wait()
ch <- true
}()
return ch
}
func stop(cron *Cron) chan bool {
ch := make(chan bool)
go func() {
cron.Stop()
ch <- true
}()
return ch
}
// newWithSeconds returns a Cron with the seconds field enabled.
func newWithSeconds() *Cron {
return New(WithParser(secondParser), WithChain())
}

86
plugin/cron/logger.go Normal file
View File

@@ -0,0 +1,86 @@
package cron
import (
"io"
"log"
"os"
"strings"
"time"
)
// DefaultLogger is used by Cron if none is specified.
var DefaultLogger = PrintfLogger(log.New(os.Stdout, "cron: ", log.LstdFlags))
// DiscardLogger can be used by callers to discard all log messages.
var DiscardLogger = PrintfLogger(log.New(io.Discard, "", 0))
// Logger is the interface used in this package for logging, so that any backend
// can be plugged in. It is a subset of the github.com/go-logr/logr interface.
type Logger interface {
// Info logs routine messages about cron's operation.
Info(msg string, keysAndValues ...interface{})
// Error logs an error condition.
Error(err error, msg string, keysAndValues ...interface{})
}
// PrintfLogger wraps a Printf-based logger (such as the standard library "log")
// into an implementation of the Logger interface which logs errors only.
func PrintfLogger(l interface{ Printf(string, ...interface{}) }) Logger {
return printfLogger{l, false}
}
// VerbosePrintfLogger wraps a Printf-based logger (such as the standard library
// "log") into an implementation of the Logger interface which logs everything.
func VerbosePrintfLogger(l interface{ Printf(string, ...interface{}) }) Logger {
return printfLogger{l, true}
}
type printfLogger struct {
logger interface{ Printf(string, ...interface{}) }
logInfo bool
}
func (pl printfLogger) Info(msg string, keysAndValues ...interface{}) {
if pl.logInfo {
keysAndValues = formatTimes(keysAndValues)
pl.logger.Printf(
formatString(len(keysAndValues)),
append([]interface{}{msg}, keysAndValues...)...)
}
}
func (pl printfLogger) Error(err error, msg string, keysAndValues ...interface{}) {
keysAndValues = formatTimes(keysAndValues)
pl.logger.Printf(
formatString(len(keysAndValues)+2),
append([]interface{}{msg, "error", err}, keysAndValues...)...)
}
// formatString returns a logfmt-like format string for the number of
// key/values.
func formatString(numKeysAndValues int) string {
var sb strings.Builder
sb.WriteString("%s")
if numKeysAndValues > 0 {
sb.WriteString(", ")
}
for i := 0; i < numKeysAndValues/2; i++ {
if i > 0 {
sb.WriteString(", ")
}
sb.WriteString("%v=%v")
}
return sb.String()
}
// formatTimes formats any time.Time values as RFC3339.
func formatTimes(keysAndValues []interface{}) []interface{} {
var formattedArgs []interface{}
for _, arg := range keysAndValues {
if t, ok := arg.(time.Time); ok {
arg = t.Format(time.RFC3339)
}
formattedArgs = append(formattedArgs, arg)
}
return formattedArgs
}

45
plugin/cron/option.go Normal file
View File

@@ -0,0 +1,45 @@
package cron
import (
"time"
)
// Option represents a modification to the default behavior of a Cron.
type Option func(*Cron)
// WithLocation overrides the timezone of the cron instance.
func WithLocation(loc *time.Location) Option {
return func(c *Cron) {
c.location = loc
}
}
// WithSeconds overrides the parser used for interpreting job schedules to
// include a seconds field as the first one.
func WithSeconds() Option {
return WithParser(NewParser(
Second | Minute | Hour | Dom | Month | Dow | Descriptor,
))
}
// WithParser overrides the parser used for interpreting job schedules.
func WithParser(p ScheduleParser) Option {
return func(c *Cron) {
c.parser = p
}
}
// WithChain specifies Job wrappers to apply to all jobs added to this cron.
// Refer to the Chain* functions in this package for provided wrappers.
func WithChain(wrappers ...JobWrapper) Option {
return func(c *Cron) {
c.chain = NewChain(wrappers...)
}
}
// WithLogger uses the provided logger.
func WithLogger(logger Logger) Option {
return func(c *Cron) {
c.logger = logger
}
}

View File

@@ -0,0 +1,43 @@
//nolint:all
package cron
import (
"log"
"strings"
"testing"
"time"
)
func TestWithLocation(t *testing.T) {
c := New(WithLocation(time.UTC))
if c.location != time.UTC {
t.Errorf("expected UTC, got %v", c.location)
}
}
func TestWithParser(t *testing.T) {
var parser = NewParser(Dow)
c := New(WithParser(parser))
if c.parser != parser {
t.Error("expected provided parser")
}
}
func TestWithVerboseLogger(t *testing.T) {
var buf syncWriter
var logger = log.New(&buf, "", log.LstdFlags)
c := New(WithLogger(VerbosePrintfLogger(logger)))
if c.logger.(printfLogger).logger != logger {
t.Error("expected provided logger")
}
c.AddFunc("@every 1s", func() {})
c.Start()
time.Sleep(OneSecond)
c.Stop()
out := buf.String()
if !strings.Contains(out, "schedule,") ||
!strings.Contains(out, "run,") {
t.Error("expected to see some actions, got:", out)
}
}

437
plugin/cron/parser.go Normal file
View File

@@ -0,0 +1,437 @@
package cron
import (
"math"
"strconv"
"strings"
"time"
"github.com/pkg/errors"
)
// Configuration options for creating a parser. Most options specify which
// fields should be included, while others enable features. If a field is not
// included the parser will assume a default value. These options do not change
// the order fields are parse in.
type ParseOption int
const (
Second ParseOption = 1 << iota // Seconds field, default 0
SecondOptional // Optional seconds field, default 0
Minute // Minutes field, default 0
Hour // Hours field, default 0
Dom // Day of month field, default *
Month // Month field, default *
Dow // Day of week field, default *
DowOptional // Optional day of week field, default *
Descriptor // Allow descriptors such as @monthly, @weekly, etc.
)
var places = []ParseOption{
Second,
Minute,
Hour,
Dom,
Month,
Dow,
}
var defaults = []string{
"0",
"0",
"0",
"*",
"*",
"*",
}
// A custom Parser that can be configured.
type Parser struct {
options ParseOption
}
// NewParser creates a Parser with custom options.
//
// It panics if more than one Optional is given, since it would be impossible to
// correctly infer which optional is provided or missing in general.
//
// Examples
//
// // Standard parser without descriptors
// specParser := NewParser(Minute | Hour | Dom | Month | Dow)
// sched, err := specParser.Parse("0 0 15 */3 *")
//
// // Same as above, just excludes time fields
// specParser := NewParser(Dom | Month | Dow)
// sched, err := specParser.Parse("15 */3 *")
//
// // Same as above, just makes Dow optional
// specParser := NewParser(Dom | Month | DowOptional)
// sched, err := specParser.Parse("15 */3")
func NewParser(options ParseOption) Parser {
optionals := 0
if options&DowOptional > 0 {
optionals++
}
if options&SecondOptional > 0 {
optionals++
}
if optionals > 1 {
panic("multiple optionals may not be configured")
}
return Parser{options}
}
// Parse returns a new crontab schedule representing the given spec.
// It returns a descriptive error if the spec is not valid.
// It accepts crontab specs and features configured by NewParser.
func (p Parser) Parse(spec string) (Schedule, error) {
if len(spec) == 0 {
return nil, errors.New("empty spec string")
}
// Extract timezone if present
var loc = time.Local
if strings.HasPrefix(spec, "TZ=") || strings.HasPrefix(spec, "CRON_TZ=") {
var err error
i := strings.Index(spec, " ")
eq := strings.Index(spec, "=")
if loc, err = time.LoadLocation(spec[eq+1 : i]); err != nil {
return nil, errors.Wrap(err, "provided bad location")
}
spec = strings.TrimSpace(spec[i:])
}
// Handle named schedules (descriptors), if configured
if strings.HasPrefix(spec, "@") {
if p.options&Descriptor == 0 {
return nil, errors.New("descriptors not enabled")
}
return parseDescriptor(spec, loc)
}
// Split on whitespace.
fields := strings.Fields(spec)
// Validate & fill in any omitted or optional fields
var err error
fields, err = normalizeFields(fields, p.options)
if err != nil {
return nil, err
}
field := func(field string, r bounds) uint64 {
if err != nil {
return 0
}
var bits uint64
bits, err = getField(field, r)
return bits
}
var (
second = field(fields[0], seconds)
minute = field(fields[1], minutes)
hour = field(fields[2], hours)
dayofmonth = field(fields[3], dom)
month = field(fields[4], months)
dayofweek = field(fields[5], dow)
)
if err != nil {
return nil, err
}
return &SpecSchedule{
Second: second,
Minute: minute,
Hour: hour,
Dom: dayofmonth,
Month: month,
Dow: dayofweek,
Location: loc,
}, nil
}
// normalizeFields takes a subset set of the time fields and returns the full set
// with defaults (zeroes) populated for unset fields.
//
// As part of performing this function, it also validates that the provided
// fields are compatible with the configured options.
func normalizeFields(fields []string, options ParseOption) ([]string, error) {
// Validate optionals & add their field to options
optionals := 0
if options&SecondOptional > 0 {
options |= Second
optionals++
}
if options&DowOptional > 0 {
options |= Dow
optionals++
}
if optionals > 1 {
return nil, errors.New("multiple optionals may not be configured")
}
// Figure out how many fields we need
max := 0
for _, place := range places {
if options&place > 0 {
max++
}
}
min := max - optionals
// Validate number of fields
if count := len(fields); count < min || count > max {
if min == max {
return nil, errors.New("incorrect number of fields")
}
return nil, errors.New("incorrect number of fields, expected " + strconv.Itoa(min) + "-" + strconv.Itoa(max))
}
// Populate the optional field if not provided
if min < max && len(fields) == min {
switch {
case options&DowOptional > 0:
fields = append(fields, defaults[5]) // TODO: improve access to default
case options&SecondOptional > 0:
fields = append([]string{defaults[0]}, fields...)
default:
return nil, errors.New("unexpected optional field")
}
}
// Populate all fields not part of options with their defaults
n := 0
expandedFields := make([]string, len(places))
copy(expandedFields, defaults)
for i, place := range places {
if options&place > 0 {
expandedFields[i] = fields[n]
n++
}
}
return expandedFields, nil
}
var standardParser = NewParser(
Minute | Hour | Dom | Month | Dow | Descriptor,
)
// ParseStandard returns a new crontab schedule representing the given
// standardSpec (https://en.wikipedia.org/wiki/Cron). It requires 5 entries
// representing: minute, hour, day of month, month and day of week, in that
// order. It returns a descriptive error if the spec is not valid.
//
// It accepts
// - Standard crontab specs, e.g. "* * * * ?"
// - Descriptors, e.g. "@midnight", "@every 1h30m"
func ParseStandard(standardSpec string) (Schedule, error) {
return standardParser.Parse(standardSpec)
}
// getField returns an Int with the bits set representing all of the times that
// the field represents or error parsing field value. A "field" is a comma-separated
// list of "ranges".
func getField(field string, r bounds) (uint64, error) {
var bits uint64
ranges := strings.FieldsFunc(field, func(r rune) bool { return r == ',' })
for _, expr := range ranges {
bit, err := getRange(expr, r)
if err != nil {
return bits, err
}
bits |= bit
}
return bits, nil
}
// getRange returns the bits indicated by the given expression:
//
// number | number "-" number [ "/" number ]
//
// or error parsing range.
func getRange(expr string, r bounds) (uint64, error) {
var (
start, end, step uint
rangeAndStep = strings.Split(expr, "/")
lowAndHigh = strings.Split(rangeAndStep[0], "-")
singleDigit = len(lowAndHigh) == 1
err error
)
var extra uint64
if lowAndHigh[0] == "*" || lowAndHigh[0] == "?" {
start = r.min
end = r.max
extra = starBit
} else {
start, err = parseIntOrName(lowAndHigh[0], r.names)
if err != nil {
return 0, err
}
switch len(lowAndHigh) {
case 1:
end = start
case 2:
end, err = parseIntOrName(lowAndHigh[1], r.names)
if err != nil {
return 0, err
}
default:
return 0, errors.New("too many hyphens: " + expr)
}
}
switch len(rangeAndStep) {
case 1:
step = 1
case 2:
step, err = mustParseInt(rangeAndStep[1])
if err != nil {
return 0, err
}
// Special handling: "N/step" means "N-max/step".
if singleDigit {
end = r.max
}
if step > 1 {
extra = 0
}
default:
return 0, errors.New("too many slashes: " + expr)
}
if start < r.min {
return 0, errors.New("beginning of range below minimum: " + expr)
}
if end > r.max {
return 0, errors.New("end of range above maximum: " + expr)
}
if start > end {
return 0, errors.New("beginning of range after end: " + expr)
}
if step == 0 {
return 0, errors.New("step cannot be zero: " + expr)
}
return getBits(start, end, step) | extra, nil
}
// parseIntOrName returns the (possibly-named) integer contained in expr.
func parseIntOrName(expr string, names map[string]uint) (uint, error) {
if names != nil {
if namedInt, ok := names[strings.ToLower(expr)]; ok {
return namedInt, nil
}
}
return mustParseInt(expr)
}
// mustParseInt parses the given expression as an int or returns an error.
func mustParseInt(expr string) (uint, error) {
num, err := strconv.Atoi(expr)
if err != nil {
return 0, errors.Wrap(err, "failed to parse number")
}
if num < 0 {
return 0, errors.New("number must be positive")
}
return uint(num), nil
}
// getBits sets all bits in the range [min, max], modulo the given step size.
func getBits(min, max, step uint) uint64 {
var bits uint64
// If step is 1, use shifts.
if step == 1 {
return ^(math.MaxUint64 << (max + 1)) & (math.MaxUint64 << min)
}
// Else, use a simple loop.
for i := min; i <= max; i += step {
bits |= 1 << i
}
return bits
}
// all returns all bits within the given bounds.
func all(r bounds) uint64 {
return getBits(r.min, r.max, 1) | starBit
}
// parseDescriptor returns a predefined schedule for the expression, or error if none matches.
func parseDescriptor(descriptor string, loc *time.Location) (Schedule, error) {
switch descriptor {
case "@yearly", "@annually":
return &SpecSchedule{
Second: 1 << seconds.min,
Minute: 1 << minutes.min,
Hour: 1 << hours.min,
Dom: 1 << dom.min,
Month: 1 << months.min,
Dow: all(dow),
Location: loc,
}, nil
case "@monthly":
return &SpecSchedule{
Second: 1 << seconds.min,
Minute: 1 << minutes.min,
Hour: 1 << hours.min,
Dom: 1 << dom.min,
Month: all(months),
Dow: all(dow),
Location: loc,
}, nil
case "@weekly":
return &SpecSchedule{
Second: 1 << seconds.min,
Minute: 1 << minutes.min,
Hour: 1 << hours.min,
Dom: all(dom),
Month: all(months),
Dow: 1 << dow.min,
Location: loc,
}, nil
case "@daily", "@midnight":
return &SpecSchedule{
Second: 1 << seconds.min,
Minute: 1 << minutes.min,
Hour: 1 << hours.min,
Dom: all(dom),
Month: all(months),
Dow: all(dow),
Location: loc,
}, nil
case "@hourly":
return &SpecSchedule{
Second: 1 << seconds.min,
Minute: 1 << minutes.min,
Hour: all(hours),
Dom: all(dom),
Month: all(months),
Dow: all(dow),
Location: loc,
}, nil
default:
// Continue to check @every prefix below
}
const every = "@every "
if strings.HasPrefix(descriptor, every) {
duration, err := time.ParseDuration(descriptor[len(every):])
if err != nil {
return nil, errors.Wrap(err, "failed to parse duration")
}
return Every(duration), nil
}
return nil, errors.New("unrecognized descriptor: " + descriptor)
}

384
plugin/cron/parser_test.go Normal file
View File

@@ -0,0 +1,384 @@
//nolint:all
package cron
import (
"reflect"
"strings"
"testing"
"time"
)
var secondParser = NewParser(Second | Minute | Hour | Dom | Month | DowOptional | Descriptor)
func TestRange(t *testing.T) {
zero := uint64(0)
ranges := []struct {
expr string
min, max uint
expected uint64
err string
}{
{"5", 0, 7, 1 << 5, ""},
{"0", 0, 7, 1 << 0, ""},
{"7", 0, 7, 1 << 7, ""},
{"5-5", 0, 7, 1 << 5, ""},
{"5-6", 0, 7, 1<<5 | 1<<6, ""},
{"5-7", 0, 7, 1<<5 | 1<<6 | 1<<7, ""},
{"5-6/2", 0, 7, 1 << 5, ""},
{"5-7/2", 0, 7, 1<<5 | 1<<7, ""},
{"5-7/1", 0, 7, 1<<5 | 1<<6 | 1<<7, ""},
{"*", 1, 3, 1<<1 | 1<<2 | 1<<3 | starBit, ""},
{"*/2", 1, 3, 1<<1 | 1<<3, ""},
{"5--5", 0, 0, zero, "too many hyphens"},
{"jan-x", 0, 0, zero, `failed to parse number: strconv.Atoi: parsing "jan": invalid syntax`},
{"2-x", 1, 5, zero, `failed to parse number: strconv.Atoi: parsing "x": invalid syntax`},
{"*/-12", 0, 0, zero, "number must be positive"},
{"*//2", 0, 0, zero, "too many slashes"},
{"1", 3, 5, zero, "below minimum"},
{"6", 3, 5, zero, "above maximum"},
{"5-3", 3, 5, zero, "beginning of range after end: 5-3"},
{"*/0", 0, 0, zero, "step cannot be zero: */0"},
}
for _, c := range ranges {
actual, err := getRange(c.expr, bounds{c.min, c.max, nil})
if len(c.err) != 0 && (err == nil || !strings.Contains(err.Error(), c.err)) {
t.Errorf("%s => expected %v, got %v", c.expr, c.err, err)
}
if len(c.err) == 0 && err != nil {
t.Errorf("%s => unexpected error %v", c.expr, err)
}
if actual != c.expected {
t.Errorf("%s => expected %d, got %d", c.expr, c.expected, actual)
}
}
}
func TestField(t *testing.T) {
fields := []struct {
expr string
min, max uint
expected uint64
}{
{"5", 1, 7, 1 << 5},
{"5,6", 1, 7, 1<<5 | 1<<6},
{"5,6,7", 1, 7, 1<<5 | 1<<6 | 1<<7},
{"1,5-7/2,3", 1, 7, 1<<1 | 1<<5 | 1<<7 | 1<<3},
}
for _, c := range fields {
actual, _ := getField(c.expr, bounds{c.min, c.max, nil})
if actual != c.expected {
t.Errorf("%s => expected %d, got %d", c.expr, c.expected, actual)
}
}
}
func TestAll(t *testing.T) {
allBits := []struct {
r bounds
expected uint64
}{
{minutes, 0xfffffffffffffff}, // 0-59: 60 ones
{hours, 0xffffff}, // 0-23: 24 ones
{dom, 0xfffffffe}, // 1-31: 31 ones, 1 zero
{months, 0x1ffe}, // 1-12: 12 ones, 1 zero
{dow, 0x7f}, // 0-6: 7 ones
}
for _, c := range allBits {
actual := all(c.r) // all() adds the starBit, so compensate for that..
if c.expected|starBit != actual {
t.Errorf("%d-%d/%d => expected %b, got %b",
c.r.min, c.r.max, 1, c.expected|starBit, actual)
}
}
}
func TestBits(t *testing.T) {
bits := []struct {
min, max, step uint
expected uint64
}{
{0, 0, 1, 0x1},
{1, 1, 1, 0x2},
{1, 5, 2, 0x2a}, // 101010
{1, 4, 2, 0xa}, // 1010
}
for _, c := range bits {
actual := getBits(c.min, c.max, c.step)
if c.expected != actual {
t.Errorf("%d-%d/%d => expected %b, got %b",
c.min, c.max, c.step, c.expected, actual)
}
}
}
func TestParseScheduleErrors(t *testing.T) {
var tests = []struct{ expr, err string }{
{"* 5 j * * *", `failed to parse number: strconv.Atoi: parsing "j": invalid syntax`},
{"@every Xm", "failed to parse duration"},
{"@unrecognized", "unrecognized descriptor"},
{"* * * *", "incorrect number of fields, expected 5-6"},
{"", "empty spec string"},
}
for _, c := range tests {
actual, err := secondParser.Parse(c.expr)
if err == nil || !strings.Contains(err.Error(), c.err) {
t.Errorf("%s => expected %v, got %v", c.expr, c.err, err)
}
if actual != nil {
t.Errorf("expected nil schedule on error, got %v", actual)
}
}
}
func TestParseSchedule(t *testing.T) {
tokyo, _ := time.LoadLocation("Asia/Tokyo")
entries := []struct {
parser Parser
expr string
expected Schedule
}{
{secondParser, "0 5 * * * *", every5min(time.Local)},
{standardParser, "5 * * * *", every5min(time.Local)},
{secondParser, "CRON_TZ=UTC 0 5 * * * *", every5min(time.UTC)},
{standardParser, "CRON_TZ=UTC 5 * * * *", every5min(time.UTC)},
{secondParser, "CRON_TZ=Asia/Tokyo 0 5 * * * *", every5min(tokyo)},
{secondParser, "@every 5m", ConstantDelaySchedule{5 * time.Minute}},
{secondParser, "@midnight", midnight(time.Local)},
{secondParser, "TZ=UTC @midnight", midnight(time.UTC)},
{secondParser, "TZ=Asia/Tokyo @midnight", midnight(tokyo)},
{secondParser, "@yearly", annual(time.Local)},
{secondParser, "@annually", annual(time.Local)},
{
parser: secondParser,
expr: "* 5 * * * *",
expected: &SpecSchedule{
Second: all(seconds),
Minute: 1 << 5,
Hour: all(hours),
Dom: all(dom),
Month: all(months),
Dow: all(dow),
Location: time.Local,
},
},
}
for _, c := range entries {
actual, err := c.parser.Parse(c.expr)
if err != nil {
t.Errorf("%s => unexpected error %v", c.expr, err)
}
if !reflect.DeepEqual(actual, c.expected) {
t.Errorf("%s => expected %b, got %b", c.expr, c.expected, actual)
}
}
}
func TestOptionalSecondSchedule(t *testing.T) {
parser := NewParser(SecondOptional | Minute | Hour | Dom | Month | Dow | Descriptor)
entries := []struct {
expr string
expected Schedule
}{
{"0 5 * * * *", every5min(time.Local)},
{"5 5 * * * *", every5min5s(time.Local)},
{"5 * * * *", every5min(time.Local)},
}
for _, c := range entries {
actual, err := parser.Parse(c.expr)
if err != nil {
t.Errorf("%s => unexpected error %v", c.expr, err)
}
if !reflect.DeepEqual(actual, c.expected) {
t.Errorf("%s => expected %b, got %b", c.expr, c.expected, actual)
}
}
}
func TestNormalizeFields(t *testing.T) {
tests := []struct {
name string
input []string
options ParseOption
expected []string
}{
{
"AllFields_NoOptional",
[]string{"0", "5", "*", "*", "*", "*"},
Second | Minute | Hour | Dom | Month | Dow | Descriptor,
[]string{"0", "5", "*", "*", "*", "*"},
},
{
"AllFields_SecondOptional_Provided",
[]string{"0", "5", "*", "*", "*", "*"},
SecondOptional | Minute | Hour | Dom | Month | Dow | Descriptor,
[]string{"0", "5", "*", "*", "*", "*"},
},
{
"AllFields_SecondOptional_NotProvided",
[]string{"5", "*", "*", "*", "*"},
SecondOptional | Minute | Hour | Dom | Month | Dow | Descriptor,
[]string{"0", "5", "*", "*", "*", "*"},
},
{
"SubsetFields_NoOptional",
[]string{"5", "15", "*"},
Hour | Dom | Month,
[]string{"0", "0", "5", "15", "*", "*"},
},
{
"SubsetFields_DowOptional_Provided",
[]string{"5", "15", "*", "4"},
Hour | Dom | Month | DowOptional,
[]string{"0", "0", "5", "15", "*", "4"},
},
{
"SubsetFields_DowOptional_NotProvided",
[]string{"5", "15", "*"},
Hour | Dom | Month | DowOptional,
[]string{"0", "0", "5", "15", "*", "*"},
},
{
"SubsetFields_SecondOptional_NotProvided",
[]string{"5", "15", "*"},
SecondOptional | Hour | Dom | Month,
[]string{"0", "0", "5", "15", "*", "*"},
},
}
for _, test := range tests {
t.Run(test.name, func(*testing.T) {
actual, err := normalizeFields(test.input, test.options)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if !reflect.DeepEqual(actual, test.expected) {
t.Errorf("expected %v, got %v", test.expected, actual)
}
})
}
}
func TestNormalizeFields_Errors(t *testing.T) {
tests := []struct {
name string
input []string
options ParseOption
err string
}{
{
"TwoOptionals",
[]string{"0", "5", "*", "*", "*", "*"},
SecondOptional | Minute | Hour | Dom | Month | DowOptional,
"",
},
{
"TooManyFields",
[]string{"0", "5", "*", "*"},
SecondOptional | Minute | Hour,
"",
},
{
"NoFields",
[]string{},
SecondOptional | Minute | Hour,
"",
},
{
"TooFewFields",
[]string{"*"},
SecondOptional | Minute | Hour,
"",
},
}
for _, test := range tests {
t.Run(test.name, func(*testing.T) {
actual, err := normalizeFields(test.input, test.options)
if err == nil {
t.Errorf("expected an error, got none. results: %v", actual)
}
if !strings.Contains(err.Error(), test.err) {
t.Errorf("expected error %q, got %q", test.err, err.Error())
}
})
}
}
func TestStandardSpecSchedule(t *testing.T) {
entries := []struct {
expr string
expected Schedule
err string
}{
{
expr: "5 * * * *",
expected: &SpecSchedule{1 << seconds.min, 1 << 5, all(hours), all(dom), all(months), all(dow), time.Local},
},
{
expr: "@every 5m",
expected: ConstantDelaySchedule{time.Duration(5) * time.Minute},
},
{
expr: "5 j * * *",
err: `failed to parse number: strconv.Atoi: parsing "j": invalid syntax`,
},
{
expr: "* * * *",
err: "incorrect number of fields",
},
}
for _, c := range entries {
actual, err := ParseStandard(c.expr)
if len(c.err) != 0 && (err == nil || !strings.Contains(err.Error(), c.err)) {
t.Errorf("%s => expected %v, got %v", c.expr, c.err, err)
}
if len(c.err) == 0 && err != nil {
t.Errorf("%s => unexpected error %v", c.expr, err)
}
if !reflect.DeepEqual(actual, c.expected) {
t.Errorf("%s => expected %b, got %b", c.expr, c.expected, actual)
}
}
}
func TestNoDescriptorParser(t *testing.T) {
parser := NewParser(Minute | Hour)
_, err := parser.Parse("@every 1m")
if err == nil {
t.Error("expected an error, got none")
}
}
func every5min(loc *time.Location) *SpecSchedule {
return &SpecSchedule{1 << 0, 1 << 5, all(hours), all(dom), all(months), all(dow), loc}
}
func every5min5s(loc *time.Location) *SpecSchedule {
return &SpecSchedule{1 << 5, 1 << 5, all(hours), all(dom), all(months), all(dow), loc}
}
func midnight(loc *time.Location) *SpecSchedule {
return &SpecSchedule{1, 1, 1, all(dom), all(months), all(dow), loc}
}
func annual(loc *time.Location) *SpecSchedule {
return &SpecSchedule{
Second: 1 << seconds.min,
Minute: 1 << minutes.min,
Hour: 1 << hours.min,
Dom: 1 << dom.min,
Month: 1 << months.min,
Dow: all(dow),
Location: loc,
}
}

188
plugin/cron/spec.go Normal file
View File

@@ -0,0 +1,188 @@
package cron
import "time"
// SpecSchedule specifies a duty cycle (to the second granularity), based on a
// traditional crontab specification. It is computed initially and stored as bit sets.
type SpecSchedule struct {
Second, Minute, Hour, Dom, Month, Dow uint64
// Override location for this schedule.
Location *time.Location
}
// bounds provides a range of acceptable values (plus a map of name to value).
type bounds struct {
min, max uint
names map[string]uint
}
// The bounds for each field.
var (
seconds = bounds{0, 59, nil}
minutes = bounds{0, 59, nil}
hours = bounds{0, 23, nil}
dom = bounds{1, 31, nil}
months = bounds{1, 12, map[string]uint{
"jan": 1,
"feb": 2,
"mar": 3,
"apr": 4,
"may": 5,
"jun": 6,
"jul": 7,
"aug": 8,
"sep": 9,
"oct": 10,
"nov": 11,
"dec": 12,
}}
dow = bounds{0, 6, map[string]uint{
"sun": 0,
"mon": 1,
"tue": 2,
"wed": 3,
"thu": 4,
"fri": 5,
"sat": 6,
}}
)
const (
// Set the top bit if a star was included in the expression.
starBit = 1 << 63
)
// Next returns the next time this schedule is activated, greater than the given
// time. If no time can be found to satisfy the schedule, return the zero time.
func (s *SpecSchedule) Next(t time.Time) time.Time {
// General approach
//
// For Month, Day, Hour, Minute, Second:
// Check if the time value matches. If yes, continue to the next field.
// If the field doesn't match the schedule, then increment the field until it matches.
// While incrementing the field, a wrap-around brings it back to the beginning
// of the field list (since it is necessary to re-verify previous field
// values)
// Convert the given time into the schedule's timezone, if one is specified.
// Save the original timezone so we can convert back after we find a time.
// Note that schedules without a time zone specified (time.Local) are treated
// as local to the time provided.
origLocation := t.Location()
loc := s.Location
if loc == time.Local {
loc = t.Location()
}
if s.Location != time.Local {
t = t.In(s.Location)
}
// Start at the earliest possible time (the upcoming second).
t = t.Add(1*time.Second - time.Duration(t.Nanosecond())*time.Nanosecond)
// This flag indicates whether a field has been incremented.
added := false
// If no time is found within five years, return zero.
yearLimit := t.Year() + 5
WRAP:
if t.Year() > yearLimit {
return time.Time{}
}
// Find the first applicable month.
// If it's this month, then do nothing.
for 1<<uint(t.Month())&s.Month == 0 {
// If we have to add a month, reset the other parts to 0.
if !added {
added = true
// Otherwise, set the date at the beginning (since the current time is irrelevant).
t = time.Date(t.Year(), t.Month(), 1, 0, 0, 0, 0, loc)
}
t = t.AddDate(0, 1, 0)
// Wrapped around.
if t.Month() == time.January {
goto WRAP
}
}
// Now get a day in that month.
//
// NOTE: This causes issues for daylight savings regimes where midnight does
// not exist. For example: Sao Paulo has DST that transforms midnight on
// 11/3 into 1am. Handle that by noticing when the Hour ends up != 0.
for !dayMatches(s, t) {
if !added {
added = true
t = time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, loc)
}
t = t.AddDate(0, 0, 1)
// Notice if the hour is no longer midnight due to DST.
// Add an hour if it's 23, subtract an hour if it's 1.
if t.Hour() != 0 {
if t.Hour() > 12 {
t = t.Add(time.Duration(24-t.Hour()) * time.Hour)
} else {
t = t.Add(time.Duration(-t.Hour()) * time.Hour)
}
}
if t.Day() == 1 {
goto WRAP
}
}
for 1<<uint(t.Hour())&s.Hour == 0 {
if !added {
added = true
t = time.Date(t.Year(), t.Month(), t.Day(), t.Hour(), 0, 0, 0, loc)
}
t = t.Add(1 * time.Hour)
if t.Hour() == 0 {
goto WRAP
}
}
for 1<<uint(t.Minute())&s.Minute == 0 {
if !added {
added = true
t = t.Truncate(time.Minute)
}
t = t.Add(1 * time.Minute)
if t.Minute() == 0 {
goto WRAP
}
}
for 1<<uint(t.Second())&s.Second == 0 {
if !added {
added = true
t = t.Truncate(time.Second)
}
t = t.Add(1 * time.Second)
if t.Second() == 0 {
goto WRAP
}
}
return t.In(origLocation)
}
// dayMatches returns true if the schedule's day-of-week and day-of-month
// restrictions are satisfied by the given time.
func dayMatches(s *SpecSchedule, t time.Time) bool {
var (
domMatch = 1<<uint(t.Day())&s.Dom > 0
dowMatch = 1<<uint(t.Weekday())&s.Dow > 0
)
if s.Dom&starBit > 0 || s.Dow&starBit > 0 {
return domMatch && dowMatch
}
return domMatch || dowMatch
}

301
plugin/cron/spec_test.go Normal file
View File

@@ -0,0 +1,301 @@
//nolint:all
package cron
import (
"strings"
"testing"
"time"
)
func TestActivation(t *testing.T) {
tests := []struct {
time, spec string
expected bool
}{
// Every fifteen minutes.
{"Mon Jul 9 15:00 2012", "0/15 * * * *", true},
{"Mon Jul 9 15:45 2012", "0/15 * * * *", true},
{"Mon Jul 9 15:40 2012", "0/15 * * * *", false},
// Every fifteen minutes, starting at 5 minutes.
{"Mon Jul 9 15:05 2012", "5/15 * * * *", true},
{"Mon Jul 9 15:20 2012", "5/15 * * * *", true},
{"Mon Jul 9 15:50 2012", "5/15 * * * *", true},
// Named months
{"Sun Jul 15 15:00 2012", "0/15 * * Jul *", true},
{"Sun Jul 15 15:00 2012", "0/15 * * Jun *", false},
// Everything set.
{"Sun Jul 15 08:30 2012", "30 08 ? Jul Sun", true},
{"Sun Jul 15 08:30 2012", "30 08 15 Jul ?", true},
{"Mon Jul 16 08:30 2012", "30 08 ? Jul Sun", false},
{"Mon Jul 16 08:30 2012", "30 08 15 Jul ?", false},
// Predefined schedules
{"Mon Jul 9 15:00 2012", "@hourly", true},
{"Mon Jul 9 15:04 2012", "@hourly", false},
{"Mon Jul 9 15:00 2012", "@daily", false},
{"Mon Jul 9 00:00 2012", "@daily", true},
{"Mon Jul 9 00:00 2012", "@weekly", false},
{"Sun Jul 8 00:00 2012", "@weekly", true},
{"Sun Jul 8 01:00 2012", "@weekly", false},
{"Sun Jul 8 00:00 2012", "@monthly", false},
{"Sun Jul 1 00:00 2012", "@monthly", true},
// Test interaction of DOW and DOM.
// If both are restricted, then only one needs to match.
{"Sun Jul 15 00:00 2012", "* * 1,15 * Sun", true},
{"Fri Jun 15 00:00 2012", "* * 1,15 * Sun", true},
{"Wed Aug 1 00:00 2012", "* * 1,15 * Sun", true},
{"Sun Jul 15 00:00 2012", "* * */10 * Sun", true}, // verifies #70
// However, if one has a star, then both need to match.
{"Sun Jul 15 00:00 2012", "* * * * Mon", false},
{"Mon Jul 9 00:00 2012", "* * 1,15 * *", false},
{"Sun Jul 15 00:00 2012", "* * 1,15 * *", true},
{"Sun Jul 15 00:00 2012", "* * */2 * Sun", true},
}
for _, test := range tests {
sched, err := ParseStandard(test.spec)
if err != nil {
t.Error(err)
continue
}
actual := sched.Next(getTime(test.time).Add(-1 * time.Second))
expected := getTime(test.time)
if test.expected && expected != actual || !test.expected && expected == actual {
t.Errorf("Fail evaluating %s on %s: (expected) %s != %s (actual)",
test.spec, test.time, expected, actual)
}
}
}
func TestNext(t *testing.T) {
runs := []struct {
time, spec string
expected string
}{
// Simple cases
{"Mon Jul 9 14:45 2012", "0 0/15 * * * *", "Mon Jul 9 15:00 2012"},
{"Mon Jul 9 14:59 2012", "0 0/15 * * * *", "Mon Jul 9 15:00 2012"},
{"Mon Jul 9 14:59:59 2012", "0 0/15 * * * *", "Mon Jul 9 15:00 2012"},
// Wrap around hours
{"Mon Jul 9 15:45 2012", "0 20-35/15 * * * *", "Mon Jul 9 16:20 2012"},
// Wrap around days
{"Mon Jul 9 23:46 2012", "0 */15 * * * *", "Tue Jul 10 00:00 2012"},
{"Mon Jul 9 23:45 2012", "0 20-35/15 * * * *", "Tue Jul 10 00:20 2012"},
{"Mon Jul 9 23:35:51 2012", "15/35 20-35/15 * * * *", "Tue Jul 10 00:20:15 2012"},
{"Mon Jul 9 23:35:51 2012", "15/35 20-35/15 1/2 * * *", "Tue Jul 10 01:20:15 2012"},
{"Mon Jul 9 23:35:51 2012", "15/35 20-35/15 10-12 * * *", "Tue Jul 10 10:20:15 2012"},
{"Mon Jul 9 23:35:51 2012", "15/35 20-35/15 1/2 */2 * *", "Thu Jul 11 01:20:15 2012"},
{"Mon Jul 9 23:35:51 2012", "15/35 20-35/15 * 9-20 * *", "Wed Jul 10 00:20:15 2012"},
{"Mon Jul 9 23:35:51 2012", "15/35 20-35/15 * 9-20 Jul *", "Wed Jul 10 00:20:15 2012"},
// Wrap around months
{"Mon Jul 9 23:35 2012", "0 0 0 9 Apr-Oct ?", "Thu Aug 9 00:00 2012"},
{"Mon Jul 9 23:35 2012", "0 0 0 */5 Apr,Aug,Oct Mon", "Tue Aug 1 00:00 2012"},
{"Mon Jul 9 23:35 2012", "0 0 0 */5 Oct Mon", "Mon Oct 1 00:00 2012"},
// Wrap around years
{"Mon Jul 9 23:35 2012", "0 0 0 * Feb Mon", "Mon Feb 4 00:00 2013"},
{"Mon Jul 9 23:35 2012", "0 0 0 * Feb Mon/2", "Fri Feb 1 00:00 2013"},
// Wrap around minute, hour, day, month, and year
{"Mon Dec 31 23:59:45 2012", "0 * * * * *", "Tue Jan 1 00:00:00 2013"},
// Leap year
{"Mon Jul 9 23:35 2012", "0 0 0 29 Feb ?", "Mon Feb 29 00:00 2016"},
// Daylight savings time 2am EST (-5) -> 3am EDT (-4)
{"2012-03-11T00:00:00-0500", "TZ=America/New_York 0 30 2 11 Mar ?", "2013-03-11T02:30:00-0400"},
// hourly job
{"2012-03-11T00:00:00-0500", "TZ=America/New_York 0 0 * * * ?", "2012-03-11T01:00:00-0500"},
{"2012-03-11T01:00:00-0500", "TZ=America/New_York 0 0 * * * ?", "2012-03-11T03:00:00-0400"},
{"2012-03-11T03:00:00-0400", "TZ=America/New_York 0 0 * * * ?", "2012-03-11T04:00:00-0400"},
{"2012-03-11T04:00:00-0400", "TZ=America/New_York 0 0 * * * ?", "2012-03-11T05:00:00-0400"},
// hourly job using CRON_TZ
{"2012-03-11T00:00:00-0500", "CRON_TZ=America/New_York 0 0 * * * ?", "2012-03-11T01:00:00-0500"},
{"2012-03-11T01:00:00-0500", "CRON_TZ=America/New_York 0 0 * * * ?", "2012-03-11T03:00:00-0400"},
{"2012-03-11T03:00:00-0400", "CRON_TZ=America/New_York 0 0 * * * ?", "2012-03-11T04:00:00-0400"},
{"2012-03-11T04:00:00-0400", "CRON_TZ=America/New_York 0 0 * * * ?", "2012-03-11T05:00:00-0400"},
// 1am nightly job
{"2012-03-11T00:00:00-0500", "TZ=America/New_York 0 0 1 * * ?", "2012-03-11T01:00:00-0500"},
{"2012-03-11T01:00:00-0500", "TZ=America/New_York 0 0 1 * * ?", "2012-03-12T01:00:00-0400"},
// 2am nightly job (skipped)
{"2012-03-11T00:00:00-0500", "TZ=America/New_York 0 0 2 * * ?", "2012-03-12T02:00:00-0400"},
// Daylight savings time 2am EDT (-4) => 1am EST (-5)
{"2012-11-04T00:00:00-0400", "TZ=America/New_York 0 30 2 04 Nov ?", "2012-11-04T02:30:00-0500"},
{"2012-11-04T01:45:00-0400", "TZ=America/New_York 0 30 1 04 Nov ?", "2012-11-04T01:30:00-0500"},
// hourly job
{"2012-11-04T00:00:00-0400", "TZ=America/New_York 0 0 * * * ?", "2012-11-04T01:00:00-0400"},
{"2012-11-04T01:00:00-0400", "TZ=America/New_York 0 0 * * * ?", "2012-11-04T01:00:00-0500"},
{"2012-11-04T01:00:00-0500", "TZ=America/New_York 0 0 * * * ?", "2012-11-04T02:00:00-0500"},
// 1am nightly job (runs twice)
{"2012-11-04T00:00:00-0400", "TZ=America/New_York 0 0 1 * * ?", "2012-11-04T01:00:00-0400"},
{"2012-11-04T01:00:00-0400", "TZ=America/New_York 0 0 1 * * ?", "2012-11-04T01:00:00-0500"},
{"2012-11-04T01:00:00-0500", "TZ=America/New_York 0 0 1 * * ?", "2012-11-05T01:00:00-0500"},
// 2am nightly job
{"2012-11-04T00:00:00-0400", "TZ=America/New_York 0 0 2 * * ?", "2012-11-04T02:00:00-0500"},
{"2012-11-04T02:00:00-0500", "TZ=America/New_York 0 0 2 * * ?", "2012-11-05T02:00:00-0500"},
// 3am nightly job
{"2012-11-04T00:00:00-0400", "TZ=America/New_York 0 0 3 * * ?", "2012-11-04T03:00:00-0500"},
{"2012-11-04T03:00:00-0500", "TZ=America/New_York 0 0 3 * * ?", "2012-11-05T03:00:00-0500"},
// hourly job
{"TZ=America/New_York 2012-11-04T00:00:00-0400", "0 0 * * * ?", "2012-11-04T01:00:00-0400"},
{"TZ=America/New_York 2012-11-04T01:00:00-0400", "0 0 * * * ?", "2012-11-04T01:00:00-0500"},
{"TZ=America/New_York 2012-11-04T01:00:00-0500", "0 0 * * * ?", "2012-11-04T02:00:00-0500"},
// 1am nightly job (runs twice)
{"TZ=America/New_York 2012-11-04T00:00:00-0400", "0 0 1 * * ?", "2012-11-04T01:00:00-0400"},
{"TZ=America/New_York 2012-11-04T01:00:00-0400", "0 0 1 * * ?", "2012-11-04T01:00:00-0500"},
{"TZ=America/New_York 2012-11-04T01:00:00-0500", "0 0 1 * * ?", "2012-11-05T01:00:00-0500"},
// 2am nightly job
{"TZ=America/New_York 2012-11-04T00:00:00-0400", "0 0 2 * * ?", "2012-11-04T02:00:00-0500"},
{"TZ=America/New_York 2012-11-04T02:00:00-0500", "0 0 2 * * ?", "2012-11-05T02:00:00-0500"},
// 3am nightly job
{"TZ=America/New_York 2012-11-04T00:00:00-0400", "0 0 3 * * ?", "2012-11-04T03:00:00-0500"},
{"TZ=America/New_York 2012-11-04T03:00:00-0500", "0 0 3 * * ?", "2012-11-05T03:00:00-0500"},
// Unsatisfiable
{"Mon Jul 9 23:35 2012", "0 0 0 30 Feb ?", ""},
{"Mon Jul 9 23:35 2012", "0 0 0 31 Apr ?", ""},
// Monthly job
{"TZ=America/New_York 2012-11-04T00:00:00-0400", "0 0 3 3 * ?", "2012-12-03T03:00:00-0500"},
// Test the scenario of DST resulting in midnight not being a valid time.
// https://github.com/robfig/cron/issues/157
{"2018-10-17T05:00:00-0400", "TZ=America/Sao_Paulo 0 0 9 10 * ?", "2018-11-10T06:00:00-0500"},
{"2018-02-14T05:00:00-0500", "TZ=America/Sao_Paulo 0 0 9 22 * ?", "2018-02-22T07:00:00-0500"},
}
for _, c := range runs {
sched, err := secondParser.Parse(c.spec)
if err != nil {
t.Error(err)
continue
}
actual := sched.Next(getTime(c.time))
expected := getTime(c.expected)
if !actual.Equal(expected) {
t.Errorf("%s, \"%s\": (expected) %v != %v (actual)", c.time, c.spec, expected, actual)
}
}
}
func TestErrors(t *testing.T) {
invalidSpecs := []string{
"xyz",
"60 0 * * *",
"0 60 * * *",
"0 0 * * XYZ",
}
for _, spec := range invalidSpecs {
_, err := ParseStandard(spec)
if err == nil {
t.Error("expected an error parsing: ", spec)
}
}
}
func getTime(value string) time.Time {
if value == "" {
return time.Time{}
}
var location = time.Local
if strings.HasPrefix(value, "TZ=") {
parts := strings.Fields(value)
loc, err := time.LoadLocation(parts[0][len("TZ="):])
if err != nil {
panic("could not parse location:" + err.Error())
}
location = loc
value = parts[1]
}
var layouts = []string{
"Mon Jan 2 15:04 2006",
"Mon Jan 2 15:04:05 2006",
}
for _, layout := range layouts {
if t, err := time.ParseInLocation(layout, value, location); err == nil {
return t
}
}
if t, err := time.ParseInLocation("2006-01-02T15:04:05-0700", value, location); err == nil {
return t
}
panic("could not parse time value " + value)
}
func TestNextWithTz(t *testing.T) {
runs := []struct {
time, spec string
expected string
}{
// Failing tests
{"2016-01-03T13:09:03+0530", "14 14 * * *", "2016-01-03T14:14:00+0530"},
{"2016-01-03T04:09:03+0530", "14 14 * * ?", "2016-01-03T14:14:00+0530"},
// Passing tests
{"2016-01-03T14:09:03+0530", "14 14 * * *", "2016-01-03T14:14:00+0530"},
{"2016-01-03T14:00:00+0530", "14 14 * * ?", "2016-01-03T14:14:00+0530"},
}
for _, c := range runs {
sched, err := ParseStandard(c.spec)
if err != nil {
t.Error(err)
continue
}
actual := sched.Next(getTimeTZ(c.time))
expected := getTimeTZ(c.expected)
if !actual.Equal(expected) {
t.Errorf("%s, \"%s\": (expected) %v != %v (actual)", c.time, c.spec, expected, actual)
}
}
}
func getTimeTZ(value string) time.Time {
if value == "" {
return time.Time{}
}
t, err := time.Parse("Mon Jan 2 15:04 2006", value)
if err != nil {
t, err = time.Parse("Mon Jan 2 15:04:05 2006", value)
if err != nil {
t, err = time.Parse("2006-01-02T15:04:05-0700", value)
if err != nil {
panic(err)
}
}
}
return t
}
// https://github.com/robfig/cron/issues/144
func TestSlash0NoHang(t *testing.T) {
schedule := "TZ=America/New_York 15/0 * * * *"
_, err := ParseStandard(schedule)
if err == nil {
t.Error("expected an error on 0 increment")
}
}

507
plugin/email/README.md Normal file
View File

@@ -0,0 +1,507 @@
# Email Plugin
SMTP email sending functionality for self-hosted Memos instances.
## Overview
This plugin provides a simple, reliable email sending interface following industry-standard SMTP protocols. It's designed for self-hosted environments where instance administrators configure their own email service, similar to platforms like GitHub, GitLab, and Discourse.
## Features
- Standard SMTP protocol support
- TLS/STARTTLS and SSL/TLS encryption
- HTML and plain text emails
- Multiple recipients (To, Cc, Bcc)
- Synchronous and asynchronous sending
- Detailed error reporting with context
- Works with all major email providers
- Reply-To header support
- RFC 5322 compliant message formatting
## Quick Start
### 1. Configure SMTP Settings
```go
import "github.com/usememos/memos/plugin/email"
config := &email.Config{
SMTPHost: "smtp.gmail.com",
SMTPPort: 587,
SMTPUsername: "your-email@gmail.com",
SMTPPassword: "your-app-password",
FromEmail: "noreply@yourdomain.com",
FromName: "Memos",
UseTLS: true,
}
```
### 2. Create and Send Email
```go
message := &email.Message{
To: []string{"user@example.com"},
Subject: "Welcome to Memos!",
Body: "Thanks for signing up.",
IsHTML: false,
}
// Synchronous send (waits for result)
err := email.Send(config, message)
if err != nil {
log.Printf("Failed to send email: %v", err)
}
// Asynchronous send (returns immediately)
email.SendAsync(config, message)
```
## Provider Configuration
### Gmail
Requires an [App Password](https://support.google.com/accounts/answer/185833) (2FA must be enabled):
```go
config := &email.Config{
SMTPHost: "smtp.gmail.com",
SMTPPort: 587,
SMTPUsername: "your-email@gmail.com",
SMTPPassword: "your-16-char-app-password",
FromEmail: "your-email@gmail.com",
FromName: "Memos",
UseTLS: true,
}
```
**Alternative (SSL):**
```go
config := &email.Config{
SMTPHost: "smtp.gmail.com",
SMTPPort: 465,
SMTPUsername: "your-email@gmail.com",
SMTPPassword: "your-16-char-app-password",
FromEmail: "your-email@gmail.com",
FromName: "Memos",
UseSSL: true,
}
```
### SendGrid
```go
config := &email.Config{
SMTPHost: "smtp.sendgrid.net",
SMTPPort: 587,
SMTPUsername: "apikey",
SMTPPassword: "your-sendgrid-api-key",
FromEmail: "noreply@yourdomain.com",
FromName: "Memos",
UseTLS: true,
}
```
### AWS SES
```go
config := &email.Config{
SMTPHost: "email-smtp.us-east-1.amazonaws.com",
SMTPPort: 587,
SMTPUsername: "your-smtp-username",
SMTPPassword: "your-smtp-password",
FromEmail: "verified@yourdomain.com",
FromName: "Memos",
UseTLS: true,
}
```
**Note:** Replace `us-east-1` with your AWS region. Email must be verified in SES.
### Mailgun
```go
config := &email.Config{
SMTPHost: "smtp.mailgun.org",
SMTPPort: 587,
SMTPUsername: "postmaster@yourdomain.com",
SMTPPassword: "your-mailgun-smtp-password",
FromEmail: "noreply@yourdomain.com",
FromName: "Memos",
UseTLS: true,
}
```
### Self-Hosted SMTP (Postfix, Exim, etc.)
```go
config := &email.Config{
SMTPHost: "mail.yourdomain.com",
SMTPPort: 587,
SMTPUsername: "username",
SMTPPassword: "password",
FromEmail: "noreply@yourdomain.com",
FromName: "Memos",
UseTLS: true,
}
```
## HTML Emails
```go
message := &email.Message{
To: []string{"user@example.com"},
Subject: "Welcome to Memos!",
Body: `
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
</head>
<body style="font-family: Arial, sans-serif;">
<h1 style="color: #333;">Welcome to Memos!</h1>
<p>We're excited to have you on board.</p>
<a href="https://yourdomain.com" style="background-color: #4CAF50; color: white; padding: 10px 20px; text-decoration: none; border-radius: 5px;">Get Started</a>
</body>
</html>
`,
IsHTML: true,
}
email.Send(config, message)
```
## Multiple Recipients
```go
message := &email.Message{
To: []string{"user1@example.com", "user2@example.com"},
Cc: []string{"manager@example.com"},
Bcc: []string{"admin@example.com"},
Subject: "Team Update",
Body: "Important team announcement...",
ReplyTo: "support@yourdomain.com",
}
email.Send(config, message)
```
## Testing
### Run Tests
```bash
# All tests
go test ./plugin/email/... -v
# With coverage
go test ./plugin/email/... -v -cover
# With race detector
go test ./plugin/email/... -race
```
### Manual Testing
Create a simple test program:
```go
package main
import (
"log"
"github.com/usememos/memos/plugin/email"
)
func main() {
config := &email.Config{
SMTPHost: "smtp.gmail.com",
SMTPPort: 587,
SMTPUsername: "your-email@gmail.com",
SMTPPassword: "your-app-password",
FromEmail: "your-email@gmail.com",
FromName: "Test",
UseTLS: true,
}
message := &email.Message{
To: []string{"recipient@example.com"},
Subject: "Test Email",
Body: "This is a test email from Memos email plugin.",
}
if err := email.Send(config, message); err != nil {
log.Fatalf("Failed to send email: %v", err)
}
log.Println("Email sent successfully!")
}
```
## Security Best Practices
### 1. Use TLS/SSL Encryption
Always enable encryption in production:
```go
// STARTTLS (port 587) - Recommended
config.UseTLS = true
// SSL/TLS (port 465)
config.UseSSL = true
```
### 2. Secure Credential Storage
Never hardcode credentials. Use environment variables:
```go
import "os"
config := &email.Config{
SMTPHost: os.Getenv("SMTP_HOST"),
SMTPPort: 587,
SMTPUsername: os.Getenv("SMTP_USERNAME"),
SMTPPassword: os.Getenv("SMTP_PASSWORD"),
FromEmail: os.Getenv("SMTP_FROM_EMAIL"),
UseTLS: true,
}
```
### 3. Use App-Specific Passwords
For Gmail and similar services, use app passwords instead of your main account password.
### 4. Validate and Sanitize Input
Always validate email addresses and sanitize content:
```go
// Validate before sending
if err := message.Validate(); err != nil {
return err
}
```
### 5. Implement Rate Limiting
Prevent abuse by limiting email sending:
```go
// Example using golang.org/x/time/rate
limiter := rate.NewLimiter(rate.Every(time.Second), 10) // 10 emails per second
if !limiter.Allow() {
return errors.New("rate limit exceeded")
}
```
### 6. Monitor and Log
Log email sending activity for security monitoring:
```go
if err := email.Send(config, message); err != nil {
slog.Error("Email send failed",
slog.String("recipient", message.To[0]),
slog.Any("error", err))
}
```
## Common Ports
| Port | Protocol | Security | Use Case |
|------|----------|----------|----------|
| **587** | SMTP + STARTTLS | Encrypted | **Recommended** for most providers |
| **465** | SMTP over SSL/TLS | Encrypted | Alternative secure option |
| **25** | SMTP | Unencrypted | Legacy, often blocked by ISPs |
| **2525** | SMTP + STARTTLS | Encrypted | Alternative when 587 is blocked |
**Port 587 (STARTTLS)** is the recommended standard for modern SMTP:
```go
config := &email.Config{
SMTPPort: 587,
UseTLS: true,
}
```
**Port 465 (SSL/TLS)** is the alternative:
```go
config := &email.Config{
SMTPPort: 465,
UseSSL: true,
}
```
## Error Handling
The package provides detailed, contextual errors:
```go
err := email.Send(config, message)
if err != nil {
// Error messages include context:
switch {
case strings.Contains(err.Error(), "invalid email configuration"):
// Configuration error (missing host, invalid port, etc.)
log.Printf("Configuration error: %v", err)
case strings.Contains(err.Error(), "invalid email message"):
// Message validation error (missing recipients, subject, body)
log.Printf("Message error: %v", err)
case strings.Contains(err.Error(), "authentication failed"):
// SMTP authentication failed (wrong credentials)
log.Printf("Auth error: %v", err)
case strings.Contains(err.Error(), "failed to connect"):
// Network/connection error
log.Printf("Connection error: %v", err)
case strings.Contains(err.Error(), "recipient rejected"):
// SMTP server rejected recipient
log.Printf("Recipient error: %v", err)
default:
log.Printf("Unknown error: %v", err)
}
}
```
### Common Error Messages
```
❌ "invalid email configuration: SMTP host is required"
→ Fix: Set config.SMTPHost
❌ "invalid email configuration: SMTP port must be between 1 and 65535"
→ Fix: Set valid config.SMTPPort (usually 587 or 465)
❌ "invalid email configuration: from email is required"
→ Fix: Set config.FromEmail
❌ "invalid email message: at least one recipient is required"
→ Fix: Set message.To with at least one email address
❌ "invalid email message: subject is required"
→ Fix: Set message.Subject
❌ "invalid email message: body is required"
→ Fix: Set message.Body
❌ "SMTP authentication failed"
→ Fix: Check credentials (username/password)
❌ "failed to connect to SMTP server"
→ Fix: Verify host/port, check firewall, ensure TLS/SSL settings match server
```
### Async Error Handling
For async sending, errors are logged automatically:
```go
email.SendAsync(config, message)
// Errors logged as:
// [WARN] Failed to send email asynchronously recipients=user@example.com error=...
```
## Dependencies
### Required
- **Go 1.25+**
- Standard library: `net/smtp`, `crypto/tls`
- `github.com/pkg/errors` - Error wrapping with context
### No External SMTP Libraries
This plugin uses Go's standard `net/smtp` library for maximum compatibility and minimal dependencies.
## API Reference
### Types
#### `Config`
```go
type Config struct {
SMTPHost string // SMTP server hostname
SMTPPort int // SMTP server port
SMTPUsername string // SMTP auth username
SMTPPassword string // SMTP auth password
FromEmail string // From email address
FromName string // From display name (optional)
UseTLS bool // Enable STARTTLS (port 587)
UseSSL bool // Enable SSL/TLS (port 465)
}
```
#### `Message`
```go
type Message struct {
To []string // Recipients
Cc []string // CC recipients (optional)
Bcc []string // BCC recipients (optional)
Subject string // Email subject
Body string // Email body (plain text or HTML)
IsHTML bool // true for HTML, false for plain text
ReplyTo string // Reply-To address (optional)
}
```
### Functions
#### `Send(config *Config, message *Message) error`
Sends an email synchronously. Blocks until email is sent or error occurs.
#### `SendAsync(config *Config, message *Message)`
Sends an email asynchronously in a goroutine. Returns immediately. Errors are logged.
#### `NewClient(config *Config) *Client`
Creates a new SMTP client for advanced usage.
#### `Client.Send(message *Message) error`
Sends email using the client's configuration.
## Architecture
```
plugin/email/
├── config.go # SMTP configuration types
├── message.go # Email message types and formatting
├── client.go # SMTP client implementation
├── email.go # High-level Send/SendAsync API
├── doc.go # Package documentation
└── *_test.go # Unit tests
```
## License
Part of the Memos project. See main repository for license details.
## Contributing
This plugin follows the Memos contribution guidelines. Please ensure:
1. All code is tested (TDD approach)
2. Tests pass: `go test ./plugin/email/... -v`
3. Code is formatted: `go fmt ./plugin/email/...`
4. No linting errors: `golangci-lint run ./plugin/email/...`
## Support
For issues and questions:
- Memos GitHub Issues: https://github.com/usememos/memos/issues
- Memos Documentation: https://usememos.com/docs
## Roadmap
Future enhancements may include:
- Email template system
- Attachment support
- Inline image embedding
- Email queuing system
- Delivery status tracking
- Bounce handling

143
plugin/email/client.go Normal file
View File

@@ -0,0 +1,143 @@
package email
import (
"crypto/tls"
"net/smtp"
"github.com/pkg/errors"
)
// Client represents an SMTP email client.
type Client struct {
config *Config
}
// NewClient creates a new email client with the given configuration.
func NewClient(config *Config) *Client {
return &Client{
config: config,
}
}
// validateConfig validates the client configuration.
func (c *Client) validateConfig() error {
if c.config == nil {
return errors.New("email configuration is required")
}
return c.config.Validate()
}
// createAuth creates an SMTP auth mechanism if credentials are provided.
func (c *Client) createAuth() smtp.Auth {
if c.config.SMTPUsername == "" && c.config.SMTPPassword == "" {
return nil
}
return smtp.PlainAuth("", c.config.SMTPUsername, c.config.SMTPPassword, c.config.SMTPHost)
}
// createTLSConfig creates a TLS configuration for secure connections.
func (c *Client) createTLSConfig() *tls.Config {
return &tls.Config{
ServerName: c.config.SMTPHost,
MinVersion: tls.VersionTLS12,
}
}
// Send sends an email message via SMTP.
func (c *Client) Send(message *Message) error {
// Validate configuration
if err := c.validateConfig(); err != nil {
return errors.Wrap(err, "invalid email configuration")
}
// Validate message
if message == nil {
return errors.New("message is required")
}
if err := message.Validate(); err != nil {
return errors.Wrap(err, "invalid email message")
}
// Format the message
body := message.Format(c.config.FromEmail, c.config.FromName)
// Get all recipients
recipients := message.GetAllRecipients()
// Create auth
auth := c.createAuth()
// Send based on encryption type
if c.config.UseSSL {
return c.sendWithSSL(auth, recipients, body)
}
return c.sendWithTLS(auth, recipients, body)
}
// sendWithTLS sends email using STARTTLS (port 587).
func (c *Client) sendWithTLS(auth smtp.Auth, recipients []string, body string) error {
serverAddr := c.config.GetServerAddress()
if c.config.UseTLS {
// Use STARTTLS
return smtp.SendMail(serverAddr, auth, c.config.FromEmail, recipients, []byte(body))
}
// Send without encryption (not recommended)
return smtp.SendMail(serverAddr, auth, c.config.FromEmail, recipients, []byte(body))
}
// sendWithSSL sends email using SSL/TLS (port 465).
func (c *Client) sendWithSSL(auth smtp.Auth, recipients []string, body string) error {
serverAddr := c.config.GetServerAddress()
// Create TLS connection
tlsConfig := c.createTLSConfig()
conn, err := tls.Dial("tcp", serverAddr, tlsConfig)
if err != nil {
return errors.Wrapf(err, "failed to connect to SMTP server with SSL: %s", serverAddr)
}
defer conn.Close()
// Create SMTP client
client, err := smtp.NewClient(conn, c.config.SMTPHost)
if err != nil {
return errors.Wrap(err, "failed to create SMTP client")
}
defer client.Quit()
// Authenticate
if auth != nil {
if err := client.Auth(auth); err != nil {
return errors.Wrap(err, "SMTP authentication failed")
}
}
// Set sender
if err := client.Mail(c.config.FromEmail); err != nil {
return errors.Wrap(err, "failed to set sender")
}
// Set recipients
for _, recipient := range recipients {
if err := client.Rcpt(recipient); err != nil {
return errors.Wrapf(err, "failed to set recipient: %s", recipient)
}
}
// Send message body
writer, err := client.Data()
if err != nil {
return errors.Wrap(err, "failed to send DATA command")
}
if _, err := writer.Write([]byte(body)); err != nil {
return errors.Wrap(err, "failed to write message body")
}
if err := writer.Close(); err != nil {
return errors.Wrap(err, "failed to close message writer")
}
return nil
}

121
plugin/email/client_test.go Normal file
View File

@@ -0,0 +1,121 @@
package email
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestNewClient(t *testing.T) {
config := &Config{
SMTPHost: "smtp.example.com",
SMTPPort: 587,
SMTPUsername: "user@example.com",
SMTPPassword: "password",
FromEmail: "noreply@example.com",
FromName: "Test App",
UseTLS: true,
}
client := NewClient(config)
assert.NotNil(t, client)
assert.Equal(t, config, client.config)
}
func TestClientValidateConfig(t *testing.T) {
tests := []struct {
name string
config *Config
wantErr bool
}{
{
name: "valid config",
config: &Config{
SMTPHost: "smtp.example.com",
SMTPPort: 587,
FromEmail: "test@example.com",
},
wantErr: false,
},
{
name: "nil config",
config: nil,
wantErr: true,
},
{
name: "invalid config",
config: &Config{
SMTPHost: "",
SMTPPort: 587,
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
client := NewClient(tt.config)
err := client.validateConfig()
if tt.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
func TestClientSendValidation(t *testing.T) {
config := &Config{
SMTPHost: "smtp.example.com",
SMTPPort: 587,
FromEmail: "test@example.com",
}
client := NewClient(config)
tests := []struct {
name string
message *Message
wantErr bool
}{
{
name: "valid message",
message: &Message{
To: []string{"recipient@example.com"},
Subject: "Test",
Body: "Test body",
},
wantErr: false, // Will fail on actual send, but passes validation
},
{
name: "nil message",
message: nil,
wantErr: true,
},
{
name: "invalid message",
message: &Message{
To: []string{},
Subject: "Test",
Body: "Test",
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := client.Send(tt.message)
// We expect validation errors for invalid messages
// For valid messages, we'll get connection errors (which is expected in tests)
if tt.wantErr {
assert.Error(t, err)
// Should fail validation before attempting connection
assert.NotContains(t, err.Error(), "dial")
}
// Note: We don't assert NoError for valid messages because
// we don't have a real SMTP server in tests
})
}
}

47
plugin/email/config.go Normal file
View File

@@ -0,0 +1,47 @@
package email
import (
"fmt"
"github.com/pkg/errors"
)
// Config represents the SMTP configuration for email sending.
// These settings should be provided by the self-hosted instance administrator.
type Config struct {
// SMTPHost is the SMTP server hostname (e.g., "smtp.gmail.com")
SMTPHost string
// SMTPPort is the SMTP server port (common: 587 for TLS, 465 for SSL, 25 for unencrypted)
SMTPPort int
// SMTPUsername is the SMTP authentication username (usually the email address)
SMTPUsername string
// SMTPPassword is the SMTP authentication password or app-specific password
SMTPPassword string
// FromEmail is the email address that will appear in the "From" field
FromEmail string
// FromName is the display name that will appear in the "From" field
FromName string
// UseTLS enables STARTTLS encryption (recommended for port 587)
UseTLS bool
// UseSSL enables SSL/TLS encryption (for port 465)
UseSSL bool
}
// Validate checks if the configuration is valid.
func (c *Config) Validate() error {
if c.SMTPHost == "" {
return errors.New("SMTP host is required")
}
if c.SMTPPort <= 0 || c.SMTPPort > 65535 {
return errors.New("SMTP port must be between 1 and 65535")
}
if c.FromEmail == "" {
return errors.New("from email is required")
}
return nil
}
// GetServerAddress returns the SMTP server address in the format "host:port".
func (c *Config) GetServerAddress() string {
return fmt.Sprintf("%s:%d", c.SMTPHost, c.SMTPPort)
}

View File

@@ -0,0 +1,80 @@
package email
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestConfigValidation(t *testing.T) {
tests := []struct {
name string
config *Config
wantErr bool
}{
{
name: "valid config",
config: &Config{
SMTPHost: "smtp.gmail.com",
SMTPPort: 587,
SMTPUsername: "user@example.com",
SMTPPassword: "password",
FromEmail: "noreply@example.com",
FromName: "Memos",
},
wantErr: false,
},
{
name: "missing host",
config: &Config{
SMTPPort: 587,
SMTPUsername: "user@example.com",
SMTPPassword: "password",
FromEmail: "noreply@example.com",
},
wantErr: true,
},
{
name: "invalid port",
config: &Config{
SMTPHost: "smtp.gmail.com",
SMTPPort: 0,
SMTPUsername: "user@example.com",
SMTPPassword: "password",
FromEmail: "noreply@example.com",
},
wantErr: true,
},
{
name: "missing from email",
config: &Config{
SMTPHost: "smtp.gmail.com",
SMTPPort: 587,
SMTPUsername: "user@example.com",
SMTPPassword: "password",
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.config.Validate()
if tt.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
func TestConfigGetServerAddress(t *testing.T) {
config := &Config{
SMTPHost: "smtp.gmail.com",
SMTPPort: 587,
}
expected := "smtp.gmail.com:587"
assert.Equal(t, expected, config.GetServerAddress())
}

98
plugin/email/doc.go Normal file
View File

@@ -0,0 +1,98 @@
// Package email provides SMTP email sending functionality for self-hosted Memos instances.
//
// This package is designed for self-hosted environments where instance administrators
// configure their own SMTP servers. It follows industry-standard patterns used by
// platforms like GitHub, GitLab, and Discourse.
//
// # Configuration
//
// The package requires SMTP server configuration provided by the instance administrator:
//
// config := &email.Config{
// SMTPHost: "smtp.gmail.com",
// SMTPPort: 587,
// SMTPUsername: "your-email@gmail.com",
// SMTPPassword: "your-app-password",
// FromEmail: "noreply@yourdomain.com",
// FromName: "Memos Notifications",
// UseTLS: true,
// }
//
// # Common SMTP Settings
//
// Gmail (requires App Password):
// - Host: smtp.gmail.com
// - Port: 587 (TLS) or 465 (SSL)
// - Username: your-email@gmail.com
// - UseTLS: true (for port 587) or UseSSL: true (for port 465)
//
// SendGrid:
// - Host: smtp.sendgrid.net
// - Port: 587
// - Username: apikey
// - Password: your-sendgrid-api-key
// - UseTLS: true
//
// AWS SES:
// - Host: email-smtp.[region].amazonaws.com
// - Port: 587
// - Username: your-smtp-username
// - Password: your-smtp-password
// - UseTLS: true
//
// Mailgun:
// - Host: smtp.mailgun.org
// - Port: 587
// - Username: your-mailgun-smtp-username
// - Password: your-mailgun-smtp-password
// - UseTLS: true
//
// # Sending Email
//
// Synchronous (waits for completion):
//
// message := &email.Message{
// To: []string{"user@example.com"},
// Subject: "Welcome to Memos",
// Body: "Thank you for joining!",
// IsHTML: false,
// }
//
// err := email.Send(config, message)
// if err != nil {
// // Handle error
// }
//
// Asynchronous (returns immediately):
//
// email.SendAsync(config, message)
// // Errors are logged but not returned
//
// # HTML Email
//
// message := &email.Message{
// To: []string{"user@example.com"},
// Subject: "Welcome!",
// Body: "<html><body><h1>Welcome to Memos!</h1></body></html>",
// IsHTML: true,
// }
//
// # Security Considerations
//
// - Always use TLS (port 587) or SSL (port 465) for production
// - Store SMTP credentials securely (environment variables or secrets management)
// - Use app-specific passwords for services like Gmail
// - Validate and sanitize email content to prevent injection attacks
// - Rate limit email sending to prevent abuse
//
// # Error Handling
//
// The package returns descriptive errors for common issues:
// - Configuration validation errors (missing host, invalid port, etc.)
// - Message validation errors (missing recipients, subject, or body)
// - Connection errors (cannot reach SMTP server)
// - Authentication errors (invalid credentials)
// - SMTP protocol errors (recipient rejected, etc.)
//
// All errors are wrapped with context using github.com/pkg/errors for better debugging.
package email

43
plugin/email/email.go Normal file
View File

@@ -0,0 +1,43 @@
package email
import (
"log/slog"
"github.com/pkg/errors"
)
// Send sends an email synchronously.
// Returns an error if the email fails to send.
func Send(config *Config, message *Message) error {
if config == nil {
return errors.New("email configuration is required")
}
if message == nil {
return errors.New("email message is required")
}
client := NewClient(config)
return client.Send(message)
}
// SendAsync sends an email asynchronously.
// It spawns a new goroutine to handle the sending and does not wait for the response.
// Any errors are logged but not returned.
func SendAsync(config *Config, message *Message) {
go func() {
if err := Send(config, message); err != nil {
// Since we're in a goroutine, we can only log the error
recipients := ""
if message != nil && len(message.To) > 0 {
recipients = message.To[0]
if len(message.To) > 1 {
recipients += " and others"
}
}
slog.Warn("Failed to send email asynchronously",
slog.String("recipients", recipients),
slog.Any("error", err))
}
}()
}

127
plugin/email/email_test.go Normal file
View File

@@ -0,0 +1,127 @@
package email
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"golang.org/x/sync/errgroup"
)
func TestSend(t *testing.T) {
config := &Config{
SMTPHost: "smtp.example.com",
SMTPPort: 587,
FromEmail: "test@example.com",
}
message := &Message{
To: []string{"recipient@example.com"},
Subject: "Test",
Body: "Test body",
}
// This will fail to connect (no real server), but should validate inputs
err := Send(config, message)
// We expect an error because there's no real SMTP server
// But it should be a connection error, not a validation error
assert.Error(t, err)
assert.Contains(t, err.Error(), "dial")
}
func TestSendValidation(t *testing.T) {
tests := []struct {
name string
config *Config
message *Message
wantErr bool
errMsg string
}{
{
name: "nil config",
config: nil,
message: &Message{To: []string{"test@example.com"}, Subject: "Test", Body: "Test"},
wantErr: true,
errMsg: "configuration is required",
},
{
name: "nil message",
config: &Config{SMTPHost: "smtp.example.com", SMTPPort: 587, FromEmail: "from@example.com"},
message: nil,
wantErr: true,
errMsg: "message is required",
},
{
name: "invalid config",
config: &Config{
SMTPHost: "",
SMTPPort: 587,
},
message: &Message{To: []string{"test@example.com"}, Subject: "Test", Body: "Test"},
wantErr: true,
errMsg: "invalid email configuration",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := Send(tt.config, tt.message)
if tt.wantErr {
assert.Error(t, err)
assert.Contains(t, err.Error(), tt.errMsg)
}
})
}
}
func TestSendAsync(t *testing.T) {
config := &Config{
SMTPHost: "smtp.example.com",
SMTPPort: 587,
FromEmail: "test@example.com",
}
message := &Message{
To: []string{"recipient@example.com"},
Subject: "Test Async",
Body: "Test async body",
}
// SendAsync should not block
start := time.Now()
SendAsync(config, message)
duration := time.Since(start)
// Should return almost immediately (< 100ms)
assert.Less(t, duration, 100*time.Millisecond)
// Give goroutine time to start
time.Sleep(50 * time.Millisecond)
}
func TestSendAsyncConcurrent(t *testing.T) {
config := &Config{
SMTPHost: "smtp.example.com",
SMTPPort: 587,
FromEmail: "test@example.com",
}
g := errgroup.Group{}
count := 5
for i := 0; i < count; i++ {
g.Go(func() error {
message := &Message{
To: []string{"recipient@example.com"},
Subject: "Concurrent Test",
Body: "Test body",
}
SendAsync(config, message)
return nil
})
}
if err := g.Wait(); err != nil {
t.Fatalf("SendAsync calls failed: %v", err)
}
}

91
plugin/email/message.go Normal file
View File

@@ -0,0 +1,91 @@
package email
import (
"errors"
"fmt"
"strings"
"time"
)
// Message represents an email message to be sent.
type Message struct {
To []string // Required: recipient email addresses
Cc []string // Optional: carbon copy recipients
Bcc []string // Optional: blind carbon copy recipients
Subject string // Required: email subject
Body string // Required: email body content
IsHTML bool // Whether the body is HTML (default: false for plain text)
ReplyTo string // Optional: reply-to address
}
// Validate checks that the message has all required fields.
func (m *Message) Validate() error {
if len(m.To) == 0 {
return errors.New("at least one recipient is required")
}
if m.Subject == "" {
return errors.New("subject is required")
}
if m.Body == "" {
return errors.New("body is required")
}
return nil
}
// Format creates an RFC 5322 formatted email message.
func (m *Message) Format(fromEmail, fromName string) string {
var sb strings.Builder
// From header
if fromName != "" {
sb.WriteString(fmt.Sprintf("From: %s <%s>\r\n", fromName, fromEmail))
} else {
sb.WriteString(fmt.Sprintf("From: %s\r\n", fromEmail))
}
// To header
sb.WriteString(fmt.Sprintf("To: %s\r\n", strings.Join(m.To, ", ")))
// Cc header (optional)
if len(m.Cc) > 0 {
sb.WriteString(fmt.Sprintf("Cc: %s\r\n", strings.Join(m.Cc, ", ")))
}
// Reply-To header (optional)
if m.ReplyTo != "" {
sb.WriteString(fmt.Sprintf("Reply-To: %s\r\n", m.ReplyTo))
}
// Subject header
sb.WriteString(fmt.Sprintf("Subject: %s\r\n", m.Subject))
// Date header (RFC 5322 format)
sb.WriteString(fmt.Sprintf("Date: %s\r\n", time.Now().Format(time.RFC1123Z)))
// MIME headers
sb.WriteString("MIME-Version: 1.0\r\n")
// Content-Type header
if m.IsHTML {
sb.WriteString("Content-Type: text/html; charset=utf-8\r\n")
} else {
sb.WriteString("Content-Type: text/plain; charset=utf-8\r\n")
}
// Empty line separating headers from body
sb.WriteString("\r\n")
// Body
sb.WriteString(m.Body)
return sb.String()
}
// GetAllRecipients returns all recipients (To, Cc, Bcc) as a single slice.
func (m *Message) GetAllRecipients() []string {
var recipients []string
recipients = append(recipients, m.To...)
recipients = append(recipients, m.Cc...)
recipients = append(recipients, m.Bcc...)
return recipients
}

View File

@@ -0,0 +1,181 @@
package email
import (
"strings"
"testing"
)
func TestMessageValidation(t *testing.T) {
tests := []struct {
name string
msg Message
wantErr bool
}{
{
name: "valid message",
msg: Message{
To: []string{"user@example.com"},
Subject: "Test Subject",
Body: "Test Body",
},
wantErr: false,
},
{
name: "no recipients",
msg: Message{
To: []string{},
Subject: "Test Subject",
Body: "Test Body",
},
wantErr: true,
},
{
name: "no subject",
msg: Message{
To: []string{"user@example.com"},
Subject: "",
Body: "Test Body",
},
wantErr: true,
},
{
name: "no body",
msg: Message{
To: []string{"user@example.com"},
Subject: "Test Subject",
Body: "",
},
wantErr: true,
},
{
name: "multiple recipients",
msg: Message{
To: []string{"user1@example.com", "user2@example.com"},
Cc: []string{"cc@example.com"},
Subject: "Test Subject",
Body: "Test Body",
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.msg.Validate()
if (err != nil) != tt.wantErr {
t.Errorf("Validate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
func TestMessageFormatPlainText(t *testing.T) {
msg := Message{
To: []string{"user@example.com"},
Subject: "Test Subject",
Body: "Test Body",
IsHTML: false,
}
formatted := msg.Format("sender@example.com", "Sender Name")
// Check required headers
if !strings.Contains(formatted, "From: Sender Name <sender@example.com>") {
t.Error("Missing or incorrect From header")
}
if !strings.Contains(formatted, "To: user@example.com") {
t.Error("Missing or incorrect To header")
}
if !strings.Contains(formatted, "Subject: Test Subject") {
t.Error("Missing or incorrect Subject header")
}
if !strings.Contains(formatted, "Content-Type: text/plain; charset=utf-8") {
t.Error("Missing or incorrect Content-Type header for plain text")
}
if !strings.Contains(formatted, "Test Body") {
t.Error("Missing message body")
}
}
func TestMessageFormatHTML(t *testing.T) {
msg := Message{
To: []string{"user@example.com"},
Subject: "Test Subject",
Body: "<html><body>Test Body</body></html>",
IsHTML: true,
}
formatted := msg.Format("sender@example.com", "Sender Name")
// Check HTML content-type
if !strings.Contains(formatted, "Content-Type: text/html; charset=utf-8") {
t.Error("Missing or incorrect Content-Type header for HTML")
}
if !strings.Contains(formatted, "<html><body>Test Body</body></html>") {
t.Error("Missing HTML body")
}
}
func TestMessageFormatMultipleRecipients(t *testing.T) {
msg := Message{
To: []string{"user1@example.com", "user2@example.com"},
Cc: []string{"cc1@example.com", "cc2@example.com"},
Bcc: []string{"bcc@example.com"},
Subject: "Test Subject",
Body: "Test Body",
ReplyTo: "reply@example.com",
}
formatted := msg.Format("sender@example.com", "Sender Name")
// Check To header formatting
if !strings.Contains(formatted, "To: user1@example.com, user2@example.com") {
t.Error("Missing or incorrect To header with multiple recipients")
}
// Check Cc header formatting
if !strings.Contains(formatted, "Cc: cc1@example.com, cc2@example.com") {
t.Error("Missing or incorrect Cc header")
}
// Bcc should NOT appear in the formatted message
if strings.Contains(formatted, "Bcc:") {
t.Error("Bcc header should not appear in formatted message")
}
// Check Reply-To header
if !strings.Contains(formatted, "Reply-To: reply@example.com") {
t.Error("Missing or incorrect Reply-To header")
}
}
func TestGetAllRecipients(t *testing.T) {
msg := Message{
To: []string{"user1@example.com", "user2@example.com"},
Cc: []string{"cc@example.com"},
Bcc: []string{"bcc@example.com"},
}
recipients := msg.GetAllRecipients()
// Should have all 4 recipients
if len(recipients) != 4 {
t.Errorf("GetAllRecipients() returned %d recipients, want 4", len(recipients))
}
// Check all recipients are present
expectedRecipients := map[string]bool{
"user1@example.com": true,
"user2@example.com": true,
"cc@example.com": true,
"bcc@example.com": true,
}
for _, recipient := range recipients {
if !expectedRecipients[recipient] {
t.Errorf("Unexpected recipient: %s", recipient)
}
delete(expectedRecipients, recipient)
}
if len(expectedRecipients) > 0 {
t.Error("Not all expected recipients were returned")
}
}

View File

@@ -0,0 +1,50 @@
# Maintaining the Memo Filter Engine
The engine is memo-specific; any future field or behavior changes must stay
consistent with the memo schema and store implementations. Use this guide when
extending or debugging the package.
## Adding a New Memo Field
1. **Update the schema**
- Add the field entry in `schema.go`.
- Define the backing column (`Column`), JSON path (if applicable), type, and
allowed operators.
- Include the CEL variable in `EnvOptions`.
2. **Adjust parser or renderer (if needed)**
- For non-scalar fields (JSON booleans, lists), add handling in
`parser.go` or extend the renderer helpers.
- Keep validation in the parser (e.g., reject unsupported operators).
3. **Write a golden test**
- Extend the dialect-specific memo filter tests under
`store/db/{sqlite,mysql,postgres}/memo_filter_test.go` with a case that
exercises the new field.
4. **Run `go test ./...`** to ensure the SQL output matches expectations across
all dialects.
## Supporting Dialect Nuances
- Centralize differences inside `render.go`. If a new dialect-specific behavior
emerges (e.g., JSON operators), add the logic there rather than leaking it
into store code.
- Use the renderer helpers (`jsonExtractExpr`, `jsonArrayExpr`, etc.) rather than
sprinkling ad-hoc SQL strings.
- When placeholders change, adjust `addArg` so that argument numbering stays in
sync with store queries.
## Debugging Tips
- **Parser errors** Most originate in `buildCondition` or schema validation.
Enable logging around `parser.go` when diagnosing unknown identifier/operator
messages.
- **Renderer output** Temporary printf/log statements in `renderCondition` help
identify which IR node produced unexpected SQL.
- **Store integration** Ensure drivers call `filter.DefaultEngine()` exactly once
per process; the singleton caches the parsed CEL environment.
## Testing Checklist
- `go test ./store/...` ensures all dialect tests consume the engine correctly.
- Add targeted unit tests whenever new IR nodes or renderer paths are introduced.
- When changing boolean or JSON handling, verify all three dialect test suites
(SQLite, MySQL, Postgres) to avoid regression.

63
plugin/filter/README.md Normal file
View File

@@ -0,0 +1,63 @@
# Memo Filter Engine
This package houses the memo-only filter engine that turns CEL expressions into
SQL fragments. The engine follows a three phase pipeline inspired by systems
such as Calcite or Prisma:
1. **Parsing** CEL expressions are parsed with `cel-go` and validated against
the memo-specific environment declared in `schema.go`. Only fields that
exist in the schema can surface in the filter.
2. **Normalization** the raw CEL AST is converted into an intermediate
representation (IR) defined in `ir.go`. The IR is a dialect-agnostic tree of
conditions (logical operators, comparisons, list membership, etc.). This
step enforces schema rules (e.g. operator compatibility, type checks).
3. **Rendering** the renderer in `render.go` walks the IR and produces a SQL
fragment plus placeholder arguments tailored to a target dialect
(`sqlite`, `mysql`, or `postgres`). Dialect differences such as JSON access,
boolean semantics, placeholders, and `LIKE` vs `ILIKE` are encapsulated in
renderer helpers.
The entry point is `filter.DefaultEngine()` from `engine.go`. It lazily constructs
an `Engine` configured with the memo schema and exposes:
```go
engine, _ := filter.DefaultEngine()
stmt, _ := engine.CompileToStatement(ctx, `has_task_list && visibility == "PUBLIC"`, filter.RenderOptions{
Dialect: filter.DialectPostgres,
})
// stmt.SQL -> "((memo.payload->'property'->>'hasTaskList')::boolean IS TRUE AND memo.visibility = $1)"
// stmt.Args -> ["PUBLIC"]
```
## Core Files
| File | Responsibility |
| ------------- | ------------------------------------------------------------------------------- |
| `schema.go` | Declares memo fields, their types, backing columns, CEL environment options |
| `ir.go` | IR node definitions used across the pipeline |
| `parser.go` | Converts CEL `Expr` into IR while applying schema validation |
| `render.go` | Translates IR into SQL, handling dialect-specific behavior |
| `engine.go` | Glue between the phases; exposes `Compile`, `CompileToStatement`, and `DefaultEngine` |
| `helpers.go` | Convenience helpers for store integration (appending conditions) |
## SQL Generation Notes
- **Placeholders** — `?` is used for SQLite/MySQL, `$n` for Postgres. The renderer
tracks offsets to compose queries with pre-existing arguments.
- **JSON Fields** — Memo metadata lives in `memo.payload`. The renderer handles
`JSON_EXTRACT`/`json_extract`/`->`/`->>` variations and boolean coercion.
- **Tag Operations** — `tag in [...]` and `"tag" in tags` become JSON array
predicates. SQLite uses `LIKE` patterns, MySQL uses `JSON_CONTAINS`, and
Postgres uses `@>`.
- **Boolean Flags** — Fields such as `has_task_list` render as `IS TRUE` equality
checks, or comparisons against `CAST('true' AS JSON)` depending on the dialect.
## Typical Integration
1. Fetch the engine with `filter.DefaultEngine()`.
2. Call `CompileToStatement` using the appropriate dialect enum.
3. Append the emitted SQL fragment/args to the existing `WHERE` clause.
4. Execute the resulting query through the store driver.
The `helpers.AppendConditions` helper encapsulates steps 23 when a driver needs
to process an array of filters.

191
plugin/filter/engine.go Normal file
View File

@@ -0,0 +1,191 @@
package filter
import (
"context"
"fmt"
"strings"
"sync"
"github.com/google/cel-go/cel"
"github.com/pkg/errors"
)
// Engine parses CEL filters into a dialect-agnostic condition tree.
type Engine struct {
schema Schema
env *cel.Env
}
// NewEngine builds a new Engine for the provided schema.
func NewEngine(schema Schema) (*Engine, error) {
env, err := cel.NewEnv(schema.EnvOptions...)
if err != nil {
return nil, errors.Wrap(err, "failed to create CEL environment")
}
return &Engine{
schema: schema,
env: env,
}, nil
}
// Program stores a compiled filter condition.
type Program struct {
schema Schema
condition Condition
}
// ConditionTree exposes the underlying condition tree.
func (p *Program) ConditionTree() Condition {
return p.condition
}
// Compile parses the filter string into an executable program.
func (e *Engine) Compile(_ context.Context, filter string) (*Program, error) {
if strings.TrimSpace(filter) == "" {
return nil, errors.New("filter expression is empty")
}
filter = normalizeLegacyFilter(filter)
ast, issues := e.env.Compile(filter)
if issues != nil && issues.Err() != nil {
return nil, errors.Wrap(issues.Err(), "failed to compile filter")
}
parsed, err := cel.AstToParsedExpr(ast)
if err != nil {
return nil, errors.Wrap(err, "failed to convert AST")
}
cond, err := buildCondition(parsed.GetExpr(), e.schema)
if err != nil {
return nil, err
}
return &Program{
schema: e.schema,
condition: cond,
}, nil
}
// CompileToStatement compiles and renders the filter in a single step.
func (e *Engine) CompileToStatement(ctx context.Context, filter string, opts RenderOptions) (Statement, error) {
program, err := e.Compile(ctx, filter)
if err != nil {
return Statement{}, err
}
return program.Render(opts)
}
// RenderOptions configure SQL rendering.
type RenderOptions struct {
Dialect DialectName
PlaceholderOffset int
DisableNullChecks bool
}
// Statement contains the rendered SQL fragment and its args.
type Statement struct {
SQL string
Args []any
}
// Render converts the program into a dialect-specific SQL fragment.
func (p *Program) Render(opts RenderOptions) (Statement, error) {
renderer := newRenderer(p.schema, opts)
return renderer.Render(p.condition)
}
var (
defaultOnce sync.Once
defaultInst *Engine
defaultErr error
defaultAttachmentOnce sync.Once
defaultAttachmentInst *Engine
defaultAttachmentErr error
)
// DefaultEngine returns the process-wide memo filter engine.
func DefaultEngine() (*Engine, error) {
defaultOnce.Do(func() {
defaultInst, defaultErr = NewEngine(NewSchema())
})
return defaultInst, defaultErr
}
// DefaultAttachmentEngine returns the process-wide attachment filter engine.
func DefaultAttachmentEngine() (*Engine, error) {
defaultAttachmentOnce.Do(func() {
defaultAttachmentInst, defaultAttachmentErr = NewEngine(NewAttachmentSchema())
})
return defaultAttachmentInst, defaultAttachmentErr
}
func normalizeLegacyFilter(expr string) string {
expr = rewriteNumericLogicalOperand(expr, "&&")
expr = rewriteNumericLogicalOperand(expr, "||")
return expr
}
func rewriteNumericLogicalOperand(expr, op string) string {
var builder strings.Builder
n := len(expr)
i := 0
var inQuote rune
for i < n {
ch := expr[i]
if inQuote != 0 {
builder.WriteByte(ch)
if ch == '\\' && i+1 < n {
builder.WriteByte(expr[i+1])
i += 2
continue
}
if ch == byte(inQuote) {
inQuote = 0
}
i++
continue
}
if ch == '\'' || ch == '"' {
inQuote = rune(ch)
builder.WriteByte(ch)
i++
continue
}
if strings.HasPrefix(expr[i:], op) {
builder.WriteString(op)
i += len(op)
// Preserve whitespace following the operator.
wsStart := i
for i < n && (expr[i] == ' ' || expr[i] == '\t') {
i++
}
builder.WriteString(expr[wsStart:i])
signStart := i
if i < n && (expr[i] == '+' || expr[i] == '-') {
i++
}
for i < n && expr[i] >= '0' && expr[i] <= '9' {
i++
}
if i > signStart {
numLiteral := expr[signStart:i]
builder.WriteString(fmt.Sprintf("(%s != 0)", numLiteral))
} else {
builder.WriteString(expr[signStart:i])
}
continue
}
builder.WriteByte(ch)
i++
}
return builder.String()
}

25
plugin/filter/helpers.go Normal file
View File

@@ -0,0 +1,25 @@
package filter
import (
"context"
"fmt"
)
// AppendConditions compiles the provided filters and appends the resulting SQL fragments and args.
func AppendConditions(ctx context.Context, engine *Engine, filters []string, dialect DialectName, where *[]string, args *[]any) error {
for _, filterStr := range filters {
stmt, err := engine.CompileToStatement(ctx, filterStr, RenderOptions{
Dialect: dialect,
PlaceholderOffset: len(*args),
})
if err != nil {
return err
}
if stmt.SQL == "" {
continue
}
*where = append(*where, fmt.Sprintf("(%s)", stmt.SQL))
*args = append(*args, stmt.Args...)
}
return nil
}

159
plugin/filter/ir.go Normal file
View File

@@ -0,0 +1,159 @@
package filter
// Condition represents a boolean expression derived from the CEL filter.
type Condition interface {
isCondition()
}
// LogicalOperator enumerates the supported logical operators.
type LogicalOperator string
const (
LogicalAnd LogicalOperator = "AND"
LogicalOr LogicalOperator = "OR"
)
// LogicalCondition composes two conditions with a logical operator.
type LogicalCondition struct {
Operator LogicalOperator
Left Condition
Right Condition
}
func (*LogicalCondition) isCondition() {}
// NotCondition negates a child condition.
type NotCondition struct {
Expr Condition
}
func (*NotCondition) isCondition() {}
// FieldPredicateCondition asserts that a field evaluates to true.
type FieldPredicateCondition struct {
Field string
}
func (*FieldPredicateCondition) isCondition() {}
// ComparisonOperator lists supported comparison operators.
type ComparisonOperator string
const (
CompareEq ComparisonOperator = "="
CompareNeq ComparisonOperator = "!="
CompareLt ComparisonOperator = "<"
CompareLte ComparisonOperator = "<="
CompareGt ComparisonOperator = ">"
CompareGte ComparisonOperator = ">="
)
// ComparisonCondition represents a binary comparison.
type ComparisonCondition struct {
Left ValueExpr
Operator ComparisonOperator
Right ValueExpr
}
func (*ComparisonCondition) isCondition() {}
// InCondition represents an IN predicate with literal list values.
type InCondition struct {
Left ValueExpr
Values []ValueExpr
}
func (*InCondition) isCondition() {}
// ElementInCondition represents the CEL syntax `"value" in field`.
type ElementInCondition struct {
Element ValueExpr
Field string
}
func (*ElementInCondition) isCondition() {}
// ContainsCondition models the <field>.contains(<value>) call.
type ContainsCondition struct {
Field string
Value string
}
func (*ContainsCondition) isCondition() {}
// ConstantCondition captures a literal boolean outcome.
type ConstantCondition struct {
Value bool
}
func (*ConstantCondition) isCondition() {}
// ValueExpr models arithmetic or scalar expressions whose result feeds a comparison.
type ValueExpr interface {
isValueExpr()
}
// FieldRef references a named schema field.
type FieldRef struct {
Name string
}
func (*FieldRef) isValueExpr() {}
// LiteralValue holds a literal scalar.
type LiteralValue struct {
Value interface{}
}
func (*LiteralValue) isValueExpr() {}
// FunctionValue captures simple function calls like size(tags).
type FunctionValue struct {
Name string
Args []ValueExpr
}
func (*FunctionValue) isValueExpr() {}
// ListComprehensionCondition represents CEL macros like exists(), all(), filter().
type ListComprehensionCondition struct {
Kind ComprehensionKind
Field string // The list field to iterate over (e.g., "tags")
IterVar string // The iteration variable name (e.g., "t")
Predicate PredicateExpr // The predicate to evaluate for each element
}
func (*ListComprehensionCondition) isCondition() {}
// ComprehensionKind enumerates the types of list comprehensions.
type ComprehensionKind string
const (
ComprehensionExists ComprehensionKind = "exists"
)
// PredicateExpr represents predicates used in comprehensions.
type PredicateExpr interface {
isPredicateExpr()
}
// StartsWithPredicate represents t.startsWith("prefix").
type StartsWithPredicate struct {
Prefix string
}
func (*StartsWithPredicate) isPredicateExpr() {}
// EndsWithPredicate represents t.endsWith("suffix").
type EndsWithPredicate struct {
Suffix string
}
func (*EndsWithPredicate) isPredicateExpr() {}
// ContainsPredicate represents t.contains("substring").
type ContainsPredicate struct {
Substring string
}
func (*ContainsPredicate) isPredicateExpr() {}

586
plugin/filter/parser.go Normal file
View File

@@ -0,0 +1,586 @@
package filter
import (
"time"
"github.com/pkg/errors"
exprv1 "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
)
func buildCondition(expr *exprv1.Expr, schema Schema) (Condition, error) {
switch v := expr.ExprKind.(type) {
case *exprv1.Expr_CallExpr:
return buildCallCondition(v.CallExpr, schema)
case *exprv1.Expr_ConstExpr:
val, err := getConstValue(expr)
if err != nil {
return nil, err
}
switch v := val.(type) {
case bool:
return &ConstantCondition{Value: v}, nil
case int64:
return &ConstantCondition{Value: v != 0}, nil
case float64:
return &ConstantCondition{Value: v != 0}, nil
default:
return nil, errors.New("filter must evaluate to a boolean value")
}
case *exprv1.Expr_IdentExpr:
name := v.IdentExpr.GetName()
field, ok := schema.Field(name)
if !ok {
return nil, errors.Errorf("unknown identifier %q", name)
}
if field.Type != FieldTypeBool {
return nil, errors.Errorf("identifier %q is not boolean", name)
}
return &FieldPredicateCondition{Field: name}, nil
case *exprv1.Expr_ComprehensionExpr:
return buildComprehensionCondition(v.ComprehensionExpr, schema)
default:
return nil, errors.New("unsupported top-level expression")
}
}
func buildCallCondition(call *exprv1.Expr_Call, schema Schema) (Condition, error) {
switch call.Function {
case "_&&_":
if len(call.Args) != 2 {
return nil, errors.New("logical AND expects two arguments")
}
left, err := buildCondition(call.Args[0], schema)
if err != nil {
return nil, err
}
right, err := buildCondition(call.Args[1], schema)
if err != nil {
return nil, err
}
return &LogicalCondition{
Operator: LogicalAnd,
Left: left,
Right: right,
}, nil
case "_||_":
if len(call.Args) != 2 {
return nil, errors.New("logical OR expects two arguments")
}
left, err := buildCondition(call.Args[0], schema)
if err != nil {
return nil, err
}
right, err := buildCondition(call.Args[1], schema)
if err != nil {
return nil, err
}
return &LogicalCondition{
Operator: LogicalOr,
Left: left,
Right: right,
}, nil
case "!_":
if len(call.Args) != 1 {
return nil, errors.New("logical NOT expects one argument")
}
child, err := buildCondition(call.Args[0], schema)
if err != nil {
return nil, err
}
return &NotCondition{Expr: child}, nil
case "_==_", "_!=_", "_<_", "_>_", "_<=_", "_>=_":
return buildComparisonCondition(call, schema)
case "@in":
return buildInCondition(call, schema)
case "contains":
return buildContainsCondition(call, schema)
default:
val, ok, err := evaluateBool(call)
if err != nil {
return nil, err
}
if ok {
return &ConstantCondition{Value: val}, nil
}
return nil, errors.Errorf("unsupported call expression %q", call.Function)
}
}
func buildComparisonCondition(call *exprv1.Expr_Call, schema Schema) (Condition, error) {
if len(call.Args) != 2 {
return nil, errors.New("comparison expects two arguments")
}
op, err := toComparisonOperator(call.Function)
if err != nil {
return nil, err
}
left, err := buildValueExpr(call.Args[0], schema)
if err != nil {
return nil, err
}
right, err := buildValueExpr(call.Args[1], schema)
if err != nil {
return nil, err
}
// If the left side is a field, validate allowed operators.
if field, ok := left.(*FieldRef); ok {
def, exists := schema.Field(field.Name)
if !exists {
return nil, errors.Errorf("unknown identifier %q", field.Name)
}
if def.Kind == FieldKindVirtualAlias {
def, exists = schema.ResolveAlias(field.Name)
if !exists {
return nil, errors.Errorf("invalid alias %q", field.Name)
}
}
if def.AllowedComparisonOps != nil {
if _, allowed := def.AllowedComparisonOps[op]; !allowed {
return nil, errors.Errorf("operator %s not allowed for field %q", op, field.Name)
}
}
}
return &ComparisonCondition{
Left: left,
Operator: op,
Right: right,
}, nil
}
func buildInCondition(call *exprv1.Expr_Call, schema Schema) (Condition, error) {
if len(call.Args) != 2 {
return nil, errors.New("in operator expects two arguments")
}
// Handle identifier in list syntax.
if identName, err := getIdentName(call.Args[0]); err == nil {
if field, ok := schema.Field(identName); ok && field.Kind == FieldKindVirtualAlias {
if _, aliasOk := schema.ResolveAlias(identName); !aliasOk {
return nil, errors.Errorf("invalid alias %q", identName)
}
} else if !ok {
return nil, errors.Errorf("unknown identifier %q", identName)
}
if listExpr := call.Args[1].GetListExpr(); listExpr != nil {
values := make([]ValueExpr, 0, len(listExpr.Elements))
for _, element := range listExpr.Elements {
value, err := buildValueExpr(element, schema)
if err != nil {
return nil, err
}
values = append(values, value)
}
return &InCondition{
Left: &FieldRef{Name: identName},
Values: values,
}, nil
}
}
// Handle "value in identifier" syntax.
if identName, err := getIdentName(call.Args[1]); err == nil {
if _, ok := schema.Field(identName); !ok {
return nil, errors.Errorf("unknown identifier %q", identName)
}
element, err := buildValueExpr(call.Args[0], schema)
if err != nil {
return nil, err
}
return &ElementInCondition{
Element: element,
Field: identName,
}, nil
}
return nil, errors.New("invalid use of in operator")
}
func buildContainsCondition(call *exprv1.Expr_Call, schema Schema) (Condition, error) {
if call.Target == nil {
return nil, errors.New("contains requires a target")
}
targetName, err := getIdentName(call.Target)
if err != nil {
return nil, err
}
field, ok := schema.Field(targetName)
if !ok {
return nil, errors.Errorf("unknown identifier %q", targetName)
}
if !field.SupportsContains {
return nil, errors.Errorf("identifier %q does not support contains()", targetName)
}
if len(call.Args) != 1 {
return nil, errors.New("contains expects exactly one argument")
}
value, err := getConstValue(call.Args[0])
if err != nil {
return nil, errors.Wrap(err, "contains only supports literal arguments")
}
str, ok := value.(string)
if !ok {
return nil, errors.New("contains argument must be a string")
}
return &ContainsCondition{
Field: targetName,
Value: str,
}, nil
}
func buildValueExpr(expr *exprv1.Expr, schema Schema) (ValueExpr, error) {
if identName, err := getIdentName(expr); err == nil {
if _, ok := schema.Field(identName); !ok {
return nil, errors.Errorf("unknown identifier %q", identName)
}
return &FieldRef{Name: identName}, nil
}
if literal, err := getConstValue(expr); err == nil {
return &LiteralValue{Value: literal}, nil
}
if value, ok, err := evaluateNumeric(expr); err != nil {
return nil, err
} else if ok {
return &LiteralValue{Value: value}, nil
}
if boolVal, ok, err := evaluateBoolExpr(expr); err != nil {
return nil, err
} else if ok {
return &LiteralValue{Value: boolVal}, nil
}
if call := expr.GetCallExpr(); call != nil {
switch call.Function {
case "size":
if len(call.Args) != 1 {
return nil, errors.New("size() expects one argument")
}
arg, err := buildValueExpr(call.Args[0], schema)
if err != nil {
return nil, err
}
return &FunctionValue{
Name: "size",
Args: []ValueExpr{arg},
}, nil
case "now":
return &LiteralValue{Value: timeNowUnix()}, nil
case "_+_", "_-_", "_*_":
value, ok, err := evaluateNumeric(expr)
if err != nil {
return nil, err
}
if ok {
return &LiteralValue{Value: value}, nil
}
default:
// Fall through to error return below
}
}
return nil, errors.New("unsupported value expression")
}
func toComparisonOperator(fn string) (ComparisonOperator, error) {
switch fn {
case "_==_":
return CompareEq, nil
case "_!=_":
return CompareNeq, nil
case "_<_":
return CompareLt, nil
case "_>_":
return CompareGt, nil
case "_<=_":
return CompareLte, nil
case "_>=_":
return CompareGte, nil
default:
return "", errors.Errorf("unsupported comparison operator %q", fn)
}
}
func getIdentName(expr *exprv1.Expr) (string, error) {
if ident := expr.GetIdentExpr(); ident != nil {
return ident.GetName(), nil
}
return "", errors.New("expression is not an identifier")
}
func getConstValue(expr *exprv1.Expr) (interface{}, error) {
v, ok := expr.ExprKind.(*exprv1.Expr_ConstExpr)
if !ok {
return nil, errors.New("expression is not a literal")
}
switch x := v.ConstExpr.ConstantKind.(type) {
case *exprv1.Constant_StringValue:
return v.ConstExpr.GetStringValue(), nil
case *exprv1.Constant_Int64Value:
return v.ConstExpr.GetInt64Value(), nil
case *exprv1.Constant_Uint64Value:
return int64(v.ConstExpr.GetUint64Value()), nil
case *exprv1.Constant_DoubleValue:
return v.ConstExpr.GetDoubleValue(), nil
case *exprv1.Constant_BoolValue:
return v.ConstExpr.GetBoolValue(), nil
case *exprv1.Constant_NullValue:
return nil, nil
default:
return nil, errors.Errorf("unsupported constant %T", x)
}
}
func evaluateBool(call *exprv1.Expr_Call) (bool, bool, error) {
val, ok, err := evaluateBoolExpr(&exprv1.Expr{ExprKind: &exprv1.Expr_CallExpr{CallExpr: call}})
return val, ok, err
}
func evaluateBoolExpr(expr *exprv1.Expr) (bool, bool, error) {
if literal, err := getConstValue(expr); err == nil {
if b, ok := literal.(bool); ok {
return b, true, nil
}
return false, false, nil
}
if call := expr.GetCallExpr(); call != nil && call.Function == "!_" {
if len(call.Args) != 1 {
return false, false, errors.New("NOT expects exactly one argument")
}
val, ok, err := evaluateBoolExpr(call.Args[0])
if err != nil || !ok {
return false, false, err
}
return !val, true, nil
}
return false, false, nil
}
func evaluateNumeric(expr *exprv1.Expr) (int64, bool, error) {
if literal, err := getConstValue(expr); err == nil {
switch v := literal.(type) {
case int64:
return v, true, nil
case float64:
return int64(v), true, nil
}
return 0, false, nil
}
call := expr.GetCallExpr()
if call == nil {
return 0, false, nil
}
switch call.Function {
case "now":
return timeNowUnix(), true, nil
case "_+_", "_-_", "_*_":
if len(call.Args) != 2 {
return 0, false, errors.New("arithmetic requires two arguments")
}
left, ok, err := evaluateNumeric(call.Args[0])
if err != nil {
return 0, false, err
}
if !ok {
return 0, false, nil
}
right, ok, err := evaluateNumeric(call.Args[1])
if err != nil {
return 0, false, err
}
if !ok {
return 0, false, nil
}
switch call.Function {
case "_+_":
return left + right, true, nil
case "_-_":
return left - right, true, nil
case "_*_":
return left * right, true, nil
default:
return 0, false, errors.Errorf("unsupported arithmetic operator %q", call.Function)
}
default:
return 0, false, nil
}
}
func timeNowUnix() int64 {
return time.Now().Unix()
}
// buildComprehensionCondition handles CEL comprehension expressions (exists, all, etc.).
func buildComprehensionCondition(comp *exprv1.Expr_Comprehension, schema Schema) (Condition, error) {
// Determine the comprehension kind by examining the loop initialization and step
kind, err := detectComprehensionKind(comp)
if err != nil {
return nil, err
}
// Get the field being iterated over
iterRangeIdent := comp.IterRange.GetIdentExpr()
if iterRangeIdent == nil {
return nil, errors.New("comprehension range must be a field identifier")
}
fieldName := iterRangeIdent.GetName()
// Validate the field
field, ok := schema.Field(fieldName)
if !ok {
return nil, errors.Errorf("unknown field %q in comprehension", fieldName)
}
if field.Kind != FieldKindJSONList {
return nil, errors.Errorf("field %q does not support comprehension (must be a list)", fieldName)
}
// Extract the predicate from the loop step
predicate, err := extractPredicate(comp, schema)
if err != nil {
return nil, err
}
return &ListComprehensionCondition{
Kind: kind,
Field: fieldName,
IterVar: comp.IterVar,
Predicate: predicate,
}, nil
}
// detectComprehensionKind determines if this is an exists() macro.
// Only exists() is currently supported.
func detectComprehensionKind(comp *exprv1.Expr_Comprehension) (ComprehensionKind, error) {
// Check the accumulator initialization
accuInit := comp.AccuInit.GetConstExpr()
if accuInit == nil {
return "", errors.New("comprehension accumulator must be initialized with a constant")
}
// exists() starts with false and uses OR (||) in loop step
if !accuInit.GetBoolValue() {
if step := comp.LoopStep.GetCallExpr(); step != nil && step.Function == "_||_" {
return ComprehensionExists, nil
}
}
// all() starts with true and uses AND (&&) - not supported
if accuInit.GetBoolValue() {
if step := comp.LoopStep.GetCallExpr(); step != nil && step.Function == "_&&_" {
return "", errors.New("all() comprehension is not supported; use exists() instead")
}
}
return "", errors.New("unsupported comprehension type; only exists() is supported")
}
// extractPredicate extracts the predicate expression from the comprehension loop step.
func extractPredicate(comp *exprv1.Expr_Comprehension, _ Schema) (PredicateExpr, error) {
// The loop step is: @result || predicate(t) for exists
// or: @result && predicate(t) for all
step := comp.LoopStep.GetCallExpr()
if step == nil {
return nil, errors.New("comprehension loop step must be a call expression")
}
if len(step.Args) != 2 {
return nil, errors.New("comprehension loop step must have two arguments")
}
// The predicate is the second argument
predicateExpr := step.Args[1]
predicateCall := predicateExpr.GetCallExpr()
if predicateCall == nil {
return nil, errors.New("comprehension predicate must be a function call")
}
// Handle different predicate functions
switch predicateCall.Function {
case "startsWith":
return buildStartsWithPredicate(predicateCall, comp.IterVar)
case "endsWith":
return buildEndsWithPredicate(predicateCall, comp.IterVar)
case "contains":
return buildContainsPredicate(predicateCall, comp.IterVar)
default:
return nil, errors.Errorf("unsupported predicate function %q in comprehension (supported: startsWith, endsWith, contains)", predicateCall.Function)
}
}
// buildStartsWithPredicate extracts the pattern from t.startsWith("prefix").
func buildStartsWithPredicate(call *exprv1.Expr_Call, iterVar string) (PredicateExpr, error) {
// Verify the target is the iteration variable
if target := call.Target.GetIdentExpr(); target == nil || target.GetName() != iterVar {
return nil, errors.Errorf("startsWith target must be the iteration variable %q", iterVar)
}
if len(call.Args) != 1 {
return nil, errors.New("startsWith expects exactly one argument")
}
prefix, err := getConstValue(call.Args[0])
if err != nil {
return nil, errors.Wrap(err, "startsWith argument must be a constant string")
}
prefixStr, ok := prefix.(string)
if !ok {
return nil, errors.New("startsWith argument must be a string")
}
return &StartsWithPredicate{Prefix: prefixStr}, nil
}
// buildEndsWithPredicate extracts the pattern from t.endsWith("suffix").
func buildEndsWithPredicate(call *exprv1.Expr_Call, iterVar string) (PredicateExpr, error) {
if target := call.Target.GetIdentExpr(); target == nil || target.GetName() != iterVar {
return nil, errors.Errorf("endsWith target must be the iteration variable %q", iterVar)
}
if len(call.Args) != 1 {
return nil, errors.New("endsWith expects exactly one argument")
}
suffix, err := getConstValue(call.Args[0])
if err != nil {
return nil, errors.Wrap(err, "endsWith argument must be a constant string")
}
suffixStr, ok := suffix.(string)
if !ok {
return nil, errors.New("endsWith argument must be a string")
}
return &EndsWithPredicate{Suffix: suffixStr}, nil
}
// buildContainsPredicate extracts the pattern from t.contains("substring").
func buildContainsPredicate(call *exprv1.Expr_Call, iterVar string) (PredicateExpr, error) {
if target := call.Target.GetIdentExpr(); target == nil || target.GetName() != iterVar {
return nil, errors.Errorf("contains target must be the iteration variable %q", iterVar)
}
if len(call.Args) != 1 {
return nil, errors.New("contains expects exactly one argument")
}
substring, err := getConstValue(call.Args[0])
if err != nil {
return nil, errors.Wrap(err, "contains argument must be a constant string")
}
substringStr, ok := substring.(string)
if !ok {
return nil, errors.New("contains argument must be a string")
}
return &ContainsPredicate{Substring: substringStr}, nil
}

748
plugin/filter/render.go Normal file
View File

@@ -0,0 +1,748 @@
package filter
import (
"fmt"
"strings"
"github.com/pkg/errors"
)
type renderer struct {
schema Schema
dialect DialectName
placeholderOffset int
placeholderCounter int
args []any
}
type renderResult struct {
sql string
trivial bool
unsatisfiable bool
}
func newRenderer(schema Schema, opts RenderOptions) *renderer {
return &renderer{
schema: schema,
dialect: opts.Dialect,
placeholderOffset: opts.PlaceholderOffset,
}
}
func (r *renderer) Render(cond Condition) (Statement, error) {
result, err := r.renderCondition(cond)
if err != nil {
return Statement{}, err
}
args := r.args
if args == nil {
args = []any{}
}
switch {
case result.unsatisfiable:
return Statement{
SQL: "1 = 0",
Args: args,
}, nil
case result.trivial:
return Statement{
SQL: "",
Args: args,
}, nil
default:
return Statement{
SQL: result.sql,
Args: args,
}, nil
}
}
func (r *renderer) renderCondition(cond Condition) (renderResult, error) {
switch c := cond.(type) {
case *LogicalCondition:
return r.renderLogicalCondition(c)
case *NotCondition:
return r.renderNotCondition(c)
case *FieldPredicateCondition:
return r.renderFieldPredicate(c)
case *ComparisonCondition:
return r.renderComparison(c)
case *InCondition:
return r.renderInCondition(c)
case *ElementInCondition:
return r.renderElementInCondition(c)
case *ContainsCondition:
return r.renderContainsCondition(c)
case *ListComprehensionCondition:
return r.renderListComprehension(c)
case *ConstantCondition:
if c.Value {
return renderResult{trivial: true}, nil
}
return renderResult{sql: "1 = 0", unsatisfiable: true}, nil
default:
return renderResult{}, errors.Errorf("unsupported condition type %T", c)
}
}
func (r *renderer) renderLogicalCondition(cond *LogicalCondition) (renderResult, error) {
left, err := r.renderCondition(cond.Left)
if err != nil {
return renderResult{}, err
}
right, err := r.renderCondition(cond.Right)
if err != nil {
return renderResult{}, err
}
switch cond.Operator {
case LogicalAnd:
return combineAnd(left, right), nil
case LogicalOr:
return combineOr(left, right), nil
default:
return renderResult{}, errors.Errorf("unsupported logical operator %s", cond.Operator)
}
}
func (r *renderer) renderNotCondition(cond *NotCondition) (renderResult, error) {
child, err := r.renderCondition(cond.Expr)
if err != nil {
return renderResult{}, err
}
if child.trivial {
return renderResult{sql: "1 = 0", unsatisfiable: true}, nil
}
if child.unsatisfiable {
return renderResult{trivial: true}, nil
}
return renderResult{
sql: fmt.Sprintf("NOT (%s)", child.sql),
}, nil
}
func (r *renderer) renderFieldPredicate(cond *FieldPredicateCondition) (renderResult, error) {
field, ok := r.schema.Field(cond.Field)
if !ok {
return renderResult{}, errors.Errorf("unknown field %q", cond.Field)
}
switch field.Kind {
case FieldKindBoolColumn:
column := qualifyColumn(r.dialect, field.Column)
return renderResult{
sql: fmt.Sprintf("%s IS TRUE", column),
}, nil
case FieldKindJSONBool:
sql, err := r.jsonBoolPredicate(field)
if err != nil {
return renderResult{}, err
}
return renderResult{sql: sql}, nil
default:
return renderResult{}, errors.Errorf("field %q cannot be used as a predicate", cond.Field)
}
}
func (r *renderer) renderComparison(cond *ComparisonCondition) (renderResult, error) {
switch left := cond.Left.(type) {
case *FieldRef:
field, ok := r.schema.Field(left.Name)
if !ok {
return renderResult{}, errors.Errorf("unknown field %q", left.Name)
}
switch field.Kind {
case FieldKindBoolColumn:
return r.renderBoolColumnComparison(field, cond.Operator, cond.Right)
case FieldKindJSONBool:
return r.renderJSONBoolComparison(field, cond.Operator, cond.Right)
case FieldKindScalar:
return r.renderScalarComparison(field, cond.Operator, cond.Right)
default:
return renderResult{}, errors.Errorf("field %q does not support comparison", field.Name)
}
case *FunctionValue:
return r.renderFunctionComparison(left, cond.Operator, cond.Right)
default:
return renderResult{}, errors.New("comparison must start with a field reference or supported function")
}
}
func (r *renderer) renderFunctionComparison(fn *FunctionValue, op ComparisonOperator, right ValueExpr) (renderResult, error) {
if fn.Name != "size" {
return renderResult{}, errors.Errorf("unsupported function %s in comparison", fn.Name)
}
if len(fn.Args) != 1 {
return renderResult{}, errors.New("size() expects one argument")
}
fieldArg, ok := fn.Args[0].(*FieldRef)
if !ok {
return renderResult{}, errors.New("size() argument must be a field")
}
field, ok := r.schema.Field(fieldArg.Name)
if !ok {
return renderResult{}, errors.Errorf("unknown field %q", fieldArg.Name)
}
if field.Kind != FieldKindJSONList {
return renderResult{}, errors.Errorf("size() only supports tag lists, got %q", field.Name)
}
value, err := expectNumericLiteral(right)
if err != nil {
return renderResult{}, err
}
expr := jsonArrayLengthExpr(r.dialect, field)
placeholder := r.addArg(value)
return renderResult{
sql: fmt.Sprintf("%s %s %s", expr, sqlOperator(op), placeholder),
}, nil
}
func (r *renderer) renderScalarComparison(field Field, op ComparisonOperator, right ValueExpr) (renderResult, error) {
lit, err := expectLiteral(right)
if err != nil {
return renderResult{}, err
}
columnExpr := field.columnExpr(r.dialect)
if lit == nil {
switch op {
case CompareEq:
return renderResult{sql: fmt.Sprintf("%s IS NULL", columnExpr)}, nil
case CompareNeq:
return renderResult{sql: fmt.Sprintf("%s IS NOT NULL", columnExpr)}, nil
default:
return renderResult{}, errors.Errorf("operator %s not supported for null comparison", op)
}
}
placeholder := ""
switch field.Type {
case FieldTypeString:
value, ok := lit.(string)
if !ok {
return renderResult{}, errors.Errorf("field %q expects string value", field.Name)
}
placeholder = r.addArg(value)
case FieldTypeInt, FieldTypeTimestamp:
num, err := toInt64(lit)
if err != nil {
return renderResult{}, errors.Wrapf(err, "field %q expects integer value", field.Name)
}
placeholder = r.addArg(num)
default:
return renderResult{}, errors.Errorf("unsupported data type %q for field %s", field.Type, field.Name)
}
return renderResult{
sql: fmt.Sprintf("%s %s %s", columnExpr, sqlOperator(op), placeholder),
}, nil
}
func (r *renderer) renderBoolColumnComparison(field Field, op ComparisonOperator, right ValueExpr) (renderResult, error) {
value, err := expectBool(right)
if err != nil {
return renderResult{}, err
}
placeholder := r.addBoolArg(value)
column := qualifyColumn(r.dialect, field.Column)
return renderResult{
sql: fmt.Sprintf("%s %s %s", column, sqlOperator(op), placeholder),
}, nil
}
func (r *renderer) renderJSONBoolComparison(field Field, op ComparisonOperator, right ValueExpr) (renderResult, error) {
value, err := expectBool(right)
if err != nil {
return renderResult{}, err
}
jsonExpr := jsonExtractExpr(r.dialect, field)
switch r.dialect {
case DialectSQLite:
switch op {
case CompareEq:
if field.Name == "has_task_list" {
target := "0"
if value {
target = "1"
}
return renderResult{sql: fmt.Sprintf("%s = %s", jsonExpr, target)}, nil
}
if value {
return renderResult{sql: fmt.Sprintf("%s IS TRUE", jsonExpr)}, nil
}
return renderResult{sql: fmt.Sprintf("NOT(%s IS TRUE)", jsonExpr)}, nil
case CompareNeq:
if field.Name == "has_task_list" {
target := "0"
if value {
target = "1"
}
return renderResult{sql: fmt.Sprintf("%s != %s", jsonExpr, target)}, nil
}
if value {
return renderResult{sql: fmt.Sprintf("NOT(%s IS TRUE)", jsonExpr)}, nil
}
return renderResult{sql: fmt.Sprintf("%s IS TRUE", jsonExpr)}, nil
default:
return renderResult{}, errors.Errorf("operator %s not supported for boolean JSON field", op)
}
case DialectMySQL:
boolStr := "false"
if value {
boolStr = "true"
}
return renderResult{
sql: fmt.Sprintf("%s %s CAST('%s' AS JSON)", jsonExpr, sqlOperator(op), boolStr),
}, nil
case DialectPostgres:
placeholder := r.addArg(value)
return renderResult{
sql: fmt.Sprintf("(%s)::boolean %s %s", jsonExpr, sqlOperator(op), placeholder),
}, nil
default:
return renderResult{}, errors.Errorf("unsupported dialect %s", r.dialect)
}
}
func (r *renderer) renderInCondition(cond *InCondition) (renderResult, error) {
fieldRef, ok := cond.Left.(*FieldRef)
if !ok {
return renderResult{}, errors.New("IN operator requires a field on the left-hand side")
}
if fieldRef.Name == "tag" {
return r.renderTagInList(cond.Values)
}
field, ok := r.schema.Field(fieldRef.Name)
if !ok {
return renderResult{}, errors.Errorf("unknown field %q", fieldRef.Name)
}
if field.Kind != FieldKindScalar {
return renderResult{}, errors.Errorf("field %q does not support IN()", fieldRef.Name)
}
return r.renderScalarInCondition(field, cond.Values)
}
func (r *renderer) renderTagInList(values []ValueExpr) (renderResult, error) {
field, ok := r.schema.ResolveAlias("tag")
if !ok {
return renderResult{}, errors.New("tag attribute is not configured")
}
conditions := make([]string, 0, len(values))
for _, v := range values {
lit, err := expectLiteral(v)
if err != nil {
return renderResult{}, err
}
str, ok := lit.(string)
if !ok {
return renderResult{}, errors.New("tags must be compared with string literals")
}
switch r.dialect {
case DialectSQLite:
// Support hierarchical tags: match exact tag OR tags with this prefix (e.g., "book" matches "book" and "book/something")
exactMatch := fmt.Sprintf("%s LIKE %s", jsonArrayExpr(r.dialect, field), r.addArg(fmt.Sprintf(`%%"%s"%%`, str)))
prefixMatch := fmt.Sprintf("%s LIKE %s", jsonArrayExpr(r.dialect, field), r.addArg(fmt.Sprintf(`%%"%s/%%`, str)))
expr := fmt.Sprintf("(%s OR %s)", exactMatch, prefixMatch)
conditions = append(conditions, expr)
case DialectMySQL:
// Support hierarchical tags: match exact tag OR tags with this prefix
exactMatch := fmt.Sprintf("JSON_CONTAINS(%s, %s)", jsonArrayExpr(r.dialect, field), r.addArg(fmt.Sprintf(`"%s"`, str)))
prefixMatch := fmt.Sprintf("%s LIKE %s", jsonArrayExpr(r.dialect, field), r.addArg(fmt.Sprintf(`%%"%s/%%`, str)))
expr := fmt.Sprintf("(%s OR %s)", exactMatch, prefixMatch)
conditions = append(conditions, expr)
case DialectPostgres:
// Support hierarchical tags: match exact tag OR tags with this prefix
exactMatch := fmt.Sprintf("%s @> jsonb_build_array(%s::json)", jsonArrayExpr(r.dialect, field), r.addArg(fmt.Sprintf(`"%s"`, str)))
prefixMatch := fmt.Sprintf("(%s)::text LIKE %s", jsonArrayExpr(r.dialect, field), r.addArg(fmt.Sprintf(`%%"%s/%%`, str)))
expr := fmt.Sprintf("(%s OR %s)", exactMatch, prefixMatch)
conditions = append(conditions, expr)
default:
return renderResult{}, errors.Errorf("unsupported dialect %s", r.dialect)
}
}
if len(conditions) == 1 {
return renderResult{sql: conditions[0]}, nil
}
return renderResult{
sql: fmt.Sprintf("(%s)", strings.Join(conditions, " OR ")),
}, nil
}
func (r *renderer) renderElementInCondition(cond *ElementInCondition) (renderResult, error) {
field, ok := r.schema.Field(cond.Field)
if !ok {
return renderResult{}, errors.Errorf("unknown field %q", cond.Field)
}
if field.Kind != FieldKindJSONList {
return renderResult{}, errors.Errorf("field %q is not a tag list", cond.Field)
}
lit, err := expectLiteral(cond.Element)
if err != nil {
return renderResult{}, err
}
str, ok := lit.(string)
if !ok {
return renderResult{}, errors.New("tags membership requires string literal")
}
switch r.dialect {
case DialectSQLite:
sql := fmt.Sprintf("%s LIKE %s", jsonArrayExpr(r.dialect, field), r.addArg(fmt.Sprintf(`%%"%s"%%`, str)))
return renderResult{sql: sql}, nil
case DialectMySQL:
sql := fmt.Sprintf("JSON_CONTAINS(%s, %s)", jsonArrayExpr(r.dialect, field), r.addArg(fmt.Sprintf(`"%s"`, str)))
return renderResult{sql: sql}, nil
case DialectPostgres:
sql := fmt.Sprintf("%s @> jsonb_build_array(%s::json)", jsonArrayExpr(r.dialect, field), r.addArg(fmt.Sprintf(`"%s"`, str)))
return renderResult{sql: sql}, nil
default:
return renderResult{}, errors.Errorf("unsupported dialect %s", r.dialect)
}
}
func (r *renderer) renderScalarInCondition(field Field, values []ValueExpr) (renderResult, error) {
placeholders := make([]string, 0, len(values))
for _, v := range values {
lit, err := expectLiteral(v)
if err != nil {
return renderResult{}, err
}
switch field.Type {
case FieldTypeString:
str, ok := lit.(string)
if !ok {
return renderResult{}, errors.Errorf("field %q expects string values", field.Name)
}
placeholders = append(placeholders, r.addArg(str))
case FieldTypeInt:
num, err := toInt64(lit)
if err != nil {
return renderResult{}, err
}
placeholders = append(placeholders, r.addArg(num))
default:
return renderResult{}, errors.Errorf("field %q does not support IN() comparisons", field.Name)
}
}
column := field.columnExpr(r.dialect)
return renderResult{
sql: fmt.Sprintf("%s IN (%s)", column, strings.Join(placeholders, ",")),
}, nil
}
func (r *renderer) renderContainsCondition(cond *ContainsCondition) (renderResult, error) {
field, ok := r.schema.Field(cond.Field)
if !ok {
return renderResult{}, errors.Errorf("unknown field %q", cond.Field)
}
column := field.columnExpr(r.dialect)
arg := fmt.Sprintf("%%%s%%", cond.Value)
switch r.dialect {
case DialectSQLite:
// Use custom Unicode-aware case folding function for case-insensitive comparison.
// This overcomes SQLite's ASCII-only LOWER() limitation.
sql := fmt.Sprintf("memos_unicode_lower(%s) LIKE memos_unicode_lower(%s)", column, r.addArg(arg))
return renderResult{sql: sql}, nil
case DialectPostgres:
sql := fmt.Sprintf("%s ILIKE %s", column, r.addArg(arg))
return renderResult{sql: sql}, nil
default:
sql := fmt.Sprintf("%s LIKE %s", column, r.addArg(arg))
return renderResult{sql: sql}, nil
}
}
func (r *renderer) renderListComprehension(cond *ListComprehensionCondition) (renderResult, error) {
field, ok := r.schema.Field(cond.Field)
if !ok {
return renderResult{}, errors.Errorf("unknown field %q", cond.Field)
}
if field.Kind != FieldKindJSONList {
return renderResult{}, errors.Errorf("field %q is not a JSON list", cond.Field)
}
// Render based on predicate type
switch pred := cond.Predicate.(type) {
case *StartsWithPredicate:
return r.renderTagStartsWith(field, pred.Prefix, cond.Kind)
case *EndsWithPredicate:
return r.renderTagEndsWith(field, pred.Suffix, cond.Kind)
case *ContainsPredicate:
return r.renderTagContains(field, pred.Substring, cond.Kind)
default:
return renderResult{}, errors.Errorf("unsupported predicate type %T in comprehension", pred)
}
}
// renderTagStartsWith generates SQL for tags.exists(t, t.startsWith("prefix")).
func (r *renderer) renderTagStartsWith(field Field, prefix string, _ ComprehensionKind) (renderResult, error) {
arrayExpr := jsonArrayExpr(r.dialect, field)
switch r.dialect {
case DialectSQLite, DialectMySQL:
// Match exact tag or tags with this prefix (hierarchical support)
exactMatch := r.buildJSONArrayLike(arrayExpr, fmt.Sprintf(`%%"%s"%%`, prefix))
prefixMatch := r.buildJSONArrayLike(arrayExpr, fmt.Sprintf(`%%"%s%%`, prefix))
condition := fmt.Sprintf("(%s OR %s)", exactMatch, prefixMatch)
return renderResult{sql: r.wrapWithNullCheck(arrayExpr, condition)}, nil
case DialectPostgres:
// Use PostgreSQL's powerful JSON operators
exactMatch := fmt.Sprintf("%s @> jsonb_build_array(%s::json)", arrayExpr, r.addArg(fmt.Sprintf(`"%s"`, prefix)))
prefixMatch := fmt.Sprintf("(%s)::text LIKE %s", arrayExpr, r.addArg(fmt.Sprintf(`%%"%s%%`, prefix)))
condition := fmt.Sprintf("(%s OR %s)", exactMatch, prefixMatch)
return renderResult{sql: r.wrapWithNullCheck(arrayExpr, condition)}, nil
default:
return renderResult{}, errors.Errorf("unsupported dialect %s", r.dialect)
}
}
// renderTagEndsWith generates SQL for tags.exists(t, t.endsWith("suffix")).
func (r *renderer) renderTagEndsWith(field Field, suffix string, _ ComprehensionKind) (renderResult, error) {
arrayExpr := jsonArrayExpr(r.dialect, field)
pattern := fmt.Sprintf(`%%%s"%%`, suffix)
likeExpr := r.buildJSONArrayLike(arrayExpr, pattern)
return renderResult{sql: r.wrapWithNullCheck(arrayExpr, likeExpr)}, nil
}
// renderTagContains generates SQL for tags.exists(t, t.contains("substring")).
func (r *renderer) renderTagContains(field Field, substring string, _ ComprehensionKind) (renderResult, error) {
arrayExpr := jsonArrayExpr(r.dialect, field)
pattern := fmt.Sprintf(`%%%s%%`, substring)
likeExpr := r.buildJSONArrayLike(arrayExpr, pattern)
return renderResult{sql: r.wrapWithNullCheck(arrayExpr, likeExpr)}, nil
}
// buildJSONArrayLike builds a LIKE expression for matching within a JSON array.
// Returns the LIKE clause without NULL/empty checks.
func (r *renderer) buildJSONArrayLike(arrayExpr, pattern string) string {
switch r.dialect {
case DialectSQLite, DialectMySQL:
return fmt.Sprintf("%s LIKE %s", arrayExpr, r.addArg(pattern))
case DialectPostgres:
return fmt.Sprintf("(%s)::text LIKE %s", arrayExpr, r.addArg(pattern))
default:
return ""
}
}
// wrapWithNullCheck wraps a condition with NULL and empty array checks.
// This ensures we don't match against NULL or empty JSON arrays.
func (r *renderer) wrapWithNullCheck(arrayExpr, condition string) string {
var nullCheck string
switch r.dialect {
case DialectSQLite:
nullCheck = fmt.Sprintf("%s IS NOT NULL AND %s != '[]'", arrayExpr, arrayExpr)
case DialectMySQL:
nullCheck = fmt.Sprintf("%s IS NOT NULL AND JSON_LENGTH(%s) > 0", arrayExpr, arrayExpr)
case DialectPostgres:
nullCheck = fmt.Sprintf("%s IS NOT NULL AND jsonb_array_length(%s) > 0", arrayExpr, arrayExpr)
default:
return condition
}
return fmt.Sprintf("(%s AND %s)", condition, nullCheck)
}
func (r *renderer) jsonBoolPredicate(field Field) (string, error) {
expr := jsonExtractExpr(r.dialect, field)
switch r.dialect {
case DialectSQLite:
return fmt.Sprintf("%s IS TRUE", expr), nil
case DialectMySQL:
return fmt.Sprintf("COALESCE(%s, CAST('false' AS JSON)) = CAST('true' AS JSON)", expr), nil
case DialectPostgres:
return fmt.Sprintf("(%s)::boolean IS TRUE", expr), nil
default:
return "", errors.Errorf("unsupported dialect %s", r.dialect)
}
}
func combineAnd(left, right renderResult) renderResult {
if left.unsatisfiable || right.unsatisfiable {
return renderResult{sql: "1 = 0", unsatisfiable: true}
}
if left.trivial {
return right
}
if right.trivial {
return left
}
return renderResult{
sql: fmt.Sprintf("(%s AND %s)", left.sql, right.sql),
}
}
func combineOr(left, right renderResult) renderResult {
if left.trivial || right.trivial {
return renderResult{trivial: true}
}
if left.unsatisfiable {
return right
}
if right.unsatisfiable {
return left
}
return renderResult{
sql: fmt.Sprintf("(%s OR %s)", left.sql, right.sql),
}
}
func (r *renderer) addArg(value any) string {
r.placeholderCounter++
r.args = append(r.args, value)
if r.dialect == DialectPostgres {
return fmt.Sprintf("$%d", r.placeholderOffset+r.placeholderCounter)
}
return "?"
}
func (r *renderer) addBoolArg(value bool) string {
var v any
switch r.dialect {
case DialectSQLite:
if value {
v = 1
} else {
v = 0
}
default:
v = value
}
return r.addArg(v)
}
func expectLiteral(expr ValueExpr) (any, error) {
lit, ok := expr.(*LiteralValue)
if !ok {
return nil, errors.New("expression must be a literal")
}
return lit.Value, nil
}
func expectBool(expr ValueExpr) (bool, error) {
lit, err := expectLiteral(expr)
if err != nil {
return false, err
}
value, ok := lit.(bool)
if !ok {
return false, errors.New("boolean literal required")
}
return value, nil
}
func expectNumericLiteral(expr ValueExpr) (int64, error) {
lit, err := expectLiteral(expr)
if err != nil {
return 0, err
}
return toInt64(lit)
}
func toInt64(value any) (int64, error) {
switch v := value.(type) {
case int:
return int64(v), nil
case int32:
return int64(v), nil
case int64:
return v, nil
case uint32:
return int64(v), nil
case uint64:
return int64(v), nil
case float32:
return int64(v), nil
case float64:
return int64(v), nil
default:
return 0, errors.Errorf("cannot convert %T to int64", value)
}
}
func sqlOperator(op ComparisonOperator) string {
return string(op)
}
func qualifyColumn(d DialectName, col Column) string {
switch d {
case DialectPostgres:
return fmt.Sprintf("%s.%s", col.Table, col.Name)
default:
return fmt.Sprintf("`%s`.`%s`", col.Table, col.Name)
}
}
func jsonPath(field Field) string {
return "$." + strings.Join(field.JSONPath, ".")
}
func jsonExtractExpr(d DialectName, field Field) string {
column := qualifyColumn(d, field.Column)
switch d {
case DialectSQLite, DialectMySQL:
return fmt.Sprintf("JSON_EXTRACT(%s, '%s')", column, jsonPath(field))
case DialectPostgres:
return buildPostgresJSONAccessor(column, field.JSONPath, true)
default:
return ""
}
}
func jsonArrayExpr(d DialectName, field Field) string {
column := qualifyColumn(d, field.Column)
switch d {
case DialectSQLite, DialectMySQL:
return fmt.Sprintf("JSON_EXTRACT(%s, '%s')", column, jsonPath(field))
case DialectPostgres:
return buildPostgresJSONAccessor(column, field.JSONPath, false)
default:
return ""
}
}
func jsonArrayLengthExpr(d DialectName, field Field) string {
arrayExpr := jsonArrayExpr(d, field)
switch d {
case DialectSQLite:
return fmt.Sprintf("JSON_ARRAY_LENGTH(COALESCE(%s, JSON_ARRAY()))", arrayExpr)
case DialectMySQL:
return fmt.Sprintf("JSON_LENGTH(COALESCE(%s, JSON_ARRAY()))", arrayExpr)
case DialectPostgres:
return fmt.Sprintf("jsonb_array_length(COALESCE(%s, '[]'::jsonb))", arrayExpr)
default:
return ""
}
}
func buildPostgresJSONAccessor(base string, path []string, terminalText bool) string {
expr := base
for idx, part := range path {
if idx == len(path)-1 && terminalText {
expr = fmt.Sprintf("%s->>'%s'", expr, part)
} else {
expr = fmt.Sprintf("%s->'%s'", expr, part)
}
}
return expr
}

319
plugin/filter/schema.go Normal file
View File

@@ -0,0 +1,319 @@
package filter
import (
"fmt"
"time"
"github.com/google/cel-go/cel"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
)
// DialectName enumerates supported SQL dialects.
type DialectName string
const (
DialectSQLite DialectName = "sqlite"
DialectMySQL DialectName = "mysql"
DialectPostgres DialectName = "postgres"
)
// FieldType represents the logical type of a field.
type FieldType string
const (
FieldTypeString FieldType = "string"
FieldTypeInt FieldType = "int"
FieldTypeBool FieldType = "bool"
FieldTypeTimestamp FieldType = "timestamp"
)
// FieldKind describes how a field is stored.
type FieldKind string
const (
FieldKindScalar FieldKind = "scalar"
FieldKindBoolColumn FieldKind = "bool_column"
FieldKindJSONBool FieldKind = "json_bool"
FieldKindJSONList FieldKind = "json_list"
FieldKindVirtualAlias FieldKind = "virtual_alias"
)
// Column identifies the backing table column.
type Column struct {
Table string
Name string
}
// Field captures the schema metadata for an exposed CEL identifier.
type Field struct {
Name string
Kind FieldKind
Type FieldType
Column Column
JSONPath []string
AliasFor string
SupportsContains bool
Expressions map[DialectName]string
AllowedComparisonOps map[ComparisonOperator]bool
}
// Schema collects CEL environment options and field metadata.
type Schema struct {
Name string
Fields map[string]Field
EnvOptions []cel.EnvOption
}
// Field returns the field metadata if present.
func (s Schema) Field(name string) (Field, bool) {
f, ok := s.Fields[name]
return f, ok
}
// ResolveAlias resolves a virtual alias to its target field.
func (s Schema) ResolveAlias(name string) (Field, bool) {
field, ok := s.Fields[name]
if !ok {
return Field{}, false
}
if field.Kind == FieldKindVirtualAlias {
target, ok := s.Fields[field.AliasFor]
if !ok {
return Field{}, false
}
return target, true
}
return field, true
}
var nowFunction = cel.Function("now",
cel.Overload("now",
[]*cel.Type{},
cel.IntType,
cel.FunctionBinding(func(_ ...ref.Val) ref.Val {
return types.Int(time.Now().Unix())
}),
),
)
// NewSchema constructs the memo filter schema and CEL environment.
func NewSchema() Schema {
fields := map[string]Field{
"content": {
Name: "content",
Kind: FieldKindScalar,
Type: FieldTypeString,
Column: Column{Table: "memo", Name: "content"},
SupportsContains: true,
Expressions: map[DialectName]string{},
},
"creator_id": {
Name: "creator_id",
Kind: FieldKindScalar,
Type: FieldTypeInt,
Column: Column{Table: "memo", Name: "creator_id"},
Expressions: map[DialectName]string{},
AllowedComparisonOps: map[ComparisonOperator]bool{
CompareEq: true,
CompareNeq: true,
},
},
"created_ts": {
Name: "created_ts",
Kind: FieldKindScalar,
Type: FieldTypeTimestamp,
Column: Column{Table: "memo", Name: "created_ts"},
Expressions: map[DialectName]string{
// MySQL stores created_ts as TIMESTAMP, needs conversion to epoch
DialectMySQL: "UNIX_TIMESTAMP(%s)",
// PostgreSQL and SQLite store created_ts as BIGINT (epoch), no conversion needed
DialectPostgres: "%s",
DialectSQLite: "%s",
},
},
"updated_ts": {
Name: "updated_ts",
Kind: FieldKindScalar,
Type: FieldTypeTimestamp,
Column: Column{Table: "memo", Name: "updated_ts"},
Expressions: map[DialectName]string{
// MySQL stores updated_ts as TIMESTAMP, needs conversion to epoch
DialectMySQL: "UNIX_TIMESTAMP(%s)",
// PostgreSQL and SQLite store updated_ts as BIGINT (epoch), no conversion needed
DialectPostgres: "%s",
DialectSQLite: "%s",
},
},
"pinned": {
Name: "pinned",
Kind: FieldKindBoolColumn,
Type: FieldTypeBool,
Column: Column{Table: "memo", Name: "pinned"},
Expressions: map[DialectName]string{},
AllowedComparisonOps: map[ComparisonOperator]bool{
CompareEq: true,
CompareNeq: true,
},
},
"visibility": {
Name: "visibility",
Kind: FieldKindScalar,
Type: FieldTypeString,
Column: Column{Table: "memo", Name: "visibility"},
Expressions: map[DialectName]string{},
AllowedComparisonOps: map[ComparisonOperator]bool{
CompareEq: true,
CompareNeq: true,
},
},
"tags": {
Name: "tags",
Kind: FieldKindJSONList,
Type: FieldTypeString,
Column: Column{Table: "memo", Name: "payload"},
JSONPath: []string{"tags"},
},
"tag": {
Name: "tag",
Kind: FieldKindVirtualAlias,
Type: FieldTypeString,
AliasFor: "tags",
},
"has_task_list": {
Name: "has_task_list",
Kind: FieldKindJSONBool,
Type: FieldTypeBool,
Column: Column{Table: "memo", Name: "payload"},
JSONPath: []string{"property", "hasTaskList"},
AllowedComparisonOps: map[ComparisonOperator]bool{
CompareEq: true,
CompareNeq: true,
},
},
"has_link": {
Name: "has_link",
Kind: FieldKindJSONBool,
Type: FieldTypeBool,
Column: Column{Table: "memo", Name: "payload"},
JSONPath: []string{"property", "hasLink"},
AllowedComparisonOps: map[ComparisonOperator]bool{
CompareEq: true,
CompareNeq: true,
},
},
"has_code": {
Name: "has_code",
Kind: FieldKindJSONBool,
Type: FieldTypeBool,
Column: Column{Table: "memo", Name: "payload"},
JSONPath: []string{"property", "hasCode"},
AllowedComparisonOps: map[ComparisonOperator]bool{
CompareEq: true,
CompareNeq: true,
},
},
"has_incomplete_tasks": {
Name: "has_incomplete_tasks",
Kind: FieldKindJSONBool,
Type: FieldTypeBool,
Column: Column{Table: "memo", Name: "payload"},
JSONPath: []string{"property", "hasIncompleteTasks"},
AllowedComparisonOps: map[ComparisonOperator]bool{
CompareEq: true,
CompareNeq: true,
},
},
}
envOptions := []cel.EnvOption{
cel.Variable("content", cel.StringType),
cel.Variable("creator_id", cel.IntType),
cel.Variable("created_ts", cel.IntType),
cel.Variable("updated_ts", cel.IntType),
cel.Variable("pinned", cel.BoolType),
cel.Variable("tag", cel.StringType),
cel.Variable("tags", cel.ListType(cel.StringType)),
cel.Variable("visibility", cel.StringType),
cel.Variable("has_task_list", cel.BoolType),
cel.Variable("has_link", cel.BoolType),
cel.Variable("has_code", cel.BoolType),
cel.Variable("has_incomplete_tasks", cel.BoolType),
nowFunction,
}
return Schema{
Name: "memo",
Fields: fields,
EnvOptions: envOptions,
}
}
// NewAttachmentSchema constructs the attachment filter schema and CEL environment.
func NewAttachmentSchema() Schema {
fields := map[string]Field{
"filename": {
Name: "filename",
Kind: FieldKindScalar,
Type: FieldTypeString,
Column: Column{Table: "attachment", Name: "filename"},
SupportsContains: true,
Expressions: map[DialectName]string{},
},
"mime_type": {
Name: "mime_type",
Kind: FieldKindScalar,
Type: FieldTypeString,
Column: Column{Table: "attachment", Name: "type"},
Expressions: map[DialectName]string{},
},
"create_time": {
Name: "create_time",
Kind: FieldKindScalar,
Type: FieldTypeTimestamp,
Column: Column{Table: "attachment", Name: "created_ts"},
Expressions: map[DialectName]string{
// MySQL stores created_ts as TIMESTAMP, needs conversion to epoch
DialectMySQL: "UNIX_TIMESTAMP(%s)",
// PostgreSQL and SQLite store created_ts as BIGINT (epoch), no conversion needed
DialectPostgres: "%s",
DialectSQLite: "%s",
},
},
"memo_id": {
Name: "memo_id",
Kind: FieldKindScalar,
Type: FieldTypeInt,
Column: Column{Table: "attachment", Name: "memo_id"},
Expressions: map[DialectName]string{},
AllowedComparisonOps: map[ComparisonOperator]bool{
CompareEq: true,
CompareNeq: true,
},
},
}
envOptions := []cel.EnvOption{
cel.Variable("filename", cel.StringType),
cel.Variable("mime_type", cel.StringType),
cel.Variable("create_time", cel.IntType),
cel.Variable("memo_id", cel.AnyType),
nowFunction,
}
return Schema{
Name: "attachment",
Fields: fields,
EnvOptions: envOptions,
}
}
// columnExpr returns the field expression for the given dialect, applying
// any schema-specific overrides (e.g. UNIX timestamp conversions).
func (f Field) columnExpr(d DialectName) string {
base := qualifyColumn(d, f.Column)
if expr, ok := f.Expressions[d]; ok && expr != "" {
return fmt.Sprintf(expr, base)
}
return base
}

View File

@@ -0,0 +1,166 @@
package httpgetter
import (
"fmt"
"io"
"net"
"net/http"
"net/url"
"github.com/pkg/errors"
"golang.org/x/net/html"
"golang.org/x/net/html/atom"
)
var ErrInternalIP = errors.New("internal IP addresses are not allowed")
var httpClient = &http.Client{
CheckRedirect: func(req *http.Request, via []*http.Request) error {
if err := validateURL(req.URL.String()); err != nil {
return errors.Wrap(err, "redirect to internal IP")
}
if len(via) >= 10 {
return errors.New("too many redirects")
}
return nil
},
}
type HTMLMeta struct {
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
}
func GetHTMLMeta(urlStr string) (*HTMLMeta, error) {
if err := validateURL(urlStr); err != nil {
return nil, err
}
response, err := httpClient.Get(urlStr)
if err != nil {
return nil, err
}
defer response.Body.Close()
mediatype, err := getMediatype(response)
if err != nil {
return nil, err
}
if mediatype != "text/html" {
return nil, errors.New("not a HTML page")
}
// TODO: limit the size of the response body
htmlMeta := extractHTMLMeta(response.Body)
enrichSiteMeta(response.Request.URL, htmlMeta)
return htmlMeta, nil
}
func extractHTMLMeta(resp io.Reader) *HTMLMeta {
tokenizer := html.NewTokenizer(resp)
htmlMeta := new(HTMLMeta)
for {
tokenType := tokenizer.Next()
if tokenType == html.ErrorToken {
break
} else if tokenType == html.StartTagToken || tokenType == html.SelfClosingTagToken {
token := tokenizer.Token()
if token.DataAtom == atom.Body {
break
}
if token.DataAtom == atom.Title {
tokenizer.Next()
token := tokenizer.Token()
htmlMeta.Title = token.Data
} else if token.DataAtom == atom.Meta {
description, ok := extractMetaProperty(token, "description")
if ok {
htmlMeta.Description = description
}
ogTitle, ok := extractMetaProperty(token, "og:title")
if ok {
htmlMeta.Title = ogTitle
}
ogDescription, ok := extractMetaProperty(token, "og:description")
if ok {
htmlMeta.Description = ogDescription
}
ogImage, ok := extractMetaProperty(token, "og:image")
if ok {
htmlMeta.Image = ogImage
}
}
}
}
return htmlMeta
}
func extractMetaProperty(token html.Token, prop string) (content string, ok bool) {
content, ok = "", false
for _, attr := range token.Attr {
if attr.Key == "property" && attr.Val == prop {
ok = true
}
if attr.Key == "content" {
content = attr.Val
}
}
return content, ok
}
func validateURL(urlStr string) error {
u, err := url.Parse(urlStr)
if err != nil {
return errors.New("invalid URL format")
}
if u.Scheme != "http" && u.Scheme != "https" {
return errors.New("only http/https protocols are allowed")
}
host := u.Hostname()
if host == "" {
return errors.New("empty hostname")
}
// check if the hostname is an IP
if ip := net.ParseIP(host); ip != nil {
if ip.IsLoopback() || ip.IsPrivate() || ip.IsLinkLocalUnicast() {
return errors.Wrap(ErrInternalIP, ip.String())
}
return nil
}
// check if it's a hostname, resolve it and check all returned IPs
ips, err := net.LookupIP(host)
if err != nil {
return errors.Errorf("failed to resolve hostname: %v", err)
}
for _, ip := range ips {
if ip.IsLoopback() || ip.IsPrivate() || ip.IsLinkLocalUnicast() {
return errors.Wrapf(ErrInternalIP, "host=%s, ip=%s", host, ip.String())
}
}
return nil
}
func enrichSiteMeta(url *url.URL, meta *HTMLMeta) {
if url.Hostname() == "www.youtube.com" {
if url.Path == "/watch" {
vid := url.Query().Get("v")
if vid != "" {
meta.Image = fmt.Sprintf("https://img.youtube.com/vi/%s/mqdefault.jpg", vid)
}
}
}
}

View File

@@ -0,0 +1,32 @@
package httpgetter
import (
"errors"
"testing"
"github.com/stretchr/testify/require"
)
func TestGetHTMLMeta(t *testing.T) {
tests := []struct {
urlStr string
htmlMeta HTMLMeta
}{}
for _, test := range tests {
metadata, err := GetHTMLMeta(test.urlStr)
require.NoError(t, err)
require.Equal(t, test.htmlMeta, *metadata)
}
}
func TestGetHTMLMetaForInternal(t *testing.T) {
// test for internal IP
if _, err := GetHTMLMeta("http://192.168.0.1"); !errors.Is(err, ErrInternalIP) {
t.Errorf("Expected error for internal IP, got %v", err)
}
// test for resolved internal IP
if _, err := GetHTMLMeta("http://localhost"); !errors.Is(err, ErrInternalIP) {
t.Errorf("Expected error for resolved internal IP, got %v", err)
}
}

View File

@@ -0,0 +1 @@
package httpgetter

View File

@@ -0,0 +1,45 @@
package httpgetter
import (
"errors"
"io"
"net/http"
"net/url"
"strings"
)
type Image struct {
Blob []byte
Mediatype string
}
func GetImage(urlStr string) (*Image, error) {
if _, err := url.Parse(urlStr); err != nil {
return nil, err
}
response, err := http.Get(urlStr)
if err != nil {
return nil, err
}
defer response.Body.Close()
mediatype, err := getMediatype(response)
if err != nil {
return nil, err
}
if !strings.HasPrefix(mediatype, "image/") {
return nil, errors.New("wrong image mediatype")
}
bodyBytes, err := io.ReadAll(response.Body)
if err != nil {
return nil, err
}
image := &Image{
Blob: bodyBytes,
Mediatype: mediatype,
}
return image, nil
}

15
plugin/httpgetter/util.go Normal file
View File

@@ -0,0 +1,15 @@
package httpgetter
import (
"mime"
"net/http"
)
func getMediatype(response *http.Response) (string, error) {
contentType := response.Header.Get("content-type")
mediatype, _, err := mime.ParseMediaType(contentType)
if err != nil {
return "", err
}
return mediatype, nil
}

8
plugin/idp/idp.go Normal file
View File

@@ -0,0 +1,8 @@
package idp
type IdentityProviderUserInfo struct {
Identifier string
DisplayName string
Email string
AvatarURL string
}

134
plugin/idp/oauth2/oauth2.go Normal file
View File

@@ -0,0 +1,134 @@
// Package oauth2 is the plugin for OAuth2 Identity Provider.
package oauth2
import (
"context"
"encoding/json"
"fmt"
"io"
"log/slog"
"net/http"
"github.com/pkg/errors"
"golang.org/x/oauth2"
"github.com/usememos/memos/plugin/idp"
storepb "github.com/usememos/memos/proto/gen/store"
)
// IdentityProvider represents an OAuth2 Identity Provider.
type IdentityProvider struct {
config *storepb.OAuth2Config
}
// NewIdentityProvider initializes a new OAuth2 Identity Provider with the given configuration.
func NewIdentityProvider(config *storepb.OAuth2Config) (*IdentityProvider, error) {
for v, field := range map[string]string{
config.ClientId: "clientId",
config.ClientSecret: "clientSecret",
config.TokenUrl: "tokenUrl",
config.UserInfoUrl: "userInfoUrl",
config.FieldMapping.Identifier: "fieldMapping.identifier",
} {
if v == "" {
return nil, errors.Errorf(`the field "%s" is empty but required`, field)
}
}
return &IdentityProvider{
config: config,
}, nil
}
// ExchangeToken returns the exchanged OAuth2 token using the given authorization code.
// If codeVerifier is provided, it will be used for PKCE (Proof Key for Code Exchange) validation.
func (p *IdentityProvider) ExchangeToken(ctx context.Context, redirectURL, code, codeVerifier string) (string, error) {
conf := &oauth2.Config{
ClientID: p.config.ClientId,
ClientSecret: p.config.ClientSecret,
RedirectURL: redirectURL,
Scopes: p.config.Scopes,
Endpoint: oauth2.Endpoint{
AuthURL: p.config.AuthUrl,
TokenURL: p.config.TokenUrl,
AuthStyle: oauth2.AuthStyleInParams,
},
}
// Prepare token exchange options
opts := []oauth2.AuthCodeOption{}
// Add PKCE code_verifier if provided
if codeVerifier != "" {
opts = append(opts, oauth2.SetAuthURLParam("code_verifier", codeVerifier))
}
token, err := conf.Exchange(ctx, code, opts...)
if err != nil {
return "", errors.Wrap(err, "failed to exchange access token")
}
// Use the standard AccessToken field instead of Extra()
// This is more reliable across different OAuth providers
if token.AccessToken == "" {
return "", errors.New("missing access token from authorization response")
}
return token.AccessToken, nil
}
// UserInfo returns the parsed user information using the given OAuth2 token.
func (p *IdentityProvider) UserInfo(token string) (*idp.IdentityProviderUserInfo, error) {
client := &http.Client{}
req, err := http.NewRequest(http.MethodGet, p.config.UserInfoUrl, nil)
if err != nil {
return nil, errors.Wrap(err, "failed to new http request")
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
resp, err := client.Do(req)
if err != nil {
return nil, errors.Wrap(err, "failed to get user information")
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, errors.Wrap(err, "failed to read response body")
}
var claims map[string]any
if err := json.Unmarshal(body, &claims); err != nil {
return nil, errors.Wrap(err, "failed to unmarshal response body")
}
slog.Info("user info claims", "claims", claims)
userInfo := &idp.IdentityProviderUserInfo{}
if v, ok := claims[p.config.FieldMapping.Identifier].(string); ok {
userInfo.Identifier = v
}
if userInfo.Identifier == "" {
return nil, errors.Errorf("the field %q is not found in claims or has empty value", p.config.FieldMapping.Identifier)
}
// Best effort to map optional fields
if p.config.FieldMapping.DisplayName != "" {
if v, ok := claims[p.config.FieldMapping.DisplayName].(string); ok {
userInfo.DisplayName = v
}
}
if userInfo.DisplayName == "" {
userInfo.DisplayName = userInfo.Identifier
}
if p.config.FieldMapping.Email != "" {
if v, ok := claims[p.config.FieldMapping.Email].(string); ok {
userInfo.Email = v
}
}
if p.config.FieldMapping.AvatarUrl != "" {
if v, ok := claims[p.config.FieldMapping.AvatarUrl].(string); ok {
userInfo.AvatarURL = v
}
}
slog.Info("user info", "userInfo", userInfo)
return userInfo, nil
}

View File

@@ -0,0 +1,164 @@
package oauth2
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/http/httptest"
"net/url"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/idp"
storepb "github.com/usememos/memos/proto/gen/store"
)
func TestNewIdentityProvider(t *testing.T) {
tests := []struct {
name string
config *storepb.OAuth2Config
containsErr string
}{
{
name: "no tokenUrl",
config: &storepb.OAuth2Config{
ClientId: "test-client-id",
ClientSecret: "test-client-secret",
AuthUrl: "",
TokenUrl: "",
UserInfoUrl: "https://example.com/api/user",
FieldMapping: &storepb.FieldMapping{
Identifier: "login",
},
},
containsErr: `the field "tokenUrl" is empty but required`,
},
{
name: "no userInfoUrl",
config: &storepb.OAuth2Config{
ClientId: "test-client-id",
ClientSecret: "test-client-secret",
AuthUrl: "",
TokenUrl: "https://example.com/token",
UserInfoUrl: "",
FieldMapping: &storepb.FieldMapping{
Identifier: "login",
},
},
containsErr: `the field "userInfoUrl" is empty but required`,
},
{
name: "no field mapping identifier",
config: &storepb.OAuth2Config{
ClientId: "test-client-id",
ClientSecret: "test-client-secret",
AuthUrl: "",
TokenUrl: "https://example.com/token",
UserInfoUrl: "https://example.com/api/user",
FieldMapping: &storepb.FieldMapping{
Identifier: "",
},
},
containsErr: `the field "fieldMapping.identifier" is empty but required`,
},
}
for _, test := range tests {
t.Run(test.name, func(*testing.T) {
_, err := NewIdentityProvider(test.config)
assert.ErrorContains(t, err, test.containsErr)
})
}
}
func newMockServer(t *testing.T, code, accessToken string, userinfo []byte) *httptest.Server {
mux := http.NewServeMux()
var rawIDToken string
mux.HandleFunc("/oauth2/token", func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, http.MethodPost, r.Method)
body, err := io.ReadAll(r.Body)
require.NoError(t, err)
vals, err := url.ParseQuery(string(body))
require.NoError(t, err)
require.Equal(t, code, vals.Get("code"))
require.Equal(t, "authorization_code", vals.Get("grant_type"))
w.Header().Set("Content-Type", "application/json")
err = json.NewEncoder(w).Encode(map[string]any{
"access_token": accessToken,
"token_type": "Bearer",
"expires_in": 3600,
"id_token": rawIDToken,
})
require.NoError(t, err)
})
mux.HandleFunc("/oauth2/userinfo", func(w http.ResponseWriter, _ *http.Request) {
w.Header().Set("Content-Type", "application/json")
_, err := w.Write(userinfo)
require.NoError(t, err)
})
s := httptest.NewServer(mux)
return s
}
func TestIdentityProvider(t *testing.T) {
ctx := context.Background()
const (
testClientID = "test-client-id"
testCode = "test-code"
testAccessToken = "test-access-token"
testSubject = "123456789"
testName = "John Doe"
testEmail = "john.doe@example.com"
)
userInfo, err := json.Marshal(
map[string]any{
"sub": testSubject,
"name": testName,
"email": testEmail,
},
)
require.NoError(t, err)
s := newMockServer(t, testCode, testAccessToken, userInfo)
oauth2, err := NewIdentityProvider(
&storepb.OAuth2Config{
ClientId: testClientID,
ClientSecret: "test-client-secret",
TokenUrl: fmt.Sprintf("%s/oauth2/token", s.URL),
UserInfoUrl: fmt.Sprintf("%s/oauth2/userinfo", s.URL),
FieldMapping: &storepb.FieldMapping{
Identifier: "sub",
DisplayName: "name",
Email: "email",
},
},
)
require.NoError(t, err)
redirectURL := "https://example.com/oauth/callback"
// Test without PKCE (backward compatibility)
oauthToken, err := oauth2.ExchangeToken(ctx, redirectURL, testCode, "")
require.NoError(t, err)
require.Equal(t, testAccessToken, oauthToken)
userInfoResult, err := oauth2.UserInfo(oauthToken)
require.NoError(t, err)
wantUserInfo := &idp.IdentityProviderUserInfo{
Identifier: testSubject,
DisplayName: testName,
Email: testEmail,
}
assert.Equal(t, wantUserInfo, userInfoResult)
}

View File

@@ -0,0 +1,28 @@
package ast
import (
gast "github.com/yuin/goldmark/ast"
)
// TagNode represents a #tag in the markdown AST.
type TagNode struct {
gast.BaseInline
// Tag name without the # prefix
Tag []byte
}
// KindTag is the NodeKind for TagNode.
var KindTag = gast.NewNodeKind("Tag")
// Kind returns KindTag.
func (*TagNode) Kind() gast.NodeKind {
return KindTag
}
// Dump implements Node.Dump for debugging.
func (n *TagNode) Dump(source []byte, level int) {
gast.DumpHelper(n, source, level, map[string]string{
"Tag": string(n.Tag),
}, nil)
}

View File

@@ -0,0 +1,24 @@
package extensions
import (
"github.com/yuin/goldmark"
"github.com/yuin/goldmark/parser"
"github.com/yuin/goldmark/util"
mparser "github.com/usememos/memos/plugin/markdown/parser"
)
type tagExtension struct{}
// TagExtension is a goldmark extension for #tag syntax.
var TagExtension = &tagExtension{}
// Extend extends the goldmark parser with tag support.
func (*tagExtension) Extend(m goldmark.Markdown) {
m.Parser().AddOptions(
parser.WithInlineParsers(
// Priority 200 - run before standard link parser (500)
util.Prioritized(mparser.NewTagParser(), 200),
),
)
}

409
plugin/markdown/markdown.go Normal file
View File

@@ -0,0 +1,409 @@
package markdown
import (
"bytes"
"strings"
"github.com/yuin/goldmark"
gast "github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/extension"
east "github.com/yuin/goldmark/extension/ast"
"github.com/yuin/goldmark/parser"
"github.com/yuin/goldmark/text"
mast "github.com/usememos/memos/plugin/markdown/ast"
"github.com/usememos/memos/plugin/markdown/extensions"
"github.com/usememos/memos/plugin/markdown/renderer"
storepb "github.com/usememos/memos/proto/gen/store"
)
// ExtractedData contains all metadata extracted from markdown in a single pass.
type ExtractedData struct {
Tags []string
Property *storepb.MemoPayload_Property
}
// Service handles markdown metadata extraction.
// It uses goldmark to parse markdown and extract tags, properties, and snippets.
// HTML rendering is primarily done on frontend using markdown-it, but backend provides
// RenderHTML for RSS feeds and other server-side rendering needs.
type Service interface {
// ExtractAll extracts tags, properties, and references in a single parse (most efficient)
ExtractAll(content []byte) (*ExtractedData, error)
// ExtractTags returns all #tags found in content
ExtractTags(content []byte) ([]string, error)
// ExtractProperties computes boolean properties
ExtractProperties(content []byte) (*storepb.MemoPayload_Property, error)
// RenderMarkdown renders goldmark AST back to markdown text
RenderMarkdown(content []byte) (string, error)
// RenderHTML renders markdown content to HTML
RenderHTML(content []byte) (string, error)
// GenerateSnippet creates plain text summary
GenerateSnippet(content []byte, maxLength int) (string, error)
// ValidateContent checks for syntax errors
ValidateContent(content []byte) error
// RenameTag renames all occurrences of oldTag to newTag in content
RenameTag(content []byte, oldTag, newTag string) (string, error)
}
// service implements the Service interface.
type service struct {
md goldmark.Markdown
}
// Option configures the markdown service.
type Option func(*config)
type config struct {
enableTags bool
}
// WithTagExtension enables #tag parsing.
func WithTagExtension() Option {
return func(c *config) {
c.enableTags = true
}
}
// NewService creates a new markdown service with the given options.
func NewService(opts ...Option) Service {
cfg := &config{}
for _, opt := range opts {
opt(cfg)
}
exts := []goldmark.Extender{
extension.GFM, // GitHub Flavored Markdown (tables, strikethrough, task lists, autolinks)
}
// Add custom extensions based on config
if cfg.enableTags {
exts = append(exts, extensions.TagExtension)
}
md := goldmark.New(
goldmark.WithExtensions(exts...),
goldmark.WithParserOptions(
parser.WithAutoHeadingID(), // Generate heading IDs
),
)
return &service{
md: md,
}
}
// parse is an internal helper to parse content into AST.
func (s *service) parse(content []byte) (gast.Node, error) {
reader := text.NewReader(content)
doc := s.md.Parser().Parse(reader)
return doc, nil
}
// ExtractTags returns all #tags found in content.
func (s *service) ExtractTags(content []byte) ([]string, error) {
root, err := s.parse(content)
if err != nil {
return nil, err
}
var tags []string
// Walk the AST to find tag nodes
err = gast.Walk(root, func(n gast.Node, entering bool) (gast.WalkStatus, error) {
if !entering {
return gast.WalkContinue, nil
}
// Check for custom TagNode
if tagNode, ok := n.(*mast.TagNode); ok {
tags = append(tags, string(tagNode.Tag))
}
return gast.WalkContinue, nil
})
if err != nil {
return nil, err
}
// Deduplicate tags while preserving original case
return uniquePreserveCase(tags), nil
}
// ExtractProperties computes boolean properties about the content.
func (s *service) ExtractProperties(content []byte) (*storepb.MemoPayload_Property, error) {
root, err := s.parse(content)
if err != nil {
return nil, err
}
prop := &storepb.MemoPayload_Property{}
err = gast.Walk(root, func(n gast.Node, entering bool) (gast.WalkStatus, error) {
if !entering {
return gast.WalkContinue, nil
}
switch n.Kind() {
case gast.KindLink:
prop.HasLink = true
case gast.KindCodeBlock, gast.KindFencedCodeBlock, gast.KindCodeSpan:
prop.HasCode = true
case east.KindTaskCheckBox:
prop.HasTaskList = true
if checkBox, ok := n.(*east.TaskCheckBox); ok {
if !checkBox.IsChecked {
prop.HasIncompleteTasks = true
}
}
default:
// No special handling for other node types
}
return gast.WalkContinue, nil
})
if err != nil {
return nil, err
}
return prop, nil
}
// RenderMarkdown renders goldmark AST back to markdown text.
func (s *service) RenderMarkdown(content []byte) (string, error) {
root, err := s.parse(content)
if err != nil {
return "", err
}
mdRenderer := renderer.NewMarkdownRenderer()
return mdRenderer.Render(root, content), nil
}
// RenderHTML renders markdown content to HTML using goldmark's built-in HTML renderer.
func (s *service) RenderHTML(content []byte) (string, error) {
var buf bytes.Buffer
if err := s.md.Convert(content, &buf); err != nil {
return "", err
}
return buf.String(), nil
}
// GenerateSnippet creates a plain text summary from markdown content.
func (s *service) GenerateSnippet(content []byte, maxLength int) (string, error) {
root, err := s.parse(content)
if err != nil {
return "", err
}
var buf strings.Builder
var lastNodeWasBlock bool
err = gast.Walk(root, func(n gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
// Skip code blocks and code spans entirely
switch n.Kind() {
case gast.KindCodeBlock, gast.KindFencedCodeBlock, gast.KindCodeSpan:
return gast.WalkSkipChildren, nil
default:
// Continue walking for other node types
}
// Add space before block elements (except first)
switch n.Kind() {
case gast.KindParagraph, gast.KindHeading, gast.KindListItem:
if buf.Len() > 0 && lastNodeWasBlock {
buf.WriteByte(' ')
}
default:
// No space needed for other node types
}
}
if !entering {
// Mark that we just exited a block element
switch n.Kind() {
case gast.KindParagraph, gast.KindHeading, gast.KindListItem:
lastNodeWasBlock = true
default:
// Not a block element
}
return gast.WalkContinue, nil
}
lastNodeWasBlock = false
// Only extract plain text nodes
if textNode, ok := n.(*gast.Text); ok {
segment := textNode.Segment
buf.Write(segment.Value(content))
// Add space if this is a soft line break
if textNode.SoftLineBreak() {
buf.WriteByte(' ')
}
}
// Stop walking if we've exceeded double the max length
// (we'll truncate precisely later)
if buf.Len() > maxLength*2 {
return gast.WalkStop, nil
}
return gast.WalkContinue, nil
})
if err != nil {
return "", err
}
snippet := buf.String()
// Truncate at word boundary if needed
if len(snippet) > maxLength {
snippet = truncateAtWord(snippet, maxLength)
}
return strings.TrimSpace(snippet), nil
}
// ValidateContent checks if the markdown content is valid.
func (s *service) ValidateContent(content []byte) error {
// Try to parse the content
_, err := s.parse(content)
return err
}
// ExtractAll extracts tags, properties, and references in a single parse for efficiency.
func (s *service) ExtractAll(content []byte) (*ExtractedData, error) {
root, err := s.parse(content)
if err != nil {
return nil, err
}
data := &ExtractedData{
Tags: []string{},
Property: &storepb.MemoPayload_Property{},
}
// Single walk to collect all data
err = gast.Walk(root, func(n gast.Node, entering bool) (gast.WalkStatus, error) {
if !entering {
return gast.WalkContinue, nil
}
// Extract tags
if tagNode, ok := n.(*mast.TagNode); ok {
data.Tags = append(data.Tags, string(tagNode.Tag))
}
// Extract properties based on node kind
switch n.Kind() {
case gast.KindLink:
data.Property.HasLink = true
case gast.KindCodeBlock, gast.KindFencedCodeBlock, gast.KindCodeSpan:
data.Property.HasCode = true
case east.KindTaskCheckBox:
data.Property.HasTaskList = true
if checkBox, ok := n.(*east.TaskCheckBox); ok {
if !checkBox.IsChecked {
data.Property.HasIncompleteTasks = true
}
}
default:
// No special handling for other node types
}
return gast.WalkContinue, nil
})
if err != nil {
return nil, err
}
// Deduplicate tags while preserving original case
data.Tags = uniquePreserveCase(data.Tags)
return data, nil
}
// RenameTag renames all occurrences of oldTag to newTag in content.
func (s *service) RenameTag(content []byte, oldTag, newTag string) (string, error) {
root, err := s.parse(content)
if err != nil {
return "", err
}
// Walk the AST to find and rename tag nodes
err = gast.Walk(root, func(n gast.Node, entering bool) (gast.WalkStatus, error) {
if !entering {
return gast.WalkContinue, nil
}
// Check for custom TagNode and rename if it matches
if tagNode, ok := n.(*mast.TagNode); ok {
if string(tagNode.Tag) == oldTag {
tagNode.Tag = []byte(newTag)
}
}
return gast.WalkContinue, nil
})
if err != nil {
return "", err
}
// Render back to markdown using the already-parsed AST
mdRenderer := renderer.NewMarkdownRenderer()
return mdRenderer.Render(root, content), nil
}
// uniquePreserveCase returns unique strings from input while preserving case.
func uniquePreserveCase(strs []string) []string {
seen := make(map[string]struct{})
var result []string
for _, s := range strs {
if _, exists := seen[s]; !exists {
seen[s] = struct{}{}
result = append(result, s)
}
}
return result
}
// truncateAtWord truncates a string at the last word boundary before maxLength.
// maxLength is treated as a rune (character) count to properly handle UTF-8 multi-byte characters.
func truncateAtWord(s string, maxLength int) string {
// Convert to runes to properly handle multi-byte UTF-8 characters
runes := []rune(s)
if len(runes) <= maxLength {
return s
}
// Truncate to max length (by character count, not byte count)
truncated := string(runes[:maxLength])
// Find last space to avoid cutting in the middle of a word
lastSpace := strings.LastIndexAny(truncated, " \t\n\r")
if lastSpace > 0 {
truncated = truncated[:lastSpace]
}
return truncated + " ..."
}

View File

@@ -0,0 +1,448 @@
package markdown
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewService(t *testing.T) {
svc := NewService()
assert.NotNil(t, svc)
}
func TestValidateContent(t *testing.T) {
svc := NewService()
tests := []struct {
name string
content string
wantErr bool
}{
{
name: "valid markdown",
content: "# Hello\n\nThis is **bold** text.",
wantErr: false,
},
{
name: "empty content",
content: "",
wantErr: false,
},
{
name: "complex markdown",
content: "# Title\n\n- List item 1\n- List item 2\n\n```go\ncode block\n```",
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := svc.ValidateContent([]byte(tt.content))
if tt.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
func TestGenerateSnippet(t *testing.T) {
svc := NewService()
tests := []struct {
name string
content string
maxLength int
expected string
}{
{
name: "simple text",
content: "Hello world",
maxLength: 100,
expected: "Hello world",
},
{
name: "text with formatting",
content: "This is **bold** and *italic* text.",
maxLength: 100,
expected: "This is bold and italic text.",
},
{
name: "truncate long text",
content: "This is a very long piece of text that should be truncated at a word boundary.",
maxLength: 30,
expected: "This is a very long piece of ...",
},
{
name: "heading and paragraph",
content: "# My Title\n\nThis is the first paragraph.",
maxLength: 100,
expected: "My Title This is the first paragraph.",
},
{
name: "code block removed",
content: "Text before\n\n```go\ncode\n```\n\nText after",
maxLength: 100,
expected: "Text before Text after",
},
{
name: "list items",
content: "- Item 1\n- Item 2\n- Item 3",
maxLength: 100,
expected: "Item 1 Item 2 Item 3",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
snippet, err := svc.GenerateSnippet([]byte(tt.content), tt.maxLength)
require.NoError(t, err)
assert.Equal(t, tt.expected, snippet)
})
}
}
func TestExtractProperties(t *testing.T) {
tests := []struct {
name string
content string
hasLink bool
hasCode bool
hasTasks bool
hasInc bool
}{
{
name: "plain text",
content: "Just plain text",
hasLink: false,
hasCode: false,
hasTasks: false,
hasInc: false,
},
{
name: "with link",
content: "Check out [this link](https://example.com)",
hasLink: true,
hasCode: false,
hasTasks: false,
hasInc: false,
},
{
name: "with inline code",
content: "Use `console.log()` to debug",
hasLink: false,
hasCode: true,
hasTasks: false,
hasInc: false,
},
{
name: "with code block",
content: "```go\nfunc main() {}\n```",
hasLink: false,
hasCode: true,
hasTasks: false,
hasInc: false,
},
{
name: "with completed task",
content: "- [x] Completed task",
hasLink: false,
hasCode: false,
hasTasks: true,
hasInc: false,
},
{
name: "with incomplete task",
content: "- [ ] Todo item",
hasLink: false,
hasCode: false,
hasTasks: true,
hasInc: true,
},
{
name: "mixed tasks",
content: "- [x] Done\n- [ ] Not done",
hasLink: false,
hasCode: false,
hasTasks: true,
hasInc: true,
},
{
name: "everything",
content: "# Title\n\n[Link](url)\n\n`code`\n\n- [ ] Task",
hasLink: true,
hasCode: true,
hasTasks: true,
hasInc: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
svc := NewService()
props, err := svc.ExtractProperties([]byte(tt.content))
require.NoError(t, err)
assert.Equal(t, tt.hasLink, props.HasLink, "HasLink")
assert.Equal(t, tt.hasCode, props.HasCode, "HasCode")
assert.Equal(t, tt.hasTasks, props.HasTaskList, "HasTaskList")
assert.Equal(t, tt.hasInc, props.HasIncompleteTasks, "HasIncompleteTasks")
})
}
}
func TestExtractTags(t *testing.T) {
tests := []struct {
name string
content string
withExt bool
expected []string
}{
{
name: "no tags",
content: "Just plain text",
withExt: false,
expected: []string{},
},
{
name: "single tag",
content: "Text with #tag",
withExt: true,
expected: []string{"tag"},
},
{
name: "multiple tags",
content: "Text with #tag1 and #tag2",
withExt: true,
expected: []string{"tag1", "tag2"},
},
{
name: "duplicate tags",
content: "#work is important. #Work #WORK",
withExt: true,
expected: []string{"work", "Work", "WORK"},
},
{
name: "tags with hyphens and underscores",
content: "Tags: #work-notes #2024_plans",
withExt: true,
expected: []string{"work-notes", "2024_plans"},
},
{
name: "tags at end of sentence",
content: "This is important #urgent.",
withExt: true,
expected: []string{"urgent"},
},
{
name: "headings not tags",
content: "## Heading\n\n# Title\n\nText with #realtag",
withExt: true,
expected: []string{"realtag"},
},
{
name: "numeric tag",
content: "Issue #123",
withExt: true,
expected: []string{"123"},
},
{
name: "tag in list",
content: "- Item 1 #todo\n- Item 2 #done",
withExt: true,
expected: []string{"todo", "done"},
},
{
name: "no extension enabled",
content: "Text with #tag",
withExt: false,
expected: []string{},
},
{
name: "Chinese tag",
content: "Text with #测试",
withExt: true,
expected: []string{"测试"},
},
{
name: "Chinese tag followed by punctuation",
content: "Text #测试。 More text",
withExt: true,
expected: []string{"测试"},
},
{
name: "mixed Chinese and ASCII tag",
content: "#测试test123 content",
withExt: true,
expected: []string{"测试test123"},
},
{
name: "Japanese tag",
content: "#日本語 content",
withExt: true,
expected: []string{"日本語"},
},
{
name: "Korean tag",
content: "#한국어 content",
withExt: true,
expected: []string{"한국어"},
},
{
name: "hierarchical tag with Chinese",
content: "#work/测试/项目",
withExt: true,
expected: []string{"work/测试/项目"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var svc Service
if tt.withExt {
svc = NewService(WithTagExtension())
} else {
svc = NewService()
}
tags, err := svc.ExtractTags([]byte(tt.content))
require.NoError(t, err)
assert.ElementsMatch(t, tt.expected, tags)
})
}
}
func TestUniquePreserveCase(t *testing.T) {
tests := []struct {
name string
input []string
expected []string
}{
{
name: "empty",
input: []string{},
expected: []string{},
},
{
name: "unique items",
input: []string{"tag1", "tag2", "tag3"},
expected: []string{"tag1", "tag2", "tag3"},
},
{
name: "duplicates",
input: []string{"tag", "TAG", "Tag"},
expected: []string{"tag", "TAG", "Tag"},
},
{
name: "mixed",
input: []string{"Work", "work", "Important", "work"},
expected: []string{"Work", "work", "Important"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := uniquePreserveCase(tt.input)
assert.ElementsMatch(t, tt.expected, result)
})
}
}
func TestTruncateAtWord(t *testing.T) {
tests := []struct {
name string
input string
maxLength int
expected string
}{
{
name: "no truncation needed",
input: "short",
maxLength: 10,
expected: "short",
},
{
name: "exact length",
input: "exactly ten",
maxLength: 11,
expected: "exactly ten",
},
{
name: "truncate at word",
input: "this is a long sentence",
maxLength: 10,
expected: "this is a ...",
},
{
name: "truncate very long word",
input: "supercalifragilisticexpialidocious",
maxLength: 10,
expected: "supercalif ...",
},
{
name: "CJK characters without spaces",
input: "这是一个很长的中文句子没有空格的情况下也要正确处理",
maxLength: 15,
expected: "这是一个很长的中文句子没有空格 ...",
},
{
name: "mixed CJK and Latin",
input: "这是中文mixed with English文字",
maxLength: 10,
expected: "这是中文mixed ...",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := truncateAtWord(tt.input, tt.maxLength)
assert.Equal(t, tt.expected, result)
})
}
}
// Benchmark tests.
func BenchmarkGenerateSnippet(b *testing.B) {
svc := NewService()
content := []byte(`# Large Document
This is a large document with multiple paragraphs and formatting.
## Section 1
Here is some **bold** text and *italic* text with [links](https://example.com).
- List item 1
- List item 2
- List item 3
## Section 2
More content here with ` + "`inline code`" + ` and other elements.
` + "```go\nfunc example() {\n return true\n}\n```")
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, err := svc.GenerateSnippet(content, 200)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkExtractProperties(b *testing.B) {
svc := NewService()
content := []byte("# Title\n\n[Link](url)\n\n`code`\n\n- [ ] Task\n- [x] Done")
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, err := svc.ExtractProperties(content)
if err != nil {
b.Fatal(err)
}
}
}

View File

@@ -0,0 +1,139 @@
package parser
import (
"unicode"
"unicode/utf8"
gast "github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/parser"
"github.com/yuin/goldmark/text"
mast "github.com/usememos/memos/plugin/markdown/ast"
)
const (
// MaxTagLength defines the maximum number of runes allowed in a tag.
MaxTagLength = 100
)
type tagParser struct{}
// NewTagParser creates a new inline parser for #tag syntax.
func NewTagParser() parser.InlineParser {
return &tagParser{}
}
// Trigger returns the characters that trigger this parser.
func (*tagParser) Trigger() []byte {
return []byte{'#'}
}
// isValidTagRune checks if a Unicode rune is valid in a tag.
// Uses Unicode categories for proper international character support.
func isValidTagRune(r rune) bool {
// Allow Unicode letters (any script: Latin, CJK, Arabic, Cyrillic, etc.)
if unicode.IsLetter(r) {
return true
}
// Allow Unicode digits
if unicode.IsNumber(r) {
return true
}
// Allow emoji and symbols (So category: Symbol, Other)
// This includes emoji, which are essential for social media-style tagging
if unicode.IsSymbol(r) {
return true
}
// Allow specific ASCII symbols for tag structure
// Underscore: word separation (snake_case)
// Hyphen: word separation (kebab-case)
// Forward slash: hierarchical tags (category/subcategory)
// Ampersand: compound tags (science&tech)
if r == '_' || r == '-' || r == '/' || r == '&' {
return true
}
return false
}
// Parse parses #tag syntax using Unicode-aware validation.
// Tags support international characters and follow these rules:
// - Must start with # followed by valid tag characters
// - Valid characters: Unicode letters, Unicode digits, underscore (_), hyphen (-), forward slash (/)
// - Maximum length: 100 runes (Unicode characters)
// - Stops at: whitespace, punctuation, or other invalid characters
func (*tagParser) Parse(_ gast.Node, block text.Reader, _ parser.Context) gast.Node {
line, _ := block.PeekLine()
// Must start with #
if len(line) == 0 || line[0] != '#' {
return nil
}
// Check if it's a heading (## or space after #)
if len(line) > 1 {
if line[1] == '#' {
// It's a heading (##), not a tag
return nil
}
if line[1] == ' ' {
// Space after # - heading or just a hash
return nil
}
} else {
// Just a lone #
return nil
}
// Parse tag using UTF-8 aware rune iteration
tagStart := 1
pos := tagStart
runeCount := 0
for pos < len(line) {
r, size := utf8.DecodeRune(line[pos:])
// Stop at invalid UTF-8
if r == utf8.RuneError && size == 1 {
break
}
// Validate character using Unicode categories
if !isValidTagRune(r) {
break
}
// Enforce max length (by rune count, not byte count)
runeCount++
if runeCount > MaxTagLength {
break
}
pos += size
}
// Must have at least one character after #
if pos <= tagStart {
return nil
}
// Extract tag (without #)
tagName := line[tagStart:pos]
// Make a copy of the tag name
tagCopy := make([]byte, len(tagName))
copy(tagCopy, tagName)
// Advance reader
block.Advance(pos)
// Create node
node := &mast.TagNode{
Tag: tagCopy,
}
return node
}

View File

@@ -0,0 +1,251 @@
package parser
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/yuin/goldmark/parser"
"github.com/yuin/goldmark/text"
mast "github.com/usememos/memos/plugin/markdown/ast"
)
func TestTagParser(t *testing.T) {
tests := []struct {
name string
input string
expectedTag string
shouldParse bool
}{
{
name: "basic tag",
input: "#tag",
expectedTag: "tag",
shouldParse: true,
},
{
name: "tag with hyphen",
input: "#work-notes",
expectedTag: "work-notes",
shouldParse: true,
},
{
name: "tag with ampersand",
input: "#science&tech",
expectedTag: "science&tech",
shouldParse: true,
},
{
name: "tag with underscore",
input: "#2024_plans",
expectedTag: "2024_plans",
shouldParse: true,
},
{
name: "numeric tag",
input: "#123",
expectedTag: "123",
shouldParse: true,
},
{
name: "tag followed by space",
input: "#tag ",
expectedTag: "tag",
shouldParse: true,
},
{
name: "tag followed by punctuation",
input: "#tag.",
expectedTag: "tag",
shouldParse: true,
},
{
name: "tag in sentence",
input: "#important task",
expectedTag: "important",
shouldParse: true,
},
{
name: "heading (##)",
input: "## Heading",
expectedTag: "",
shouldParse: false,
},
{
name: "space after hash",
input: "# heading",
expectedTag: "",
shouldParse: false,
},
{
name: "lone hash",
input: "#",
expectedTag: "",
shouldParse: false,
},
{
name: "hash with space",
input: "# ",
expectedTag: "",
shouldParse: false,
},
{
name: "special characters",
input: "#tag@special",
expectedTag: "tag",
shouldParse: true,
},
{
name: "mixed case",
input: "#WorkNotes",
expectedTag: "WorkNotes",
shouldParse: true,
},
{
name: "hierarchical tag with slash",
input: "#tag1/subtag",
expectedTag: "tag1/subtag",
shouldParse: true,
},
{
name: "hierarchical tag with multiple levels",
input: "#tag1/subtag/subtag2",
expectedTag: "tag1/subtag/subtag2",
shouldParse: true,
},
{
name: "hierarchical tag followed by space",
input: "#work/notes ",
expectedTag: "work/notes",
shouldParse: true,
},
{
name: "hierarchical tag followed by punctuation",
input: "#project/2024.",
expectedTag: "project/2024",
shouldParse: true,
},
{
name: "hierarchical tag with numbers and dashes",
input: "#work-log/2024/q1",
expectedTag: "work-log/2024/q1",
shouldParse: true,
},
{
name: "Chinese characters",
input: "#测试",
expectedTag: "测试",
shouldParse: true,
},
{
name: "Chinese tag followed by space",
input: "#测试 some text",
expectedTag: "测试",
shouldParse: true,
},
{
name: "Chinese tag followed by punctuation",
input: "#测试。",
expectedTag: "测试",
shouldParse: true,
},
{
name: "mixed Chinese and ASCII",
input: "#测试test123",
expectedTag: "测试test123",
shouldParse: true,
},
{
name: "Japanese characters",
input: "#テスト",
expectedTag: "テスト",
shouldParse: true,
},
{
name: "Korean characters",
input: "#테스트",
expectedTag: "테스트",
shouldParse: true,
},
{
name: "emoji",
input: "#test🚀",
expectedTag: "test🚀",
shouldParse: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
p := NewTagParser()
reader := text.NewReader([]byte(tt.input))
ctx := parser.NewContext()
node := p.Parse(nil, reader, ctx)
if tt.shouldParse {
require.NotNil(t, node, "Expected tag to be parsed")
require.IsType(t, &mast.TagNode{}, node)
tagNode, ok := node.(*mast.TagNode)
require.True(t, ok, "Expected node to be *mast.TagNode")
assert.Equal(t, tt.expectedTag, string(tagNode.Tag))
} else {
assert.Nil(t, node, "Expected tag NOT to be parsed")
}
})
}
}
func TestTagParser_Trigger(t *testing.T) {
p := NewTagParser()
triggers := p.Trigger()
assert.Equal(t, []byte{'#'}, triggers)
}
func TestTagParser_MultipleTags(t *testing.T) {
// Test that parser correctly handles multiple tags in sequence
input := "#tag1 #tag2"
p := NewTagParser()
reader := text.NewReader([]byte(input))
ctx := parser.NewContext()
// Parse first tag
node1 := p.Parse(nil, reader, ctx)
require.NotNil(t, node1)
tagNode1, ok := node1.(*mast.TagNode)
require.True(t, ok, "Expected node1 to be *mast.TagNode")
assert.Equal(t, "tag1", string(tagNode1.Tag))
// Advance past the space
reader.Advance(1)
// Parse second tag
node2 := p.Parse(nil, reader, ctx)
require.NotNil(t, node2)
tagNode2, ok := node2.(*mast.TagNode)
require.True(t, ok, "Expected node2 to be *mast.TagNode")
assert.Equal(t, "tag2", string(tagNode2.Tag))
}
func TestTagNode_Kind(t *testing.T) {
node := &mast.TagNode{
Tag: []byte("test"),
}
assert.Equal(t, mast.KindTag, node.Kind())
}
func TestTagNode_Dump(t *testing.T) {
node := &mast.TagNode{
Tag: []byte("test"),
}
// Should not panic
assert.NotPanics(t, func() {
node.Dump([]byte("#test"), 0)
})
}

View File

@@ -0,0 +1,266 @@
package renderer
import (
"bytes"
"fmt"
"strings"
gast "github.com/yuin/goldmark/ast"
east "github.com/yuin/goldmark/extension/ast"
mast "github.com/usememos/memos/plugin/markdown/ast"
)
// MarkdownRenderer renders goldmark AST back to markdown text.
type MarkdownRenderer struct {
buf *bytes.Buffer
}
// NewMarkdownRenderer creates a new markdown renderer.
func NewMarkdownRenderer() *MarkdownRenderer {
return &MarkdownRenderer{
buf: &bytes.Buffer{},
}
}
// Render renders the AST node to markdown and returns the result.
func (r *MarkdownRenderer) Render(node gast.Node, source []byte) string {
r.buf.Reset()
r.renderNode(node, source, 0)
return r.buf.String()
}
// renderNode renders a single node and its children.
func (r *MarkdownRenderer) renderNode(node gast.Node, source []byte, depth int) {
switch n := node.(type) {
case *gast.Document:
r.renderChildren(n, source, depth)
case *gast.Paragraph:
r.renderChildren(n, source, depth)
if node.NextSibling() != nil {
r.buf.WriteString("\n\n")
}
case *gast.Text:
// Text nodes store their content as segments in the source
segment := n.Segment
r.buf.Write(segment.Value(source))
if n.SoftLineBreak() {
r.buf.WriteByte('\n')
} else if n.HardLineBreak() {
r.buf.WriteString(" \n")
}
case *gast.CodeSpan:
r.buf.WriteByte('`')
r.renderChildren(n, source, depth)
r.buf.WriteByte('`')
case *gast.Emphasis:
symbol := "*"
if n.Level == 2 {
symbol = "**"
}
r.buf.WriteString(symbol)
r.renderChildren(n, source, depth)
r.buf.WriteString(symbol)
case *gast.Link:
r.buf.WriteString("[")
r.renderChildren(n, source, depth)
r.buf.WriteString("](")
r.buf.Write(n.Destination)
if len(n.Title) > 0 {
r.buf.WriteString(` "`)
r.buf.Write(n.Title)
r.buf.WriteString(`"`)
}
r.buf.WriteString(")")
case *gast.AutoLink:
url := n.URL(source)
if n.AutoLinkType == gast.AutoLinkEmail {
r.buf.WriteString("<")
r.buf.Write(url)
r.buf.WriteString(">")
} else {
r.buf.Write(url)
}
case *gast.Image:
r.buf.WriteString("![")
r.renderChildren(n, source, depth)
r.buf.WriteString("](")
r.buf.Write(n.Destination)
if len(n.Title) > 0 {
r.buf.WriteString(` "`)
r.buf.Write(n.Title)
r.buf.WriteString(`"`)
}
r.buf.WriteString(")")
case *gast.Heading:
r.buf.WriteString(strings.Repeat("#", n.Level))
r.buf.WriteByte(' ')
r.renderChildren(n, source, depth)
if node.NextSibling() != nil {
r.buf.WriteString("\n\n")
}
case *gast.CodeBlock, *gast.FencedCodeBlock:
r.renderCodeBlock(n, source)
case *gast.Blockquote:
// Render each child line with "> " prefix
r.renderBlockquote(n, source, depth)
if node.NextSibling() != nil {
r.buf.WriteString("\n\n")
}
case *gast.List:
r.renderChildren(n, source, depth)
if node.NextSibling() != nil {
r.buf.WriteString("\n\n")
}
case *gast.ListItem:
r.renderListItem(n, source, depth)
case *gast.ThematicBreak:
r.buf.WriteString("---")
if node.NextSibling() != nil {
r.buf.WriteString("\n\n")
}
case *east.Strikethrough:
r.buf.WriteString("~~")
r.renderChildren(n, source, depth)
r.buf.WriteString("~~")
case *east.TaskCheckBox:
if n.IsChecked {
r.buf.WriteString("[x] ")
} else {
r.buf.WriteString("[ ] ")
}
case *east.Table:
r.renderTable(n, source)
if node.NextSibling() != nil {
r.buf.WriteString("\n\n")
}
// Custom Memos nodes
case *mast.TagNode:
r.buf.WriteByte('#')
r.buf.Write(n.Tag)
default:
// For unknown nodes, try to render children
r.renderChildren(n, source, depth)
}
}
// renderChildren renders all children of a node.
func (r *MarkdownRenderer) renderChildren(node gast.Node, source []byte, depth int) {
child := node.FirstChild()
for child != nil {
r.renderNode(child, source, depth+1)
child = child.NextSibling()
}
}
// renderCodeBlock renders a code block.
func (r *MarkdownRenderer) renderCodeBlock(node gast.Node, source []byte) {
if fenced, ok := node.(*gast.FencedCodeBlock); ok {
// Fenced code block with language
r.buf.WriteString("```")
if lang := fenced.Language(source); len(lang) > 0 {
r.buf.Write(lang)
}
r.buf.WriteByte('\n')
// Write all lines
lines := fenced.Lines()
for i := 0; i < lines.Len(); i++ {
line := lines.At(i)
r.buf.Write(line.Value(source))
}
r.buf.WriteString("```")
if node.NextSibling() != nil {
r.buf.WriteString("\n\n")
}
} else if codeBlock, ok := node.(*gast.CodeBlock); ok {
// Indented code block
lines := codeBlock.Lines()
for i := 0; i < lines.Len(); i++ {
line := lines.At(i)
r.buf.WriteString(" ")
r.buf.Write(line.Value(source))
}
if node.NextSibling() != nil {
r.buf.WriteString("\n\n")
}
}
}
// renderBlockquote renders a blockquote with "> " prefix.
func (r *MarkdownRenderer) renderBlockquote(node *gast.Blockquote, source []byte, depth int) {
// Create a temporary buffer for the blockquote content
tempBuf := &bytes.Buffer{}
tempRenderer := &MarkdownRenderer{buf: tempBuf}
tempRenderer.renderChildren(node, source, depth)
// Add "> " prefix to each line
content := tempBuf.String()
lines := strings.Split(strings.TrimRight(content, "\n"), "\n")
for i, line := range lines {
r.buf.WriteString("> ")
r.buf.WriteString(line)
if i < len(lines)-1 {
r.buf.WriteByte('\n')
}
}
}
// renderListItem renders a list item with proper indentation and markers.
func (r *MarkdownRenderer) renderListItem(node *gast.ListItem, source []byte, depth int) {
parent := node.Parent()
list, ok := parent.(*gast.List)
if !ok {
r.renderChildren(node, source, depth)
return
}
// Add indentation only for nested lists
// Document=0, List=1, ListItem=2 (no indent), nested ListItem=3+ (indent)
if depth > 2 {
indent := strings.Repeat(" ", depth-2)
r.buf.WriteString(indent)
}
// Add list marker
if list.IsOrdered() {
fmt.Fprintf(r.buf, "%d. ", list.Start)
list.Start++ // Increment for next item
} else {
r.buf.WriteString("- ")
}
// Render content
r.renderChildren(node, source, depth)
// Add newline if there's a next sibling
if node.NextSibling() != nil {
r.buf.WriteByte('\n')
}
}
// renderTable renders a table in markdown format.
func (r *MarkdownRenderer) renderTable(table *east.Table, source []byte) {
// This is a simplified table renderer
// A full implementation would need to handle alignment, etc.
r.renderChildren(table, source, 0)
}

View File

@@ -0,0 +1,176 @@
package renderer
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/yuin/goldmark"
"github.com/yuin/goldmark/extension"
"github.com/yuin/goldmark/parser"
"github.com/yuin/goldmark/text"
"github.com/usememos/memos/plugin/markdown/extensions"
)
func TestMarkdownRenderer(t *testing.T) {
// Create goldmark instance with all extensions
md := goldmark.New(
goldmark.WithExtensions(
extension.GFM,
extensions.TagExtension,
),
goldmark.WithParserOptions(
parser.WithAutoHeadingID(),
),
)
tests := []struct {
name string
input string
expected string
}{
{
name: "simple text",
input: "Hello world",
expected: "Hello world",
},
{
name: "paragraph with newlines",
input: "First paragraph\n\nSecond paragraph",
expected: "First paragraph\n\nSecond paragraph",
},
{
name: "emphasis",
input: "This is *italic* and **bold** text",
expected: "This is *italic* and **bold** text",
},
{
name: "headings",
input: "# Heading 1\n\n## Heading 2\n\n### Heading 3",
expected: "# Heading 1\n\n## Heading 2\n\n### Heading 3",
},
{
name: "link",
input: "Check [this link](https://example.com)",
expected: "Check [this link](https://example.com)",
},
{
name: "image",
input: "![alt text](image.png)",
expected: "![alt text](image.png)",
},
{
name: "code inline",
input: "This is `inline code` here",
expected: "This is `inline code` here",
},
{
name: "code block fenced",
input: "```go\nfunc main() {\n}\n```",
expected: "```go\nfunc main() {\n}\n```",
},
{
name: "unordered list",
input: "- Item 1\n- Item 2\n- Item 3",
expected: "- Item 1\n- Item 2\n- Item 3",
},
{
name: "ordered list",
input: "1. First\n2. Second\n3. Third",
expected: "1. First\n2. Second\n3. Third",
},
{
name: "blockquote",
input: "> This is a quote\n> Second line",
expected: "> This is a quote\n> Second line",
},
{
name: "horizontal rule",
input: "Text before\n\n---\n\nText after",
expected: "Text before\n\n---\n\nText after",
},
{
name: "strikethrough",
input: "This is ~~deleted~~ text",
expected: "This is ~~deleted~~ text",
},
{
name: "task list",
input: "- [x] Completed task\n- [ ] Incomplete task",
expected: "- [x] Completed task\n- [ ] Incomplete task",
},
{
name: "tag",
input: "This has #tag in it",
expected: "This has #tag in it",
},
{
name: "multiple tags",
input: "#work #important meeting notes",
expected: "#work #important meeting notes",
},
{
name: "complex mixed content",
input: "# Meeting Notes\n\n**Date**: 2024-01-01\n\n## Attendees\n- Alice\n- Bob\n\n## Discussion\n\nWe discussed #project status.\n\n```python\nprint('hello')\n```",
expected: "# Meeting Notes\n\n**Date**: 2024-01-01\n\n## Attendees\n\n- Alice\n- Bob\n\n## Discussion\n\nWe discussed #project status.\n\n```python\nprint('hello')\n```",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Parse the input
source := []byte(tt.input)
reader := text.NewReader(source)
doc := md.Parser().Parse(reader)
require.NotNil(t, doc)
// Render back to markdown
renderer := NewMarkdownRenderer()
result := renderer.Render(doc, source)
// For debugging
if result != tt.expected {
t.Logf("Input: %q", tt.input)
t.Logf("Expected: %q", tt.expected)
t.Logf("Got: %q", result)
}
assert.Equal(t, tt.expected, result)
})
}
}
func TestMarkdownRendererPreservesStructure(t *testing.T) {
// Test that parsing and rendering preserves structure
md := goldmark.New(
goldmark.WithExtensions(
extension.GFM,
extensions.TagExtension,
),
)
inputs := []string{
"# Title\n\nParagraph",
"**Bold** and *italic*",
"- List\n- Items",
"#tag #another",
"> Quote",
}
renderer := NewMarkdownRenderer()
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
source := []byte(input)
reader := text.NewReader(source)
doc := md.Parser().Parse(reader)
result := renderer.Render(doc, source)
// The result should be structurally similar
// (may have minor formatting differences)
assert.NotEmpty(t, result)
})
}
}

367
plugin/scheduler/README.md Normal file
View File

@@ -0,0 +1,367 @@
# Scheduler Plugin
A production-ready, GitHub Actions-inspired cron job scheduler for Go.
## Features
- **Standard Cron Syntax**: Supports both 5-field and 6-field (with seconds) cron expressions
- **Timezone-Aware**: Explicit timezone handling to avoid DST surprises
- **Middleware Pattern**: Composable job wrappers for logging, metrics, panic recovery, timeouts
- **Graceful Shutdown**: Jobs complete cleanly or cancel when context expires
- **Zero Dependencies**: Core functionality uses only the standard library
- **Type-Safe**: Strong typing with clear error messages
- **Well-Tested**: Comprehensive test coverage
## Installation
This package is included with Memos. No separate installation required.
## Quick Start
```go
package main
import (
"context"
"fmt"
"github.com/usememos/memos/plugin/scheduler"
)
func main() {
s := scheduler.New()
s.Register(&scheduler.Job{
Name: "daily-cleanup",
Schedule: "0 2 * * *", // 2 AM daily
Handler: func(ctx context.Context) error {
fmt.Println("Running cleanup...")
return nil
},
})
s.Start()
defer s.Stop(context.Background())
// Keep running...
select {}
}
```
## Cron Expression Format
### 5-Field Format (Standard)
```
┌───────────── minute (0 - 59)
│ ┌───────────── hour (0 - 23)
│ │ ┌───────────── day of month (1 - 31)
│ │ │ ┌───────────── month (1 - 12)
│ │ │ │ ┌───────────── day of week (0 - 7) (Sunday = 0 or 7)
│ │ │ │ │
* * * * *
```
### 6-Field Format (With Seconds)
```
┌───────────── second (0 - 59)
│ ┌───────────── minute (0 - 59)
│ │ ┌───────────── hour (0 - 23)
│ │ │ ┌───────────── day of month (1 - 31)
│ │ │ │ ┌───────────── month (1 - 12)
│ │ │ │ │ ┌───────────── day of week (0 - 7)
│ │ │ │ │ │
* * * * * *
```
### Special Characters
- `*` - Any value (every minute, every hour, etc.)
- `,` - List of values: `1,15,30` (1st, 15th, and 30th)
- `-` - Range: `9-17` (9 AM through 5 PM)
- `/` - Step: `*/15` (every 15 units)
### Common Examples
| Schedule | Description |
|----------|-------------|
| `* * * * *` | Every minute |
| `0 * * * *` | Every hour |
| `0 0 * * *` | Daily at midnight |
| `0 9 * * 1-5` | Weekdays at 9 AM |
| `*/15 * * * *` | Every 15 minutes |
| `0 0 1 * *` | First day of every month |
| `0 0 * * 0` | Every Sunday at midnight |
| `30 14 * * *` | Every day at 2:30 PM |
## Timezone Support
```go
// Global timezone for all jobs
s := scheduler.New(
scheduler.WithTimezone("America/New_York"),
)
// Per-job timezone (overrides global)
s.Register(&scheduler.Job{
Name: "tokyo-report",
Schedule: "0 9 * * *", // 9 AM Tokyo time
Timezone: "Asia/Tokyo",
Handler: func(ctx context.Context) error {
// Runs at 9 AM in Tokyo
return nil
},
})
```
**Important**: Always use IANA timezone names (`America/New_York`, not `EST`).
## Middleware
Middleware wraps job handlers to add cross-cutting behavior. Multiple middleware can be chained together.
### Built-in Middleware
#### Recovery (Panic Handling)
```go
s := scheduler.New(
scheduler.WithMiddleware(
scheduler.Recovery(func(jobName string, r interface{}) {
log.Printf("Job %s panicked: %v", jobName, r)
}),
),
)
```
#### Logging
```go
type Logger interface {
Info(msg string, args ...interface{})
Error(msg string, args ...interface{})
}
s := scheduler.New(
scheduler.WithMiddleware(
scheduler.Logging(myLogger),
),
)
```
#### Timeout
```go
s := scheduler.New(
scheduler.WithMiddleware(
scheduler.Timeout(5 * time.Minute),
),
)
```
### Combining Middleware
```go
s := scheduler.New(
scheduler.WithMiddleware(
scheduler.Recovery(panicHandler),
scheduler.Logging(logger),
scheduler.Timeout(10 * time.Minute),
),
)
```
**Order matters**: Middleware are applied left-to-right. In the example above:
1. Recovery (outermost) catches panics from everything
2. Logging logs the execution
3. Timeout (innermost) wraps the actual handler
### Custom Middleware
```go
func Metrics(recorder MetricsRecorder) scheduler.Middleware {
return func(next scheduler.JobHandler) scheduler.JobHandler {
return func(ctx context.Context) error {
start := time.Now()
err := next(ctx)
duration := time.Since(start)
jobName := scheduler.GetJobName(ctx)
recorder.Record(jobName, duration, err)
return err
}
}
}
```
## Graceful Shutdown
Always use `Stop()` with a context to allow jobs to finish cleanly:
```go
// Give jobs up to 30 seconds to complete
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
if err := s.Stop(ctx); err != nil {
log.Printf("Shutdown error: %v", err)
}
```
Jobs should respect context cancellation:
```go
Handler: func(ctx context.Context) error {
for i := 0; i < 100; i++ {
select {
case <-ctx.Done():
return ctx.Err() // Canceled
default:
// Do work
}
}
return nil
}
```
## Best Practices
### 1. Always Name Your Jobs
Names are used for logging, metrics, and debugging:
```go
Name: "user-cleanup-job" // Good
Name: "job1" // Bad
```
### 2. Add Descriptions and Tags
```go
s.Register(&scheduler.Job{
Name: "stale-session-cleanup",
Description: "Removes user sessions older than 30 days",
Tags: []string{"maintenance", "security"},
Schedule: "0 3 * * *",
Handler: cleanupSessions,
})
```
### 3. Use Appropriate Middleware
Always include Recovery and Logging in production:
```go
scheduler.New(
scheduler.WithMiddleware(
scheduler.Recovery(logPanic),
scheduler.Logging(logger),
),
)
```
### 4. Avoid Scheduling Exactly on the Hour
Many systems schedule jobs at `:00`, causing load spikes. Stagger your jobs:
```go
"5 2 * * *" // 2:05 AM (good)
"0 2 * * *" // 2:00 AM (often overloaded)
```
### 5. Make Jobs Idempotent
Jobs may run multiple times (crash recovery, etc.). Design them to be safely re-runnable:
```go
Handler: func(ctx context.Context) error {
// Use unique constraint or check-before-insert
db.Exec("INSERT IGNORE INTO processed_items ...")
return nil
}
```
### 6. Handle Timezones Explicitly
Always specify timezone for business-hour jobs:
```go
Timezone: "America/New_York" // Good
// Timezone: "" // Bad (defaults to UTC)
```
### 7. Test Your Cron Expressions
Use a cron expression calculator before deploying:
- [crontab.guru](https://crontab.guru/)
- Write unit tests with the parser
## Testing Jobs
Test job handlers independently of the scheduler:
```go
func TestCleanupJob(t *testing.T) {
ctx := context.Background()
err := cleanupHandler(ctx)
if err != nil {
t.Fatalf("cleanup failed: %v", err)
}
// Verify cleanup occurred
}
```
Test schedule parsing:
```go
func TestScheduleParsing(t *testing.T) {
job := &scheduler.Job{
Name: "test",
Schedule: "0 2 * * *",
Handler: func(ctx context.Context) error { return nil },
}
if err := job.Validate(); err != nil {
t.Fatalf("invalid job: %v", err)
}
}
```
## Comparison to Other Solutions
| Feature | scheduler | robfig/cron | github.com/go-co-op/gocron |
|---------|-----------|-------------|----------------------------|
| Standard cron syntax | ✅ | ✅ | ✅ |
| Seconds support | ✅ | ✅ | ✅ |
| Timezone support | ✅ | ✅ | ✅ |
| Middleware pattern | ✅ | ⚠️ (basic) | ❌ |
| Graceful shutdown | ✅ | ⚠️ (basic) | ✅ |
| Zero dependencies | ✅ | ❌ | ❌ |
| Job metadata | ✅ | ❌ | ⚠️ (limited) |
## API Reference
See [example_test.go](./example_test.go) for comprehensive examples.
### Core Types
- `Scheduler` - Manages scheduled jobs
- `Job` - Job definition with schedule and handler
- `Middleware` - Function that wraps job handlers
### Functions
- `New(opts ...Option) *Scheduler` - Create new scheduler
- `WithTimezone(tz string) Option` - Set default timezone
- `WithMiddleware(mw ...Middleware) Option` - Add middleware
### Methods
- `Register(job *Job) error` - Add job to scheduler
- `Start() error` - Begin executing jobs
- `Stop(ctx context.Context) error` - Graceful shutdown
## License
This package is part of the Memos project and shares its license.

35
plugin/scheduler/doc.go Normal file
View File

@@ -0,0 +1,35 @@
// Package scheduler provides a GitHub Actions-inspired cron job scheduler.
//
// Features:
// - Standard cron expression syntax (5-field and 6-field formats)
// - Timezone-aware scheduling
// - Middleware pattern for cross-cutting concerns (logging, metrics, recovery)
// - Graceful shutdown with context cancellation
// - Zero external dependencies
//
// Basic usage:
//
// s := scheduler.New()
//
// s.Register(&scheduler.Job{
// Name: "daily-cleanup",
// Schedule: "0 2 * * *", // 2 AM daily
// Handler: func(ctx context.Context) error {
// // Your cleanup logic here
// return nil
// },
// })
//
// s.Start()
// defer s.Stop(context.Background())
//
// With middleware:
//
// s := scheduler.New(
// scheduler.WithTimezone("America/New_York"),
// scheduler.WithMiddleware(
// scheduler.Recovery(),
// scheduler.Logging(),
// ),
// )
package scheduler

View File

@@ -0,0 +1,165 @@
package scheduler_test
import (
"context"
"fmt"
"log/slog"
"os"
"time"
"github.com/usememos/memos/plugin/scheduler"
)
// Example demonstrates basic scheduler usage.
func Example_basic() {
s := scheduler.New()
s.Register(&scheduler.Job{
Name: "hello",
Schedule: "*/5 * * * *", // Every 5 minutes
Description: "Say hello",
Handler: func(_ context.Context) error {
fmt.Println("Hello from scheduler!")
return nil
},
})
s.Start()
defer s.Stop(context.Background())
// Scheduler runs in background
time.Sleep(100 * time.Millisecond)
}
// Example demonstrates timezone-aware scheduling.
func Example_timezone() {
s := scheduler.New(
scheduler.WithTimezone("America/New_York"),
)
s.Register(&scheduler.Job{
Name: "daily-report",
Schedule: "0 9 * * *", // 9 AM in New York
Handler: func(_ context.Context) error {
fmt.Println("Generating daily report...")
return nil
},
})
s.Start()
defer s.Stop(context.Background())
}
// Example demonstrates middleware usage.
func Example_middleware() {
logger := slog.New(slog.NewTextHandler(os.Stdout, nil))
s := scheduler.New(
scheduler.WithMiddleware(
scheduler.Recovery(func(jobName string, r interface{}) {
logger.Error("Job panicked", "job", jobName, "panic", r)
}),
scheduler.Logging(&slogAdapter{logger}),
scheduler.Timeout(5*time.Minute),
),
)
s.Register(&scheduler.Job{
Name: "data-sync",
Schedule: "0 */2 * * *", // Every 2 hours
Handler: func(_ context.Context) error {
// Your sync logic here
return nil
},
})
s.Start()
defer s.Stop(context.Background())
}
// slogAdapter adapts slog.Logger to scheduler.Logger interface.
type slogAdapter struct {
logger *slog.Logger
}
func (a *slogAdapter) Info(msg string, args ...interface{}) {
a.logger.Info(msg, args...)
}
func (a *slogAdapter) Error(msg string, args ...interface{}) {
a.logger.Error(msg, args...)
}
// Example demonstrates multiple jobs with different schedules.
func Example_multipleJobs() {
s := scheduler.New()
// Cleanup old data every night at 2 AM
s.Register(&scheduler.Job{
Name: "cleanup",
Schedule: "0 2 * * *",
Tags: []string{"maintenance"},
Handler: func(_ context.Context) error {
fmt.Println("Cleaning up old data...")
return nil
},
})
// Health check every 5 minutes
s.Register(&scheduler.Job{
Name: "health-check",
Schedule: "*/5 * * * *",
Tags: []string{"monitoring"},
Handler: func(_ context.Context) error {
fmt.Println("Running health check...")
return nil
},
})
// Weekly backup on Sundays at 1 AM
s.Register(&scheduler.Job{
Name: "weekly-backup",
Schedule: "0 1 * * 0",
Tags: []string{"backup"},
Handler: func(_ context.Context) error {
fmt.Println("Creating weekly backup...")
return nil
},
})
s.Start()
defer s.Stop(context.Background())
}
// Example demonstrates graceful shutdown with timeout.
func Example_gracefulShutdown() {
s := scheduler.New()
s.Register(&scheduler.Job{
Name: "long-running",
Schedule: "* * * * *",
Handler: func(ctx context.Context) error {
select {
case <-time.After(30 * time.Second):
fmt.Println("Job completed")
case <-ctx.Done():
fmt.Println("Job canceled, cleaning up...")
return ctx.Err()
}
return nil
},
})
s.Start()
// Simulate shutdown signal
time.Sleep(5 * time.Second)
// Give jobs 10 seconds to finish
shutdownCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
if err := s.Stop(shutdownCtx); err != nil {
fmt.Printf("Shutdown error: %v\n", err)
}
}

View File

@@ -0,0 +1,393 @@
package scheduler_test
import (
"context"
"errors"
"fmt"
"strings"
"sync"
"sync/atomic"
"testing"
"time"
"github.com/usememos/memos/plugin/scheduler"
)
// TestRealWorldScenario tests a realistic multi-job scenario.
func TestRealWorldScenario(t *testing.T) {
var (
quickJobCount atomic.Int32
hourlyJobCount atomic.Int32
logEntries []string
logMu sync.Mutex
)
logger := &testLogger{
onInfo: func(msg string, _ ...interface{}) {
logMu.Lock()
logEntries = append(logEntries, fmt.Sprintf("INFO: %s", msg))
logMu.Unlock()
},
onError: func(msg string, _ ...interface{}) {
logMu.Lock()
logEntries = append(logEntries, fmt.Sprintf("ERROR: %s", msg))
logMu.Unlock()
},
}
s := scheduler.New(
scheduler.WithTimezone("UTC"),
scheduler.WithMiddleware(
scheduler.Recovery(func(jobName string, r interface{}) {
t.Logf("Job %s panicked: %v", jobName, r)
}),
scheduler.Logging(logger),
scheduler.Timeout(5*time.Second),
),
)
// Quick job (every second)
s.Register(&scheduler.Job{
Name: "quick-check",
Schedule: "* * * * * *",
Handler: func(_ context.Context) error {
quickJobCount.Add(1)
time.Sleep(100 * time.Millisecond)
return nil
},
})
// Slower job (every 2 seconds)
s.Register(&scheduler.Job{
Name: "slow-process",
Schedule: "*/2 * * * * *",
Handler: func(_ context.Context) error {
hourlyJobCount.Add(1)
time.Sleep(500 * time.Millisecond)
return nil
},
})
// Start scheduler
if err := s.Start(); err != nil {
t.Fatalf("failed to start scheduler: %v", err)
}
// Let it run for 5 seconds
time.Sleep(5 * time.Second)
// Graceful shutdown
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
if err := s.Stop(ctx); err != nil {
t.Fatalf("failed to stop scheduler: %v", err)
}
// Verify execution counts
quick := quickJobCount.Load()
slow := hourlyJobCount.Load()
t.Logf("Quick job ran %d times", quick)
t.Logf("Slow job ran %d times", slow)
if quick < 4 {
t.Errorf("expected quick job to run at least 4 times, ran %d", quick)
}
if slow < 2 {
t.Errorf("expected slow job to run at least 2 times, ran %d", slow)
}
// Verify logging
logMu.Lock()
defer logMu.Unlock()
hasStartLog := false
hasCompleteLog := false
for _, entry := range logEntries {
if contains(entry, "Job started") {
hasStartLog = true
}
if contains(entry, "Job completed") {
hasCompleteLog = true
}
}
if !hasStartLog {
t.Error("expected job start logs")
}
if !hasCompleteLog {
t.Error("expected job completion logs")
}
}
// TestCancellationDuringExecution verifies jobs can be canceled mid-execution.
func TestCancellationDuringExecution(t *testing.T) {
var canceled atomic.Bool
var started atomic.Bool
s := scheduler.New()
s.Register(&scheduler.Job{
Name: "long-job",
Schedule: "* * * * * *",
Handler: func(ctx context.Context) error {
started.Store(true)
// Simulate long-running work
for i := 0; i < 100; i++ {
select {
case <-ctx.Done():
canceled.Store(true)
return ctx.Err()
case <-time.After(100 * time.Millisecond):
// Keep working
}
}
return nil
},
})
if err := s.Start(); err != nil {
t.Fatalf("failed to start: %v", err)
}
// Wait until job starts
for i := 0; i < 30; i++ {
if started.Load() {
break
}
time.Sleep(100 * time.Millisecond)
}
if !started.Load() {
t.Fatal("job did not start within timeout")
}
// Stop with reasonable timeout
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
if err := s.Stop(ctx); err != nil {
t.Logf("stop returned error (may be expected): %v", err)
}
if !canceled.Load() {
t.Error("expected job to detect cancellation")
}
}
// TestTimezoneHandling verifies timezone-aware scheduling.
func TestTimezoneHandling(t *testing.T) {
// Parse a schedule in a specific timezone
schedule, err := scheduler.ParseCronExpression("0 9 * * *") // 9 AM
if err != nil {
t.Fatalf("failed to parse schedule: %v", err)
}
// Test in New York timezone
nyc, err := time.LoadLocation("America/New_York")
if err != nil {
t.Fatalf("failed to load timezone: %v", err)
}
// Current time: 8:30 AM in New York
now := time.Date(2025, 1, 15, 8, 30, 0, 0, nyc)
// Next run should be 9:00 AM same day
next := schedule.Next(now)
expected := time.Date(2025, 1, 15, 9, 0, 0, 0, nyc)
if !next.Equal(expected) {
t.Errorf("next = %v, expected %v", next, expected)
}
// If it's already past 9 AM
now = time.Date(2025, 1, 15, 9, 30, 0, 0, nyc)
next = schedule.Next(now)
expected = time.Date(2025, 1, 16, 9, 0, 0, 0, nyc)
if !next.Equal(expected) {
t.Errorf("next = %v, expected %v", next, expected)
}
}
// TestErrorPropagation verifies error handling.
func TestErrorPropagation(t *testing.T) {
var errorLogged atomic.Bool
logger := &testLogger{
onError: func(msg string, _ ...interface{}) {
if msg == "Job failed" {
errorLogged.Store(true)
}
},
}
s := scheduler.New(
scheduler.WithMiddleware(
scheduler.Logging(logger),
),
)
s.Register(&scheduler.Job{
Name: "failing-job",
Schedule: "* * * * * *",
Handler: func(_ context.Context) error {
return errors.New("intentional error")
},
})
if err := s.Start(); err != nil {
t.Fatalf("failed to start: %v", err)
}
// Let it run once
time.Sleep(1500 * time.Millisecond)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
if err := s.Stop(ctx); err != nil {
t.Fatalf("failed to stop: %v", err)
}
if !errorLogged.Load() {
t.Error("expected error to be logged")
}
}
// TestPanicRecovery verifies panic recovery middleware.
func TestPanicRecovery(t *testing.T) {
var panicRecovered atomic.Bool
s := scheduler.New(
scheduler.WithMiddleware(
scheduler.Recovery(func(jobName string, r interface{}) {
panicRecovered.Store(true)
t.Logf("Recovered from panic in job %s: %v", jobName, r)
}),
),
)
s.Register(&scheduler.Job{
Name: "panicking-job",
Schedule: "* * * * * *",
Handler: func(_ context.Context) error {
panic("intentional panic for testing")
},
})
if err := s.Start(); err != nil {
t.Fatalf("failed to start: %v", err)
}
// Let it run once
time.Sleep(1500 * time.Millisecond)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
if err := s.Stop(ctx); err != nil {
t.Fatalf("failed to stop: %v", err)
}
if !panicRecovered.Load() {
t.Error("expected panic to be recovered")
}
}
// TestMultipleJobsWithDifferentSchedules verifies concurrent job execution.
func TestMultipleJobsWithDifferentSchedules(t *testing.T) {
var (
job1Count atomic.Int32
job2Count atomic.Int32
job3Count atomic.Int32
)
s := scheduler.New()
// Job 1: Every second
s.Register(&scheduler.Job{
Name: "job-1sec",
Schedule: "* * * * * *",
Handler: func(_ context.Context) error {
job1Count.Add(1)
return nil
},
})
// Job 2: Every 2 seconds
s.Register(&scheduler.Job{
Name: "job-2sec",
Schedule: "*/2 * * * * *",
Handler: func(_ context.Context) error {
job2Count.Add(1)
return nil
},
})
// Job 3: Every 3 seconds
s.Register(&scheduler.Job{
Name: "job-3sec",
Schedule: "*/3 * * * * *",
Handler: func(_ context.Context) error {
job3Count.Add(1)
return nil
},
})
if err := s.Start(); err != nil {
t.Fatalf("failed to start: %v", err)
}
// Let them run for 6 seconds
time.Sleep(6 * time.Second)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
if err := s.Stop(ctx); err != nil {
t.Fatalf("failed to stop: %v", err)
}
// Verify counts (allowing for timing variance)
c1 := job1Count.Load()
c2 := job2Count.Load()
c3 := job3Count.Load()
t.Logf("Job 1 ran %d times, Job 2 ran %d times, Job 3 ran %d times", c1, c2, c3)
if c1 < 5 {
t.Errorf("expected job1 to run at least 5 times, ran %d", c1)
}
if c2 < 2 {
t.Errorf("expected job2 to run at least 2 times, ran %d", c2)
}
if c3 < 1 {
t.Errorf("expected job3 to run at least 1 time, ran %d", c3)
}
}
// Helpers
type testLogger struct {
onInfo func(msg string, args ...interface{})
onError func(msg string, args ...interface{})
}
func (l *testLogger) Info(msg string, args ...interface{}) {
if l.onInfo != nil {
l.onInfo(msg, args...)
}
}
func (l *testLogger) Error(msg string, args ...interface{}) {
if l.onError != nil {
l.onError(msg, args...)
}
}
func contains(s, substr string) bool {
return strings.Contains(s, substr)
}

58
plugin/scheduler/job.go Normal file
View File

@@ -0,0 +1,58 @@
package scheduler
import (
"context"
"github.com/pkg/errors"
)
// JobHandler is the function signature for scheduled job handlers.
// The context passed to the handler will be canceled if the scheduler is shutting down.
type JobHandler func(ctx context.Context) error
// Job represents a scheduled task.
type Job struct {
// Name is a unique identifier for this job (required).
// Used for logging and metrics.
Name string
// Schedule is a cron expression defining when this job runs (required).
// Supports standard 5-field format: "minute hour day month weekday"
// Examples: "0 * * * *" (hourly), "0 0 * * *" (daily at midnight)
Schedule string
// Timezone for schedule evaluation (optional, defaults to UTC).
// Use IANA timezone names: "America/New_York", "Europe/London", etc.
Timezone string
// Handler is the function to execute when the job triggers (required).
Handler JobHandler
// Description provides human-readable context about what this job does (optional).
Description string
// Tags allow categorizing jobs for filtering/monitoring (optional).
Tags []string
}
// Validate checks if the job definition is valid.
func (j *Job) Validate() error {
if j.Name == "" {
return errors.New("job name is required")
}
if j.Schedule == "" {
return errors.New("job schedule is required")
}
// Validate cron expression using parser
if _, err := ParseCronExpression(j.Schedule); err != nil {
return errors.Wrap(err, "invalid cron expression")
}
if j.Handler == nil {
return errors.New("job handler is required")
}
return nil
}

Some files were not shown because too many files have changed in this diff Show More