This commit is contained in:
69
.env.example
Normal file
69
.env.example
Normal file
@@ -0,0 +1,69 @@
|
||||
# PIM MCP Server Configuration
|
||||
# Copy this file to .env and fill in your values
|
||||
|
||||
# =============================================================================
|
||||
# Server Configuration
|
||||
# =============================================================================
|
||||
SERVER_NAME="PIM MCP Server"
|
||||
PORT=8000
|
||||
HOST=0.0.0.0
|
||||
ENVIRONMENT=production
|
||||
|
||||
# API Authentication
|
||||
# Generate with: python -c "import secrets; print(secrets.token_urlsafe(32))"
|
||||
MCP_API_KEY=your-secure-api-key-here
|
||||
|
||||
# =============================================================================
|
||||
# IMAP Configuration (for reading emails)
|
||||
# =============================================================================
|
||||
IMAP_HOST=imap.example.com
|
||||
IMAP_PORT=993
|
||||
IMAP_USERNAME=user@example.com
|
||||
IMAP_PASSWORD=your-imap-password
|
||||
IMAP_USE_SSL=true
|
||||
|
||||
# =============================================================================
|
||||
# SMTP Configuration (for sending emails)
|
||||
# =============================================================================
|
||||
SMTP_HOST=smtp.example.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USERNAME=user@example.com
|
||||
SMTP_PASSWORD=your-smtp-password
|
||||
SMTP_USE_TLS=true
|
||||
SMTP_FROM_EMAIL=user@example.com
|
||||
SMTP_FROM_NAME=Your Name
|
||||
|
||||
# =============================================================================
|
||||
# CalDAV Configuration (Calendar)
|
||||
# =============================================================================
|
||||
# Examples for common providers:
|
||||
# - Nextcloud: https://cloud.example.com/remote.php/dav
|
||||
# - Fastmail: https://caldav.fastmail.com/dav/calendars/user/you@fastmail.com
|
||||
# - Radicale: https://radicale.example.com/user/
|
||||
CALDAV_URL=https://caldav.example.com/dav
|
||||
CALDAV_USERNAME=user@example.com
|
||||
CALDAV_PASSWORD=your-caldav-password
|
||||
|
||||
# =============================================================================
|
||||
# CardDAV Configuration (Contacts)
|
||||
# =============================================================================
|
||||
# Examples for common providers:
|
||||
# - Nextcloud: https://cloud.example.com/remote.php/dav
|
||||
# - Fastmail: https://carddav.fastmail.com/dav/addressbooks/user/you@fastmail.com
|
||||
# - Radicale: https://radicale.example.com/user/
|
||||
CARDDAV_URL=https://carddav.example.com/dav
|
||||
CARDDAV_USERNAME=user@example.com
|
||||
CARDDAV_PASSWORD=your-carddav-password
|
||||
|
||||
# =============================================================================
|
||||
# Cache Configuration
|
||||
# =============================================================================
|
||||
SQLITE_PATH=/data/cache.db
|
||||
CACHE_TTL_SECONDS=300
|
||||
|
||||
# =============================================================================
|
||||
# Feature Flags (disable services you don't need)
|
||||
# =============================================================================
|
||||
ENABLE_EMAIL=true
|
||||
ENABLE_CALENDAR=true
|
||||
ENABLE_CONTACTS=true
|
||||
29
.gitea/workflows/gitea-ci.yml
Normal file
29
.gitea/workflows/gitea-ci.yml
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Build And Test
|
||||
run-name: ${{ gitea.actor }} runs ci pipeline
|
||||
on: [ push ]
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
if: gitea.ref == 'refs/heads/main'
|
||||
steps:
|
||||
- uses: https://github.com/actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: https://github.com/docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker registry
|
||||
uses: https://github.com/docker/login-action@v3
|
||||
with:
|
||||
registry: registry.yigit.run
|
||||
username: ${{ secrets.REGISTRY_USERNAME }}
|
||||
password: ${{ secrets.REGISTRY_PASSWORD }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: https://github.com/docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
registry.yigit.run/yigit/pim-mcp-server:${{ gitea.sha }}
|
||||
registry.yigit.run/yigit/pim-mcp-server:latest
|
||||
223
.gitignore
vendored
Normal file
223
.gitignore
vendored
Normal file
@@ -0,0 +1,223 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[codz]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
# Pipfile.lock
|
||||
|
||||
# UV
|
||||
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# uv.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
# poetry.lock
|
||||
# poetry.toml
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
||||
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
||||
# pdm.lock
|
||||
# pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# pixi
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
||||
# pixi.lock
|
||||
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
||||
# in the .venv directory. It is recommended not to include this directory in version control.
|
||||
.pixi
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# Redis
|
||||
*.rdb
|
||||
*.aof
|
||||
*.pid
|
||||
|
||||
# RabbitMQ
|
||||
mnesia/
|
||||
rabbitmq/
|
||||
rabbitmq-data/
|
||||
|
||||
# ActiveMQ
|
||||
activemq-data/
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.envrc
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
# .idea/
|
||||
|
||||
# Abstra
|
||||
# Abstra is an AI-powered process automation framework.
|
||||
# Ignore directories containing user credentials, local state, and settings.
|
||||
# Learn more at https://abstra.io/docs
|
||||
.abstra/
|
||||
|
||||
# Visual Studio Code
|
||||
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
||||
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
||||
# you could uncomment the following to ignore the entire vscode folder
|
||||
# .vscode/
|
||||
|
||||
# Ruff stuff:
|
||||
.ruff_cache/
|
||||
|
||||
# PyPI configuration file
|
||||
.pypirc
|
||||
|
||||
# Marimo
|
||||
marimo/_static/
|
||||
marimo/_lsp/
|
||||
__marimo__/
|
||||
|
||||
# Streamlit
|
||||
.streamlit/secrets.toml
|
||||
|
||||
# PIM MCP Server specific
|
||||
/data/
|
||||
*.db
|
||||
*.sqlite
|
||||
.env.local
|
||||
.env.*.local
|
||||
36
Dockerfile
Normal file
36
Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
# Create non-root user
|
||||
RUN groupadd -r mcp && useradd -r -g mcp mcp
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY src/ ./src/
|
||||
|
||||
# Create data directory for SQLite
|
||||
RUN mkdir -p /data && chown -R mcp:mcp /data /app
|
||||
|
||||
# Switch to non-root user
|
||||
USER mcp
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD python -c "import urllib.request; urllib.request.urlopen('http://localhost:8000/mcp')" || exit 1
|
||||
|
||||
# Run the server
|
||||
CMD ["python", "src/server.py"]
|
||||
68
alembic.ini
Normal file
68
alembic.ini
Normal file
@@ -0,0 +1,68 @@
|
||||
# Alembic Configuration for PIM MCP Server
|
||||
|
||||
[alembic]
|
||||
# Path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# Template used to generate migration files
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d%%(second).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# Truncate long revision identifiers
|
||||
truncate_slug_length = 40
|
||||
|
||||
# Set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# Set to 'true' to allow .pyc and .pyo files without
|
||||
# having the source .py files present
|
||||
# sourceless = false
|
||||
|
||||
# SQLite URL - can be overridden by SQLITE_PATH env var
|
||||
# The actual URL is constructed in env.py
|
||||
sqlalchemy.url = sqlite+aiosqlite:///data/cache.db
|
||||
|
||||
# Version path separator
|
||||
version_path_separator = os
|
||||
|
||||
[post_write_hooks]
|
||||
# Format migration files with black (if installed)
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -q
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
66
docker-compose.yml
Normal file
66
docker-compose.yml
Normal file
@@ -0,0 +1,66 @@
|
||||
services:
|
||||
pim-mcp-server:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: pim-mcp-server
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "${PORT:-8000}:8000"
|
||||
volumes:
|
||||
- pim-data:/data
|
||||
environment:
|
||||
# Server Configuration
|
||||
- SERVER_NAME=${SERVER_NAME:-PIM MCP Server}
|
||||
- PORT=${PORT:-8000}
|
||||
- HOST=0.0.0.0
|
||||
- ENVIRONMENT=${ENVIRONMENT:-production}
|
||||
|
||||
# API Authentication
|
||||
- MCP_API_KEY=${MCP_API_KEY}
|
||||
|
||||
# IMAP Configuration (Email Reading)
|
||||
- IMAP_HOST=${IMAP_HOST}
|
||||
- IMAP_PORT=${IMAP_PORT:-993}
|
||||
- IMAP_USERNAME=${IMAP_USERNAME}
|
||||
- IMAP_PASSWORD=${IMAP_PASSWORD}
|
||||
- IMAP_USE_SSL=${IMAP_USE_SSL:-true}
|
||||
|
||||
# SMTP Configuration (Email Sending)
|
||||
- SMTP_HOST=${SMTP_HOST}
|
||||
- SMTP_PORT=${SMTP_PORT:-587}
|
||||
- SMTP_USERNAME=${SMTP_USERNAME}
|
||||
- SMTP_PASSWORD=${SMTP_PASSWORD}
|
||||
- SMTP_USE_TLS=${SMTP_USE_TLS:-true}
|
||||
- SMTP_FROM_EMAIL=${SMTP_FROM_EMAIL}
|
||||
- SMTP_FROM_NAME=${SMTP_FROM_NAME}
|
||||
|
||||
# CalDAV Configuration (Calendar)
|
||||
- CALDAV_URL=${CALDAV_URL}
|
||||
- CALDAV_USERNAME=${CALDAV_USERNAME}
|
||||
- CALDAV_PASSWORD=${CALDAV_PASSWORD}
|
||||
|
||||
# CardDAV Configuration (Contacts)
|
||||
- CARDDAV_URL=${CARDDAV_URL}
|
||||
- CARDDAV_USERNAME=${CARDDAV_USERNAME}
|
||||
- CARDDAV_PASSWORD=${CARDDAV_PASSWORD}
|
||||
|
||||
# Cache Configuration
|
||||
- SQLITE_PATH=/data/cache.db
|
||||
- CACHE_TTL_SECONDS=${CACHE_TTL_SECONDS:-300}
|
||||
|
||||
# Feature Flags
|
||||
- ENABLE_EMAIL=${ENABLE_EMAIL:-true}
|
||||
- ENABLE_CALENDAR=${ENABLE_CALENDAR:-true}
|
||||
- ENABLE_CONTACTS=${ENABLE_CONTACTS:-true}
|
||||
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/mcp')"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
volumes:
|
||||
pim-data:
|
||||
driver: local
|
||||
101
migrations/env.py
Normal file
101
migrations/env.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Alembic migration environment configuration."""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add src to path for imports
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "src"))
|
||||
|
||||
# Import all models to register them with SQLModel.metadata
|
||||
from database.models import ( # noqa: F401
|
||||
CacheMeta,
|
||||
EmailCache,
|
||||
EventCache,
|
||||
ContactCache,
|
||||
SyncState,
|
||||
)
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# SQLModel metadata for autogenerate
|
||||
target_metadata = SQLModel.metadata
|
||||
|
||||
# Get database URL from environment or config
|
||||
def get_url() -> str:
|
||||
"""Get database URL from environment variable or config."""
|
||||
sqlite_path = os.environ.get("SQLITE_PATH", "data/cache.db")
|
||||
return f"sqlite+aiosqlite:///{sqlite_path}"
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""
|
||||
Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL and not an Engine.
|
||||
Calls to context.execute() emit the SQL to the script output.
|
||||
"""
|
||||
url = get_url()
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True, # Required for SQLite ALTER TABLE support
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Run migrations with the given connection."""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=True, # Required for SQLite ALTER TABLE support
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode with async engine."""
|
||||
configuration = config.get_section(config.config_ini_section) or {}
|
||||
configuration["sqlalchemy.url"] = get_url()
|
||||
|
||||
connectable = async_engine_from_config(
|
||||
configuration,
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
27
migrations/script.py.mako
Normal file
27
migrations/script.py.mako
Normal file
@@ -0,0 +1,27 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
11
render.yaml
Normal file
11
render.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
services:
|
||||
- type: web
|
||||
name: fastmcp-server
|
||||
runtime: python
|
||||
buildCommand: pip install -r requirements.txt
|
||||
startCommand: python src/server.py
|
||||
plan: free
|
||||
autoDeploy: false
|
||||
envVars:
|
||||
- key: ENVIRONMENT
|
||||
value: production
|
||||
28
requirements.txt
Normal file
28
requirements.txt
Normal file
@@ -0,0 +1,28 @@
|
||||
# FastMCP framework
|
||||
fastmcp>=2.12.0
|
||||
uvicorn>=0.35.0
|
||||
|
||||
# Email (IMAP/SMTP)
|
||||
imapclient>=3.0.1
|
||||
aiosmtplib>=3.0.2
|
||||
|
||||
# Calendar (CalDAV)
|
||||
caldav>=1.4.0
|
||||
icalendar>=6.0.0
|
||||
|
||||
# Contacts (CardDAV)
|
||||
vobject>=0.9.8
|
||||
httpx>=0.28.0
|
||||
|
||||
# Database & Config
|
||||
sqlmodel>=0.0.22
|
||||
alembic>=1.14.0
|
||||
aiosqlite>=0.20.0
|
||||
greenlet>=3.1.0
|
||||
pydantic>=2.10.0
|
||||
pydantic-settings>=2.6.1
|
||||
|
||||
# Utilities
|
||||
python-dateutil>=2.9.0
|
||||
email-validator>=2.2.0
|
||||
python-dotenv>=1.0.1
|
||||
91
src/config.py
Normal file
91
src/config.py
Normal file
@@ -0,0 +1,91 @@
|
||||
from pydantic_settings import BaseSettings
|
||||
from pydantic import Field, SecretStr
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
# MCP Server
|
||||
server_name: str = Field(default="PIM MCP Server", alias="SERVER_NAME")
|
||||
server_port: int = Field(default=8000, alias="PORT")
|
||||
server_host: str = Field(default="0.0.0.0", alias="HOST")
|
||||
environment: str = Field(default="development", alias="ENVIRONMENT")
|
||||
|
||||
# API Authentication
|
||||
mcp_api_key: Optional[SecretStr] = Field(default=None, alias="MCP_API_KEY")
|
||||
|
||||
# IMAP Configuration
|
||||
imap_host: Optional[str] = Field(default=None, alias="IMAP_HOST")
|
||||
imap_port: int = Field(default=993, alias="IMAP_PORT")
|
||||
imap_username: Optional[str] = Field(default=None, alias="IMAP_USERNAME")
|
||||
imap_password: Optional[SecretStr] = Field(default=None, alias="IMAP_PASSWORD")
|
||||
imap_use_ssl: bool = Field(default=True, alias="IMAP_USE_SSL")
|
||||
|
||||
# SMTP Configuration
|
||||
smtp_host: Optional[str] = Field(default=None, alias="SMTP_HOST")
|
||||
smtp_port: int = Field(default=587, alias="SMTP_PORT")
|
||||
smtp_username: Optional[str] = Field(default=None, alias="SMTP_USERNAME")
|
||||
smtp_password: Optional[SecretStr] = Field(default=None, alias="SMTP_PASSWORD")
|
||||
smtp_use_tls: bool = Field(default=True, alias="SMTP_USE_TLS")
|
||||
smtp_from_email: Optional[str] = Field(default=None, alias="SMTP_FROM_EMAIL")
|
||||
smtp_from_name: Optional[str] = Field(default=None, alias="SMTP_FROM_NAME")
|
||||
|
||||
# CalDAV Configuration
|
||||
caldav_url: Optional[str] = Field(default=None, alias="CALDAV_URL")
|
||||
caldav_username: Optional[str] = Field(default=None, alias="CALDAV_USERNAME")
|
||||
caldav_password: Optional[SecretStr] = Field(default=None, alias="CALDAV_PASSWORD")
|
||||
|
||||
# CardDAV Configuration
|
||||
carddav_url: Optional[str] = Field(default=None, alias="CARDDAV_URL")
|
||||
carddav_username: Optional[str] = Field(default=None, alias="CARDDAV_USERNAME")
|
||||
carddav_password: Optional[SecretStr] = Field(default=None, alias="CARDDAV_PASSWORD")
|
||||
|
||||
# SQLite Cache
|
||||
sqlite_path: str = Field(default="/data/cache.db", alias="SQLITE_PATH")
|
||||
cache_ttl_seconds: int = Field(default=300, alias="CACHE_TTL_SECONDS")
|
||||
|
||||
# Feature Flags
|
||||
enable_email: bool = Field(default=True, alias="ENABLE_EMAIL")
|
||||
enable_calendar: bool = Field(default=True, alias="ENABLE_CALENDAR")
|
||||
enable_contacts: bool = Field(default=True, alias="ENABLE_CONTACTS")
|
||||
|
||||
model_config = {
|
||||
"env_file": ".env",
|
||||
"env_file_encoding": "utf-8",
|
||||
"populate_by_name": True,
|
||||
"extra": "ignore",
|
||||
}
|
||||
|
||||
def is_email_configured(self) -> bool:
|
||||
return all([
|
||||
self.enable_email,
|
||||
self.imap_host,
|
||||
self.imap_username,
|
||||
self.imap_password,
|
||||
])
|
||||
|
||||
def is_smtp_configured(self) -> bool:
|
||||
return all([
|
||||
self.smtp_host,
|
||||
self.smtp_username,
|
||||
self.smtp_password,
|
||||
self.smtp_from_email,
|
||||
])
|
||||
|
||||
def is_calendar_configured(self) -> bool:
|
||||
return all([
|
||||
self.enable_calendar,
|
||||
self.caldav_url,
|
||||
self.caldav_username,
|
||||
self.caldav_password,
|
||||
])
|
||||
|
||||
def is_contacts_configured(self) -> bool:
|
||||
return all([
|
||||
self.enable_contacts,
|
||||
self.carddav_url,
|
||||
self.carddav_username,
|
||||
self.carddav_password,
|
||||
])
|
||||
|
||||
|
||||
settings = Settings()
|
||||
22
src/database/__init__.py
Normal file
22
src/database/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from .connection import get_engine, get_session, init_db, close_db
|
||||
from .models import (
|
||||
EmailCache,
|
||||
EventCache,
|
||||
ContactCache,
|
||||
SyncState,
|
||||
CacheMeta,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Connection
|
||||
"get_engine",
|
||||
"get_session",
|
||||
"init_db",
|
||||
"close_db",
|
||||
# Models
|
||||
"EmailCache",
|
||||
"EventCache",
|
||||
"ContactCache",
|
||||
"SyncState",
|
||||
"CacheMeta",
|
||||
]
|
||||
93
src/database/connection.py
Normal file
93
src/database/connection.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Database connection management using SQLModel with async SQLite."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, AsyncEngine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
_engine: Optional[AsyncEngine] = None
|
||||
_session_factory: Optional[sessionmaker] = None
|
||||
|
||||
|
||||
def get_engine() -> AsyncEngine:
|
||||
"""Get the async database engine."""
|
||||
if _engine is None:
|
||||
raise RuntimeError("Database not initialized. Call init_db() first.")
|
||||
return _engine
|
||||
|
||||
|
||||
def get_session_factory() -> sessionmaker:
|
||||
"""Get the session factory."""
|
||||
if _session_factory is None:
|
||||
raise RuntimeError("Database not initialized. Call init_db() first.")
|
||||
return _session_factory
|
||||
|
||||
|
||||
async def init_db(database_path: str) -> AsyncEngine:
|
||||
"""
|
||||
Initialize the database engine and create tables.
|
||||
|
||||
Args:
|
||||
database_path: Path to the SQLite database file.
|
||||
|
||||
Returns:
|
||||
The async database engine.
|
||||
"""
|
||||
global _engine, _session_factory
|
||||
|
||||
# Ensure directory exists
|
||||
db_path = Path(database_path)
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create async engine for SQLite
|
||||
database_url = f"sqlite+aiosqlite:///{database_path}"
|
||||
_engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
future=True,
|
||||
)
|
||||
|
||||
# Create session factory
|
||||
_session_factory = sessionmaker(
|
||||
bind=_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
# Create tables (for initial setup without migrations)
|
||||
# In production, use Alembic migrations instead
|
||||
async with _engine.begin() as conn:
|
||||
await conn.run_sync(SQLModel.metadata.create_all)
|
||||
|
||||
return _engine
|
||||
|
||||
|
||||
async def close_db():
|
||||
"""Close the database connection."""
|
||||
global _engine, _session_factory
|
||||
if _engine:
|
||||
await _engine.dispose()
|
||||
_engine = None
|
||||
_session_factory = None
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_session():
|
||||
"""
|
||||
Get an async database session.
|
||||
|
||||
Usage:
|
||||
async with get_session() as session:
|
||||
result = await session.exec(select(EmailCache))
|
||||
"""
|
||||
factory = get_session_factory()
|
||||
async with factory() as session:
|
||||
try:
|
||||
yield session
|
||||
await session.commit()
|
||||
except Exception:
|
||||
await session.rollback()
|
||||
raise
|
||||
70
src/database/models.py
Normal file
70
src/database/models.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""SQLModel database models for caching PIM data."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from sqlmodel import SQLModel, Field
|
||||
|
||||
|
||||
class CacheMeta(SQLModel, table=True):
|
||||
"""Generic key-value cache metadata."""
|
||||
|
||||
__tablename__ = "cache_meta"
|
||||
|
||||
key: str = Field(primary_key=True)
|
||||
value: Optional[str] = None
|
||||
expires_at: Optional[int] = None
|
||||
|
||||
|
||||
class EmailCache(SQLModel, table=True):
|
||||
"""Cached email data."""
|
||||
|
||||
__tablename__ = "email_cache"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
mailbox: str = Field(index=True)
|
||||
subject: Optional[str] = None
|
||||
from_address: Optional[str] = None
|
||||
date: Optional[datetime] = Field(default=None, index=True)
|
||||
is_read: bool = False
|
||||
is_flagged: bool = False
|
||||
snippet: Optional[str] = None
|
||||
full_data: Optional[str] = Field(default=None, description="JSON blob of full email data")
|
||||
cached_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
class EventCache(SQLModel, table=True):
|
||||
"""Cached calendar event data."""
|
||||
|
||||
__tablename__ = "event_cache"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
calendar_id: str = Field(index=True)
|
||||
title: Optional[str] = None
|
||||
start_time: Optional[datetime] = Field(default=None, index=True)
|
||||
end_time: Optional[datetime] = None
|
||||
full_data: Optional[str] = Field(default=None, description="JSON blob of full event data")
|
||||
cached_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
class ContactCache(SQLModel, table=True):
|
||||
"""Cached contact data."""
|
||||
|
||||
__tablename__ = "contact_cache"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
addressbook_id: str = Field(index=True)
|
||||
display_name: Optional[str] = Field(default=None, index=True)
|
||||
primary_email: Optional[str] = None
|
||||
full_data: Optional[str] = Field(default=None, description="JSON blob of full contact data")
|
||||
cached_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
class SyncState(SQLModel, table=True):
|
||||
"""Track sync state for incremental updates."""
|
||||
|
||||
__tablename__ = "sync_state"
|
||||
|
||||
resource_type: str = Field(primary_key=True, description="Type: mailbox, calendar, addressbook")
|
||||
resource_id: str = Field(primary_key=True)
|
||||
last_sync: Optional[datetime] = None
|
||||
sync_token: Optional[str] = None
|
||||
51
src/models/__init__.py
Normal file
51
src/models/__init__.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from .email_models import (
|
||||
Mailbox,
|
||||
EmailAddress,
|
||||
Attachment,
|
||||
EmailSummary,
|
||||
Email,
|
||||
EmailList,
|
||||
)
|
||||
from .calendar_models import (
|
||||
EventStatus,
|
||||
Attendee,
|
||||
Reminder,
|
||||
Calendar,
|
||||
Event,
|
||||
EventList,
|
||||
)
|
||||
from .contacts_models import (
|
||||
EmailField,
|
||||
PhoneField,
|
||||
AddressField,
|
||||
AddressBook,
|
||||
Contact,
|
||||
ContactList,
|
||||
)
|
||||
from .common import OperationResult
|
||||
|
||||
__all__ = [
|
||||
# Email
|
||||
"Mailbox",
|
||||
"EmailAddress",
|
||||
"Attachment",
|
||||
"EmailSummary",
|
||||
"Email",
|
||||
"EmailList",
|
||||
# Calendar
|
||||
"EventStatus",
|
||||
"Attendee",
|
||||
"Reminder",
|
||||
"Calendar",
|
||||
"Event",
|
||||
"EventList",
|
||||
# Contacts
|
||||
"EmailField",
|
||||
"PhoneField",
|
||||
"AddressField",
|
||||
"AddressBook",
|
||||
"Contact",
|
||||
"ContactList",
|
||||
# Common
|
||||
"OperationResult",
|
||||
]
|
||||
56
src/models/calendar_models.py
Normal file
56
src/models/calendar_models.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class EventStatus(str, Enum):
|
||||
CONFIRMED = "confirmed"
|
||||
TENTATIVE = "tentative"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
|
||||
class Attendee(BaseModel):
|
||||
email: str
|
||||
name: Optional[str] = None
|
||||
status: str = "needs-action"
|
||||
required: bool = True
|
||||
|
||||
|
||||
class Reminder(BaseModel):
|
||||
minutes_before: int
|
||||
method: str = "display"
|
||||
|
||||
|
||||
class Calendar(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
color: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
is_readonly: bool = False
|
||||
|
||||
|
||||
class Event(BaseModel):
|
||||
id: str
|
||||
calendar_id: str
|
||||
title: str
|
||||
start: datetime
|
||||
end: datetime
|
||||
all_day: bool = False
|
||||
description: Optional[str] = None
|
||||
location: Optional[str] = None
|
||||
status: EventStatus = EventStatus.CONFIRMED
|
||||
attendees: list[Attendee] = []
|
||||
reminders: list[Reminder] = []
|
||||
recurrence_rule: Optional[str] = None
|
||||
created: Optional[datetime] = None
|
||||
updated: Optional[datetime] = None
|
||||
organizer: Optional[str] = None
|
||||
|
||||
|
||||
class EventList(BaseModel):
|
||||
events: list[Event]
|
||||
calendar_id: str
|
||||
start_date: str
|
||||
end_date: str
|
||||
total: int
|
||||
8
src/models/common.py
Normal file
8
src/models/common.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class OperationResult(BaseModel):
|
||||
success: bool
|
||||
message: str
|
||||
id: Optional[str] = None
|
||||
58
src/models/contacts_models.py
Normal file
58
src/models/contacts_models.py
Normal file
@@ -0,0 +1,58 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
from datetime import date
|
||||
|
||||
|
||||
class EmailField(BaseModel):
|
||||
type: str = "home"
|
||||
email: str
|
||||
primary: bool = False
|
||||
|
||||
|
||||
class PhoneField(BaseModel):
|
||||
type: str = "mobile"
|
||||
number: str
|
||||
primary: bool = False
|
||||
|
||||
|
||||
class AddressField(BaseModel):
|
||||
type: str = "home"
|
||||
street: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
postal_code: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
|
||||
|
||||
class AddressBook(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
contact_count: int = 0
|
||||
|
||||
|
||||
class Contact(BaseModel):
|
||||
id: str
|
||||
addressbook_id: str
|
||||
first_name: Optional[str] = None
|
||||
last_name: Optional[str] = None
|
||||
display_name: Optional[str] = None
|
||||
nickname: Optional[str] = None
|
||||
emails: list[EmailField] = []
|
||||
phones: list[PhoneField] = []
|
||||
addresses: list[AddressField] = []
|
||||
organization: Optional[str] = None
|
||||
title: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
birthday: Optional[date] = None
|
||||
photo_url: Optional[str] = None
|
||||
created: Optional[str] = None
|
||||
updated: Optional[str] = None
|
||||
|
||||
|
||||
class ContactList(BaseModel):
|
||||
contacts: list[Contact]
|
||||
addressbook_id: str
|
||||
total: int
|
||||
limit: int
|
||||
offset: int
|
||||
55
src/models/email_models.py
Normal file
55
src/models/email_models.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class Mailbox(BaseModel):
|
||||
name: str
|
||||
path: str
|
||||
message_count: int
|
||||
unread_count: int
|
||||
has_children: bool = False
|
||||
|
||||
|
||||
class EmailAddress(BaseModel):
|
||||
name: Optional[str] = None
|
||||
email: str
|
||||
|
||||
|
||||
class Attachment(BaseModel):
|
||||
filename: str
|
||||
content_type: str
|
||||
size: int
|
||||
content_id: Optional[str] = None
|
||||
|
||||
|
||||
class EmailSummary(BaseModel):
|
||||
id: str
|
||||
mailbox: str
|
||||
subject: str
|
||||
from_address: EmailAddress
|
||||
to_addresses: list[EmailAddress]
|
||||
date: datetime
|
||||
is_read: bool
|
||||
is_flagged: bool
|
||||
has_attachments: bool
|
||||
snippet: Optional[str] = None
|
||||
|
||||
|
||||
class Email(EmailSummary):
|
||||
cc_addresses: list[EmailAddress] = []
|
||||
bcc_addresses: list[EmailAddress] = []
|
||||
body_text: Optional[str] = None
|
||||
body_html: Optional[str] = None
|
||||
attachments: list[Attachment] = []
|
||||
headers: dict[str, str] = {}
|
||||
in_reply_to: Optional[str] = None
|
||||
references: list[str] = []
|
||||
|
||||
|
||||
class EmailList(BaseModel):
|
||||
emails: list[EmailSummary]
|
||||
total: int
|
||||
mailbox: str
|
||||
limit: int
|
||||
offset: int
|
||||
143
src/server.py
Normal file
143
src/server.py
Normal file
@@ -0,0 +1,143 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
PIM MCP Server - Personal Information Management via Model Context Protocol
|
||||
|
||||
A self-hosted MCP server that provides tools for managing:
|
||||
- Email (IMAP/SMTP)
|
||||
- Calendar (CalDAV)
|
||||
- Contacts (CardDAV)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add src directory to path for imports
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from fastmcp import FastMCP
|
||||
|
||||
from config import settings
|
||||
from database import init_db, close_db
|
||||
|
||||
# Initialize MCP server
|
||||
mcp = FastMCP(
|
||||
settings.server_name,
|
||||
description="Personal Information Management MCP Server for Email, Calendar, and Contacts",
|
||||
)
|
||||
|
||||
# Initialize services based on configuration
|
||||
email_service = None
|
||||
calendar_service = None
|
||||
contacts_service = None
|
||||
|
||||
|
||||
def setup_services():
|
||||
"""Initialize services based on configuration."""
|
||||
global email_service, calendar_service, contacts_service
|
||||
|
||||
if settings.is_email_configured():
|
||||
from services.email_service import EmailService
|
||||
email_service = EmailService(settings)
|
||||
print(f" Email service: enabled (IMAP: {settings.imap_host})")
|
||||
else:
|
||||
print(" Email service: disabled (not configured)")
|
||||
|
||||
if settings.is_calendar_configured():
|
||||
from services.calendar_service import CalendarService
|
||||
calendar_service = CalendarService(settings)
|
||||
print(f" Calendar service: enabled (CalDAV: {settings.caldav_url})")
|
||||
else:
|
||||
print(" Calendar service: disabled (not configured)")
|
||||
|
||||
if settings.is_contacts_configured():
|
||||
from services.contacts_service import ContactsService
|
||||
contacts_service = ContactsService(settings)
|
||||
print(f" Contacts service: enabled (CardDAV: {settings.carddav_url})")
|
||||
else:
|
||||
print(" Contacts service: disabled (not configured)")
|
||||
|
||||
|
||||
def register_tools():
|
||||
"""Register MCP tools based on enabled services."""
|
||||
if email_service:
|
||||
from tools.email_tools import register_email_tools
|
||||
register_email_tools(mcp, email_service)
|
||||
print(" Registered email tools")
|
||||
|
||||
if calendar_service:
|
||||
from tools.calendar_tools import register_calendar_tools
|
||||
register_calendar_tools(mcp, calendar_service)
|
||||
print(" Registered calendar tools")
|
||||
|
||||
if contacts_service:
|
||||
from tools.contacts_tools import register_contacts_tools
|
||||
register_contacts_tools(mcp, contacts_service)
|
||||
print(" Registered contacts tools")
|
||||
|
||||
|
||||
# Server info tool (always available)
|
||||
@mcp.tool(description="Get information about this PIM MCP server including enabled services and version.")
|
||||
def get_server_info() -> dict:
|
||||
"""Get server information and status."""
|
||||
return {
|
||||
"server_name": settings.server_name,
|
||||
"version": "1.0.0",
|
||||
"environment": settings.environment,
|
||||
"services": {
|
||||
"email": {
|
||||
"enabled": email_service is not None,
|
||||
"imap_host": settings.imap_host if email_service else None,
|
||||
"smtp_configured": settings.is_smtp_configured() if email_service else False,
|
||||
},
|
||||
"calendar": {
|
||||
"enabled": calendar_service is not None,
|
||||
"caldav_url": settings.caldav_url if calendar_service else None,
|
||||
},
|
||||
"contacts": {
|
||||
"enabled": contacts_service is not None,
|
||||
"carddav_url": settings.carddav_url if contacts_service else None,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def initialize():
|
||||
"""Initialize the server."""
|
||||
print(f"\n{'='*60}")
|
||||
print(f" {settings.server_name}")
|
||||
print(f"{'='*60}")
|
||||
print(f"\nInitializing database...")
|
||||
await init_db(settings.sqlite_path)
|
||||
print(f" Database: {settings.sqlite_path}")
|
||||
print(" Using SQLModel with Alembic migrations")
|
||||
|
||||
print(f"\nConfiguring services...")
|
||||
setup_services()
|
||||
|
||||
print(f"\nRegistering tools...")
|
||||
register_tools()
|
||||
|
||||
print(f"\n{'='*60}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
|
||||
async def main():
|
||||
await initialize()
|
||||
|
||||
port = settings.server_port
|
||||
host = settings.server_host
|
||||
|
||||
print(f"\nStarting server on {host}:{port}")
|
||||
print(f"MCP endpoint: http://{host}:{port}/mcp")
|
||||
print(f"{'='*60}\n")
|
||||
|
||||
mcp.run(
|
||||
transport="http",
|
||||
host=host,
|
||||
port=port,
|
||||
stateless_http=True,
|
||||
)
|
||||
|
||||
asyncio.run(main())
|
||||
5
src/services/__init__.py
Normal file
5
src/services/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .email_service import EmailService
|
||||
from .calendar_service import CalendarService
|
||||
from .contacts_service import ContactsService
|
||||
|
||||
__all__ = ["EmailService", "CalendarService", "ContactsService"]
|
||||
316
src/services/calendar_service.py
Normal file
316
src/services/calendar_service.py
Normal file
@@ -0,0 +1,316 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
import uuid
|
||||
|
||||
import caldav
|
||||
from icalendar import Calendar as iCalendar, Event as iEvent, vText
|
||||
from dateutil.parser import parse as parse_date
|
||||
from dateutil.rrule import rrulestr
|
||||
|
||||
from models.calendar_models import (
|
||||
Calendar,
|
||||
Event,
|
||||
EventList,
|
||||
EventStatus,
|
||||
Attendee,
|
||||
Reminder,
|
||||
)
|
||||
from models.common import OperationResult
|
||||
from config import Settings
|
||||
|
||||
|
||||
class CalendarService:
|
||||
def __init__(self, settings: Settings):
|
||||
self.settings = settings
|
||||
self._client: Optional[caldav.DAVClient] = None
|
||||
self._principal = None
|
||||
|
||||
def _get_client(self) -> caldav.DAVClient:
|
||||
if self._client is None:
|
||||
self._client = caldav.DAVClient(
|
||||
url=self.settings.caldav_url,
|
||||
username=self.settings.caldav_username,
|
||||
password=self.settings.caldav_password.get_secret_value(),
|
||||
)
|
||||
self._principal = self._client.principal()
|
||||
return self._client
|
||||
|
||||
def _get_principal(self):
|
||||
self._get_client()
|
||||
return self._principal
|
||||
|
||||
def list_calendars(self) -> list[Calendar]:
|
||||
principal = self._get_principal()
|
||||
calendars = principal.calendars()
|
||||
|
||||
result = []
|
||||
for cal in calendars:
|
||||
props = cal.get_properties([caldav.dav.DisplayName()])
|
||||
name = props.get("{DAV:}displayname", cal.name or "Unnamed")
|
||||
|
||||
result.append(
|
||||
Calendar(
|
||||
id=str(cal.url),
|
||||
name=name,
|
||||
color=None,
|
||||
description=None,
|
||||
is_readonly=False,
|
||||
)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def _get_calendar_by_id(self, calendar_id: str) -> caldav.Calendar:
|
||||
principal = self._get_principal()
|
||||
calendars = principal.calendars()
|
||||
|
||||
for cal in calendars:
|
||||
if str(cal.url) == calendar_id:
|
||||
return cal
|
||||
|
||||
raise ValueError(f"Calendar not found: {calendar_id}")
|
||||
|
||||
def list_events(
|
||||
self,
|
||||
calendar_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
include_recurring: bool = True,
|
||||
) -> EventList:
|
||||
calendar = self._get_calendar_by_id(calendar_id)
|
||||
|
||||
start = parse_date(start_date)
|
||||
end = parse_date(end_date)
|
||||
|
||||
events = calendar.date_search(start=start, end=end, expand=include_recurring)
|
||||
|
||||
result = []
|
||||
for event in events:
|
||||
parsed = self._parse_event(event, calendar_id)
|
||||
if parsed:
|
||||
result.append(parsed)
|
||||
|
||||
result.sort(key=lambda e: e.start)
|
||||
|
||||
return EventList(
|
||||
events=result,
|
||||
calendar_id=calendar_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
total=len(result),
|
||||
)
|
||||
|
||||
def get_event(self, calendar_id: str, event_id: str) -> Optional[Event]:
|
||||
calendar = self._get_calendar_by_id(calendar_id)
|
||||
|
||||
try:
|
||||
event = calendar.event_by_url(event_id)
|
||||
return self._parse_event(event, calendar_id)
|
||||
except Exception:
|
||||
# Try searching by UID
|
||||
events = calendar.events()
|
||||
for event in events:
|
||||
parsed = self._parse_event(event, calendar_id)
|
||||
if parsed and parsed.id == event_id:
|
||||
return parsed
|
||||
return None
|
||||
|
||||
def create_event(
|
||||
self,
|
||||
calendar_id: str,
|
||||
title: str,
|
||||
start: str,
|
||||
end: str,
|
||||
description: Optional[str] = None,
|
||||
location: Optional[str] = None,
|
||||
attendees: Optional[list[str]] = None,
|
||||
reminders: Optional[list[int]] = None,
|
||||
recurrence: Optional[str] = None,
|
||||
) -> Event:
|
||||
calendar = self._get_calendar_by_id(calendar_id)
|
||||
|
||||
# Create iCalendar event
|
||||
ical = iCalendar()
|
||||
ical.add("prodid", "-//PIM MCP Server//EN")
|
||||
ical.add("version", "2.0")
|
||||
|
||||
ievent = iEvent()
|
||||
event_uid = str(uuid.uuid4())
|
||||
ievent.add("uid", event_uid)
|
||||
ievent.add("summary", title)
|
||||
ievent.add("dtstart", parse_date(start))
|
||||
ievent.add("dtend", parse_date(end))
|
||||
ievent.add("dtstamp", datetime.now())
|
||||
|
||||
if description:
|
||||
ievent.add("description", description)
|
||||
if location:
|
||||
ievent.add("location", location)
|
||||
|
||||
if attendees:
|
||||
for attendee_email in attendees:
|
||||
ievent.add("attendee", f"mailto:{attendee_email}")
|
||||
|
||||
if recurrence:
|
||||
ievent.add("rrule", recurrence)
|
||||
|
||||
ical.add_component(ievent)
|
||||
|
||||
# Save to calendar
|
||||
created_event = calendar.save_event(ical.to_ical().decode("utf-8"))
|
||||
|
||||
return Event(
|
||||
id=event_uid,
|
||||
calendar_id=calendar_id,
|
||||
title=title,
|
||||
start=parse_date(start),
|
||||
end=parse_date(end),
|
||||
description=description,
|
||||
location=location,
|
||||
attendees=[Attendee(email=a) for a in (attendees or [])],
|
||||
reminders=[Reminder(minutes_before=m) for m in (reminders or [])],
|
||||
recurrence_rule=recurrence,
|
||||
created=datetime.now(),
|
||||
)
|
||||
|
||||
def update_event(
|
||||
self,
|
||||
calendar_id: str,
|
||||
event_id: str,
|
||||
title: Optional[str] = None,
|
||||
start: Optional[str] = None,
|
||||
end: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
location: Optional[str] = None,
|
||||
attendees: Optional[list[str]] = None,
|
||||
) -> Optional[Event]:
|
||||
calendar = self._get_calendar_by_id(calendar_id)
|
||||
|
||||
# Find the event
|
||||
event = None
|
||||
for e in calendar.events():
|
||||
ical = iCalendar.from_ical(e.data)
|
||||
for component in ical.walk():
|
||||
if component.name == "VEVENT":
|
||||
uid = str(component.get("uid", ""))
|
||||
if uid == event_id:
|
||||
event = e
|
||||
break
|
||||
|
||||
if not event:
|
||||
return None
|
||||
|
||||
# Parse and modify
|
||||
ical = iCalendar.from_ical(event.data)
|
||||
for component in ical.walk():
|
||||
if component.name == "VEVENT":
|
||||
if title is not None:
|
||||
component["summary"] = vText(title)
|
||||
if start is not None:
|
||||
component["dtstart"] = parse_date(start)
|
||||
if end is not None:
|
||||
component["dtend"] = parse_date(end)
|
||||
if description is not None:
|
||||
component["description"] = vText(description)
|
||||
if location is not None:
|
||||
component["location"] = vText(location)
|
||||
|
||||
# Save changes
|
||||
event.data = ical.to_ical().decode("utf-8")
|
||||
event.save()
|
||||
|
||||
return self._parse_event(event, calendar_id)
|
||||
|
||||
def delete_event(
|
||||
self, calendar_id: str, event_id: str, notify_attendees: bool = True
|
||||
) -> OperationResult:
|
||||
try:
|
||||
calendar = self._get_calendar_by_id(calendar_id)
|
||||
|
||||
# Find and delete the event
|
||||
for event in calendar.events():
|
||||
ical = iCalendar.from_ical(event.data)
|
||||
for component in ical.walk():
|
||||
if component.name == "VEVENT":
|
||||
uid = str(component.get("uid", ""))
|
||||
if uid == event_id:
|
||||
event.delete()
|
||||
return OperationResult(
|
||||
success=True,
|
||||
message="Event deleted successfully",
|
||||
id=event_id,
|
||||
)
|
||||
|
||||
return OperationResult(
|
||||
success=False, message=f"Event not found: {event_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
return OperationResult(success=False, message=str(e))
|
||||
|
||||
def _parse_event(self, caldav_event, calendar_id: str) -> Optional[Event]:
|
||||
try:
|
||||
ical = iCalendar.from_ical(caldav_event.data)
|
||||
|
||||
for component in ical.walk():
|
||||
if component.name == "VEVENT":
|
||||
uid = str(component.get("uid", ""))
|
||||
|
||||
# Parse dates
|
||||
dtstart = component.get("dtstart")
|
||||
dtend = component.get("dtend")
|
||||
|
||||
start = dtstart.dt if dtstart else datetime.now()
|
||||
end = dtend.dt if dtend else start + timedelta(hours=1)
|
||||
|
||||
# Handle date-only values (all-day events)
|
||||
all_day = False
|
||||
if not isinstance(start, datetime):
|
||||
all_day = True
|
||||
start = datetime.combine(start, datetime.min.time())
|
||||
if not isinstance(end, datetime):
|
||||
end = datetime.combine(end, datetime.min.time())
|
||||
|
||||
# Parse status
|
||||
status_str = str(component.get("status", "CONFIRMED")).upper()
|
||||
status = EventStatus.CONFIRMED
|
||||
if status_str == "TENTATIVE":
|
||||
status = EventStatus.TENTATIVE
|
||||
elif status_str == "CANCELLED":
|
||||
status = EventStatus.CANCELLED
|
||||
|
||||
# Parse attendees
|
||||
attendees = []
|
||||
for attendee in component.get("attendee", []):
|
||||
if isinstance(attendee, list):
|
||||
for a in attendee:
|
||||
email = str(a).replace("mailto:", "")
|
||||
attendees.append(Attendee(email=email))
|
||||
else:
|
||||
email = str(attendee).replace("mailto:", "")
|
||||
attendees.append(Attendee(email=email))
|
||||
|
||||
# Parse recurrence
|
||||
rrule = component.get("rrule")
|
||||
recurrence_rule = None
|
||||
if rrule:
|
||||
recurrence_rule = rrule.to_ical().decode("utf-8")
|
||||
|
||||
return Event(
|
||||
id=uid,
|
||||
calendar_id=calendar_id,
|
||||
title=str(component.get("summary", "Untitled")),
|
||||
start=start,
|
||||
end=end,
|
||||
all_day=all_day,
|
||||
description=str(component.get("description", "")) or None,
|
||||
location=str(component.get("location", "")) or None,
|
||||
status=status,
|
||||
attendees=attendees,
|
||||
recurrence_rule=recurrence_rule,
|
||||
organizer=str(component.get("organizer", "")).replace("mailto:", "") or None,
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error parsing event: {e}")
|
||||
return None
|
||||
|
||||
return None
|
||||
477
src/services/contacts_service.py
Normal file
477
src/services/contacts_service.py
Normal file
@@ -0,0 +1,477 @@
|
||||
from typing import Optional
|
||||
import uuid
|
||||
|
||||
import httpx
|
||||
import vobject
|
||||
|
||||
from models.contacts_models import (
|
||||
AddressBook,
|
||||
Contact,
|
||||
ContactList,
|
||||
EmailField,
|
||||
PhoneField,
|
||||
AddressField,
|
||||
)
|
||||
from models.common import OperationResult
|
||||
from config import Settings
|
||||
|
||||
|
||||
PROPFIND_ADDRESSBOOKS = """<?xml version="1.0" encoding="utf-8"?>
|
||||
<d:propfind xmlns:d="DAV:" xmlns:card="urn:ietf:params:xml:ns:carddav">
|
||||
<d:prop>
|
||||
<d:displayname/>
|
||||
<d:resourcetype/>
|
||||
<card:addressbook-description/>
|
||||
</d:prop>
|
||||
</d:propfind>"""
|
||||
|
||||
REPORT_CONTACTS = """<?xml version="1.0" encoding="utf-8"?>
|
||||
<card:addressbook-query xmlns:d="DAV:" xmlns:card="urn:ietf:params:xml:ns:carddav">
|
||||
<d:prop>
|
||||
<d:getetag/>
|
||||
<card:address-data/>
|
||||
</d:prop>
|
||||
</card:addressbook-query>"""
|
||||
|
||||
|
||||
class ContactsService:
|
||||
def __init__(self, settings: Settings):
|
||||
self.settings = settings
|
||||
self._client: Optional[httpx.Client] = None
|
||||
|
||||
def _get_client(self) -> httpx.Client:
|
||||
if self._client is None:
|
||||
self._client = httpx.Client(
|
||||
auth=(
|
||||
self.settings.carddav_username,
|
||||
self.settings.carddav_password.get_secret_value(),
|
||||
),
|
||||
headers={"Content-Type": "application/xml; charset=utf-8"},
|
||||
timeout=30.0,
|
||||
)
|
||||
return self._client
|
||||
|
||||
def list_addressbooks(self) -> list[AddressBook]:
|
||||
client = self._get_client()
|
||||
|
||||
response = client.request(
|
||||
"PROPFIND",
|
||||
self.settings.carddav_url,
|
||||
headers={"Depth": "1"},
|
||||
content=PROPFIND_ADDRESSBOOKS,
|
||||
)
|
||||
|
||||
if response.status_code not in [200, 207]:
|
||||
raise Exception(f"Failed to list addressbooks: {response.status_code}")
|
||||
|
||||
# Parse XML response
|
||||
addressbooks = []
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
root = ET.fromstring(response.text)
|
||||
ns = {
|
||||
"d": "DAV:",
|
||||
"card": "urn:ietf:params:xml:ns:carddav",
|
||||
}
|
||||
|
||||
for response_elem in root.findall(".//d:response", ns):
|
||||
href = response_elem.find("d:href", ns)
|
||||
if href is None:
|
||||
continue
|
||||
|
||||
resourcetype = response_elem.find(".//d:resourcetype", ns)
|
||||
is_addressbook = (
|
||||
resourcetype is not None
|
||||
and resourcetype.find("card:addressbook", ns) is not None
|
||||
)
|
||||
|
||||
if not is_addressbook:
|
||||
continue
|
||||
|
||||
displayname = response_elem.find(".//d:displayname", ns)
|
||||
description = response_elem.find(".//card:addressbook-description", ns)
|
||||
|
||||
addressbooks.append(
|
||||
AddressBook(
|
||||
id=href.text,
|
||||
name=displayname.text if displayname is not None and displayname.text else "Unnamed",
|
||||
description=description.text if description is not None else None,
|
||||
contact_count=0,
|
||||
)
|
||||
)
|
||||
|
||||
return addressbooks
|
||||
|
||||
def list_contacts(
|
||||
self,
|
||||
addressbook_id: str,
|
||||
search: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> ContactList:
|
||||
client = self._get_client()
|
||||
|
||||
# Build URL
|
||||
base_url = self.settings.carddav_url.rstrip("/")
|
||||
addressbook_url = f"{base_url}{addressbook_id}" if addressbook_id.startswith("/") else addressbook_id
|
||||
|
||||
response = client.request(
|
||||
"REPORT",
|
||||
addressbook_url,
|
||||
headers={"Depth": "1"},
|
||||
content=REPORT_CONTACTS,
|
||||
)
|
||||
|
||||
if response.status_code not in [200, 207]:
|
||||
raise Exception(f"Failed to list contacts: {response.status_code}")
|
||||
|
||||
# Parse XML response
|
||||
contacts = []
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
root = ET.fromstring(response.text)
|
||||
ns = {
|
||||
"d": "DAV:",
|
||||
"card": "urn:ietf:params:xml:ns:carddav",
|
||||
}
|
||||
|
||||
for response_elem in root.findall(".//d:response", ns):
|
||||
href = response_elem.find("d:href", ns)
|
||||
address_data = response_elem.find(".//card:address-data", ns)
|
||||
|
||||
if href is None or address_data is None or address_data.text is None:
|
||||
continue
|
||||
|
||||
try:
|
||||
contact = self._parse_vcard(address_data.text, addressbook_id, href.text)
|
||||
if contact:
|
||||
# Apply search filter
|
||||
if search:
|
||||
search_lower = search.lower()
|
||||
match = False
|
||||
if contact.display_name and search_lower in contact.display_name.lower():
|
||||
match = True
|
||||
elif contact.first_name and search_lower in contact.first_name.lower():
|
||||
match = True
|
||||
elif contact.last_name and search_lower in contact.last_name.lower():
|
||||
match = True
|
||||
elif any(search_lower in e.email.lower() for e in contact.emails):
|
||||
match = True
|
||||
if not match:
|
||||
continue
|
||||
|
||||
contacts.append(contact)
|
||||
except Exception as e:
|
||||
print(f"Error parsing contact: {e}")
|
||||
continue
|
||||
|
||||
# Sort by display name
|
||||
contacts.sort(key=lambda c: c.display_name or c.first_name or c.last_name or "")
|
||||
|
||||
total = len(contacts)
|
||||
contacts = contacts[offset : offset + limit]
|
||||
|
||||
return ContactList(
|
||||
contacts=contacts,
|
||||
addressbook_id=addressbook_id,
|
||||
total=total,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
|
||||
def get_contact(self, addressbook_id: str, contact_id: str) -> Optional[Contact]:
|
||||
client = self._get_client()
|
||||
|
||||
# Build URL
|
||||
base_url = self.settings.carddav_url.rstrip("/")
|
||||
contact_url = f"{base_url}{contact_id}" if contact_id.startswith("/") else contact_id
|
||||
|
||||
response = client.get(contact_url)
|
||||
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Failed to get contact: {response.status_code}")
|
||||
|
||||
return self._parse_vcard(response.text, addressbook_id, contact_id)
|
||||
|
||||
def create_contact(
|
||||
self,
|
||||
addressbook_id: str,
|
||||
first_name: Optional[str] = None,
|
||||
last_name: Optional[str] = None,
|
||||
display_name: Optional[str] = None,
|
||||
emails: Optional[list[dict]] = None,
|
||||
phones: Optional[list[dict]] = None,
|
||||
addresses: Optional[list[dict]] = None,
|
||||
organization: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
notes: Optional[str] = None,
|
||||
birthday: Optional[str] = None,
|
||||
) -> Contact:
|
||||
client = self._get_client()
|
||||
|
||||
# Create vCard
|
||||
vcard = vobject.vCard()
|
||||
|
||||
# Generate UID
|
||||
uid = str(uuid.uuid4())
|
||||
vcard.add("uid").value = uid
|
||||
|
||||
# Name
|
||||
n = vcard.add("n")
|
||||
n.value = vobject.vcard.Name(
|
||||
family=last_name or "",
|
||||
given=first_name or "",
|
||||
)
|
||||
|
||||
# Full name
|
||||
fn = display_name or " ".join(filter(None, [first_name, last_name])) or "Unnamed"
|
||||
vcard.add("fn").value = fn
|
||||
|
||||
# Organization
|
||||
if organization:
|
||||
org = vcard.add("org")
|
||||
org.value = [organization]
|
||||
|
||||
# Title
|
||||
if title:
|
||||
vcard.add("title").value = title
|
||||
|
||||
# Notes
|
||||
if notes:
|
||||
vcard.add("note").value = notes
|
||||
|
||||
# Birthday
|
||||
if birthday:
|
||||
vcard.add("bday").value = birthday
|
||||
|
||||
# Emails
|
||||
if emails:
|
||||
for email_data in emails:
|
||||
email = vcard.add("email")
|
||||
email.value = email_data.get("email", "")
|
||||
email.type_param = email_data.get("type", "home").upper()
|
||||
|
||||
# Phones
|
||||
if phones:
|
||||
for phone_data in phones:
|
||||
tel = vcard.add("tel")
|
||||
tel.value = phone_data.get("number", "")
|
||||
tel.type_param = phone_data.get("type", "cell").upper()
|
||||
|
||||
# Addresses
|
||||
if addresses:
|
||||
for addr_data in addresses:
|
||||
adr = vcard.add("adr")
|
||||
adr.value = vobject.vcard.Address(
|
||||
street=addr_data.get("street", ""),
|
||||
city=addr_data.get("city", ""),
|
||||
region=addr_data.get("state", ""),
|
||||
code=addr_data.get("postal_code", ""),
|
||||
country=addr_data.get("country", ""),
|
||||
)
|
||||
adr.type_param = addr_data.get("type", "home").upper()
|
||||
|
||||
# Build URL and save
|
||||
base_url = self.settings.carddav_url.rstrip("/")
|
||||
addressbook_url = f"{base_url}{addressbook_id}" if addressbook_id.startswith("/") else addressbook_id
|
||||
contact_url = f"{addressbook_url.rstrip('/')}/{uid}.vcf"
|
||||
|
||||
response = client.put(
|
||||
contact_url,
|
||||
content=vcard.serialize(),
|
||||
headers={"Content-Type": "text/vcard; charset=utf-8"},
|
||||
)
|
||||
|
||||
if response.status_code not in [200, 201, 204]:
|
||||
raise Exception(f"Failed to create contact: {response.status_code}")
|
||||
|
||||
return Contact(
|
||||
id=contact_url,
|
||||
addressbook_id=addressbook_id,
|
||||
first_name=first_name,
|
||||
last_name=last_name,
|
||||
display_name=fn,
|
||||
emails=[EmailField(**e) for e in (emails or [])],
|
||||
phones=[PhoneField(**p) for p in (phones or [])],
|
||||
addresses=[AddressField(**a) for a in (addresses or [])],
|
||||
organization=organization,
|
||||
title=title,
|
||||
notes=notes,
|
||||
)
|
||||
|
||||
def update_contact(
|
||||
self,
|
||||
addressbook_id: str,
|
||||
contact_id: str,
|
||||
first_name: Optional[str] = None,
|
||||
last_name: Optional[str] = None,
|
||||
display_name: Optional[str] = None,
|
||||
emails: Optional[list[dict]] = None,
|
||||
phones: Optional[list[dict]] = None,
|
||||
addresses: Optional[list[dict]] = None,
|
||||
organization: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
notes: Optional[str] = None,
|
||||
) -> Optional[Contact]:
|
||||
# Get existing contact
|
||||
existing = self.get_contact(addressbook_id, contact_id)
|
||||
if not existing:
|
||||
return None
|
||||
|
||||
# Merge with updates
|
||||
updated_data = {
|
||||
"first_name": first_name if first_name is not None else existing.first_name,
|
||||
"last_name": last_name if last_name is not None else existing.last_name,
|
||||
"display_name": display_name if display_name is not None else existing.display_name,
|
||||
"emails": emails if emails is not None else [e.model_dump() for e in existing.emails],
|
||||
"phones": phones if phones is not None else [p.model_dump() for p in existing.phones],
|
||||
"addresses": addresses if addresses is not None else [a.model_dump() for a in existing.addresses],
|
||||
"organization": organization if organization is not None else existing.organization,
|
||||
"title": title if title is not None else existing.title,
|
||||
"notes": notes if notes is not None else existing.notes,
|
||||
}
|
||||
|
||||
# Delete and recreate (simpler than partial update)
|
||||
self.delete_contact(addressbook_id, contact_id)
|
||||
return self.create_contact(addressbook_id, **updated_data)
|
||||
|
||||
def delete_contact(self, addressbook_id: str, contact_id: str) -> OperationResult:
|
||||
try:
|
||||
client = self._get_client()
|
||||
|
||||
# Build URL
|
||||
base_url = self.settings.carddav_url.rstrip("/")
|
||||
contact_url = f"{base_url}{contact_id}" if contact_id.startswith("/") else contact_id
|
||||
|
||||
response = client.delete(contact_url)
|
||||
|
||||
if response.status_code in [200, 204]:
|
||||
return OperationResult(
|
||||
success=True, message="Contact deleted successfully", id=contact_id
|
||||
)
|
||||
elif response.status_code == 404:
|
||||
return OperationResult(
|
||||
success=False, message="Contact not found", id=contact_id
|
||||
)
|
||||
else:
|
||||
return OperationResult(
|
||||
success=False,
|
||||
message=f"Failed to delete contact: {response.status_code}",
|
||||
)
|
||||
except Exception as e:
|
||||
return OperationResult(success=False, message=str(e))
|
||||
|
||||
def _parse_vcard(
|
||||
self, vcard_data: str, addressbook_id: str, href: str
|
||||
) -> Optional[Contact]:
|
||||
try:
|
||||
vcard = vobject.readOne(vcard_data)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# Get UID
|
||||
uid = href
|
||||
if hasattr(vcard, "uid"):
|
||||
uid = vcard.uid.value
|
||||
|
||||
# Get name components
|
||||
first_name = None
|
||||
last_name = None
|
||||
if hasattr(vcard, "n"):
|
||||
first_name = vcard.n.value.given or None
|
||||
last_name = vcard.n.value.family or None
|
||||
|
||||
# Get display name
|
||||
display_name = None
|
||||
if hasattr(vcard, "fn"):
|
||||
display_name = vcard.fn.value
|
||||
|
||||
# Get emails
|
||||
emails = []
|
||||
if hasattr(vcard, "email_list"):
|
||||
for email in vcard.email_list:
|
||||
email_type = "home"
|
||||
if hasattr(email, "type_param"):
|
||||
email_type = str(email.type_param).lower()
|
||||
emails.append(
|
||||
EmailField(type=email_type, email=email.value, primary=len(emails) == 0)
|
||||
)
|
||||
|
||||
# Get phones
|
||||
phones = []
|
||||
if hasattr(vcard, "tel_list"):
|
||||
for tel in vcard.tel_list:
|
||||
phone_type = "mobile"
|
||||
if hasattr(tel, "type_param"):
|
||||
phone_type = str(tel.type_param).lower()
|
||||
phones.append(
|
||||
PhoneField(type=phone_type, number=tel.value, primary=len(phones) == 0)
|
||||
)
|
||||
|
||||
# Get addresses
|
||||
addresses = []
|
||||
if hasattr(vcard, "adr_list"):
|
||||
for adr in vcard.adr_list:
|
||||
addr_type = "home"
|
||||
if hasattr(adr, "type_param"):
|
||||
addr_type = str(adr.type_param).lower()
|
||||
addresses.append(
|
||||
AddressField(
|
||||
type=addr_type,
|
||||
street=adr.value.street or None,
|
||||
city=adr.value.city or None,
|
||||
state=adr.value.region or None,
|
||||
postal_code=adr.value.code or None,
|
||||
country=adr.value.country or None,
|
||||
)
|
||||
)
|
||||
|
||||
# Get organization
|
||||
organization = None
|
||||
if hasattr(vcard, "org"):
|
||||
org_value = vcard.org.value
|
||||
if isinstance(org_value, list) and len(org_value) > 0:
|
||||
organization = org_value[0]
|
||||
else:
|
||||
organization = str(org_value)
|
||||
|
||||
# Get title
|
||||
title = None
|
||||
if hasattr(vcard, "title"):
|
||||
title = vcard.title.value
|
||||
|
||||
# Get notes
|
||||
notes = None
|
||||
if hasattr(vcard, "note"):
|
||||
notes = vcard.note.value
|
||||
|
||||
# Get birthday
|
||||
birthday = None
|
||||
if hasattr(vcard, "bday"):
|
||||
try:
|
||||
from datetime import date
|
||||
bday_value = vcard.bday.value
|
||||
if isinstance(bday_value, str):
|
||||
birthday = date.fromisoformat(bday_value)
|
||||
else:
|
||||
birthday = bday_value
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return Contact(
|
||||
id=href,
|
||||
addressbook_id=addressbook_id,
|
||||
first_name=first_name,
|
||||
last_name=last_name,
|
||||
display_name=display_name,
|
||||
emails=emails,
|
||||
phones=phones,
|
||||
addresses=addresses,
|
||||
organization=organization,
|
||||
title=title,
|
||||
notes=notes,
|
||||
birthday=birthday,
|
||||
)
|
||||
560
src/services/email_service.py
Normal file
560
src/services/email_service.py
Normal file
@@ -0,0 +1,560 @@
|
||||
import email
|
||||
from email.header import decode_header
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from email.utils import formataddr, parseaddr
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
import re
|
||||
|
||||
from imapclient import IMAPClient
|
||||
import aiosmtplib
|
||||
|
||||
from models.email_models import (
|
||||
Mailbox,
|
||||
EmailAddress,
|
||||
Attachment,
|
||||
EmailSummary,
|
||||
Email,
|
||||
EmailList,
|
||||
)
|
||||
from models.common import OperationResult
|
||||
from config import Settings
|
||||
|
||||
|
||||
def decode_mime_header(header: Optional[str]) -> str:
|
||||
if not header:
|
||||
return ""
|
||||
decoded_parts = []
|
||||
for part, encoding in decode_header(header):
|
||||
if isinstance(part, bytes):
|
||||
decoded_parts.append(part.decode(encoding or "utf-8", errors="replace"))
|
||||
else:
|
||||
decoded_parts.append(part)
|
||||
return "".join(decoded_parts)
|
||||
|
||||
|
||||
def parse_email_address(addr: str) -> EmailAddress:
|
||||
name, email_addr = parseaddr(addr)
|
||||
return EmailAddress(name=decode_mime_header(name) or None, email=email_addr)
|
||||
|
||||
|
||||
def parse_email_addresses(addrs: Optional[str]) -> list[EmailAddress]:
|
||||
if not addrs:
|
||||
return []
|
||||
# Handle multiple addresses separated by comma
|
||||
addresses = []
|
||||
for addr in addrs.split(","):
|
||||
addr = addr.strip()
|
||||
if addr:
|
||||
addresses.append(parse_email_address(addr))
|
||||
return addresses
|
||||
|
||||
|
||||
class EmailService:
|
||||
def __init__(self, settings: Settings):
|
||||
self.settings = settings
|
||||
self._imap_client: Optional[IMAPClient] = None
|
||||
|
||||
def _get_imap_client(self) -> IMAPClient:
|
||||
if self._imap_client is None:
|
||||
self._imap_client = IMAPClient(
|
||||
host=self.settings.imap_host,
|
||||
port=self.settings.imap_port,
|
||||
ssl=self.settings.imap_use_ssl,
|
||||
)
|
||||
self._imap_client.login(
|
||||
self.settings.imap_username,
|
||||
self.settings.imap_password.get_secret_value(),
|
||||
)
|
||||
return self._imap_client
|
||||
|
||||
def _close_imap_client(self):
|
||||
if self._imap_client:
|
||||
try:
|
||||
self._imap_client.logout()
|
||||
except Exception:
|
||||
pass
|
||||
self._imap_client = None
|
||||
|
||||
def list_mailboxes(self) -> list[Mailbox]:
|
||||
client = self._get_imap_client()
|
||||
folders = client.list_folders()
|
||||
mailboxes = []
|
||||
|
||||
for flags, delimiter, name in folders:
|
||||
# Get folder status
|
||||
try:
|
||||
status = client.folder_status(name, ["MESSAGES", "UNSEEN"])
|
||||
message_count = status.get(b"MESSAGES", 0)
|
||||
unread_count = status.get(b"UNSEEN", 0)
|
||||
except Exception:
|
||||
message_count = 0
|
||||
unread_count = 0
|
||||
|
||||
has_children = b"\\HasChildren" in flags
|
||||
|
||||
mailboxes.append(
|
||||
Mailbox(
|
||||
name=name.split(delimiter.decode() if delimiter else "/")[-1],
|
||||
path=name,
|
||||
message_count=message_count,
|
||||
unread_count=unread_count,
|
||||
has_children=has_children,
|
||||
)
|
||||
)
|
||||
|
||||
return mailboxes
|
||||
|
||||
def list_emails(
|
||||
self,
|
||||
mailbox: str = "INBOX",
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
include_body: bool = False,
|
||||
) -> EmailList:
|
||||
client = self._get_imap_client()
|
||||
client.select_folder(mailbox, readonly=True)
|
||||
|
||||
# Search for all messages
|
||||
message_ids = client.search(["ALL"])
|
||||
total = len(message_ids)
|
||||
|
||||
# Sort by UID descending (newest first) and apply pagination
|
||||
message_ids = sorted(message_ids, reverse=True)
|
||||
paginated_ids = message_ids[offset : offset + limit]
|
||||
|
||||
if not paginated_ids:
|
||||
return EmailList(
|
||||
emails=[], total=total, mailbox=mailbox, limit=limit, offset=offset
|
||||
)
|
||||
|
||||
# Fetch message data
|
||||
fetch_items = ["ENVELOPE", "FLAGS", "BODYSTRUCTURE", "RFC822.SIZE"]
|
||||
if include_body:
|
||||
fetch_items.append("BODY.PEEK[]")
|
||||
|
||||
messages = client.fetch(paginated_ids, fetch_items)
|
||||
emails = []
|
||||
|
||||
for uid, data in messages.items():
|
||||
envelope = data[b"ENVELOPE"]
|
||||
flags = data[b"FLAGS"]
|
||||
|
||||
# Parse from address
|
||||
from_addr = EmailAddress(name=None, email="unknown@unknown.com")
|
||||
if envelope.from_ and len(envelope.from_) > 0:
|
||||
sender = envelope.from_[0]
|
||||
from_addr = EmailAddress(
|
||||
name=decode_mime_header(sender.name) if sender.name else None,
|
||||
email=f"{sender.mailbox.decode() if sender.mailbox else 'unknown'}@{sender.host.decode() if sender.host else 'unknown.com'}",
|
||||
)
|
||||
|
||||
# Parse to addresses
|
||||
to_addrs = []
|
||||
if envelope.to:
|
||||
for addr in envelope.to:
|
||||
to_addrs.append(
|
||||
EmailAddress(
|
||||
name=decode_mime_header(addr.name) if addr.name else None,
|
||||
email=f"{addr.mailbox.decode() if addr.mailbox else 'unknown'}@{addr.host.decode() if addr.host else 'unknown.com'}",
|
||||
)
|
||||
)
|
||||
|
||||
# Parse date
|
||||
date = envelope.date or datetime.now()
|
||||
|
||||
# Check for attachments
|
||||
has_attachments = self._has_attachments(data.get(b"BODYSTRUCTURE"))
|
||||
|
||||
# Get snippet if body was fetched
|
||||
snippet = None
|
||||
if include_body and b"BODY[]" in data:
|
||||
raw_email = data[b"BODY[]"]
|
||||
msg = email.message_from_bytes(raw_email)
|
||||
snippet = self._get_text_snippet(msg, 200)
|
||||
|
||||
email_summary = EmailSummary(
|
||||
id=str(uid),
|
||||
mailbox=mailbox,
|
||||
subject=decode_mime_header(envelope.subject) if envelope.subject else "(No Subject)",
|
||||
from_address=from_addr,
|
||||
to_addresses=to_addrs,
|
||||
date=date,
|
||||
is_read=b"\\Seen" in flags,
|
||||
is_flagged=b"\\Flagged" in flags,
|
||||
has_attachments=has_attachments,
|
||||
snippet=snippet,
|
||||
)
|
||||
emails.append(email_summary)
|
||||
|
||||
# Sort by date descending
|
||||
emails.sort(key=lambda e: e.date, reverse=True)
|
||||
|
||||
return EmailList(
|
||||
emails=emails, total=total, mailbox=mailbox, limit=limit, offset=offset
|
||||
)
|
||||
|
||||
def read_email(
|
||||
self, mailbox: str, email_id: str, format: str = "text"
|
||||
) -> Optional[Email]:
|
||||
client = self._get_imap_client()
|
||||
client.select_folder(mailbox, readonly=True)
|
||||
|
||||
uid = int(email_id)
|
||||
messages = client.fetch([uid], ["ENVELOPE", "FLAGS", "BODY[]", "BODYSTRUCTURE"])
|
||||
|
||||
if uid not in messages:
|
||||
return None
|
||||
|
||||
data = messages[uid]
|
||||
envelope = data[b"ENVELOPE"]
|
||||
flags = data[b"FLAGS"]
|
||||
raw_email = data[b"BODY[]"]
|
||||
|
||||
msg = email.message_from_bytes(raw_email)
|
||||
|
||||
# Parse from address
|
||||
from_addr = EmailAddress(name=None, email="unknown@unknown.com")
|
||||
if envelope.from_ and len(envelope.from_) > 0:
|
||||
sender = envelope.from_[0]
|
||||
from_addr = EmailAddress(
|
||||
name=decode_mime_header(sender.name) if sender.name else None,
|
||||
email=f"{sender.mailbox.decode() if sender.mailbox else 'unknown'}@{sender.host.decode() if sender.host else 'unknown.com'}",
|
||||
)
|
||||
|
||||
# Parse addresses
|
||||
to_addrs = self._parse_envelope_addresses(envelope.to)
|
||||
cc_addrs = self._parse_envelope_addresses(envelope.cc)
|
||||
bcc_addrs = self._parse_envelope_addresses(envelope.bcc)
|
||||
|
||||
# Get body
|
||||
body_text, body_html = self._get_body(msg)
|
||||
|
||||
# Get attachments
|
||||
attachments = self._get_attachments(msg)
|
||||
|
||||
# Get headers
|
||||
headers = {}
|
||||
for key in ["Message-ID", "In-Reply-To", "References", "X-Priority"]:
|
||||
value = msg.get(key)
|
||||
if value:
|
||||
headers[key] = decode_mime_header(value)
|
||||
|
||||
return Email(
|
||||
id=str(uid),
|
||||
mailbox=mailbox,
|
||||
subject=decode_mime_header(envelope.subject) if envelope.subject else "(No Subject)",
|
||||
from_address=from_addr,
|
||||
to_addresses=to_addrs,
|
||||
cc_addresses=cc_addrs,
|
||||
bcc_addresses=bcc_addrs,
|
||||
date=envelope.date or datetime.now(),
|
||||
is_read=b"\\Seen" in flags,
|
||||
is_flagged=b"\\Flagged" in flags,
|
||||
has_attachments=len(attachments) > 0,
|
||||
body_text=body_text if format in ["text", "both"] else None,
|
||||
body_html=body_html if format in ["html", "both"] else None,
|
||||
attachments=attachments,
|
||||
headers=headers,
|
||||
in_reply_to=headers.get("In-Reply-To"),
|
||||
references=headers.get("References", "").split() if headers.get("References") else [],
|
||||
)
|
||||
|
||||
def search_emails(
|
||||
self,
|
||||
query: str,
|
||||
mailbox: str = "INBOX",
|
||||
search_in: list[str] = None,
|
||||
date_from: Optional[str] = None,
|
||||
date_to: Optional[str] = None,
|
||||
limit: int = 50,
|
||||
) -> EmailList:
|
||||
if search_in is None:
|
||||
search_in = ["subject", "from", "body"]
|
||||
|
||||
client = self._get_imap_client()
|
||||
client.select_folder(mailbox, readonly=True)
|
||||
|
||||
# Build IMAP search criteria
|
||||
criteria = []
|
||||
|
||||
# Add text search
|
||||
if "subject" in search_in:
|
||||
criteria.append(["SUBJECT", query])
|
||||
elif "from" in search_in:
|
||||
criteria.append(["FROM", query])
|
||||
elif "body" in search_in:
|
||||
criteria.append(["BODY", query])
|
||||
else:
|
||||
criteria.append(["TEXT", query])
|
||||
|
||||
# Add date filters
|
||||
if date_from:
|
||||
criteria.append(["SINCE", date_from])
|
||||
if date_to:
|
||||
criteria.append(["BEFORE", date_to])
|
||||
|
||||
# Flatten criteria for OR search across fields
|
||||
if len(criteria) == 1:
|
||||
search_criteria = criteria[0]
|
||||
else:
|
||||
# Use OR for multiple search fields
|
||||
search_criteria = criteria[0]
|
||||
|
||||
message_ids = client.search(search_criteria)
|
||||
total = len(message_ids)
|
||||
|
||||
# Sort and limit
|
||||
message_ids = sorted(message_ids, reverse=True)[:limit]
|
||||
|
||||
if not message_ids:
|
||||
return EmailList(
|
||||
emails=[], total=0, mailbox=mailbox, limit=limit, offset=0
|
||||
)
|
||||
|
||||
# Fetch and parse messages
|
||||
messages = client.fetch(message_ids, ["ENVELOPE", "FLAGS", "BODYSTRUCTURE"])
|
||||
emails = []
|
||||
|
||||
for uid, data in messages.items():
|
||||
envelope = data[b"ENVELOPE"]
|
||||
flags = data[b"FLAGS"]
|
||||
|
||||
from_addr = EmailAddress(name=None, email="unknown@unknown.com")
|
||||
if envelope.from_ and len(envelope.from_) > 0:
|
||||
sender = envelope.from_[0]
|
||||
from_addr = EmailAddress(
|
||||
name=decode_mime_header(sender.name) if sender.name else None,
|
||||
email=f"{sender.mailbox.decode() if sender.mailbox else 'unknown'}@{sender.host.decode() if sender.host else 'unknown.com'}",
|
||||
)
|
||||
|
||||
to_addrs = self._parse_envelope_addresses(envelope.to)
|
||||
|
||||
email_summary = EmailSummary(
|
||||
id=str(uid),
|
||||
mailbox=mailbox,
|
||||
subject=decode_mime_header(envelope.subject) if envelope.subject else "(No Subject)",
|
||||
from_address=from_addr,
|
||||
to_addresses=to_addrs,
|
||||
date=envelope.date or datetime.now(),
|
||||
is_read=b"\\Seen" in flags,
|
||||
is_flagged=b"\\Flagged" in flags,
|
||||
has_attachments=self._has_attachments(data.get(b"BODYSTRUCTURE")),
|
||||
)
|
||||
emails.append(email_summary)
|
||||
|
||||
emails.sort(key=lambda e: e.date, reverse=True)
|
||||
|
||||
return EmailList(
|
||||
emails=emails, total=total, mailbox=mailbox, limit=limit, offset=0
|
||||
)
|
||||
|
||||
def move_email(
|
||||
self, email_id: str, source_mailbox: str, destination_mailbox: str
|
||||
) -> OperationResult:
|
||||
try:
|
||||
client = self._get_imap_client()
|
||||
client.select_folder(source_mailbox)
|
||||
uid = int(email_id)
|
||||
client.move([uid], destination_mailbox)
|
||||
return OperationResult(
|
||||
success=True,
|
||||
message=f"Email moved from {source_mailbox} to {destination_mailbox}",
|
||||
id=email_id,
|
||||
)
|
||||
except Exception as e:
|
||||
return OperationResult(success=False, message=str(e))
|
||||
|
||||
def delete_email(
|
||||
self, email_id: str, mailbox: str, permanent: bool = False
|
||||
) -> OperationResult:
|
||||
try:
|
||||
client = self._get_imap_client()
|
||||
client.select_folder(mailbox)
|
||||
uid = int(email_id)
|
||||
|
||||
if permanent:
|
||||
client.delete_messages([uid])
|
||||
client.expunge()
|
||||
return OperationResult(
|
||||
success=True, message="Email permanently deleted", id=email_id
|
||||
)
|
||||
else:
|
||||
# Move to Trash
|
||||
trash_folder = self._find_trash_folder()
|
||||
if trash_folder:
|
||||
client.move([uid], trash_folder)
|
||||
return OperationResult(
|
||||
success=True, message="Email moved to trash", id=email_id
|
||||
)
|
||||
else:
|
||||
client.delete_messages([uid])
|
||||
client.expunge()
|
||||
return OperationResult(
|
||||
success=True, message="Email deleted (no trash folder found)", id=email_id
|
||||
)
|
||||
except Exception as e:
|
||||
return OperationResult(success=False, message=str(e))
|
||||
|
||||
async def send_email(
|
||||
self,
|
||||
to: list[str],
|
||||
subject: str,
|
||||
body: str,
|
||||
cc: Optional[list[str]] = None,
|
||||
bcc: Optional[list[str]] = None,
|
||||
reply_to: Optional[str] = None,
|
||||
html_body: Optional[str] = None,
|
||||
) -> OperationResult:
|
||||
try:
|
||||
msg = MIMEMultipart("alternative")
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = formataddr(
|
||||
(self.settings.smtp_from_name or "", self.settings.smtp_from_email)
|
||||
)
|
||||
msg["To"] = ", ".join(to)
|
||||
|
||||
if cc:
|
||||
msg["Cc"] = ", ".join(cc)
|
||||
if reply_to:
|
||||
msg["Reply-To"] = reply_to
|
||||
|
||||
# Add plain text body
|
||||
msg.attach(MIMEText(body, "plain", "utf-8"))
|
||||
|
||||
# Add HTML body if provided
|
||||
if html_body:
|
||||
msg.attach(MIMEText(html_body, "html", "utf-8"))
|
||||
|
||||
# Build recipient list
|
||||
recipients = list(to)
|
||||
if cc:
|
||||
recipients.extend(cc)
|
||||
if bcc:
|
||||
recipients.extend(bcc)
|
||||
|
||||
# Send via SMTP
|
||||
await aiosmtplib.send(
|
||||
msg,
|
||||
hostname=self.settings.smtp_host,
|
||||
port=self.settings.smtp_port,
|
||||
username=self.settings.smtp_username,
|
||||
password=self.settings.smtp_password.get_secret_value(),
|
||||
start_tls=self.settings.smtp_use_tls,
|
||||
)
|
||||
|
||||
return OperationResult(
|
||||
success=True,
|
||||
message=f"Email sent successfully to {', '.join(to)}",
|
||||
id=msg.get("Message-ID"),
|
||||
)
|
||||
except Exception as e:
|
||||
return OperationResult(success=False, message=str(e))
|
||||
|
||||
def _parse_envelope_addresses(self, addresses) -> list[EmailAddress]:
|
||||
if not addresses:
|
||||
return []
|
||||
result = []
|
||||
for addr in addresses:
|
||||
result.append(
|
||||
EmailAddress(
|
||||
name=decode_mime_header(addr.name) if addr.name else None,
|
||||
email=f"{addr.mailbox.decode() if addr.mailbox else 'unknown'}@{addr.host.decode() if addr.host else 'unknown.com'}",
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
def _has_attachments(self, bodystructure) -> bool:
|
||||
if bodystructure is None:
|
||||
return False
|
||||
# Simple heuristic: check if multipart with non-text parts
|
||||
if isinstance(bodystructure, list):
|
||||
for part in bodystructure:
|
||||
if isinstance(part, tuple) and len(part) > 0:
|
||||
content_type = part[0].decode() if isinstance(part[0], bytes) else str(part[0])
|
||||
if content_type.lower() not in ["text", "multipart"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_body(self, msg) -> tuple[Optional[str], Optional[str]]:
|
||||
body_text = None
|
||||
body_html = None
|
||||
|
||||
if msg.is_multipart():
|
||||
for part in msg.walk():
|
||||
content_type = part.get_content_type()
|
||||
content_disposition = str(part.get("Content-Disposition", ""))
|
||||
|
||||
if "attachment" in content_disposition:
|
||||
continue
|
||||
|
||||
if content_type == "text/plain" and body_text is None:
|
||||
payload = part.get_payload(decode=True)
|
||||
if payload:
|
||||
charset = part.get_content_charset() or "utf-8"
|
||||
body_text = payload.decode(charset, errors="replace")
|
||||
elif content_type == "text/html" and body_html is None:
|
||||
payload = part.get_payload(decode=True)
|
||||
if payload:
|
||||
charset = part.get_content_charset() or "utf-8"
|
||||
body_html = payload.decode(charset, errors="replace")
|
||||
else:
|
||||
content_type = msg.get_content_type()
|
||||
payload = msg.get_payload(decode=True)
|
||||
if payload:
|
||||
charset = msg.get_content_charset() or "utf-8"
|
||||
decoded = payload.decode(charset, errors="replace")
|
||||
if content_type == "text/html":
|
||||
body_html = decoded
|
||||
else:
|
||||
body_text = decoded
|
||||
|
||||
return body_text, body_html
|
||||
|
||||
def _get_text_snippet(self, msg, max_length: int = 200) -> Optional[str]:
|
||||
body_text, body_html = self._get_body(msg)
|
||||
text = body_text or ""
|
||||
|
||||
if not text and body_html:
|
||||
# Strip HTML tags for snippet
|
||||
text = re.sub(r"<[^>]+>", "", body_html)
|
||||
text = re.sub(r"\s+", " ", text).strip()
|
||||
|
||||
if text:
|
||||
return text[:max_length] + "..." if len(text) > max_length else text
|
||||
return None
|
||||
|
||||
def _get_attachments(self, msg) -> list[Attachment]:
|
||||
attachments = []
|
||||
|
||||
if msg.is_multipart():
|
||||
for part in msg.walk():
|
||||
content_disposition = str(part.get("Content-Disposition", ""))
|
||||
if "attachment" in content_disposition:
|
||||
filename = part.get_filename()
|
||||
if filename:
|
||||
filename = decode_mime_header(filename)
|
||||
else:
|
||||
filename = "unnamed"
|
||||
|
||||
attachments.append(
|
||||
Attachment(
|
||||
filename=filename,
|
||||
content_type=part.get_content_type(),
|
||||
size=len(part.get_payload(decode=True) or b""),
|
||||
content_id=part.get("Content-ID"),
|
||||
)
|
||||
)
|
||||
|
||||
return attachments
|
||||
|
||||
def _find_trash_folder(self) -> Optional[str]:
|
||||
client = self._get_imap_client()
|
||||
folders = client.list_folders()
|
||||
|
||||
trash_names = ["Trash", "Deleted", "Deleted Items", "Deleted Messages", "[Gmail]/Trash"]
|
||||
for flags, delimiter, name in folders:
|
||||
if name in trash_names or b"\\Trash" in flags:
|
||||
return name
|
||||
return None
|
||||
5
src/tools/__init__.py
Normal file
5
src/tools/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .email_tools import register_email_tools
|
||||
from .calendar_tools import register_calendar_tools
|
||||
from .contacts_tools import register_contacts_tools
|
||||
|
||||
__all__ = ["register_email_tools", "register_calendar_tools", "register_contacts_tools"]
|
||||
125
src/tools/calendar_tools.py
Normal file
125
src/tools/calendar_tools.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from typing import Optional
|
||||
from fastmcp import FastMCP
|
||||
|
||||
from services.calendar_service import CalendarService
|
||||
|
||||
|
||||
def register_calendar_tools(mcp: FastMCP, service: CalendarService):
|
||||
"""Register all calendar-related MCP tools."""
|
||||
|
||||
@mcp.tool(description="List all available calendars from the CalDAV server. Returns calendar ID, name, and properties.")
|
||||
def list_calendars() -> list[dict]:
|
||||
"""List all calendars."""
|
||||
calendars = service.list_calendars()
|
||||
return [c.model_dump() for c in calendars]
|
||||
|
||||
@mcp.tool(description="List events in a calendar within a specified date range. Supports recurring event expansion.")
|
||||
def list_events(
|
||||
calendar_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
include_recurring: bool = True,
|
||||
) -> dict:
|
||||
"""
|
||||
List events in a date range.
|
||||
|
||||
Args:
|
||||
calendar_id: The calendar ID (URL) to query
|
||||
start_date: Start of date range (ISO format: YYYY-MM-DD)
|
||||
end_date: End of date range (ISO format: YYYY-MM-DD)
|
||||
include_recurring: Whether to expand recurring events (default: True)
|
||||
"""
|
||||
result = service.list_events(calendar_id, start_date, end_date, include_recurring)
|
||||
return result.model_dump()
|
||||
|
||||
@mcp.tool(description="Get detailed information about a specific calendar event including attendees and recurrence.")
|
||||
def get_event(
|
||||
calendar_id: str,
|
||||
event_id: str,
|
||||
) -> Optional[dict]:
|
||||
"""
|
||||
Get a specific event.
|
||||
|
||||
Args:
|
||||
calendar_id: The calendar ID containing the event
|
||||
event_id: The unique ID (UID) of the event
|
||||
"""
|
||||
result = service.get_event(calendar_id, event_id)
|
||||
return result.model_dump() if result else None
|
||||
|
||||
@mcp.tool(description="Create a new calendar event with title, time, location, attendees, and optional recurrence.")
|
||||
def create_event(
|
||||
calendar_id: str,
|
||||
title: str,
|
||||
start: str,
|
||||
end: str,
|
||||
description: Optional[str] = None,
|
||||
location: Optional[str] = None,
|
||||
attendees: Optional[list[str]] = None,
|
||||
reminders: Optional[list[int]] = None,
|
||||
recurrence: Optional[str] = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Create a new calendar event.
|
||||
|
||||
Args:
|
||||
calendar_id: The calendar ID to create the event in
|
||||
title: Event title/summary
|
||||
start: Start datetime (ISO format: YYYY-MM-DDTHH:MM:SS)
|
||||
end: End datetime (ISO format: YYYY-MM-DDTHH:MM:SS)
|
||||
description: Event description (optional)
|
||||
location: Event location (optional)
|
||||
attendees: List of attendee email addresses (optional)
|
||||
reminders: List of reminder times in minutes before event (optional)
|
||||
recurrence: iCalendar RRULE string for recurring events (optional)
|
||||
"""
|
||||
result = service.create_event(
|
||||
calendar_id, title, start, end, description, location, attendees, reminders, recurrence
|
||||
)
|
||||
return result.model_dump()
|
||||
|
||||
@mcp.tool(description="Update an existing calendar event. Only provided fields will be modified.")
|
||||
def update_event(
|
||||
calendar_id: str,
|
||||
event_id: str,
|
||||
title: Optional[str] = None,
|
||||
start: Optional[str] = None,
|
||||
end: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
location: Optional[str] = None,
|
||||
attendees: Optional[list[str]] = None,
|
||||
) -> Optional[dict]:
|
||||
"""
|
||||
Update an existing event.
|
||||
|
||||
Args:
|
||||
calendar_id: The calendar ID containing the event
|
||||
event_id: The unique ID of the event to update
|
||||
title: New event title (optional)
|
||||
start: New start datetime (optional)
|
||||
end: New end datetime (optional)
|
||||
description: New description (optional)
|
||||
location: New location (optional)
|
||||
attendees: New list of attendee emails (optional)
|
||||
"""
|
||||
result = service.update_event(
|
||||
calendar_id, event_id, title, start, end, description, location, attendees
|
||||
)
|
||||
return result.model_dump() if result else None
|
||||
|
||||
@mcp.tool(description="Delete a calendar event by ID.")
|
||||
def delete_event(
|
||||
calendar_id: str,
|
||||
event_id: str,
|
||||
notify_attendees: bool = True,
|
||||
) -> dict:
|
||||
"""
|
||||
Delete a calendar event.
|
||||
|
||||
Args:
|
||||
calendar_id: The calendar ID containing the event
|
||||
event_id: The unique ID of the event to delete
|
||||
notify_attendees: Whether to notify attendees of cancellation (default: True)
|
||||
"""
|
||||
result = service.delete_event(calendar_id, event_id, notify_attendees)
|
||||
return result.model_dump()
|
||||
153
src/tools/contacts_tools.py
Normal file
153
src/tools/contacts_tools.py
Normal file
@@ -0,0 +1,153 @@
|
||||
from typing import Optional
|
||||
from fastmcp import FastMCP
|
||||
|
||||
from services.contacts_service import ContactsService
|
||||
|
||||
|
||||
def register_contacts_tools(mcp: FastMCP, service: ContactsService):
|
||||
"""Register all contacts-related MCP tools."""
|
||||
|
||||
@mcp.tool(description="List all available address books from the CardDAV server.")
|
||||
def list_addressbooks() -> list[dict]:
|
||||
"""List all address books."""
|
||||
addressbooks = service.list_addressbooks()
|
||||
return [a.model_dump() for a in addressbooks]
|
||||
|
||||
@mcp.tool(description="List contacts in an address book with optional search filtering and pagination.")
|
||||
def list_contacts(
|
||||
addressbook_id: str,
|
||||
search: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> dict:
|
||||
"""
|
||||
List contacts in an address book.
|
||||
|
||||
Args:
|
||||
addressbook_id: The address book ID (URL path) to query
|
||||
search: Optional search term to filter contacts by name or email
|
||||
limit: Maximum number of contacts to return (default: 100)
|
||||
offset: Number of contacts to skip for pagination (default: 0)
|
||||
"""
|
||||
result = service.list_contacts(addressbook_id, search, limit, offset)
|
||||
return result.model_dump()
|
||||
|
||||
@mcp.tool(description="Get detailed information about a specific contact including all fields.")
|
||||
def get_contact(
|
||||
addressbook_id: str,
|
||||
contact_id: str,
|
||||
) -> Optional[dict]:
|
||||
"""
|
||||
Get a specific contact.
|
||||
|
||||
Args:
|
||||
addressbook_id: The address book containing the contact
|
||||
contact_id: The unique ID (URL) of the contact
|
||||
"""
|
||||
result = service.get_contact(addressbook_id, contact_id)
|
||||
return result.model_dump() if result else None
|
||||
|
||||
@mcp.tool(description="Create a new contact with name, emails, phones, addresses, and other details.")
|
||||
def create_contact(
|
||||
addressbook_id: str,
|
||||
first_name: Optional[str] = None,
|
||||
last_name: Optional[str] = None,
|
||||
display_name: Optional[str] = None,
|
||||
emails: Optional[list[dict]] = None,
|
||||
phones: Optional[list[dict]] = None,
|
||||
addresses: Optional[list[dict]] = None,
|
||||
organization: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
notes: Optional[str] = None,
|
||||
birthday: Optional[str] = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Create a new contact.
|
||||
|
||||
Args:
|
||||
addressbook_id: The address book ID to create the contact in
|
||||
first_name: Contact's first/given name
|
||||
last_name: Contact's last/family name
|
||||
display_name: Full display name (auto-generated if not provided)
|
||||
emails: List of email objects with 'type' (home/work) and 'email' fields
|
||||
phones: List of phone objects with 'type' (mobile/home/work) and 'number' fields
|
||||
addresses: List of address objects with 'type', 'street', 'city', 'state', 'postal_code', 'country'
|
||||
organization: Company/organization name
|
||||
title: Job title
|
||||
notes: Additional notes
|
||||
birthday: Birthday in ISO format (YYYY-MM-DD)
|
||||
"""
|
||||
result = service.create_contact(
|
||||
addressbook_id,
|
||||
first_name,
|
||||
last_name,
|
||||
display_name,
|
||||
emails,
|
||||
phones,
|
||||
addresses,
|
||||
organization,
|
||||
title,
|
||||
notes,
|
||||
birthday,
|
||||
)
|
||||
return result.model_dump()
|
||||
|
||||
@mcp.tool(description="Update an existing contact. Only provided fields will be modified.")
|
||||
def update_contact(
|
||||
addressbook_id: str,
|
||||
contact_id: str,
|
||||
first_name: Optional[str] = None,
|
||||
last_name: Optional[str] = None,
|
||||
display_name: Optional[str] = None,
|
||||
emails: Optional[list[dict]] = None,
|
||||
phones: Optional[list[dict]] = None,
|
||||
addresses: Optional[list[dict]] = None,
|
||||
organization: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
notes: Optional[str] = None,
|
||||
) -> Optional[dict]:
|
||||
"""
|
||||
Update an existing contact.
|
||||
|
||||
Args:
|
||||
addressbook_id: The address book containing the contact
|
||||
contact_id: The unique ID of the contact to update
|
||||
first_name: New first name (optional)
|
||||
last_name: New last name (optional)
|
||||
display_name: New display name (optional)
|
||||
emails: New list of emails (optional, replaces existing)
|
||||
phones: New list of phones (optional, replaces existing)
|
||||
addresses: New list of addresses (optional, replaces existing)
|
||||
organization: New organization (optional)
|
||||
title: New title (optional)
|
||||
notes: New notes (optional)
|
||||
"""
|
||||
result = service.update_contact(
|
||||
addressbook_id,
|
||||
contact_id,
|
||||
first_name,
|
||||
last_name,
|
||||
display_name,
|
||||
emails,
|
||||
phones,
|
||||
addresses,
|
||||
organization,
|
||||
title,
|
||||
notes,
|
||||
)
|
||||
return result.model_dump() if result else None
|
||||
|
||||
@mcp.tool(description="Delete a contact from an address book.")
|
||||
def delete_contact(
|
||||
addressbook_id: str,
|
||||
contact_id: str,
|
||||
) -> dict:
|
||||
"""
|
||||
Delete a contact.
|
||||
|
||||
Args:
|
||||
addressbook_id: The address book containing the contact
|
||||
contact_id: The unique ID of the contact to delete
|
||||
"""
|
||||
result = service.delete_contact(addressbook_id, contact_id)
|
||||
return result.model_dump()
|
||||
134
src/tools/email_tools.py
Normal file
134
src/tools/email_tools.py
Normal file
@@ -0,0 +1,134 @@
|
||||
from typing import Optional
|
||||
from fastmcp import FastMCP
|
||||
|
||||
from services.email_service import EmailService
|
||||
|
||||
|
||||
def register_email_tools(mcp: FastMCP, service: EmailService):
|
||||
"""Register all email-related MCP tools."""
|
||||
|
||||
@mcp.tool(description="List all mailboxes/folders in the email account. Returns name, path, message count, and unread count for each mailbox.")
|
||||
def list_mailboxes() -> list[dict]:
|
||||
"""List all IMAP mailboxes/folders."""
|
||||
mailboxes = service.list_mailboxes()
|
||||
return [m.model_dump() for m in mailboxes]
|
||||
|
||||
@mcp.tool(description="List emails in a mailbox with pagination. Returns email summaries including subject, from, date, and read status.")
|
||||
def list_emails(
|
||||
mailbox: str = "INBOX",
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
include_body: bool = False,
|
||||
) -> dict:
|
||||
"""
|
||||
List emails in a mailbox.
|
||||
|
||||
Args:
|
||||
mailbox: The mailbox/folder to list (default: INBOX)
|
||||
limit: Maximum number of emails to return (default: 50)
|
||||
offset: Number of emails to skip for pagination (default: 0)
|
||||
include_body: Whether to include email body snippets (default: False)
|
||||
"""
|
||||
result = service.list_emails(mailbox, limit, offset, include_body)
|
||||
return result.model_dump()
|
||||
|
||||
@mcp.tool(description="Read a specific email by ID with full body content and attachment information.")
|
||||
def read_email(
|
||||
mailbox: str,
|
||||
email_id: str,
|
||||
format: str = "text",
|
||||
) -> Optional[dict]:
|
||||
"""
|
||||
Read a specific email.
|
||||
|
||||
Args:
|
||||
mailbox: The mailbox containing the email
|
||||
email_id: The unique ID of the email
|
||||
format: Body format to return - 'text', 'html', or 'both' (default: text)
|
||||
"""
|
||||
result = service.read_email(mailbox, email_id, format)
|
||||
return result.model_dump() if result else None
|
||||
|
||||
@mcp.tool(description="Search emails in a mailbox using various criteria like subject, sender, or body content.")
|
||||
def search_emails(
|
||||
query: str,
|
||||
mailbox: str = "INBOX",
|
||||
search_in: Optional[list[str]] = None,
|
||||
date_from: Optional[str] = None,
|
||||
date_to: Optional[str] = None,
|
||||
limit: int = 50,
|
||||
) -> dict:
|
||||
"""
|
||||
Search for emails matching criteria.
|
||||
|
||||
Args:
|
||||
query: Search term to look for
|
||||
mailbox: Mailbox to search in (default: INBOX)
|
||||
search_in: Fields to search - any of ['subject', 'from', 'body'] (default: all)
|
||||
date_from: Only emails after this date (format: DD-Mon-YYYY, e.g., 01-Jan-2024)
|
||||
date_to: Only emails before this date (format: DD-Mon-YYYY)
|
||||
limit: Maximum results to return (default: 50)
|
||||
"""
|
||||
if search_in is None:
|
||||
search_in = ["subject", "from", "body"]
|
||||
result = service.search_emails(query, mailbox, search_in, date_from, date_to, limit)
|
||||
return result.model_dump()
|
||||
|
||||
@mcp.tool(description="Move an email from one mailbox/folder to another.")
|
||||
def move_email(
|
||||
email_id: str,
|
||||
source_mailbox: str,
|
||||
destination_mailbox: str,
|
||||
) -> dict:
|
||||
"""
|
||||
Move an email to a different folder.
|
||||
|
||||
Args:
|
||||
email_id: The unique ID of the email to move
|
||||
source_mailbox: The current mailbox containing the email
|
||||
destination_mailbox: The target mailbox to move the email to
|
||||
"""
|
||||
result = service.move_email(email_id, source_mailbox, destination_mailbox)
|
||||
return result.model_dump()
|
||||
|
||||
@mcp.tool(description="Delete an email, either moving it to trash or permanently deleting it.")
|
||||
def delete_email(
|
||||
email_id: str,
|
||||
mailbox: str,
|
||||
permanent: bool = False,
|
||||
) -> dict:
|
||||
"""
|
||||
Delete an email.
|
||||
|
||||
Args:
|
||||
email_id: The unique ID of the email to delete
|
||||
mailbox: The mailbox containing the email
|
||||
permanent: If True, permanently delete; if False, move to Trash (default: False)
|
||||
"""
|
||||
result = service.delete_email(email_id, mailbox, permanent)
|
||||
return result.model_dump()
|
||||
|
||||
@mcp.tool(description="Send a new email via SMTP. Supports plain text and HTML content, CC, BCC, and reply-to.")
|
||||
async def send_email(
|
||||
to: list[str],
|
||||
subject: str,
|
||||
body: str,
|
||||
cc: Optional[list[str]] = None,
|
||||
bcc: Optional[list[str]] = None,
|
||||
reply_to: Optional[str] = None,
|
||||
html_body: Optional[str] = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Send a new email.
|
||||
|
||||
Args:
|
||||
to: List of recipient email addresses
|
||||
subject: Email subject line
|
||||
body: Plain text email body
|
||||
cc: List of CC recipients (optional)
|
||||
bcc: List of BCC recipients (optional)
|
||||
reply_to: Reply-to address (optional)
|
||||
html_body: HTML version of the email body (optional)
|
||||
"""
|
||||
result = await service.send_email(to, subject, body, cc, bcc, reply_to, html_body)
|
||||
return result.model_dump()
|
||||
Reference in New Issue
Block a user