checkpoint: stalwart deploy, beam-design, migration scripts, config tweaks
Stalwart + Bulwark mail server deployment with OIDC, TLS cert, vault secrets. Beam design service. Pingora config cleanup. SeaweedFS replication fix. Kratos values tweak. Migration scripts for mbox/messages /calendars from La Suite to Stalwart.
This commit is contained in:
54
scripts/export-mbox.py
Normal file
54
scripts/export-mbox.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import os, zlib, gzip, email.utils, time
|
||||
import psycopg2
|
||||
|
||||
DB_HOST = "postgres-rw.data.svc.cluster.local"
|
||||
conn = psycopg2.connect(
|
||||
host=DB_HOST, port=5432, dbname="messages_db",
|
||||
user="messages", password=os.environ["DB_PASSWORD"]
|
||||
)
|
||||
cur = conn.cursor()
|
||||
|
||||
cur.execute("""
|
||||
SELECT DISTINCT mb.local_part || '@' || d.name as mailbox_email,
|
||||
b.raw_content, b.compression, m.sent_at
|
||||
FROM messages_message m
|
||||
JOIN messages_blob b ON m.blob_id = b.id
|
||||
JOIN messages_thread t ON m.thread_id = t.id
|
||||
JOIN messages_threadaccess ta ON ta.thread_id = t.id
|
||||
JOIN messages_mailbox mb ON ta.mailbox_id = mb.id
|
||||
JOIN messages_maildomain d ON mb.domain_id = d.id
|
||||
ORDER BY mailbox_email, m.sent_at
|
||||
""")
|
||||
|
||||
os.makedirs("/tmp/mbox", exist_ok=True)
|
||||
counts = {}
|
||||
for row in cur.fetchall():
|
||||
mailbox_email, raw_content, compression, sent_at = row
|
||||
raw = bytes(raw_content)
|
||||
if compression == 1:
|
||||
try:
|
||||
eml = gzip.decompress(raw)
|
||||
except Exception:
|
||||
try:
|
||||
eml = zlib.decompress(raw, -zlib.MAX_WBITS)
|
||||
except Exception:
|
||||
eml = raw
|
||||
else:
|
||||
eml = raw
|
||||
|
||||
date_str = email.utils.formatdate(time.mktime(sent_at.timetuple())) if sent_at else email.utils.formatdate()
|
||||
mbox_path = f"/tmp/mbox/{mailbox_email}.mbox"
|
||||
with open(mbox_path, "ab") as f:
|
||||
f.write(f"From {mailbox_email} {date_str}\n".encode())
|
||||
for line in eml.split(b"\n"):
|
||||
if line.startswith(b"From "):
|
||||
f.write(b">" + line + b"\n")
|
||||
else:
|
||||
f.write(line + b"\n")
|
||||
f.write(b"\n")
|
||||
counts[mailbox_email] = counts.get(mailbox_email, 0) + 1
|
||||
|
||||
conn.close()
|
||||
for addr, count in counts.items():
|
||||
print(f"{addr}: {count} messages")
|
||||
print("Export complete.")
|
||||
115
scripts/migrate-calendars.sh
Executable file
115
scripts/migrate-calendars.sh
Executable file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env bash
|
||||
# Migrate calendars from La Suite Calendars (CalDAV) to Stalwart (CalDAV).
|
||||
#
|
||||
# Exports all calendars per user from the old CalDAV endpoint and imports them
|
||||
# into Stalwart's CalDAV endpoint.
|
||||
#
|
||||
# Prerequisites:
|
||||
# - curl
|
||||
# - Port-forward to both old calendars-backend and new Stalwart:
|
||||
# kubectl port-forward -n lasuite svc/calendars-backend 8081:80 &
|
||||
# kubectl port-forward -n stalwart svc/stalwart 8080:8080 &
|
||||
#
|
||||
# Usage:
|
||||
# ./migrate-calendars.sh \
|
||||
# --old-url http://127.0.0.1:8081 \
|
||||
# --new-url http://127.0.0.1:8080 \
|
||||
# --new-user admin --new-password <pw> \
|
||||
# --users "sienna@sunbeam.pt,amber@sunbeam.pt,lonni@sunbeam.pt"
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
EXPORT_DIR="$(mktemp -d)/calendar-export"
|
||||
mkdir -p "$EXPORT_DIR"
|
||||
|
||||
OLD_URL=""
|
||||
NEW_URL=""
|
||||
NEW_USER=""
|
||||
NEW_PASS=""
|
||||
USERS=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--old-url) OLD_URL="$2"; shift 2 ;;
|
||||
--new-url) NEW_URL="$2"; shift 2 ;;
|
||||
--new-user) NEW_USER="$2"; shift 2 ;;
|
||||
--new-password) NEW_PASS="$2"; shift 2 ;;
|
||||
--users) USERS="$2"; shift 2 ;;
|
||||
*) echo "Unknown arg: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "$OLD_URL" || -z "$NEW_URL" || -z "$NEW_USER" || -z "$NEW_PASS" || -z "$USERS" ]]; then
|
||||
echo "Usage: $0 --old-url <url> --new-url <url> --new-user <user> --new-password <pw> --users <comma-separated>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
IFS=',' read -ra USER_LIST <<< "$USERS"
|
||||
|
||||
echo "==> Exporting calendars from La Suite..."
|
||||
|
||||
for user in "${USER_LIST[@]}"; do
|
||||
user_dir="$EXPORT_DIR/$user"
|
||||
mkdir -p "$user_dir"
|
||||
|
||||
echo " Exporting calendars for $user..."
|
||||
|
||||
# NOTE: The exact CalDAV path depends on La Suite's CalDAV implementation.
|
||||
# La Suite Calendars uses /caldav/<user>/ or similar. Adjust as needed.
|
||||
# This PROPFIND discovers all calendars for the user.
|
||||
curl -s -X PROPFIND \
|
||||
-H "Content-Type: application/xml" \
|
||||
-d '<?xml version="1.0" encoding="utf-8"?>
|
||||
<d:propfind xmlns:d="DAV:" xmlns:cs="urn:ietf:params:xml:ns:caldav">
|
||||
<d:prop>
|
||||
<d:displayname/>
|
||||
<d:resourcetype/>
|
||||
</d:prop>
|
||||
</d:propfind>' \
|
||||
"$OLD_URL/caldav/$user/" \
|
||||
-o "$user_dir/calendars.xml" 2>/dev/null || true
|
||||
|
||||
# Export each calendar as .ics via CalDAV REPORT
|
||||
# NOTE: This is a simplified template. The actual export depends on
|
||||
# the La Suite CalDAV response format. You may need to parse the
|
||||
# PROPFIND response to discover calendar URLs, then issue
|
||||
# calendar-multiget or calendar-query REPORT requests.
|
||||
#
|
||||
# A simpler alternative: if La Suite exposes /ical/ export endpoints
|
||||
# (seen in pingora config: /ical/ → calendars-backend), use those:
|
||||
#
|
||||
# curl -s "$OLD_URL/ical/$user/calendar.ics" -o "$user_dir/calendar.ics"
|
||||
#
|
||||
echo " Exported to $user_dir/"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "==> Importing calendars into Stalwart..."
|
||||
|
||||
for user in "${USER_LIST[@]}"; do
|
||||
user_dir="$EXPORT_DIR/$user"
|
||||
|
||||
for ics_file in "$user_dir"/*.ics; do
|
||||
[[ -f "$ics_file" ]] || continue
|
||||
cal_name=$(basename "$ics_file" .ics)
|
||||
|
||||
echo " Importing $cal_name for $user..."
|
||||
|
||||
# Upload .ics to Stalwart CalDAV.
|
||||
# Stalwart CalDAV path: /dav/calendars/user/<user>/<calendar-name>/
|
||||
curl -s -X PUT \
|
||||
-u "$NEW_USER:$NEW_PASS" \
|
||||
-H "Content-Type: text/calendar" \
|
||||
--data-binary "@$ics_file" \
|
||||
"$NEW_URL/dav/calendars/user/$user/$cal_name.ics" || {
|
||||
echo " ⚠️ Failed to import $cal_name for $user"
|
||||
}
|
||||
done
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "==> Migration complete. Exported data is in: $EXPORT_DIR"
|
||||
echo ""
|
||||
echo "Verify by comparing event counts:"
|
||||
echo " Old: curl -s '$OLD_URL/caldav/<user>/' -X PROPFIND | grep -c VEVENT"
|
||||
echo " New: curl -s -u admin:pw '$NEW_URL/dav/calendars/user/<user>/' -X PROPFIND | grep -c VEVENT"
|
||||
57
scripts/migrate-mbox-job.yaml
Normal file
57
scripts/migrate-mbox-job.yaml
Normal file
@@ -0,0 +1,57 @@
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: migrate-mbox
|
||||
namespace: stalwart
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
initContainers:
|
||||
- name: export
|
||||
image: python:3.12-slim
|
||||
command: ["/bin/sh", "-c", "pip -q install psycopg2-binary && python3 /scripts/export-mbox.py && ls -la /tmp/mbox/"]
|
||||
env:
|
||||
- name: DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: messages-db-credentials
|
||||
key: password
|
||||
volumeMounts:
|
||||
- name: script
|
||||
mountPath: /scripts
|
||||
- name: mbox
|
||||
mountPath: /tmp/mbox
|
||||
containers:
|
||||
- name: import
|
||||
image: stalwartlabs/stalwart:v0.15.5
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
URL="http://stalwart.stalwart.svc.cluster.local:8080"
|
||||
CREDS="admin:${ADMIN_PASSWORD}"
|
||||
|
||||
for mbox in /tmp/mbox/*.mbox; do
|
||||
ACCOUNT=$(basename "$mbox" .mbox)
|
||||
echo "=== Importing $mbox into $ACCOUNT ==="
|
||||
stalwart-cli -u "$URL" -c "$CREDS" import messages -f mbox "$ACCOUNT" "$mbox"
|
||||
echo ""
|
||||
done
|
||||
|
||||
echo "All imports complete."
|
||||
env:
|
||||
- name: ADMIN_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: stalwart-app-secrets
|
||||
key: admin-password
|
||||
volumeMounts:
|
||||
- name: mbox
|
||||
mountPath: /tmp/mbox
|
||||
volumes:
|
||||
- name: script
|
||||
configMap:
|
||||
name: export-mbox-script
|
||||
- name: mbox
|
||||
emptyDir: {}
|
||||
261
scripts/migrate-messages-job.yaml
Normal file
261
scripts/migrate-messages-job.yaml
Normal file
@@ -0,0 +1,261 @@
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: migrate-messages
|
||||
namespace: stalwart
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: migrate
|
||||
image: python:3.12-slim
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
pip install psycopg2-binary && python3 /scripts/migrate.py
|
||||
env:
|
||||
- name: ADMIN_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: stalwart-app-secrets
|
||||
key: admin-password
|
||||
- name: DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: messages-db-credentials
|
||||
key: password
|
||||
volumeMounts:
|
||||
- name: script
|
||||
mountPath: /scripts
|
||||
volumes:
|
||||
- name: script
|
||||
configMap:
|
||||
name: migrate-messages-script
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: migrate-messages-script
|
||||
namespace: stalwart
|
||||
data:
|
||||
migrate.py: |
|
||||
import json, os, zlib, urllib.request, urllib.error, base64, sys
|
||||
import psycopg2
|
||||
|
||||
DB_HOST = "postgres-rw.data.svc.cluster.local"
|
||||
DB_PORT = 5432
|
||||
DB_NAME = "messages_db"
|
||||
DB_USER = "messages"
|
||||
DB_PASS = os.environ.get("DB_PASSWORD", "")
|
||||
JMAP_URL = "http://stalwart.stalwart.svc.cluster.local:8080"
|
||||
ADMIN_USER = "admin"
|
||||
|
||||
ADMIN_PASS = os.environ["ADMIN_PASSWORD"]
|
||||
|
||||
auth_header = "Basic " + base64.b64encode(f"{ADMIN_USER}:{ADMIN_PASS}".encode()).decode()
|
||||
|
||||
def jmap_call(method_calls):
|
||||
body = json.dumps({
|
||||
"using": ["urn:ietf:params:jmap:core", "urn:ietf:params:jmap:mail"],
|
||||
"methodCalls": method_calls,
|
||||
}).encode()
|
||||
req = urllib.request.Request(f"{JMAP_URL}/jmap", data=body,
|
||||
headers={"Authorization": auth_header, "Content-Type": "application/json"})
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
return json.loads(resp.read())
|
||||
|
||||
def upload_blob(account_id, eml_bytes):
|
||||
req = urllib.request.Request(
|
||||
f"{JMAP_URL}/jmap/upload/{account_id}/",
|
||||
data=eml_bytes,
|
||||
headers={"Authorization": auth_header, "Content-Type": "message/rfc822"})
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
return json.loads(resp.read())["blobId"]
|
||||
|
||||
def ensure_user(email, full_name):
|
||||
body = json.dumps({
|
||||
"type": "individual", "name": email,
|
||||
"description": full_name or email, "emails": [email],
|
||||
"quota": 0, "secrets": [], "urls": [], "memberOf": [],
|
||||
"roles": ["user"], "lists": [], "members": [],
|
||||
"enabledPermissions": [], "disabledPermissions": [], "externalMembers": [],
|
||||
}).encode()
|
||||
req = urllib.request.Request(f"{JMAP_URL}/api/principal",
|
||||
data=body, method="POST",
|
||||
headers={"Authorization": auth_header, "Content-Type": "application/json"})
|
||||
try:
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
result = json.loads(resp.read())
|
||||
print(f" Created user {email} (id={result.get('data')})")
|
||||
except urllib.error.HTTPError as e:
|
||||
body_text = e.read().decode()
|
||||
if "fieldAlreadyExists" in body_text:
|
||||
print(f" User {email} already exists")
|
||||
else:
|
||||
print(f" Error creating user {email}: {e.code} {body_text}")
|
||||
|
||||
def get_account_id_for_user(email):
|
||||
"""Get JMAP account ID by authenticating as the user (admin impersonation)."""
|
||||
# Stalwart allows admin to access any account via the master user mechanism:
|
||||
# authenticate as "user%admin" with admin password
|
||||
impersonate_auth = "Basic " + base64.b64encode(
|
||||
f"{email}%{ADMIN_USER}:{ADMIN_PASS}".encode()).decode()
|
||||
req = urllib.request.Request(f"{JMAP_URL}/.well-known/jmap",
|
||||
headers={"Authorization": impersonate_auth})
|
||||
try:
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
session = json.loads(resp.read())
|
||||
return next(iter(session.get("accounts", {})), None), impersonate_auth
|
||||
except urllib.error.HTTPError:
|
||||
# Fallback: try direct admin auth
|
||||
req = urllib.request.Request(f"{JMAP_URL}/.well-known/jmap",
|
||||
headers={"Authorization": auth_header})
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
session = json.loads(resp.read())
|
||||
return next(iter(session.get("accounts", {})), None), auth_header
|
||||
|
||||
def get_inbox_id(account_id, user_auth):
|
||||
body = json.dumps({
|
||||
"using": ["urn:ietf:params:jmap:core", "urn:ietf:params:jmap:mail"],
|
||||
"methodCalls": [["Mailbox/get", {"accountId": account_id}, "0"]],
|
||||
}).encode()
|
||||
req = urllib.request.Request(f"{JMAP_URL}/jmap", data=body,
|
||||
headers={"Authorization": user_auth, "Content-Type": "application/json"})
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
result = json.loads(resp.read())
|
||||
mailboxes = result["methodResponses"][0][1]["list"]
|
||||
for mb in mailboxes:
|
||||
if mb.get("role") == "inbox" or mb.get("name", "").lower() == "inbox":
|
||||
return mb["id"]
|
||||
return mailboxes[0]["id"] if mailboxes else None
|
||||
|
||||
def upload_blob_as(account_id, eml_bytes, user_auth):
|
||||
req = urllib.request.Request(
|
||||
f"{JMAP_URL}/jmap/upload/{account_id}/",
|
||||
data=eml_bytes,
|
||||
headers={"Authorization": user_auth, "Content-Type": "message/rfc822"})
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
return json.loads(resp.read())["blobId"]
|
||||
|
||||
def jmap_call_as(method_calls, user_auth):
|
||||
body = json.dumps({
|
||||
"using": ["urn:ietf:params:jmap:core", "urn:ietf:params:jmap:mail"],
|
||||
"methodCalls": method_calls,
|
||||
}).encode()
|
||||
req = urllib.request.Request(f"{JMAP_URL}/jmap", data=body,
|
||||
headers={"Authorization": user_auth, "Content-Type": "application/json"})
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
return json.loads(resp.read())
|
||||
|
||||
# Connect to messages_db
|
||||
print("Connecting to messages_db...")
|
||||
conn = psycopg2.connect(host=DB_HOST, port=DB_PORT, dbname=DB_NAME, user=DB_USER, password=DB_PASS)
|
||||
cur = conn.cursor()
|
||||
|
||||
cur.execute("""
|
||||
SELECT DISTINCT m.id, m.subject, m.sent_at, m.is_draft, m.is_starred,
|
||||
m.is_trashed, m.is_spam,
|
||||
mb.local_part || '@' || d.name as mailbox_email,
|
||||
b.raw_content, b.compression
|
||||
FROM messages_message m
|
||||
JOIN messages_blob b ON m.blob_id = b.id
|
||||
JOIN messages_thread t ON m.thread_id = t.id
|
||||
JOIN messages_threadaccess ta ON ta.thread_id = t.id
|
||||
JOIN messages_mailbox mb ON ta.mailbox_id = mb.id
|
||||
JOIN messages_maildomain d ON mb.domain_id = d.id
|
||||
ORDER BY mailbox_email, m.sent_at
|
||||
""")
|
||||
messages = cur.fetchall()
|
||||
print(f"Found {len(messages)} messages to migrate.")
|
||||
|
||||
cur.execute("SELECT email, full_name FROM messages_user")
|
||||
users = {row[0]: row[1] for row in cur.fetchall()}
|
||||
conn.close()
|
||||
|
||||
# Ensure all mailbox users exist in Stalwart
|
||||
seen_emails = set()
|
||||
for msg in messages:
|
||||
email = msg[7]
|
||||
if email not in seen_emails:
|
||||
seen_emails.add(email)
|
||||
ensure_user(email, users.get(email, email))
|
||||
|
||||
# Resolve per-user JMAP accounts
|
||||
user_contexts = {}
|
||||
for email in seen_emails:
|
||||
print(f"Resolving JMAP account for {email}...")
|
||||
account_id, user_auth = get_account_id_for_user(email)
|
||||
if account_id:
|
||||
inbox_id = get_inbox_id(account_id, user_auth)
|
||||
user_contexts[email] = (account_id, inbox_id, user_auth)
|
||||
print(f" {email}: account={account_id}, inbox={inbox_id}")
|
||||
else:
|
||||
print(f" WARNING: Could not get account for {email}, will skip")
|
||||
|
||||
# Import messages into each user's account
|
||||
imported = 0
|
||||
errors = 0
|
||||
for msg in messages:
|
||||
msg_id, subject, sent_at, is_draft, is_starred, is_trashed, is_spam, email, raw_content, compression = msg
|
||||
|
||||
if email not in user_contexts:
|
||||
errors += 1
|
||||
print(f" SKIP: {email}: {subject} (no account)")
|
||||
continue
|
||||
|
||||
account_id, inbox_id, user_auth = user_contexts[email]
|
||||
|
||||
try:
|
||||
raw = bytes(raw_content)
|
||||
if compression == 1:
|
||||
import gzip
|
||||
try:
|
||||
eml_bytes = gzip.decompress(raw)
|
||||
except Exception:
|
||||
try:
|
||||
eml_bytes = zlib.decompress(raw, -zlib.MAX_WBITS)
|
||||
except Exception:
|
||||
try:
|
||||
eml_bytes = zlib.decompress(raw)
|
||||
except Exception:
|
||||
eml_bytes = raw
|
||||
else:
|
||||
eml_bytes = raw
|
||||
|
||||
blob_id = upload_blob_as(account_id, eml_bytes, user_auth)
|
||||
|
||||
keywords = {"$seen": True}
|
||||
if is_starred:
|
||||
keywords["$flagged"] = True
|
||||
if is_draft:
|
||||
keywords["$draft"] = True
|
||||
|
||||
received_at = sent_at.isoformat() if sent_at else None
|
||||
import_data = {
|
||||
"accountId": account_id,
|
||||
"emails": {
|
||||
"imp1": {
|
||||
"blobId": blob_id,
|
||||
"mailboxIds": {inbox_id: True},
|
||||
"keywords": keywords,
|
||||
}
|
||||
}
|
||||
}
|
||||
if received_at:
|
||||
import_data["emails"]["imp1"]["receivedAt"] = received_at
|
||||
|
||||
result = jmap_call_as([["Email/import", import_data, "0"]], user_auth)
|
||||
resp = result["methodResponses"][0][1]
|
||||
if "created" in resp and "imp1" in resp["created"]:
|
||||
imported += 1
|
||||
print(f" [{imported}] {email}: {subject or '(no subject)'}")
|
||||
else:
|
||||
errors += 1
|
||||
print(f" ERROR: {email}: {subject}: {resp.get('notCreated', {})}")
|
||||
except Exception as e:
|
||||
errors += 1
|
||||
print(f" ERROR: {email}: {subject}: {e}")
|
||||
|
||||
print(f"\nMigration complete: {imported} imported, {errors} errors, {len(messages)} total")
|
||||
277
scripts/migrate-messages.py
Executable file
277
scripts/migrate-messages.py
Executable file
@@ -0,0 +1,277 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migrate emails from La Suite Messages (PostgreSQL + SeaweedFS) to Stalwart (JMAP).
|
||||
|
||||
Reads message metadata from messages_db, downloads RFC 5322 bodies from SeaweedFS,
|
||||
and uploads each message to Stalwart via JMAP Email/import (RFC 8621 §5.4).
|
||||
|
||||
Usage:
|
||||
# Port-forward to the required services first:
|
||||
kubectl port-forward -n data svc/postgres-rw 5432:5432 &
|
||||
kubectl port-forward -n stalwart svc/stalwart 8080:8080 &
|
||||
|
||||
# Run the migration:
|
||||
python3 migrate-messages.py \
|
||||
--db-host 127.0.0.1 --db-port 5432 --db-name messages_db --db-user messages --db-password <pw> \
|
||||
--s3-endpoint http://127.0.0.1:8333 --s3-bucket sunbeam-messages \
|
||||
--s3-access-key <key> --s3-secret-key <secret> \
|
||||
--jmap-url http://127.0.0.1:8080 --jmap-user admin --jmap-password <pw>
|
||||
|
||||
Prerequisites:
|
||||
pip install psycopg2-binary boto3 requests
|
||||
|
||||
The script is idempotent: it tracks progress in a checkpoint file and skips
|
||||
already-imported messages on re-run.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import hashlib
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import psycopg2
|
||||
import boto3
|
||||
import requests
|
||||
except ImportError:
|
||||
print("Missing dependencies. Install with: pip install psycopg2-binary boto3 requests")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
CHECKPOINT_FILE = Path("migrate-messages-checkpoint.json")
|
||||
|
||||
|
||||
def load_checkpoint():
|
||||
if CHECKPOINT_FILE.exists():
|
||||
return json.loads(CHECKPOINT_FILE.read_text())
|
||||
return {"imported": {}}
|
||||
|
||||
|
||||
def save_checkpoint(checkpoint):
|
||||
CHECKPOINT_FILE.write_text(json.dumps(checkpoint, indent=2))
|
||||
|
||||
|
||||
def get_jmap_session(jmap_url, user, password):
|
||||
"""Get JMAP session and extract accountId."""
|
||||
resp = requests.get(
|
||||
f"{jmap_url}/.well-known/jmap",
|
||||
auth=(user, password),
|
||||
)
|
||||
resp.raise_for_status()
|
||||
session = resp.json()
|
||||
# Use the primary account
|
||||
primary_accounts = session.get("primaryAccounts", {})
|
||||
account_id = primary_accounts.get("urn:ietf:params:jmap:mail")
|
||||
if not account_id:
|
||||
# Fallback: first account
|
||||
accounts = session.get("accounts", {})
|
||||
account_id = next(iter(accounts))
|
||||
return session, account_id
|
||||
|
||||
|
||||
def jmap_get_mailboxes(jmap_url, account_id, user, password):
|
||||
"""Fetch all mailboxes for the account."""
|
||||
resp = requests.post(
|
||||
f"{jmap_url}/jmap",
|
||||
auth=(user, password),
|
||||
json={
|
||||
"using": ["urn:ietf:params:jmap:core", "urn:ietf:params:jmap:mail"],
|
||||
"methodCalls": [
|
||||
["Mailbox/get", {"accountId": account_id}, "0"]
|
||||
],
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
result = resp.json()
|
||||
mailboxes = result["methodResponses"][0][1]["list"]
|
||||
return {mb["name"]: mb["id"] for mb in mailboxes}
|
||||
|
||||
|
||||
def jmap_create_mailbox(jmap_url, account_id, user, password, name):
|
||||
"""Create a mailbox and return its ID."""
|
||||
resp = requests.post(
|
||||
f"{jmap_url}/jmap",
|
||||
auth=(user, password),
|
||||
json={
|
||||
"using": ["urn:ietf:params:jmap:core", "urn:ietf:params:jmap:mail"],
|
||||
"methodCalls": [
|
||||
["Mailbox/set", {
|
||||
"accountId": account_id,
|
||||
"create": {"mb1": {"name": name}},
|
||||
}, "0"]
|
||||
],
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
result = resp.json()
|
||||
created = result["methodResponses"][0][1].get("created", {})
|
||||
if "mb1" in created:
|
||||
return created["mb1"]["id"]
|
||||
# Already exists — fetch it
|
||||
mailboxes = jmap_get_mailboxes(jmap_url, account_id, user, password)
|
||||
return mailboxes.get(name)
|
||||
|
||||
|
||||
def jmap_import_email(jmap_url, account_id, user, password, eml_bytes, mailbox_id, keywords, received_at):
|
||||
"""Import a single RFC 5322 message via JMAP Email/import."""
|
||||
# First, upload the blob
|
||||
resp = requests.post(
|
||||
f"{jmap_url}/jmap/upload/{account_id}/",
|
||||
auth=(user, password),
|
||||
headers={"Content-Type": "message/rfc822"},
|
||||
data=eml_bytes,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
blob = resp.json()
|
||||
blob_id = blob["blobId"]
|
||||
|
||||
# Then import it
|
||||
import_data = {
|
||||
"accountId": account_id,
|
||||
"emails": {
|
||||
"imp1": {
|
||||
"blobId": blob_id,
|
||||
"mailboxIds": {mailbox_id: True},
|
||||
"keywords": keywords,
|
||||
}
|
||||
},
|
||||
}
|
||||
if received_at:
|
||||
import_data["emails"]["imp1"]["receivedAt"] = received_at
|
||||
|
||||
resp = requests.post(
|
||||
f"{jmap_url}/jmap",
|
||||
auth=(user, password),
|
||||
json={
|
||||
"using": ["urn:ietf:params:jmap:core", "urn:ietf:params:jmap:mail"],
|
||||
"methodCalls": [
|
||||
["Email/import", import_data, "0"]
|
||||
],
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
result = resp.json()
|
||||
created = result["methodResponses"][0][1].get("created", {})
|
||||
if "imp1" in created:
|
||||
return created["imp1"]["id"]
|
||||
not_created = result["methodResponses"][0][1].get("notCreated", {})
|
||||
if "imp1" in not_created:
|
||||
err = not_created["imp1"]
|
||||
raise RuntimeError(f"JMAP import failed: {err}")
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Migrate La Suite Messages → Stalwart JMAP")
|
||||
parser.add_argument("--db-host", default="127.0.0.1")
|
||||
parser.add_argument("--db-port", type=int, default=5432)
|
||||
parser.add_argument("--db-name", default="messages_db")
|
||||
parser.add_argument("--db-user", default="messages")
|
||||
parser.add_argument("--db-password", required=True)
|
||||
parser.add_argument("--s3-endpoint", default="http://127.0.0.1:8333")
|
||||
parser.add_argument("--s3-bucket", default="sunbeam-messages")
|
||||
parser.add_argument("--s3-access-key", required=True)
|
||||
parser.add_argument("--s3-secret-key", required=True)
|
||||
parser.add_argument("--jmap-url", default="http://127.0.0.1:8080")
|
||||
parser.add_argument("--jmap-user", default="admin")
|
||||
parser.add_argument("--jmap-password", required=True)
|
||||
parser.add_argument("--dry-run", action="store_true", help="Count messages without importing")
|
||||
args = parser.parse_args()
|
||||
|
||||
checkpoint = load_checkpoint()
|
||||
|
||||
# Connect to messages_db
|
||||
print("Connecting to messages_db...")
|
||||
conn = psycopg2.connect(
|
||||
host=args.db_host,
|
||||
port=args.db_port,
|
||||
dbname=args.db_name,
|
||||
user=args.db_user,
|
||||
password=args.db_password,
|
||||
)
|
||||
|
||||
# Connect to SeaweedFS
|
||||
print("Connecting to SeaweedFS...")
|
||||
s3 = boto3.client(
|
||||
"s3",
|
||||
endpoint_url=args.s3_endpoint,
|
||||
aws_access_key_id=args.s3_access_key,
|
||||
aws_secret_access_key=args.s3_secret_key,
|
||||
region_name="us-east-1",
|
||||
)
|
||||
|
||||
# Get JMAP session
|
||||
if not args.dry_run:
|
||||
print("Connecting to Stalwart JMAP...")
|
||||
session, account_id = get_jmap_session(args.jmap_url, args.jmap_user, args.jmap_password)
|
||||
print(f" Account: {account_id}")
|
||||
mailboxes = jmap_get_mailboxes(args.jmap_url, account_id, args.jmap_user, args.jmap_password)
|
||||
print(f" Mailboxes: {list(mailboxes.keys())}")
|
||||
|
||||
# Query all messages from La Suite
|
||||
# NOTE: The actual table/column names depend on La Suite Messages' Django models.
|
||||
# You may need to adjust these queries after inspecting the actual schema.
|
||||
# Run `\dt` and `\d <table>` in psql against messages_db to find the real names.
|
||||
print("\nQuerying messages from La Suite database...")
|
||||
cur = conn.cursor()
|
||||
|
||||
# List all tables to help identify the right ones
|
||||
cur.execute("""
|
||||
SELECT table_name FROM information_schema.tables
|
||||
WHERE table_schema = 'public' ORDER BY table_name;
|
||||
""")
|
||||
tables = [row[0] for row in cur.fetchall()]
|
||||
print(f" Tables: {tables}")
|
||||
|
||||
if args.dry_run:
|
||||
# Just count messages per table that looks like it contains emails
|
||||
for table in tables:
|
||||
cur.execute(f"SELECT COUNT(*) FROM {table};")
|
||||
count = cur.fetchone()[0]
|
||||
if count > 0:
|
||||
print(f" {table}: {count} rows")
|
||||
print("\nDry run complete. Inspect the tables above and adjust the migration")
|
||||
print("queries in this script to match the actual La Suite Messages schema.")
|
||||
conn.close()
|
||||
return
|
||||
|
||||
# TODO: Replace with actual queries once schema is inspected.
|
||||
# The migration logic below is a template — run with --dry-run first
|
||||
# to see the actual table structure, then update these queries.
|
||||
#
|
||||
# Expected flow:
|
||||
# 1. Query user accounts
|
||||
# 2. For each user, query their mailboxes/folders
|
||||
# 3. For each mailbox, query messages (S3 key, flags, received date)
|
||||
# 4. Download .eml from S3
|
||||
# 5. Upload to Stalwart via JMAP Email/import
|
||||
#
|
||||
# Example (adjust table/column names):
|
||||
#
|
||||
# cur.execute("SELECT id, email FROM auth_user;")
|
||||
# for user_id, email in cur.fetchall():
|
||||
# cur.execute("SELECT id, name FROM mailbox WHERE user_id = %s;", (user_id,))
|
||||
# for mb_id, mb_name in cur.fetchall():
|
||||
# mailbox_jmap_id = ensure_mailbox(mb_name)
|
||||
# cur.execute("SELECT s3_key, is_read, received_at FROM message WHERE mailbox_id = %s;", (mb_id,))
|
||||
# for s3_key, is_read, received_at in cur.fetchall():
|
||||
# if s3_key in checkpoint["imported"]:
|
||||
# continue
|
||||
# eml = s3.get_object(Bucket=args.s3_bucket, Key=s3_key)["Body"].read()
|
||||
# keywords = {"$seen": True} if is_read else {}
|
||||
# jmap_import_email(..., eml, mailbox_jmap_id, keywords, received_at)
|
||||
# checkpoint["imported"][s3_key] = True
|
||||
# save_checkpoint(checkpoint)
|
||||
|
||||
print("\n⚠️ Schema inspection required!")
|
||||
print("Run with --dry-run first, then update the TODO section in this script")
|
||||
print("with the correct table and column names from the La Suite Messages schema.")
|
||||
|
||||
conn.close()
|
||||
print("\nDone.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user