feat(storage): sccache S3 build cache with scoped SeaweedFS identity

Add sunbeam-sccache bucket and a dedicated sccache S3 identity scoped
to Read/Write/List/Tagging on that bucket only. Bump volume server
max from 50 to 100 (was full, blocking all new writes).
This commit is contained in:
2026-04-05 21:50:46 +01:00
parent 1206cd0fe4
commit efe574f48e
4 changed files with 8 additions and 5 deletions

View File

@@ -15,6 +15,8 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
DB_PASSWORD="localdev"
S3_ACCESS_KEY="minioadmin"
S3_SECRET_KEY="minioadmin"
SCCACHE_ACCESS_KEY="sccache-local"
SCCACHE_SECRET_KEY="sccache-local-secret"
HYDRA_SYSTEM_SECRET="local-hydra-system-secret-at-least-16"
HYDRA_COOKIE_SECRET="local-hydra-cookie-secret-at-least-16"
HYDRA_PAIRWISE_SALT="local-hydra-pairwise-salt-value-1"
@@ -201,7 +203,7 @@ else
bao kv put secret/hydra db-password='$DB_PASSWORD' system-secret='$HYDRA_SYSTEM_SECRET' cookie-secret='$HYDRA_COOKIE_SECRET' pairwise-salt='$HYDRA_PAIRWISE_SALT'
bao kv put secret/kratos db-password='$DB_PASSWORD'
bao kv put secret/gitea db-password='$DB_PASSWORD' s3-access-key='$S3_ACCESS_KEY' s3-secret-key='$S3_SECRET_KEY'
bao kv put secret/seaweedfs access-key='$S3_ACCESS_KEY' secret-key='$S3_SECRET_KEY'
bao kv put secret/seaweedfs access-key='$S3_ACCESS_KEY' secret-key='$S3_SECRET_KEY' sccache-access-key='$SCCACHE_ACCESS_KEY' sccache-secret-key='$SCCACHE_SECRET_KEY'
bao kv put secret/hive db-url='postgresql://hive:${DB_PASSWORD}@postgres-rw.data.svc.cluster.local:5432/hive_db' oidc-client-id='hive-local' oidc-client-secret='hive-local-secret'
bao kv put secret/people db-password='$DB_PASSWORD' django-secret-key='local-dev-people-django-secret-key-not-for-production'
bao kv put secret/penpot db-password='$DB_PASSWORD' secret-key='penpot-local-secret-key-not-for-production'