This commit is contained in:
Korbs 2024-08-03 21:51:57 -04:00
parent ec6101d82c
commit 792498b755
504 changed files with 1811 additions and 12089 deletions

19
.gitignore vendored
View file

@ -1,8 +1,7 @@
# build output
dist/
# generated types
.astro/
source/.astro
source/node_modules
.minpluto/generated
# dependencies
node_modules/
@ -13,18 +12,16 @@ yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
# environment variables
.env
.env.production
source/.env
source/.env.production
# macOS-specific files
.DS_Store
# i18n
/src/pages/en/
/src/pages/jp/
/src/pages/ru/
/source/src/pages/en/
/source/src/pages/jp/
/source/src/pages/ru/
# other
supabase/

View file

View file

View file

@ -10,4 +10,4 @@ RUN npm run build
ENV HOST=0.0.0.0
ENV PORT=1930
EXPOSE 1930
CMD node ./dist/server/entry.mjs
CMD node ./.minpluto/generated/astro/dist/server/entry.mjs

107
.minpluto/docker/supabase/.env Executable file
View file

@ -0,0 +1,107 @@
###########
# Docker Volumes
# All folders provided in `./supabase-volume/` must be included
###########
SUPABASE_ENTIRE_VOLUME="./supabase-volume/"
############
# Secrets
# YOU MUST CHANGE THESE BEFORE GOING INTO PRODUCTION
############
POSTGRES_PASSWORD=5lgHamV44d8D1GN9LRS6b44VxREi4692
JWT_SECRET=paxDX2xE00qFa4I1r6PKe15nIkB089I4
ANON_KEY=9X43H6LKq3115zmZhZj95f2IJ104a603
SERVICE_ROLE_KEY=K2G792rYBZR0kZvw9Zp6182zAwzxsdas
DASHBOARD_USERNAME=mp_admin
DASHBOARD_PASSWORD=ez116oqVWd4wHZUQNbgW3fA0m958FN09
############
# Database - You can change these to any PostgreSQL database that has logical replication enabled.
############
# default user is postgres
POSTGRES_HOST=db
POSTGRES_DB=postgres
POSTGRES_PORT=1945
############
# API Proxy - Configuration for the Kong Reverse proxy.
############
KONG_HTTP_PORT=1942
KONG_HTTPS_PORT=1943
############
# API - Configuration for PostgREST.
############
PGRST_DB_SCHEMAS=public,storage,graphql_public
############
# Auth - Configuration for the GoTrue authentication server.
############
## General
SITE_URL=http://localhost:1930
ADDITIONAL_REDIRECT_URLS=
JWT_EXPIRY=3600
DISABLE_SIGNUP=false
API_EXTERNAL_URL=https://db.minpluto.org
## Mailer Config
MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify"
MAILER_URLPATHS_INVITE="/auth/v1/verify"
MAILER_URLPATHS_RECOVERY="/auth/v1/verify"
MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify"
## Email auth
ENABLE_EMAIL_SIGNUP=true
ENABLE_EMAIL_AUTOCONFIRM=false
SMTP_ADMIN_EMAIL=no-reply@sudovanilla.org
SMTP_HOST=smtp.resend.com
SMTP_PORT=587
SMTP_USER=resend
SMTP_PASS=re_XLbiDxHd_9Yucx4y9EwiacKgHrRowfJVU
SMTP_SENDER_NAME=MinPluto
ENABLE_ANONYMOUS_USERS=true
## Phone auth
ENABLE_PHONE_SIGNUP=false
ENABLE_PHONE_AUTOCONFIRM=false
############
# Studio - Configuration for the Dashboard
############
STUDIO_DEFAULT_ORGANIZATION=Default Organization
STUDIO_DEFAULT_PROJECT=Default Project
STUDIO_PORT=1944
SUPABASE_PUBLIC_URL=http://localhost:8000
# Enable webp support
IMGPROXY_ENABLE_WEBP_DETECTION=true
############
# Functions - Configuration for Functions
############
# NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet.
FUNCTIONS_VERIFY_JWT=false
############
# Logs - Configuration for Logflare
# Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction
############
LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key
# Change vector.toml sinks to reflect this change
LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key
# Docker socket location - this value will differ depending on your OS
DOCKER_SOCKET_LOCATION=/var/run/docker.sock
# Google Cloud Project details
GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID
GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER

View file

@ -0,0 +1,428 @@
# Usage
# Start: docker compose up
# With helpers: docker compose -f docker-compose.yml -f ./dev/docker-compose.dev.yml up
# Stop: docker compose down
# Destroy: docker compose -f docker-compose.yml -f ./dev/docker-compose.dev.yml down -v --remove-orphans
name: supabase
services:
studio:
container_name: supabase-studio
image: supabase/studio:20240701-05dfbec
restart: unless-stopped
healthcheck:
test:
[
"CMD",
"node",
"-e",
"require('http').get('http://localhost:3000/api/profile', (r) => {if (r.statusCode !== 200) throw new Error(r.statusCode)})"
]
timeout: 5s
interval: 5s
retries: 3
depends_on:
analytics:
condition: service_healthy
environment:
STUDIO_PG_META_URL: http://meta:8080
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
DEFAULT_ORGANIZATION_NAME: ${STUDIO_DEFAULT_ORGANIZATION}
DEFAULT_PROJECT_NAME: ${STUDIO_DEFAULT_PROJECT}
SUPABASE_URL: http://kong:8000
SUPABASE_PUBLIC_URL: ${SUPABASE_PUBLIC_URL}
SUPABASE_ANON_KEY: ${ANON_KEY}
SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY}
AUTH_JWT_SECRET: ${JWT_SECRET}
LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
LOGFLARE_URL: http://analytics:4000
NEXT_PUBLIC_ENABLE_LOGS: true
# Comment to use Big Query backend for analytics
NEXT_ANALYTICS_BACKEND_PROVIDER: postgres
# Uncomment to use Big Query backend for analytics
# NEXT_ANALYTICS_BACKEND_PROVIDER: bigquery
kong:
container_name: supabase-kong
image: kong:2.8.1
restart: unless-stopped
# https://unix.stackexchange.com/a/294837
entrypoint: bash -c 'eval "echo \"$$(cat ~/temp.yml)\"" > ~/kong.yml && /docker-entrypoint.sh kong docker-start'
ports:
- ${KONG_HTTP_PORT}:8000/tcp
- ${KONG_HTTPS_PORT}:8443/tcp
depends_on:
analytics:
condition: service_healthy
environment:
KONG_DATABASE: "off"
KONG_DECLARATIVE_CONFIG: /home/kong/kong.yml
# https://github.com/supabase/cli/issues/14
KONG_DNS_ORDER: LAST,A,CNAME
KONG_PLUGINS: request-transformer,cors,key-auth,acl,basic-auth
KONG_NGINX_PROXY_PROXY_BUFFER_SIZE: 160k
KONG_NGINX_PROXY_PROXY_BUFFERS: 64 160k
SUPABASE_ANON_KEY: ${ANON_KEY}
SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY}
DASHBOARD_USERNAME: ${DASHBOARD_USERNAME}
DASHBOARD_PASSWORD: ${DASHBOARD_PASSWORD}
volumes:
# https://github.com/supabase/supabase/issues/12661
- ./volumes/api/kong.yml:/home/kong/temp.yml:ro
auth:
container_name: supabase-auth
image: supabase/gotrue:v2.151.0
depends_on:
db:
# Disable this if you are using an external Postgres database
condition: service_healthy
analytics:
condition: service_healthy
healthcheck:
test:
[
"CMD",
"wget",
"--no-verbose",
"--tries=1",
"--spider",
"http://localhost:9999/health"
]
timeout: 5s
interval: 5s
retries: 3
restart: unless-stopped
environment:
GOTRUE_API_HOST: 0.0.0.0
GOTRUE_API_PORT: 9999
API_EXTERNAL_URL: ${API_EXTERNAL_URL}
GOTRUE_DB_DRIVER: postgres
GOTRUE_DB_DATABASE_URL: postgres://supabase_auth_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
GOTRUE_SITE_URL: ${SITE_URL}
GOTRUE_URI_ALLOW_LIST: ${ADDITIONAL_REDIRECT_URLS}
GOTRUE_DISABLE_SIGNUP: ${DISABLE_SIGNUP}
GOTRUE_JWT_ADMIN_ROLES: service_role
GOTRUE_JWT_AUD: authenticated
GOTRUE_JWT_DEFAULT_GROUP_NAME: authenticated
GOTRUE_JWT_EXP: ${JWT_EXPIRY}
GOTRUE_JWT_SECRET: ${JWT_SECRET}
GOTRUE_EXTERNAL_EMAIL_ENABLED: ${ENABLE_EMAIL_SIGNUP}
GOTRUE_EXTERNAL_ANONYMOUS_USERS_ENABLED: ${ENABLE_ANONYMOUS_USERS}
GOTRUE_MAILER_AUTOCONFIRM: ${ENABLE_EMAIL_AUTOCONFIRM}
# GOTRUE_MAILER_SECURE_EMAIL_CHANGE_ENABLED: true
# GOTRUE_SMTP_MAX_FREQUENCY: 1s
GOTRUE_SMTP_ADMIN_EMAIL: ${SMTP_ADMIN_EMAIL}
GOTRUE_SMTP_HOST: ${SMTP_HOST}
GOTRUE_SMTP_PORT: ${SMTP_PORT}
GOTRUE_SMTP_USER: ${SMTP_USER}
GOTRUE_SMTP_PASS: ${SMTP_PASS}
GOTRUE_SMTP_SENDER_NAME: ${SMTP_SENDER_NAME}
GOTRUE_MAILER_URLPATHS_INVITE: ${MAILER_URLPATHS_INVITE}
GOTRUE_MAILER_URLPATHS_CONFIRMATION: ${MAILER_URLPATHS_CONFIRMATION}
GOTRUE_MAILER_URLPATHS_RECOVERY: ${MAILER_URLPATHS_RECOVERY}
GOTRUE_MAILER_URLPATHS_EMAIL_CHANGE: ${MAILER_URLPATHS_EMAIL_CHANGE}
GOTRUE_EXTERNAL_PHONE_ENABLED: ${ENABLE_PHONE_SIGNUP}
GOTRUE_SMS_AUTOCONFIRM: ${ENABLE_PHONE_AUTOCONFIRM}
# Uncomment to enable custom access token hook. You'll need to create a public.custom_access_token_hook function and grant necessary permissions.
# See: https://supabase.com/docs/guides/auth/auth-hooks#hook-custom-access-token for details
# GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_ENABLED="true"
# GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_URI="pg-functions://postgres/public/custom_access_token_hook"
# GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_ENABLED="true"
# GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_URI="pg-functions://postgres/public/mfa_verification_attempt"
# GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_ENABLED="true"
# GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_URI="pg-functions://postgres/public/password_verification_attempt"
rest:
container_name: supabase-rest
image: postgrest/postgrest:v12.2.0
depends_on:
db:
# Disable this if you are using an external Postgres database
condition: service_healthy
analytics:
condition: service_healthy
restart: unless-stopped
environment:
PGRST_DB_URI: postgres://authenticator:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
PGRST_DB_SCHEMAS: ${PGRST_DB_SCHEMAS}
PGRST_DB_ANON_ROLE: anon
PGRST_JWT_SECRET: ${JWT_SECRET}
PGRST_DB_USE_LEGACY_GUCS: "false"
PGRST_APP_SETTINGS_JWT_SECRET: ${JWT_SECRET}
PGRST_APP_SETTINGS_JWT_EXP: ${JWT_EXPIRY}
command: "postgrest"
realtime:
# This container name looks inconsistent but is correct because realtime constructs tenant id by parsing the subdomain
container_name: realtime-dev.supabase-realtime
image: supabase/realtime:v2.29.15
depends_on:
db:
# Disable this if you are using an external Postgres database
condition: service_healthy
analytics:
condition: service_healthy
healthcheck:
test:
[
"CMD",
"curl",
"-sSfL",
"--head",
"-o",
"/dev/null",
"-H",
"Authorization: Bearer ${ANON_KEY}",
"http://localhost:4000/api/tenants/realtime-dev/health"
]
timeout: 5s
interval: 5s
retries: 3
restart: unless-stopped
environment:
PORT: 4000
DB_HOST: ${POSTGRES_HOST}
DB_PORT: ${POSTGRES_PORT}
DB_USER: supabase_admin
DB_PASSWORD: ${POSTGRES_PASSWORD}
DB_NAME: ${POSTGRES_DB}
DB_AFTER_CONNECT_QUERY: 'SET search_path TO _realtime'
DB_ENC_KEY: supabaserealtime
API_JWT_SECRET: ${JWT_SECRET}
SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
ERL_AFLAGS: -proto_dist inet_tcp
DNS_NODES: "''"
RLIMIT_NOFILE: "10000"
APP_NAME: realtime
SEED_SELF_HOST: true
# To use S3 backed storage: docker compose -f docker-compose.yml -f docker-compose.s3.yml up
storage:
container_name: supabase-storage
image: supabase/storage-api:v1.0.6
depends_on:
db:
# Disable this if you are using an external Postgres database
condition: service_healthy
rest:
condition: service_started
imgproxy:
condition: service_started
healthcheck:
test:
[
"CMD",
"wget",
"--no-verbose",
"--tries=1",
"--spider",
"http://localhost:5000/status"
]
timeout: 5s
interval: 5s
retries: 3
restart: unless-stopped
environment:
ANON_KEY: ${ANON_KEY}
SERVICE_KEY: ${SERVICE_ROLE_KEY}
POSTGREST_URL: http://rest:3000
PGRST_JWT_SECRET: ${JWT_SECRET}
DATABASE_URL: postgres://supabase_storage_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
FILE_SIZE_LIMIT: 52428800
STORAGE_BACKEND: file
FILE_STORAGE_BACKEND_PATH: /var/lib/storage
TENANT_ID: stub
# TODO: https://github.com/supabase/storage-api/issues/55
REGION: stub
GLOBAL_S3_BUCKET: stub
ENABLE_IMAGE_TRANSFORMATION: "true"
IMGPROXY_URL: http://imgproxy:5001
volumes:
- ./volumes/storage:/var/lib/storage:z
imgproxy:
container_name: supabase-imgproxy
image: darthsim/imgproxy:v3.8.0
healthcheck:
test: [ "CMD", "imgproxy", "health" ]
timeout: 5s
interval: 5s
retries: 3
environment:
IMGPROXY_BIND: ":5001"
IMGPROXY_LOCAL_FILESYSTEM_ROOT: /
IMGPROXY_USE_ETAG: "true"
IMGPROXY_ENABLE_WEBP_DETECTION: ${IMGPROXY_ENABLE_WEBP_DETECTION}
volumes:
- ./volumes/storage:/var/lib/storage:z
meta:
container_name: supabase-meta
image: supabase/postgres-meta:v0.83.2
depends_on:
db:
# Disable this if you are using an external Postgres database
condition: service_healthy
analytics:
condition: service_healthy
restart: unless-stopped
environment:
PG_META_PORT: 8080
PG_META_DB_HOST: ${POSTGRES_HOST}
PG_META_DB_PORT: ${POSTGRES_PORT}
PG_META_DB_NAME: ${POSTGRES_DB}
PG_META_DB_USER: supabase_admin
PG_META_DB_PASSWORD: ${POSTGRES_PASSWORD}
functions:
container_name: supabase-edge-functions
image: supabase/edge-runtime:v1.54.10
restart: unless-stopped
depends_on:
analytics:
condition: service_healthy
environment:
JWT_SECRET: ${JWT_SECRET}
SUPABASE_URL: http://kong:8000
SUPABASE_ANON_KEY: ${ANON_KEY}
SUPABASE_SERVICE_ROLE_KEY: ${SERVICE_ROLE_KEY}
SUPABASE_DB_URL: postgresql://postgres:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
# TODO: Allow configuring VERIFY_JWT per function. This PR might help: https://github.com/supabase/cli/pull/786
VERIFY_JWT: "${FUNCTIONS_VERIFY_JWT}"
volumes:
- ./volumes/functions:/home/deno/functions:Z
command:
- start
- --main-service
- /home/deno/functions/main
analytics:
container_name: supabase-analytics
image: supabase/logflare:1.4.0
healthcheck:
test: [ "CMD", "curl", "http://localhost:4000/health" ]
timeout: 5s
interval: 5s
retries: 10
restart: unless-stopped
depends_on:
db:
# Disable this if you are using an external Postgres database
condition: service_healthy
# Uncomment to use Big Query backend for analytics
# volumes:
# - type: bind
# source: ${PWD}/gcloud.json
# target: /opt/app/rel/logflare/bin/gcloud.json
# read_only: true
environment:
LOGFLARE_NODE_HOST: 127.0.0.1
DB_USERNAME: supabase_admin
DB_DATABASE: ${POSTGRES_DB}
DB_HOSTNAME: ${POSTGRES_HOST}
DB_PORT: ${POSTGRES_PORT}
DB_PASSWORD: ${POSTGRES_PASSWORD}
DB_SCHEMA: _analytics
LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
LOGFLARE_SINGLE_TENANT: true
LOGFLARE_SUPABASE_MODE: true
LOGFLARE_MIN_CLUSTER_SIZE: 1
# Comment variables to use Big Query backend for analytics
POSTGRES_BACKEND_URL: postgresql://supabase_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
POSTGRES_BACKEND_SCHEMA: _analytics
LOGFLARE_FEATURE_FLAG_OVERRIDE: multibackend=true
# Uncomment to use Big Query backend for analytics
# GOOGLE_PROJECT_ID: ${GOOGLE_PROJECT_ID}
# GOOGLE_PROJECT_NUMBER: ${GOOGLE_PROJECT_NUMBER}
ports:
- 4000:4000
# Comment out everything below this point if you are using an external Postgres database
db:
container_name: supabase-db
image: supabase/postgres:15.1.1.61
healthcheck:
test: pg_isready -U postgres -h localhost
interval: 5s
timeout: 5s
retries: 10
depends_on:
vector:
condition: service_healthy
command:
- postgres
- -c
- config_file=/etc/postgresql/postgresql.conf
- -c
- log_min_messages=fatal # prevents Realtime polling queries from appearing in logs
restart: unless-stopped
ports:
# Pass down internal port because it's set dynamically by other services
- ${POSTGRES_PORT}:${POSTGRES_PORT}
environment:
POSTGRES_HOST: /var/run/postgresql
PGPORT: ${POSTGRES_PORT}
POSTGRES_PORT: ${POSTGRES_PORT}
PGPASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
PGDATABASE: ${POSTGRES_DB}
POSTGRES_DB: ${POSTGRES_DB}
JWT_SECRET: ${JWT_SECRET}
JWT_EXP: ${JWT_EXPIRY}
volumes:
- ./volumes/db/realtime.sql:/docker-entrypoint-initdb.d/migrations/99-realtime.sql:Z
# Must be superuser to create event trigger
- ./volumes/db/webhooks.sql:/docker-entrypoint-initdb.d/init-scripts/98-webhooks.sql:Z
# Must be superuser to alter reserved role
- ./volumes/db/roles.sql:/docker-entrypoint-initdb.d/init-scripts/99-roles.sql:Z
# Initialize the database settings with JWT_SECRET and JWT_EXP
- ./volumes/db/jwt.sql:/docker-entrypoint-initdb.d/init-scripts/99-jwt.sql:Z
# PGDATA directory is persisted between restarts
- ./volumes/db/data:/var/lib/postgresql/data:Z
# Changes required for Analytics support
- ./volumes/db/logs.sql:/docker-entrypoint-initdb.d/migrations/99-logs.sql:Z
# Use named volume to persist pgsodium decryption key between restarts
- db-config:/etc/postgresql-custom
vector:
container_name: supabase-vector
image: timberio/vector:0.28.1-alpine
healthcheck:
test:
[
"CMD",
"wget",
"--no-verbose",
"--tries=1",
"--spider",
"http://vector:9001/health"
]
timeout: 5s
interval: 5s
retries: 3
volumes:
- ./volumes/logs/vector.yml:/etc/vector/vector.yml:ro
- ${DOCKER_SOCKET_LOCATION}:/var/run/docker.sock:ro
environment:
LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
command: [ "--config", "etc/vector/vector.yml" ]
volumes:
db-config:

View file

@ -0,0 +1,241 @@
_format_version: '2.1'
_transform: true
###
### Consumers / Users
###
consumers:
- username: DASHBOARD
- username: anon
keyauth_credentials:
- key: $SUPABASE_ANON_KEY
- username: service_role
keyauth_credentials:
- key: $SUPABASE_SERVICE_KEY
###
### Access Control List
###
acls:
- consumer: anon
group: anon
- consumer: service_role
group: admin
###
### Dashboard credentials
###
basicauth_credentials:
- consumer: DASHBOARD
username: $DASHBOARD_USERNAME
password: $DASHBOARD_PASSWORD
###
### API Routes
###
services:
## Open Auth routes
- name: auth-v1-open
url: http://auth:9999/verify
routes:
- name: auth-v1-open
strip_path: true
paths:
- /auth/v1/verify
plugins:
- name: cors
- name: auth-v1-open-callback
url: http://auth:9999/callback
routes:
- name: auth-v1-open-callback
strip_path: true
paths:
- /auth/v1/callback
plugins:
- name: cors
- name: auth-v1-open-authorize
url: http://auth:9999/authorize
routes:
- name: auth-v1-open-authorize
strip_path: true
paths:
- /auth/v1/authorize
plugins:
- name: cors
## Secure Auth routes
- name: auth-v1
_comment: 'GoTrue: /auth/v1/* -> http://auth:9999/*'
url: http://auth:9999/
routes:
- name: auth-v1-all
strip_path: true
paths:
- /auth/v1/
plugins:
- name: cors
- name: key-auth
config:
hide_credentials: false
- name: acl
config:
hide_groups_header: true
allow:
- admin
- anon
## Secure REST routes
- name: rest-v1
_comment: 'PostgREST: /rest/v1/* -> http://rest:3000/*'
url: http://rest:3000/
routes:
- name: rest-v1-all
strip_path: true
paths:
- /rest/v1/
plugins:
- name: cors
- name: key-auth
config:
hide_credentials: true
- name: acl
config:
hide_groups_header: true
allow:
- admin
- anon
## Secure GraphQL routes
- name: graphql-v1
_comment: 'PostgREST: /graphql/v1/* -> http://rest:3000/rpc/graphql'
url: http://rest:3000/rpc/graphql
routes:
- name: graphql-v1-all
strip_path: true
paths:
- /graphql/v1
plugins:
- name: cors
- name: key-auth
config:
hide_credentials: true
- name: request-transformer
config:
add:
headers:
- Content-Profile:graphql_public
- name: acl
config:
hide_groups_header: true
allow:
- admin
- anon
## Secure Realtime routes
- name: realtime-v1-ws
_comment: 'Realtime: /realtime/v1/* -> ws://realtime:4000/socket/*'
url: http://realtime-dev.supabase-realtime:4000/socket
protocol: ws
routes:
- name: realtime-v1-ws
strip_path: true
paths:
- /realtime/v1/
plugins:
- name: cors
- name: key-auth
config:
hide_credentials: false
- name: acl
config:
hide_groups_header: true
allow:
- admin
- anon
- name: realtime-v1-rest
_comment: 'Realtime: /realtime/v1/* -> ws://realtime:4000/socket/*'
url: http://realtime-dev.supabase-realtime:4000/api
protocol: http
routes:
- name: realtime-v1-rest
strip_path: true
paths:
- /realtime/v1/api
plugins:
- name: cors
- name: key-auth
config:
hide_credentials: false
- name: acl
config:
hide_groups_header: true
allow:
- admin
- anon
## Storage routes: the storage server manages its own auth
- name: storage-v1
_comment: 'Storage: /storage/v1/* -> http://storage:5000/*'
url: http://storage:5000/
routes:
- name: storage-v1-all
strip_path: true
paths:
- /storage/v1/
plugins:
- name: cors
## Edge Functions routes
- name: functions-v1
_comment: 'Edge Functions: /functions/v1/* -> http://functions:9000/*'
url: http://functions:9000/
routes:
- name: functions-v1-all
strip_path: true
paths:
- /functions/v1/
plugins:
- name: cors
## Analytics routes
- name: analytics-v1
_comment: 'Analytics: /analytics/v1/* -> http://logflare:4000/*'
url: http://analytics:4000/
routes:
- name: analytics-v1-all
strip_path: true
paths:
- /analytics/v1/
## Secure Database routes
- name: meta
_comment: 'pg-meta: /pg/* -> http://pg-meta:8080/*'
url: http://meta:8080/
routes:
- name: meta-all
strip_path: true
paths:
- /pg/
plugins:
- name: key-auth
config:
hide_credentials: false
- name: acl
config:
hide_groups_header: true
allow:
- admin
## Protected Dashboard - catch all remaining routes
- name: dashboard
_comment: 'Studio: /* -> http://studio:3000/*'
url: http://studio:3000/
routes:
- name: dashboard-all
strip_path: true
paths:
- /
plugins:
- name: cors
- name: basic-auth
config:
hide_credentials: true

View file

@ -0,0 +1,5 @@
\set jwt_secret `echo "$JWT_SECRET"`
\set jwt_exp `echo "$JWT_EXP"`
ALTER DATABASE postgres SET "app.settings.jwt_secret" TO :'jwt_secret';
ALTER DATABASE postgres SET "app.settings.jwt_exp" TO :'jwt_exp';

View file

@ -0,0 +1,4 @@
\set pguser `echo "$POSTGRES_USER"`
create schema if not exists _analytics;
alter schema _analytics owner to :pguser;

View file

@ -0,0 +1,4 @@
\set pguser `echo "$POSTGRES_USER"`
create schema if not exists _realtime;
alter schema _realtime owner to :pguser;

View file

@ -0,0 +1,8 @@
-- NOTE: change to your own passwords for production environments
\set pgpass `echo "$POSTGRES_PASSWORD"`
ALTER USER authenticator WITH PASSWORD :'pgpass';
ALTER USER pgbouncer WITH PASSWORD :'pgpass';
ALTER USER supabase_auth_admin WITH PASSWORD :'pgpass';
ALTER USER supabase_functions_admin WITH PASSWORD :'pgpass';
ALTER USER supabase_storage_admin WITH PASSWORD :'pgpass';

View file

@ -0,0 +1,208 @@
BEGIN;
-- Create pg_net extension
CREATE EXTENSION IF NOT EXISTS pg_net SCHEMA extensions;
-- Create supabase_functions schema
CREATE SCHEMA supabase_functions AUTHORIZATION supabase_admin;
GRANT USAGE ON SCHEMA supabase_functions TO postgres, anon, authenticated, service_role;
ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON TABLES TO postgres, anon, authenticated, service_role;
ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON FUNCTIONS TO postgres, anon, authenticated, service_role;
ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON SEQUENCES TO postgres, anon, authenticated, service_role;
-- supabase_functions.migrations definition
CREATE TABLE supabase_functions.migrations (
version text PRIMARY KEY,
inserted_at timestamptz NOT NULL DEFAULT NOW()
);
-- Initial supabase_functions migration
INSERT INTO supabase_functions.migrations (version) VALUES ('initial');
-- supabase_functions.hooks definition
CREATE TABLE supabase_functions.hooks (
id bigserial PRIMARY KEY,
hook_table_id integer NOT NULL,
hook_name text NOT NULL,
created_at timestamptz NOT NULL DEFAULT NOW(),
request_id bigint
);
CREATE INDEX supabase_functions_hooks_request_id_idx ON supabase_functions.hooks USING btree (request_id);
CREATE INDEX supabase_functions_hooks_h_table_id_h_name_idx ON supabase_functions.hooks USING btree (hook_table_id, hook_name);
COMMENT ON TABLE supabase_functions.hooks IS 'Supabase Functions Hooks: Audit trail for triggered hooks.';
CREATE FUNCTION supabase_functions.http_request()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
DECLARE
request_id bigint;
payload jsonb;
url text := TG_ARGV[0]::text;
method text := TG_ARGV[1]::text;
headers jsonb DEFAULT '{}'::jsonb;
params jsonb DEFAULT '{}'::jsonb;
timeout_ms integer DEFAULT 1000;
BEGIN
IF url IS NULL OR url = 'null' THEN
RAISE EXCEPTION 'url argument is missing';
END IF;
IF method IS NULL OR method = 'null' THEN
RAISE EXCEPTION 'method argument is missing';
END IF;
IF TG_ARGV[2] IS NULL OR TG_ARGV[2] = 'null' THEN
headers = '{"Content-Type": "application/json"}'::jsonb;
ELSE
headers = TG_ARGV[2]::jsonb;
END IF;
IF TG_ARGV[3] IS NULL OR TG_ARGV[3] = 'null' THEN
params = '{}'::jsonb;
ELSE
params = TG_ARGV[3]::jsonb;
END IF;
IF TG_ARGV[4] IS NULL OR TG_ARGV[4] = 'null' THEN
timeout_ms = 1000;
ELSE
timeout_ms = TG_ARGV[4]::integer;
END IF;
CASE
WHEN method = 'GET' THEN
SELECT http_get INTO request_id FROM net.http_get(
url,
params,
headers,
timeout_ms
);
WHEN method = 'POST' THEN
payload = jsonb_build_object(
'old_record', OLD,
'record', NEW,
'type', TG_OP,
'table', TG_TABLE_NAME,
'schema', TG_TABLE_SCHEMA
);
SELECT http_post INTO request_id FROM net.http_post(
url,
payload,
params,
headers,
timeout_ms
);
ELSE
RAISE EXCEPTION 'method argument % is invalid', method;
END CASE;
INSERT INTO supabase_functions.hooks
(hook_table_id, hook_name, request_id)
VALUES
(TG_RELID, TG_NAME, request_id);
RETURN NEW;
END
$function$;
-- Supabase super admin
DO
$$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_roles
WHERE rolname = 'supabase_functions_admin'
)
THEN
CREATE USER supabase_functions_admin NOINHERIT CREATEROLE LOGIN NOREPLICATION;
END IF;
END
$$;
GRANT ALL PRIVILEGES ON SCHEMA supabase_functions TO supabase_functions_admin;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA supabase_functions TO supabase_functions_admin;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA supabase_functions TO supabase_functions_admin;
ALTER USER supabase_functions_admin SET search_path = "supabase_functions";
ALTER table "supabase_functions".migrations OWNER TO supabase_functions_admin;
ALTER table "supabase_functions".hooks OWNER TO supabase_functions_admin;
ALTER function "supabase_functions".http_request() OWNER TO supabase_functions_admin;
GRANT supabase_functions_admin TO postgres;
-- Remove unused supabase_pg_net_admin role
DO
$$
BEGIN
IF EXISTS (
SELECT 1
FROM pg_roles
WHERE rolname = 'supabase_pg_net_admin'
)
THEN
REASSIGN OWNED BY supabase_pg_net_admin TO supabase_admin;
DROP OWNED BY supabase_pg_net_admin;
DROP ROLE supabase_pg_net_admin;
END IF;
END
$$;
-- pg_net grants when extension is already enabled
DO
$$
BEGIN
IF EXISTS (
SELECT 1
FROM pg_extension
WHERE extname = 'pg_net'
)
THEN
GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role;
ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
END IF;
END
$$;
-- Event trigger for pg_net
CREATE OR REPLACE FUNCTION extensions.grant_pg_net_access()
RETURNS event_trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF EXISTS (
SELECT 1
FROM pg_event_trigger_ddl_commands() AS ev
JOIN pg_extension AS ext
ON ev.objid = ext.oid
WHERE ext.extname = 'pg_net'
)
THEN
GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role;
ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
END IF;
END;
$$;
COMMENT ON FUNCTION extensions.grant_pg_net_access IS 'Grants access to pg_net';
DO
$$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_event_trigger
WHERE evtname = 'issue_pg_net_access'
) THEN
CREATE EVENT TRIGGER issue_pg_net_access ON ddl_command_end WHEN TAG IN ('CREATE EXTENSION')
EXECUTE PROCEDURE extensions.grant_pg_net_access();
END IF;
END
$$;
INSERT INTO supabase_functions.migrations (version) VALUES ('20210809183423_update_grants');
ALTER function supabase_functions.http_request() SECURITY DEFINER;
ALTER function supabase_functions.http_request() SET search_path = supabase_functions;
REVOKE ALL ON FUNCTION supabase_functions.http_request() FROM PUBLIC;
GRANT EXECUTE ON FUNCTION supabase_functions.http_request() TO postgres, anon, authenticated, service_role;
COMMIT;

View file

@ -0,0 +1,16 @@
// Follow this setup guide to integrate the Deno language server with your editor:
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from "https://deno.land/std@0.177.1/http/server.ts"
serve(async () => {
return new Response(
`"Hello from Edge Functions!"`,
{ headers: { "Content-Type": "application/json" } },
)
})
// To invoke:
// curl 'http://localhost:<KONG_HTTP_PORT>/functions/v1/hello' \
// --header 'Authorization: Bearer <anon/service_role API key>'

View file

@ -0,0 +1,94 @@
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import * as jose from 'https://deno.land/x/jose@v4.14.4/index.ts'
console.log('main function started')
const JWT_SECRET = Deno.env.get('JWT_SECRET')
const VERIFY_JWT = Deno.env.get('VERIFY_JWT') === 'true'
function getAuthToken(req: Request) {
const authHeader = req.headers.get('authorization')
if (!authHeader) {
throw new Error('Missing authorization header')
}
const [bearer, token] = authHeader.split(' ')
if (bearer !== 'Bearer') {
throw new Error(`Auth header is not 'Bearer {token}'`)
}
return token
}
async function verifyJWT(jwt: string): Promise<boolean> {
const encoder = new TextEncoder()
const secretKey = encoder.encode(JWT_SECRET)
try {
await jose.jwtVerify(jwt, secretKey)
} catch (err) {
console.error(err)
return false
}
return true
}
serve(async (req: Request) => {
if (req.method !== 'OPTIONS' && VERIFY_JWT) {
try {
const token = getAuthToken(req)
const isValidJWT = await verifyJWT(token)
if (!isValidJWT) {
return new Response(JSON.stringify({ msg: 'Invalid JWT' }), {
status: 401,
headers: { 'Content-Type': 'application/json' },
})
}
} catch (e) {
console.error(e)
return new Response(JSON.stringify({ msg: e.toString() }), {
status: 401,
headers: { 'Content-Type': 'application/json' },
})
}
}
const url = new URL(req.url)
const { pathname } = url
const path_parts = pathname.split('/')
const service_name = path_parts[1]
if (!service_name || service_name === '') {
const error = { msg: 'missing function name in request' }
return new Response(JSON.stringify(error), {
status: 400,
headers: { 'Content-Type': 'application/json' },
})
}
const servicePath = `/home/deno/functions/${service_name}`
console.error(`serving the request with ${servicePath}`)
const memoryLimitMb = 150
const workerTimeoutMs = 1 * 60 * 1000
const noModuleCache = false
const importMapPath = null
const envVarsObj = Deno.env.toObject()
const envVars = Object.keys(envVarsObj).map((k) => [k, envVarsObj[k]])
try {
const worker = await EdgeRuntime.userWorkers.create({
servicePath,
memoryLimitMb,
workerTimeoutMs,
noModuleCache,
importMapPath,
envVars,
})
return await worker.fetch(req)
} catch (e) {
const error = { msg: e.toString() }
return new Response(JSON.stringify(error), {
status: 500,
headers: { 'Content-Type': 'application/json' },
})
}
})

View file

@ -0,0 +1,232 @@
api:
enabled: true
address: 0.0.0.0:9001
sources:
docker_host:
type: docker_logs
exclude_containers:
- supabase-vector
transforms:
project_logs:
type: remap
inputs:
- docker_host
source: |-
.project = "default"
.event_message = del(.message)
.appname = del(.container_name)
del(.container_created_at)
del(.container_id)
del(.source_type)
del(.stream)
del(.label)
del(.image)
del(.host)
del(.stream)
router:
type: route
inputs:
- project_logs
route:
kong: '.appname == "supabase-kong"'
auth: '.appname == "supabase-auth"'
rest: '.appname == "supabase-rest"'
realtime: '.appname == "supabase-realtime"'
storage: '.appname == "supabase-storage"'
functions: '.appname == "supabase-functions"'
db: '.appname == "supabase-db"'
# Ignores non nginx errors since they are related with kong booting up
kong_logs:
type: remap
inputs:
- router.kong
source: |-
req, err = parse_nginx_log(.event_message, "combined")
if err == null {
.timestamp = req.timestamp
.metadata.request.headers.referer = req.referer
.metadata.request.headers.user_agent = req.agent
.metadata.request.headers.cf_connecting_ip = req.client
.metadata.request.method = req.method
.metadata.request.path = req.path
.metadata.request.protocol = req.protocol
.metadata.response.status_code = req.status
}
if err != null {
abort
}
# Ignores non nginx errors since they are related with kong booting up
kong_err:
type: remap
inputs:
- router.kong
source: |-
.metadata.request.method = "GET"
.metadata.response.status_code = 200
parsed, err = parse_nginx_log(.event_message, "error")
if err == null {
.timestamp = parsed.timestamp
.severity = parsed.severity
.metadata.request.host = parsed.host
.metadata.request.headers.cf_connecting_ip = parsed.client
url, err = split(parsed.request, " ")
if err == null {
.metadata.request.method = url[0]
.metadata.request.path = url[1]
.metadata.request.protocol = url[2]
}
}
if err != null {
abort
}
# Gotrue logs are structured json strings which frontend parses directly. But we keep metadata for consistency.
auth_logs:
type: remap
inputs:
- router.auth
source: |-
parsed, err = parse_json(.event_message)
if err == null {
.metadata.timestamp = parsed.time
.metadata = merge!(.metadata, parsed)
}
# PostgREST logs are structured so we separate timestamp from message using regex
rest_logs:
type: remap
inputs:
- router.rest
source: |-
parsed, err = parse_regex(.event_message, r'^(?P<time>.*): (?P<msg>.*)$')
if err == null {
.event_message = parsed.msg
.timestamp = to_timestamp!(parsed.time)
.metadata.host = .project
}
# Realtime logs are structured so we parse the severity level using regex (ignore time because it has no date)
realtime_logs:
type: remap
inputs:
- router.realtime
source: |-
.metadata.project = del(.project)
.metadata.external_id = .metadata.project
parsed, err = parse_regex(.event_message, r'^(?P<time>\d+:\d+:\d+\.\d+) \[(?P<level>\w+)\] (?P<msg>.*)$')
if err == null {
.event_message = parsed.msg
.metadata.level = parsed.level
}
# Storage logs may contain json objects so we parse them for completeness
storage_logs:
type: remap
inputs:
- router.storage
source: |-
.metadata.project = del(.project)
.metadata.tenantId = .metadata.project
parsed, err = parse_json(.event_message)
if err == null {
.event_message = parsed.msg
.metadata.level = parsed.level
.metadata.timestamp = parsed.time
.metadata.context[0].host = parsed.hostname
.metadata.context[0].pid = parsed.pid
}
# Postgres logs some messages to stderr which we map to warning severity level
db_logs:
type: remap
inputs:
- router.db
source: |-
.metadata.host = "db-default"
.metadata.parsed.timestamp = .timestamp
parsed, err = parse_regex(.event_message, r'.*(?P<level>INFO|NOTICE|WARNING|ERROR|LOG|FATAL|PANIC?):.*', numeric_groups: true)
if err != null || parsed == null {
.metadata.parsed.error_severity = "info"
}
if parsed != null {
.metadata.parsed.error_severity = parsed.level
}
if .metadata.parsed.error_severity == "info" {
.metadata.parsed.error_severity = "log"
}
.metadata.parsed.error_severity = upcase!(.metadata.parsed.error_severity)
sinks:
logflare_auth:
type: 'http'
inputs:
- auth_logs
encoding:
codec: 'json'
method: 'post'
request:
retry_max_duration_secs: 10
uri: 'http://analytics:4000/api/logs?source_name=gotrue.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
logflare_realtime:
type: 'http'
inputs:
- realtime_logs
encoding:
codec: 'json'
method: 'post'
request:
retry_max_duration_secs: 10
uri: 'http://analytics:4000/api/logs?source_name=realtime.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
logflare_rest:
type: 'http'
inputs:
- rest_logs
encoding:
codec: 'json'
method: 'post'
request:
retry_max_duration_secs: 10
uri: 'http://analytics:4000/api/logs?source_name=postgREST.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
logflare_db:
type: 'http'
inputs:
- db_logs
encoding:
codec: 'json'
method: 'post'
request:
retry_max_duration_secs: 10
# We must route the sink through kong because ingesting logs before logflare is fully initialised will
# lead to broken queries from studio. This works by the assumption that containers are started in the
# following order: vector > db > logflare > kong
uri: 'http://kong:8000/analytics/v1/api/logs?source_name=postgres.logs&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
logflare_functions:
type: 'http'
inputs:
- router.functions
encoding:
codec: 'json'
method: 'post'
request:
retry_max_duration_secs: 10
uri: 'http://analytics:4000/api/logs?source_name=deno-relay-logs&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
logflare_storage:
type: 'http'
inputs:
- storage_logs
encoding:
codec: 'json'
method: 'post'
request:
retry_max_duration_secs: 10
uri: 'http://analytics:4000/api/logs?source_name=storage.logs.prod.2&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
logflare_kong:
type: 'http'
inputs:
- kong_logs
- kong_err
encoding:
codec: 'json'
method: 'post'
request:
retry_max_duration_secs: 10
uri: 'http://analytics:4000/api/logs?source_name=cloudflare.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'

86
.minpluto/docs/API.md Normal file
View file

@ -0,0 +1,86 @@
## API
### Generic
**Language**
`/api/language/` [`en` | `jp`]
**Telemtry**
`/api/telemtry/` [`enable` | `disable`]
### Player
**Milieu**
`/api/player/milieu/` [`enable` | `disable`]
Enable or disable the Milieu mode.
### Account System
**Register**
`/api/auth/register`
*Required*
- `name`
- `email`
- `password`
When a user registered an account, it is processed with the following data:
```JSON
{
"name": "PROVIDED_NAME",
"ui_theme": "Default",
"ui_color": "Default",
"ui_zen": "false",
"ui_sidebar_size": "Normal",
"invidous_data": "https://yt.sudovanilla.org",
"invidous_media": "https://yt.sudovanilla.org",
"safetwitch_data": "https://twitch.sudovanilla.org",
"safetwitch_media": "https://twitch.sudovanilla.org",
"image_proxy": "https://ipx.sudovanilla.org",
"player_type": "Zorn"
}
```
The end-user is then redirected to `/?=welcome` with an onboarding screen(Planned, this actually does nothing for now).
Before logging in, end-users are required to verify their email first before logging in.
**Confirm**
`/api/auth/confirm`
*Required*
- `email`
- `code`
As mention just before, end-users are required to verify their email first before logging in. After registration, end-users are brought to a "Confirm Email" page.
It is required that the email in question is provided, along with the code sent to that email from the SMTP server.
**Login**
`/api/auth/login`
*Required*
- `email`
- `password`
**Logout**
`/api/auth/logout`
**Update Name**
`/api/update/name`
*Required*
- `name`
**Update Email**
`/api/update/email`
*Required*
- `email`

View file

@ -0,0 +1,68 @@
## Compatibility
### Package Managers
| Package Manager | Install Packages | Run Project |
|---------------------|------------------|-------------|
| NPM | ❌ | ❌ |
| PNPM | ❌ | ❌ |
| Bun | ✅ | ✅ |
| Yarn v1 | ✅ | ❌ |
| Yarn v3 | ✅ | ❌ |
| Yarn v4 | ✅ | ❌ |
### Deployment
| Software | Build | Run |
|---------------------|-------|------|
Bun (Local) | ✅ | ✅ |
Node (Local) | ✅ | ✅ |
Docker (Local) | ✅ | Mix |
Cloudflare Pages | 🔘 | 🔘 |
> MinPluto can use a lot of bandwidth, which most services will charge extra for. So this list is small and limited to certain services that allow unlimited bandwidth.
### Web Browsers
| Browser | Live Streams |Player | CSS | JavaScript | Account System | Embed |
|--------------------|--------------|-------|-----|------------|----------------|-------|
| **Other Browsers**|
| FOSS Browser | ❌ | ✅ | ❌ | ✅ | 🔘 | 🔘 |
| Ladybird*3 | ❌ | ❌ | ✅ | ✅ | 🔘 | 🔘 |
| **WebKit Browsers**|
| Safariᴸᴬ | ✅ | ✅ | ❌ | ✅ | 🔘 | 🔘 |
| GNOME Web | ✅ | ❌*2 | ✅ | ✅ | 🔘 | 🔘 |
| DuckDuckGoᴸᴬ | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| **Electron Browsers**|
| Min | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| **Chromium Browsers**|
| Braveᴸᴬ | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Chromium | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Google Chrome | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Microsoft Edge | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Opera | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Vivaldi | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Yandex | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| **Firefox Browsers**|
| ~~Dot Browser~~*1 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Falkon | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Firefox | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Floorp | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| IceCat | ❌ | ✅ | ❌ | 🔘 | 🔘 | 🔘 |
| Librewolf | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Mull | ❌ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Mullvad | ✅ | ❌ | ✅ | ✅ | 🔘 | 🔘 |
| Tor | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Waterfox | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| **Outdated Browsers**|
| Internet Explorer | ❌ | ✅ | ❌ | ✅ | 🔘 | 🔘 |
> 🔘: Untested
> All browsers are tested as-is out of box. Some functions with ❌ can probably work if you tweak the settings or interact with a built-in extension.
> For Firefox-based browsers: Timestamp may show wrong data on first load, everything works correctly after a reload and so on.
> ᴸᴬ: Tested in Lambdatest
> *1: This web browser is unstable to operate
> *2: Seeking doesn't work
> *3: Ladybird is a new web browser that is uncompleted. This browser is not expected to work correctly for MinPluto at the moment.

30
.minpluto/docs/FAQ.md Normal file
View file

@ -0,0 +1,30 @@
## End-User FAQ
### Can I use MinPluto on this device?
For devices with a web browser built-in, MinPluto will run in it. However, MinPluto does not have a dedicated app for devices like Roku, Amazon Firestick, Google TV, and any other Smart TV device along with gaming consoles.
For mobile users, you can install MinPluto from your browser as an app, if the browser has that option.
### Will there be a Android/iOS app?
For now, MinPluto is planned to be only a web application that can be accessed via a web browser. There are currently no plans to build a mobile application.
### What data does MinPluto collect?
MinPluto by itself does not collect any data about you or any other user.
If you decide to opt-in to the telemtry, the Telemtry page will list what is logged and what is not logged.
However, please keep in mind that on other MinPluto instances, the instance operator can log your IP address like any other server could.
### What is a YouTube/Twitch frontend?
A frontend is its own UI and service that serves data from another platform such as YouTube, Twitch, X, Reddit, etc.
### Why use a different frontend and not use the real thing?
There are multiple reasons to use a frontend like MinPluto. Reasons such as like privacy, to avoid being tracked from invasive companies; speed, to avoid the bloated pages that these platforms have…frontend alternatives are commonly lightweight; and another reason is to avoid ads, all frontends dont serve ads like these platforms do.
### Why is the player different for Twitch Streams?
In order to support playing live streams for MinPluto a technology known as HLS(HTTP Live Streaming) has to be added to a supported player, in this case VideoJS. Using your browser's built-in player will not work and this has not been tested for for MinPluto's custom player yet.

View file

@ -0,0 +1,32 @@
## Requirements
### Hardware
| | Minimum | Recommended |
|-------------------|--------------------|------------------|
| CPU Cores | 2 | 4 |
| Memory | 2GB*1 | 8GB |
| Storage | 20GB | 60GB |
| Network Speed | 300Mbps/Upload | 1Gbps/Upload |
| Traffic | 20TB Monthly | Unlimited/No Cap |
> *1: At least have 512Mb free for the operating system.
<details>
<summary>Recommended VPS</summary>
<br>
| Host | Plan | Monthly Pricing | Yearly Pricing |
|------------------|-------------|-------------------|------------------|
| BuyVM | SLICE 4096 | $15.00 | $180.00 |
| Regxa | EVA3 | $15.00 | $171.00 |
> A provider with unlimited bandwidth is preferred.
> All prices are listed in USD.
</details>
### Software
The package manager that you need to use with MinPluto must be [Bun](https://bun.sh/) since it appears to be the most functional option for this project. Attempting to use another package manager like Yarn or PNPM may cause issues, view [Package Managers](#package-managers) in the [Compatibility](#compatibility) section. There seems to be some issues related to both `@astrojs/vue` and `@iconoir/vue`.
You'll still need to have [NodeJS](https://nodejs.org/en/download/package-manager) v21 or newer install in order for translations to work properly.

171
.minpluto/docs/TODO.md Normal file
View file

@ -0,0 +1,171 @@
## To Do
- [ ] i18n
- [x] API
- [ ] Languages
- [x] English
- [x] Japanese
- [ ] French
- [ ] Spanish
- [x] Russian
- [x] Data
- [x] Track Events (Users should be opted-out by default, OpenPanel will be used)
- [x] Make privacy policy adaptive
- [x] Mobile Support
- [ ] Server Configuration (.env)
- [ ] Quality
- [ ] Allow 1080p
- [ ] Allow 4K
- [ ] Allow 8K
- [ ] Account System (Based on [Account System Demo](https://ark.sudovanilla.org/MinPluto/Account-System-Demo))
- [x] Use Supabase Library
- [ ] Create Pages:
- [ ] Subscription Feed
- [ ] History (Maybe, maybe not)
- [x] Login
- [x] Register
- [x] Account
- [ ] Preferences
- [ ] Delete
- [ ] Anomymous Account Creation
- [x] Email Confirmation Code
- [ ] Ability to:
- [ ] Update Data
- [x] Username
- [ ] Email
- [ ] Pasword
- [ ] Delete Account
- [ ] API
- [x] `/api/update/name`
- [ ] `/api/update/email`
- [ ] `/api/update/password`
- [ ] `/api/update/preference/ui/theme`
- [ ] `/api/update/preference/ui/color-scheme`
- [ ] `/api/update/preference/ui/zen`
- [ ] `/api/update/preference/ui/sidebar/size`
- [ ] `/api/update/preference/player-type`
- [ ] `/api/update/preference/image-proxy`
- [ ] `/api/update/preference/instance/invidious/media`
- [ ] `/api/update/preference/instance/invidious/data`
- [ ] `/api/update/preference/instance/safetwitch/media`
- [ ] `/api/update/preference/instance/safetwitch/data`
- [x] `/api/auth/login`
- [x] `/api/auth/register`
- [ ] `/api/auth/delete`
- [x] `/api/auth/confirm`
- [x] `/api/auth/logout`
- [ ] `/api/anon/create`
- [ ] `/api/anon/delete`
- [ ] `/api/anon/signout`
- [ ] `/api/subscription/add`
- [ ] `/api/subscription/remove`
- [ ] Revamp Design and Layout ([UI Library Repo](https://ark.sudovanilla.org/MinPluto/UI-Library/))
- [ ] Use Header over Sidebar
- [ ] Generic
- [ ] Dropdown
- [ ] Buttons
- [ ] Toggle
- [ ] Inputs
- [ ] Radio Buttons
- [ ] Toast
- [ ] Tooltip
- [ ] Hovercard (For Creators) [Example](https://www.radix-vue.com/components/hover-card)
- [ ] Scrollable Areas
- [ ] KBD
- [ ] Empty State
- [ ] Create Footer
- [ ] Make more re-usable components
- [ ] Watch Page
- [ ] Video Player
- [ ] Title, Actions, and Description Area
- [ ] Comments/Chat
- [ ] Related Content
- [ ] Dialogs/Modal
- [ ] Share
- [ ] Download
- [ ] Error
- [ ] Search
- [ ] Creator
- [ ] Video/Stream
- [ ] Playlist
- [ ] Paginations
- [ ] Discovery Pages
- [ ] Animation
- [ ] Automotive
- [x] Comedy
- [ ] Courses
- [ ] Educational
- [ ] Family Friendly
- [ ] Fashion
- [ ] Fitness
- [ ] Food
- [x] Games
- [ ] Music
- [ ] News
- [ ] Podcasts
- [ ] Science
- [ ] Sports
- [x] Tech
- [ ] Web Series
- [ ] Twitch Support
- [x] API
- [x] Video Player HLS Support (Required to play streams)
- [ ] Polycentric Chat
- [ ] Categories
- [ ] Games
- [ ] Music
- [ ] Just Chatting
- [ ] IRL
- [ ] Sports
- [ ] Animals
- [ ] Creativity
- [ ] Inline Player
- [ ] Dedicated Redirect Page
- [ ] Should pull from instances list
- [ ] YouTube Playlists
- [ ] RSS
- [ ] Component for Search
- [ ] Add to Watch Page
- [ ] Search
- [ ] Revamp Experience
- [ ] Filters
- [x] Auto Complete
- [ ] Video Player
- [x] Dash Format (1080p/4K/8K)
- [ ] 360° Support
- [ ] Mobile Gestures
- [x] Embed Page
- [ ] Download
- [ ] Share
- [ ] Report
- [ ] Controls
- [ ] Play/Pause
- [ ] Volume
- [ ] Fullscreen
- [ ] Close Captians
- [ ] Quality Changer
- [ ] Theater Mode
- [ ] Cast
- [ ] Video Page
- [ ] ~~Important Infomation Card ([Example](https://img.sudovanilla.org/pXqzT10.png))~~ Controversial, do not proceed
- [ ] Viewers Note (Like Community Notes, in [experimental phase at YouTube](https://blog.youtube/news-and-events/new-ways-to-offer-viewers-more-context/))
- [ ] Toggle:
- [ ] Audio Only
- [ ] Autoplay
- [ ] User Settings
- [ ] Invidious Server Selection
- [ ] [SafeTwitch](https://codeberg.org/SafeTwitch/safetwitch) Backend Server Selection
- [ ] Platform Selection (YouTube/Twitch)
- [ ] Video Player
- [ ] Toggle:
- [ ] Proxy
- [ ] Theme
- [ ] Preferred Language (For audio track on YouTube)
- [ ] Custom CSS/JS
- [ ] Switch Auth Servers
- [ ] Import [SafeTwitch](https://codeberg.org/SafeTwitch/safetwitch) Settings
- [ ] Import/Export Twitch/[SafeTwitch](https://codeberg.org/SafeTwitch/safetwitch) Subscription ([SafeTwitch](https://codeberg.org/SafeTwitch/safetwitch) needs to be looked into further)
- [ ] Import/Export YouTube/FreeTube/NewPipe Subscription
- [ ] Import/Export MinPluto User Settings
- [ ] Feed Page
- [ ] Universal Feed (YouTube and Twitch)
- [ ] Subscription Management

287
README.md
View file

@ -1,287 +1,18 @@
![MinPluto Banner](https://img.sudovanilla.org/3iPweoL.png)
# MinPluto
MinPluto is a modern privacy frontend for YouTube and Twitch(planned) giving your personal total anonymity. It provides additional features such as an account system, no annoying ads, multilingual support, custom video player, and additional customization.
MinPluto is a modern privacy frontend for YouTube and Twitch giving your personal total anonymity. It provides additional features such as an account system, no annoying ads, multilingual support, custom video player, and additional customization.
___
## Requirements
### Hardware
| | Minimum | Recommended |
|-------------------|--------------------|------------------|
| CPU Cores | 2 | 4 |
| Memory | 2GB*1 | 8GB |
| Storage | 20GB | 60GB |
| Network Speed | 300Mbps/Upload | 1Gbps/Upload |
| Traffic | 20TB Monthly | Unlimited/No Cap |
> *1: At least have 512Mb free for the operating system.
<details>
<summary>Recommended VPS</summary>
<br>
| Host | Plan | Monthly Pricing | Yearly Pricing |
|------------------|-------------|-------------------|------------------|
| BuyVM | SLICE 4096 | $15.00 | $180.00 |
| Regxa | EVA3 | $15.00 | $171.00 |
> A provider with unlimited bandwidth is preferred.
> All prices are listed in USD.
</details>
### Software
The package manager that you need to use with MinPluto must be [Bun](https://bun.sh/) since it appears to be the most functional option for this project. Attempting to use another package manager like Yarn or PNPM may cause issues, view [Package Managers](#package-managers) in the [Compatibility](#compatibility) section. There seems to be some issues related to both `@astrojs/vue` and `@iconoir/vue`.
You'll still need to have [NodeJS](https://nodejs.org/en/download/package-manager) v21 or newer install in order for translations to work properly.
___
## Compatibility
### Package Managers
| Package Manager | Install Packages | Run Project |
|---------------------|------------------|-------------|
| NPM | ❌ | ❌ |
| PNPM | ❌ | ❌ |
| Bun | ✅ | ✅ |
| Yarn v1 | ✅ | ❌ |
| Yarn v3 | ✅ | ❌ |
| Yarn v4 | ✅ | ❌ |
### Deployment
| Software | Build | Run |
|---------------------|-------|------|
Bun (Local) | ✅ | ✅ |
Node (Local) | ✅ | ✅ |
Docker (Local) | ✅ | Mix |
Cloudflare Pages | 🔘 | 🔘 |
> MinPluto can use a lot of bandwidth, which most services will charge extra for. So this list is small and limited to certain services that allow unlimited bandwidth.
### Web Browsers
| Browser | Live Streams |Player | CSS | JavaScript | Account System | Embed |
|--------------------|--------------|-------|-----|------------|----------------|-------|
| **Other Browsers**|
| FOSS Browser | ❌ | ✅ | ❌ | ✅ | 🔘 | 🔘 |
| Ladybird*3 | ❌ | ❌ | ✅ | ✅ | 🔘 | 🔘 |
| **WebKit Browsers**|
| Safariᴸᴬ | ✅ | ✅ | ❌ | ✅ | 🔘 | 🔘 |
| GNOME Web | ✅ | ❌*2 | ✅ | ✅ | 🔘 | 🔘 |
| DuckDuckGoᴸᴬ | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| **Electron Browsers**|
| Min | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| **Chromium Browsers**|
| Braveᴸᴬ | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Chromium | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Google Chrome | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Microsoft Edge | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Opera | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Vivaldi | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Yandex | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| **Firefox Browsers**|
| ~~Dot Browser~~*1 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Falkon | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Firefox | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Floorp | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| IceCat | ❌ | ✅ | ❌ | 🔘 | 🔘 | 🔘 |
| Librewolf | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Mull | ❌ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| Mullvad | ✅ | ❌ | ✅ | ✅ | 🔘 | 🔘 |
| Tor | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 | 🔘 |
| Waterfox | ✅ | ✅ | ✅ | ✅ | 🔘 | 🔘 |
| **Outdated Browsers**|
| Internet Explorer | ❌ | ✅ | ❌ | ✅ | 🔘 | 🔘 |
> 🔘: Untested
> All browsers are tested as-is out of box. Some functions with ❌ can probably work if you tweak the settings or interact with a built-in extension.
> For Firefox-based browsers: Timestamp may show wrong data on first load, everything works correctly after a reload and so on.
> ᴸᴬ: Tested in Lambdatest
> *1: This web browser is unstable to operate
> *2: Seeking doesn't work
> *3: Ladybird is a new web browser that is uncompleted. This browser is not expected to work correctly for MinPluto at the moment.
___
## To Do
- [ ] i18n
- [x] API
- [ ] Languages
- [x] English
- [x] Japanese
- [ ] French
- [ ] Spanish
- [x] Russian
- [x] Data
- [x] Track Events (Users should be opted-out by default, OpenPanel will be used)
- [x] Make privacy policy adaptive
- [x] Mobile Support
- [ ] Server Configuration (.env)
- [ ] Quality
- [ ] Allow 1080p
- [ ] Allow 4K
- [ ] Allow 8K
- [ ] Account System (Based on [Account System Demo](https://ark.sudovanilla.org/MinPluto/Account-System-Demo))
- [x] Use Supabase Library
- [ ] Create Pages:
- [ ] Subscription Feed
- [ ] History (Maybe, maybe not)
- [x] Login
- [x] Register
- [x] Account
- [ ] Preferences
- [ ] Delete
- [ ] Anomymous Account Creation
- [x] Email Confirmation Code
- [ ] Ability to:
- [ ] Update Data
- [x] Username
- [ ] Email
- [ ] Pasword
- [ ] Delete Account
- [ ] API
- [x] `/api/update/name`
- [ ] `/api/update/email`
- [ ] `/api/update/password`
- [ ] `/api/update/preference/ui/theme`
- [ ] `/api/update/preference/ui/color-scheme`
- [ ] `/api/update/preference/ui/zen`
- [ ] `/api/update/preference/ui/sidebar/size`
- [ ] `/api/update/preference/player-type`
- [ ] `/api/update/preference/image-proxy`
- [ ] `/api/update/preference/instance/invidious/media`
- [ ] `/api/update/preference/instance/invidious/data`
- [ ] `/api/update/preference/instance/safetwitch/media`
- [ ] `/api/update/preference/instance/safetwitch/data`
- [x] `/api/auth/login`
- [x] `/api/auth/register`
- [ ] `/api/auth/delete`
- [x] `/api/auth/confirm`
- [x] `/api/auth/logout`
- [ ] `/api/anon/create`
- [ ] `/api/anon/delete`
- [ ] `/api/anon/signout`
- [ ] `/api/subscription/add`
- [ ] `/api/subscription/remove`
- [ ] Revamp Design and Layout ([UI Library Repo](https://ark.sudovanilla.org/MinPluto/UI-Library/))
- [ ] Use Header over Sidebar
- [ ] Generic
- [ ] Dropdown
- [ ] Buttons
- [ ] Toggle
- [ ] Inputs
- [ ] Radio Buttons
- [ ] Toast
- [ ] Tooltip
- [ ] Hovercard (For Creators) [Example](https://www.radix-vue.com/components/hover-card)
- [ ] Scrollable Areas
- [ ] KBD
- [ ] Empty State
- [ ] Create Footer
- [ ] Make more re-usable components
- [ ] Watch Page
- [ ] Video Player
- [ ] Title, Actions, and Description Area
- [ ] Comments/Chat
- [ ] Related Content
- [ ] Dialogs/Modal
- [ ] Share
- [ ] Download
- [ ] Error
- [ ] Search
- [ ] Creator
- [ ] Video/Stream
- [ ] Playlist
- [ ] Paginations
- [ ] Discovery Pages
- [ ] Animation
- [ ] Automotive
- [x] Comedy
- [ ] Courses
- [ ] Educational
- [ ] Family Friendly
- [ ] Fashion
- [ ] Fitness
- [ ] Food
- [x] Games
- [ ] Music
- [ ] News
- [ ] Podcasts
- [ ] Science
- [ ] Sports
- [x] Tech
- [ ] Web Series
- [ ] Twitch Support
- [x] API
- [x] Video Player HLS Support (Required to play streams)
- [ ] Polycentric Chat
- [ ] Categories
- [ ] Games
- [ ] Music
- [ ] Just Chatting
- [ ] IRL
- [ ] Sports
- [ ] Animals
- [ ] Creativity
- [ ] Inline Player
- [ ] Dedicated Redirect Page
- [ ] Should pull from instances list
- [ ] YouTube Playlists
- [ ] RSS
- [ ] Component for Search
- [ ] Add to Watch Page
- [ ] Search
- [ ] Revamp Experience
- [ ] Filters
- [x] Auto Complete
- [ ] Video Player
- [x] Dash Format (1080p/4K/8K)
- [ ] 360° Support
- [ ] Mobile Gestures
- [x] Embed Page
- [ ] Download
- [ ] Share
- [ ] Report
- [ ] Controls
- [ ] Play/Pause
- [ ] Volume
- [ ] Fullscreen
- [ ] Close Captians
- [ ] Quality Changer
- [ ] Theater Mode
- [ ] Cast
- [ ] Video Page
- [ ] ~~Important Infomation Card ([Example](https://img.sudovanilla.org/pXqzT10.png))~~ Controversial, do not proceed
- [ ] Viewers Note (Like Community Notes, in [experimental phase at YouTube](https://blog.youtube/news-and-events/new-ways-to-offer-viewers-more-context/))
- [ ] Toggle:
- [ ] Audio Only
- [ ] Autoplay
- [ ] User Settings
- [ ] Invidious Server Selection
- [ ] [SafeTwitch](https://codeberg.org/SafeTwitch/safetwitch) Backend Server Selection
- [ ] Platform Selection (YouTube/Twitch)
- [ ] Video Player
- [ ] Toggle:
- [ ] Proxy
- [ ] Theme
- [ ] Preferred Language (For audio track on YouTube)
- [ ] Custom CSS/JS
- [ ] Switch Auth Servers
- [ ] Import [SafeTwitch](https://codeberg.org/SafeTwitch/safetwitch) Settings
- [ ] Import/Export Twitch/[SafeTwitch](https://codeberg.org/SafeTwitch/safetwitch) Subscription ([SafeTwitch](https://codeberg.org/SafeTwitch/safetwitch) needs to be looked into further)
- [ ] Import/Export YouTube/FreeTube/NewPipe Subscription
- [ ] Import/Export MinPluto User Settings
- [ ] Feed Page
- [ ] Universal Feed (YouTube and Twitch)
- [ ] Subscription Management
## Docs
- [FAQ](/.minpluto/docs/FAQ.md)
- [API](/.minpluto/docs/API.md)
- [Requirements](/.minpluto/docs/Requirements.md)
- [Compatibility](/.minpluto/docs/Compatibility.md)
- Develop, Build, Run
- Selfhosting
- Player
___

BIN
bun.lockb

Binary file not shown.

11778
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,8 +1,8 @@
{
"name": "minpluto",
"version": "2024.08.02",
"description": "An open source frontend alternative to YouTube.",
"repository": "https://sudovanilla.org/MinPluto/MinPluto",
"version": "2024.08.03",
"description": "An open source frontend alternative to YouTube and Twitch.",
"repository": "https://ark.sudovanilla.org/MinPluto/MinPluto",
"author": "Korbs <korbs@sudovanilla.org>",
"license": "AGPL-3.0-or-later",
"bugs": {
@ -20,12 +20,16 @@
"frontend",
"proxy",
"ytdl",
"invidious"
"invidious",
"safetwitch",
"twitch",
"live",
"stream"
],
"scripts": {
"start": "astro dev --host",
"translate": "astro-i18next generate",
"build": "astro build"
"start": "astro dev --config ./source/astro.mjs --host",
"translate": "astro-i18next --config ./source/translate.mjs generate",
"build": "astro build --config ./source/astro.js"
},
"dependencies": {
"@astrojs/mdx": "^3.1.2",

View file

@ -1,13 +1,17 @@
import { defineConfig } from 'astro/config';
import node from '@astrojs/node';
import vue from '@astrojs/vue';
import astroI18next from "astro-i18next";
import mdx from '@astrojs/mdx';
import { defineConfig } from 'astro/config'
import node from '@astrojs/node'
import vue from '@astrojs/vue'
import astroI18next from "astro-i18next"
import mdx from '@astrojs/mdx'
// https://astro.build/config
export default defineConfig({
// Project Structure
publicDir: './src/public/',
cacheDir: './.minpluto/generated/astro/cache/',
outDir: './.minpluto/generated/astro/dist/',
publicDir: './source/src/public',
root: './source',
srcDir: './source/src',
// Integrations and Plugins
integrations: [mdx(), vue(), astroI18next()],
// Security
@ -34,4 +38,4 @@ export default defineConfig({
devToolbar: {
enabled: false
}
});
})

View file

@ -1,6 +1,6 @@
/// <reference types="astro/client" />
declare namespace App {
interface Locals {
email: string;
email: string
}
}

1
source/src/env.d.ts vendored Normal file
View file

@ -0,0 +1 @@
/// <reference types="astro/client" />

Some files were not shown because too many files have changed in this diff Show more