mirror of
https://github.com/diced/zipline.git
synced 2025-04-11 23:31:17 -05:00
feat: remove supabase datasource
This commit is contained in:
parent
f408811f60
commit
4d80d8db3c
7 changed files with 13 additions and 195 deletions
|
@ -1,7 +1,7 @@
|
|||
# every field in here is optional except, CORE_SECRET and CORE_DATABASE_URL.
|
||||
# if CORE_SECRET is still "changethis" then zipline will exit and tell you to change it.
|
||||
|
||||
# if using s3/supabase make sure to uncomment or comment out the correct lines needed.
|
||||
# if using s3 make sure to uncomment or comment out the correct lines needed.
|
||||
|
||||
CORE_RETURN_HTTPS=true
|
||||
CORE_SECRET="changethis"
|
||||
|
@ -27,13 +27,6 @@ DATASOURCE_LOCAL_DIRECTORY=./uploads
|
|||
# DATASOURCE_S3_FORCE_S3_PATH=false
|
||||
# DATASOURCE_S3_USE_SSL=false
|
||||
|
||||
# or supabase
|
||||
# DATASOURCE_TYPE=supabase
|
||||
# DATASOURCE_SUPABASE_KEY=xxx
|
||||
# remember: no leading slash
|
||||
# DATASOURCE_SUPABASE_URL=https://something.supabase.co
|
||||
# DATASOURCE_SUPABASE_BUCKET=zipline
|
||||
|
||||
UPLOADER_DEFAULT_FORMAT=RANDOM
|
||||
UPLOADER_ROUTE=/u
|
||||
UPLOADER_LENGTH=6
|
||||
|
|
|
@ -20,10 +20,9 @@ export interface ConfigCompression {
|
|||
}
|
||||
|
||||
export interface ConfigDatasource {
|
||||
type: 'local' | 's3' | 'supabase';
|
||||
type: 'local' | 's3';
|
||||
local: ConfigLocalDatasource;
|
||||
s3?: ConfigS3Datasource;
|
||||
supabase?: ConfigSupabaseDatasource;
|
||||
}
|
||||
|
||||
export interface ConfigLocalDatasource {
|
||||
|
@ -41,12 +40,6 @@ export interface ConfigS3Datasource {
|
|||
region?: string;
|
||||
}
|
||||
|
||||
export interface ConfigSupabaseDatasource {
|
||||
url: string;
|
||||
key: string;
|
||||
bucket: string;
|
||||
}
|
||||
|
||||
export interface ConfigUploader {
|
||||
default_format: string;
|
||||
route: string;
|
||||
|
|
|
@ -85,10 +85,6 @@ export default function readConfig() {
|
|||
map('DATASOURCE_S3_REGION', 'string', 'datasource.s3.region'),
|
||||
map('DATASOURCE_S3_USE_SSL', 'boolean', 'datasource.s3.use_ssl'),
|
||||
|
||||
map('DATASOURCE_SUPABASE_URL', 'string', 'datasource.supabase.url'),
|
||||
map('DATASOURCE_SUPABASE_KEY', 'string', 'datasource.supabase.key'),
|
||||
map('DATASOURCE_SUPABASE_BUCKET', 'string', 'datasource.supabase.bucket'),
|
||||
|
||||
map('UPLOADER_DEFAULT_FORMAT', 'string', 'uploader.default_format'),
|
||||
map('UPLOADER_ROUTE', 'string', 'uploader.route'),
|
||||
map('UPLOADER_LENGTH', 'number', 'uploader.length'),
|
||||
|
|
|
@ -51,7 +51,7 @@ const validator = s.object({
|
|||
}),
|
||||
datasource: s
|
||||
.object({
|
||||
type: s.enum('local', 's3', 'supabase').default('local'),
|
||||
type: s.enum('local', 's3').default('local'),
|
||||
local: s
|
||||
.object({
|
||||
directory: s.string.default(resolve('./uploads')).transform((v) => resolve(v)),
|
||||
|
@ -69,11 +69,6 @@ const validator = s.object({
|
|||
region: s.string.default('us-east-1'),
|
||||
use_ssl: s.boolean.default(false),
|
||||
}).optional,
|
||||
supabase: s.object({
|
||||
url: s.string,
|
||||
key: s.string,
|
||||
bucket: s.string,
|
||||
}).optional,
|
||||
})
|
||||
.default({
|
||||
type: 'local',
|
||||
|
@ -253,29 +248,16 @@ export default function validate(config): Config {
|
|||
logger.debug(`Attemping to validate ${JSON.stringify(config)}`);
|
||||
const validated = validator.parse(config);
|
||||
logger.debug(`Recieved config: ${JSON.stringify(validated)}`);
|
||||
switch (validated.datasource.type) {
|
||||
case 's3': {
|
||||
const errors = [];
|
||||
if (!validated.datasource.s3.access_key_id)
|
||||
errors.push('datasource.s3.access_key_id is a required field');
|
||||
if (!validated.datasource.s3.secret_access_key)
|
||||
errors.push('datasource.s3.secret_access_key is a required field');
|
||||
if (!validated.datasource.s3.bucket) errors.push('datasource.s3.bucket is a required field');
|
||||
if (!validated.datasource.s3.endpoint) errors.push('datasource.s3.endpoint is a required field');
|
||||
if (errors.length) throw { errors };
|
||||
break;
|
||||
}
|
||||
case 'supabase': {
|
||||
const errors = [];
|
||||
|
||||
if (!validated.datasource.supabase.key) errors.push('datasource.supabase.key is a required field');
|
||||
if (!validated.datasource.supabase.url) errors.push('datasource.supabase.url is a required field');
|
||||
if (!validated.datasource.supabase.bucket)
|
||||
errors.push('datasource.supabase.bucket is a required field');
|
||||
if (errors.length) throw { errors };
|
||||
|
||||
break;
|
||||
}
|
||||
if (validated.datasource.type === 's3') {
|
||||
const errors = [];
|
||||
if (!validated.datasource.s3.access_key_id)
|
||||
errors.push('datasource.s3.access_key_id is a required field');
|
||||
if (!validated.datasource.s3.secret_access_key)
|
||||
errors.push('datasource.s3.secret_access_key is a required field');
|
||||
if (!validated.datasource.s3.bucket) errors.push('datasource.s3.bucket is a required field');
|
||||
if (!validated.datasource.s3.endpoint) errors.push('datasource.s3.endpoint is a required field');
|
||||
if (errors.length) throw { errors };
|
||||
}
|
||||
|
||||
const reserved = new RegExp(/^\/(view|code|folder|auth|r)(\/\S*)?$|^\/(api|dashboard)(\/\S*)*/);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import config from './config';
|
||||
import { Datasource, Local, S3, Supabase } from './datasources';
|
||||
import { Datasource, Local, S3 } from './datasources';
|
||||
import Logger from './logger';
|
||||
|
||||
const logger = Logger.get('datasource');
|
||||
|
@ -14,10 +14,6 @@ if (!global.datasource) {
|
|||
global.datasource = new Local(config.datasource.local.directory);
|
||||
logger.info(`using Local(${config.datasource.local.directory}) datasource`);
|
||||
break;
|
||||
case 'supabase':
|
||||
global.datasource = new Supabase(config.datasource.supabase);
|
||||
logger.info(`using Supabase(${config.datasource.supabase.bucket}) datasource`);
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid datasource type');
|
||||
}
|
||||
|
|
|
@ -1,141 +0,0 @@
|
|||
import { Datasource } from '.';
|
||||
import { ConfigSupabaseDatasource } from 'lib/config/Config';
|
||||
import { guess } from 'lib/mimes';
|
||||
import Logger from 'lib/logger';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
export class Supabase extends Datasource {
|
||||
public name = 'Supabase';
|
||||
public logger: Logger = Logger.get('datasource::supabase');
|
||||
|
||||
public constructor(public config: ConfigSupabaseDatasource) {
|
||||
super();
|
||||
}
|
||||
|
||||
public async save(file: string, data: Buffer): Promise<void> {
|
||||
const mimetype = await guess(file.split('.').pop());
|
||||
|
||||
const r = await fetch(`${this.config.url}/storage/v1/object/${this.config.bucket}/${file}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.key}`,
|
||||
'Content-Type': mimetype,
|
||||
},
|
||||
body: data,
|
||||
});
|
||||
|
||||
const j = await r.json();
|
||||
if (j.error) this.logger.error(`${j.error}: ${j.message}`);
|
||||
}
|
||||
|
||||
public async delete(file: string): Promise<void> {
|
||||
await fetch(`${this.config.url}/storage/v1/object/${this.config.bucket}/${file}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.key}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
public async clear(): Promise<void> {
|
||||
try {
|
||||
const resp = await fetch(`${this.config.url}/storage/v1/object/list/${this.config.bucket}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.key}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prefix: '',
|
||||
}),
|
||||
});
|
||||
const objs = await resp.json();
|
||||
if (objs.error) throw new Error(`${objs.error}: ${objs.message}`);
|
||||
|
||||
const res = await fetch(`${this.config.url}/storage/v1/object/${this.config.bucket}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.key}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prefixes: objs.map((x: { name: string }) => x.name),
|
||||
}),
|
||||
});
|
||||
|
||||
const j = await res.json();
|
||||
if (j.error) throw new Error(`${j.error}: ${j.message}`);
|
||||
|
||||
return;
|
||||
} catch (e) {
|
||||
this.logger.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
public async get(file: string, start: number = 0, end: number = Infinity): Promise<Readable> {
|
||||
// get a readable stream from the request
|
||||
const r = await fetch(`${this.config.url}/storage/v1/object/${this.config.bucket}/${file}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.key}`,
|
||||
Range: `bytes=${start}-${end === Infinity ? '' : end}`,
|
||||
},
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
return Readable.fromWeb(r.body as any);
|
||||
}
|
||||
|
||||
public size(file: string): Promise<number | null> {
|
||||
return new Promise(async (res) => {
|
||||
fetch(`${this.config.url}/storage/v1/object/list/${this.config.bucket}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.key}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prefix: '',
|
||||
search: file,
|
||||
}),
|
||||
})
|
||||
.then((r) => r.json())
|
||||
.then((j) => {
|
||||
if (j.error) {
|
||||
this.logger.error(`${j.error}: ${j.message}`);
|
||||
res(null);
|
||||
}
|
||||
|
||||
if (j.length === 0) {
|
||||
res(null);
|
||||
} else {
|
||||
res(j[0].metadata.size);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public async fullSize(): Promise<number> {
|
||||
return new Promise((res) => {
|
||||
fetch(`${this.config.url}/storage/v1/object/list/${this.config.bucket}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.key}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prefix: '',
|
||||
}),
|
||||
})
|
||||
.then((r) => r.json())
|
||||
.then((j) => {
|
||||
if (j.error) {
|
||||
this.logger.error(`${j.error}: ${j.message}`);
|
||||
res(0);
|
||||
}
|
||||
|
||||
res(j.reduce((a, b) => a + b.metadata.size, 0));
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,4 +1,3 @@
|
|||
export { Datasource } from './Datasource';
|
||||
export { Local } from './Local';
|
||||
export { S3 } from './S3';
|
||||
export { Supabase } from './Supabase';
|
||||
|
|
Loading…
Add table
Reference in a new issue