refactor: clean up datasource stuff

This commit is contained in:
diced 2022-06-20 10:25:22 -07:00
parent 1e2b8efb13
commit 8fea0cbe77
No known key found for this signature in database
GPG key ID: 370BD1BA142842D1
19 changed files with 40 additions and 29 deletions

View file

@ -22,11 +22,11 @@ const { rm } = require('fs/promises');
'src/server/validateConfig.ts', 'src/server/validateConfig.ts',
'src/lib/logger.ts', 'src/lib/logger.ts',
'src/lib/readConfig.ts', 'src/lib/readConfig.ts',
'src/lib/datasource/datasource.ts', 'src/lib/datasources/Datasource.ts',
'src/lib/datasource/index.ts', 'src/lib/datasources/index.ts',
'src/lib/datasource/Local.ts', 'src/lib/datasources/Local.ts',
'src/lib/datasource/S3.ts', 'src/lib/datasources/S3.ts',
'src/lib/ds.ts', 'src/lib/datasource.ts',
'src/lib/config.ts', 'src/lib/config.ts',
], ],
format: 'cjs', format: 'cjs',

View file

@ -3,7 +3,7 @@
"version": "3.4.5", "version": "3.4.5",
"license": "MIT", "license": "MIT",
"scripts": { "scripts": {
"dev": "node esbuild.config.js && REACT_EDITOR=code-insiders NODE_ENV=development node dist/server", "dev": "node esbuild.config.js && REACT_EDITOR=code NODE_ENV=development node dist/server",
"build": "npm-run-all build:server build:schema build:next", "build": "npm-run-all build:server build:schema build:next",
"build:server": "node esbuild.config.js", "build:server": "node esbuild.config.js",
"build:next": "next build", "build:next": "next build",
@ -66,4 +66,4 @@
"url": "https://github.com/diced/zipline.git" "url": "https://github.com/diced/zipline.git"
}, },
"packageManager": "yarn@3.2.1" "packageManager": "yarn@3.2.1"
} }

View file

@ -1,16 +1,16 @@
import config from './config'; import config from './config';
import { S3, Local } from './datasource'; import { S3, Local } from './datasources';
import Logger from './logger'; import Logger from './logger';
if (!global.datasource) { if (!global.datasource) {
switch (config.datasource.type) { switch (config.datasource.type) {
case 's3': case 's3':
Logger.get('datasource').info(`Using S3(${config.datasource.s3.bucket}) datasource`);
global.datasource = new S3(config.datasource.s3); global.datasource = new S3(config.datasource.s3);
Logger.get('datasource').info(`Using S3(${config.datasource.s3.bucket}) datasource`);
break; break;
case 'local': case 'local':
Logger.get('datasource').info(`Using local(${config.datasource.local.directory}) datasource`);
global.datasource = new Local(config.datasource.local.directory); global.datasource = new Local(config.datasource.local.directory);
Logger.get('datasource').info(`Using local(${config.datasource.local.directory}) datasource`);
break; break;
default: default:
throw new Error('Invalid datasource type'); throw new Error('Invalid datasource type');

View file

@ -1,4 +0,0 @@
export { Datasource } from './datasource';
export { Local } from './Local';
export { S3 } from './S3';

View file

@ -2,7 +2,7 @@ import { Readable } from 'stream';
export abstract class Datasource { export abstract class Datasource {
public name: string; public name: string;
public abstract save(file: string, data: Buffer): Promise<void>; public abstract save(file: string, data: Buffer): Promise<void>;
public abstract delete(file: string): Promise<void>; public abstract delete(file: string): Promise<void>;
public abstract get(file: string): Readable; public abstract get(file: string): Readable;

View file

@ -1,7 +1,7 @@
import { createReadStream, existsSync, ReadStream } from 'fs'; import { createReadStream, existsSync, ReadStream } from 'fs';
import { readdir, rm, stat, writeFile } from 'fs/promises'; import { readdir, rm, stat, writeFile } from 'fs/promises';
import { join } from 'path'; import { join } from 'path';
import { Datasource } from './datasource'; import { Datasource } from './';
export class Local extends Datasource { export class Local extends Datasource {
public name: string = 'local'; public name: string = 'local';

View file

@ -1,4 +1,4 @@
import { Datasource } from './datasource'; import { Datasource } from './';
import AWS from 'aws-sdk'; import AWS from 'aws-sdk';
import { Readable } from 'stream'; import { Readable } from 'stream';
import { ConfigS3Datasource } from 'lib/types'; import { ConfigS3Datasource } from 'lib/types';

View file

@ -0,0 +1,3 @@
export { Datasource } from './Datasource';
export { Local } from './Local';
export { S3 } from './S3';

View file

@ -21,7 +21,7 @@ export default class Logger {
this.name = name; this.name = name;
} }
info(...args) { info(...args: any[]) {
console.log(this.formatMessage(LoggerLevel.INFO, this.name, args.join(' '))); console.log(this.formatMessage(LoggerLevel.INFO, this.name, args.join(' ')));
} }
@ -29,7 +29,7 @@ export default class Logger {
console.log(this.formatMessage(LoggerLevel.ERROR, this.name, args.map(error => error.stack ?? error).join(' '))); console.log(this.formatMessage(LoggerLevel.ERROR, this.name, args.map(error => error.stack ?? error).join(' ')));
} }
formatMessage(level: LoggerLevel, name, message) { formatMessage(level: LoggerLevel, name: string, message: string) {
const time = format(new Date(), 'YYYY-MM-DD hh:mm:ss,SSS A'); const time = format(new Date(), 'YYYY-MM-DD hh:mm:ss,SSS A');
return `${time} ${this.formatLevel(level)} [${blueBright(name)}] ${message}`; return `${time} ${this.formatLevel(level)} [${blueBright(name)}] ${message}`;
} }

View file

@ -25,8 +25,10 @@ export interface ConfigDatasource {
// The type of datasource // The type of datasource
type: 'local' | 's3'; type: 'local' | 's3';
// The local datasource // The local datasource, the default
local: ConfigLocalDatasource; local: ConfigLocalDatasource;
// The s3 datasource
s3?: ConfigS3Datasource; s3?: ConfigS3Datasource;
} }
@ -36,10 +38,20 @@ export interface ConfigLocalDatasource {
} }
export interface ConfigS3Datasource { export interface ConfigS3Datasource {
// The access key id for the s3 bucket
access_key_id: string; access_key_id: string;
// The secret access key for the s3 bucket
secret_access_key: string; secret_access_key: string;
// Not required, but if using a non-aws S3 service you can specify the endpoint
endpoint?: string; endpoint?: string;
// The S3 bucket to store files in
bucket: string; bucket: string;
// If true Zipline will attempt to connect to the bucket via the url "https://s3.amazonaws.com/{bucket}/stuff"
// If false Zipline will attempt to connect to the bucket via the url "http://{bucket}.s3.amazonaws.com/stuff"
force_s3_path: boolean; force_s3_path: boolean;
} }

View file

@ -169,7 +169,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
}; };
if (!image.mimetype.startsWith('image')) { if (!image.mimetype.startsWith('image')) {
const { default: datasource } = await import('lib/ds'); const { default: datasource } = await import('lib/datasource');
const data = datasource.get(image.file); const data = datasource.get(image.file);
if (!data) return { notFound: true }; if (!data) return { notFound: true };

View file

@ -1,7 +1,7 @@
import prisma from 'lib/prisma'; import prisma from 'lib/prisma';
import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline'; import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import { checkPassword } from 'lib/util'; import { checkPassword } from 'lib/util';
import datasource from 'lib/ds'; import datasource from 'lib/datasource';
import mimes from '../../../../scripts/mimes'; import mimes from '../../../../scripts/mimes';
import { extname } from 'path'; import { extname } from 'path';

View file

@ -7,7 +7,7 @@ import Logger from 'lib/logger';
import { ImageFormat, InvisibleImage } from '@prisma/client'; import { ImageFormat, InvisibleImage } from '@prisma/client';
import { format as formatDate } from 'fecha'; import { format as formatDate } from 'fecha';
import { v4 } from 'uuid'; import { v4 } from 'uuid';
import datasource from 'lib/ds'; import datasource from 'lib/datasource';
const uploader = multer(); const uploader = multer();

View file

@ -2,7 +2,7 @@ import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import prisma from 'lib/prisma'; import prisma from 'lib/prisma';
import Logger from 'lib/logger'; import Logger from 'lib/logger';
import { Zip, ZipPassThrough } from 'fflate'; import { Zip, ZipPassThrough } from 'fflate';
import datasource from 'lib/ds'; import datasource from 'lib/datasource';
import { readdir } from 'fs/promises'; import { readdir } from 'fs/promises';
import { createReadStream, createWriteStream } from 'fs'; import { createReadStream, createWriteStream } from 'fs';

View file

@ -2,7 +2,7 @@ import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import prisma from 'lib/prisma'; import prisma from 'lib/prisma';
import { chunk } from 'lib/util'; import { chunk } from 'lib/util';
import Logger from 'lib/logger'; import Logger from 'lib/logger';
import datasource from 'lib/ds'; import datasource from 'lib/datasource';
async function handler(req: NextApiReq, res: NextApiRes) { async function handler(req: NextApiReq, res: NextApiRes) {
const user = await req.user(); const user = await req.user();

View file

@ -1,7 +1,7 @@
import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline'; import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import prisma from 'lib/prisma'; import prisma from 'lib/prisma';
import Logger from 'lib/logger'; import Logger from 'lib/logger';
import datasource from 'lib/ds'; import datasource from 'lib/datasource';
async function handler(req: NextApiReq, res: NextApiRes) { async function handler(req: NextApiReq, res: NextApiRes) {
const user = await req.user(); const user = await req.user();

View file

@ -4,7 +4,7 @@ import { Image, PrismaClient } from '@prisma/client';
import { createServer, IncomingMessage, OutgoingMessage, Server as HttpServer, ServerResponse } from 'http'; import { createServer, IncomingMessage, OutgoingMessage, Server as HttpServer, ServerResponse } from 'http';
import next from 'next'; import next from 'next';
import config from '../lib/config'; import config from '../lib/config';
import datasource from '../lib/ds'; import datasource from '../lib/datasource';
import { getStats, log, migrations } from './util'; import { getStats, log, migrations } from './util';
import { mkdir } from 'fs/promises'; import { mkdir } from 'fs/promises';
import Logger from '../lib/logger'; import Logger from '../lib/logger';

View file

@ -1,7 +1,7 @@
import { Migrate } from '@prisma/migrate/dist/Migrate'; import { Migrate } from '@prisma/migrate/dist/Migrate';
import { ensureDatabaseExists } from '@prisma/migrate/dist/utils/ensureDatabaseExists'; import { ensureDatabaseExists } from '@prisma/migrate/dist/utils/ensureDatabaseExists';
import Logger from '../lib/logger'; import Logger from '../lib/logger';
import { Datasource } from 'lib/datasource'; import { Datasource } from 'lib/datasources';
import { PrismaClient } from '@prisma/client'; import { PrismaClient } from '@prisma/client';
export async function migrations() { export async function migrations() {

2
zip-env.d.ts vendored
View file

@ -1,5 +1,5 @@
import type { PrismaClient } from '@prisma/client'; import type { PrismaClient } from '@prisma/client';
import type { Datasource } from 'lib/datasource'; import type { Datasource } from 'lib/datasources';
import type { Config } from '.lib/types'; import type { Config } from '.lib/types';
declare global { declare global {