mirror of
https://github.com/stonith404/pingvin-share.git
synced 2025-01-15 01:14:27 -05:00
fix: memory leak while uploading files by disabling base64 encoding of chunks
This commit is contained in:
parent
0bfbaea49a
commit
7a15fbb465
4 changed files with 46 additions and 67 deletions
|
@ -27,17 +27,14 @@ export class FileController {
|
|||
@UseGuards(CreateShareGuard, ShareOwnerGuard)
|
||||
async create(
|
||||
@Query() query: any,
|
||||
|
||||
@Body() body: string,
|
||||
@Param("shareId") shareId: string,
|
||||
) {
|
||||
const { id, name, chunkIndex, totalChunks } = query;
|
||||
|
||||
// Data can be empty if the file is empty
|
||||
const data = body.toString().split(",")[1] ?? "";
|
||||
|
||||
return await this.fileService.create(
|
||||
data,
|
||||
body,
|
||||
{ index: parseInt(chunkIndex), total: parseInt(totalChunks) },
|
||||
{ id, name },
|
||||
shareId,
|
||||
|
|
|
@ -1,22 +1,19 @@
|
|||
import { Button, Group } from "@mantine/core";
|
||||
import { useModals } from "@mantine/modals";
|
||||
import { cleanNotifications } from "@mantine/notifications";
|
||||
import { AxiosError } from "axios";
|
||||
import { useRouter } from "next/router";
|
||||
import pLimit from "p-limit";
|
||||
import { useEffect, useMemo, useState } from "react";
|
||||
import { useEffect, useMemo, useRef, useState } from "react";
|
||||
import { FormattedMessage } from "react-intl";
|
||||
import Dropzone from "../../components/upload/Dropzone";
|
||||
import FileList from "../../components/upload/FileList";
|
||||
import showCompletedUploadModal from "../../components/upload/modals/showCompletedUploadModal";
|
||||
import useConfig from "../../hooks/config.hook";
|
||||
import useTranslate from "../../hooks/useTranslate.hook";
|
||||
import shareService from "../../services/share.service";
|
||||
import { FileListItem, FileMetaData, FileUpload } from "../../types/File.type";
|
||||
import toast from "../../utils/toast.util";
|
||||
import { useRouter } from "next/router";
|
||||
|
||||
const promiseLimit = pLimit(3);
|
||||
const chunkSize = 10 * 1024 * 1024; // 10MB
|
||||
let errorToastShown = false;
|
||||
|
||||
const EditableUpload = ({
|
||||
|
@ -33,6 +30,8 @@ const EditableUpload = ({
|
|||
const router = useRouter();
|
||||
const config = useConfig();
|
||||
|
||||
const chunkSize = useRef(parseInt(config.get("share.chunkSize")));
|
||||
|
||||
const [existingFiles, setExistingFiles] =
|
||||
useState<Array<FileMetaData & { deleted?: boolean }>>(savedFiles);
|
||||
const [uploadingFiles, setUploadingFiles] = useState<FileUpload[]>([]);
|
||||
|
@ -66,7 +65,7 @@ const EditableUpload = ({
|
|||
const fileUploadPromises = files.map(async (file, fileIndex) =>
|
||||
// Limit the number of concurrent uploads to 3
|
||||
promiseLimit(async () => {
|
||||
let fileId: string;
|
||||
let fileId: string | undefined;
|
||||
|
||||
const setFileProgress = (progress: number) => {
|
||||
setUploadingFiles((files) =>
|
||||
|
@ -81,38 +80,30 @@ const EditableUpload = ({
|
|||
|
||||
setFileProgress(1);
|
||||
|
||||
let chunks = Math.ceil(file.size / chunkSize);
|
||||
let chunks = Math.ceil(file.size / chunkSize.current);
|
||||
|
||||
// If the file is 0 bytes, we still need to upload 1 chunk
|
||||
if (chunks == 0) chunks++;
|
||||
|
||||
for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) {
|
||||
const from = chunkIndex * chunkSize;
|
||||
const to = from + chunkSize;
|
||||
const from = chunkIndex * chunkSize.current;
|
||||
const to = from + chunkSize.current;
|
||||
const blob = file.slice(from, to);
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = async (event) =>
|
||||
await shareService
|
||||
.uploadFile(
|
||||
shareId,
|
||||
event,
|
||||
{
|
||||
id: fileId,
|
||||
name: file.name,
|
||||
},
|
||||
chunkIndex,
|
||||
chunks,
|
||||
)
|
||||
.then((response) => {
|
||||
fileId = response.id;
|
||||
resolve(response);
|
||||
})
|
||||
.catch(reject);
|
||||
|
||||
reader.readAsDataURL(blob);
|
||||
});
|
||||
await shareService
|
||||
.uploadFile(
|
||||
shareId,
|
||||
blob,
|
||||
{
|
||||
id: fileId,
|
||||
name: file.name,
|
||||
},
|
||||
chunkIndex,
|
||||
chunks,
|
||||
)
|
||||
.then((response) => {
|
||||
fileId = response.id;
|
||||
});
|
||||
|
||||
setFileProgress(((chunkIndex + 1) / chunks) * 100);
|
||||
} catch (e) {
|
||||
|
|
|
@ -3,7 +3,7 @@ import { useModals } from "@mantine/modals";
|
|||
import { cleanNotifications } from "@mantine/notifications";
|
||||
import { AxiosError } from "axios";
|
||||
import pLimit from "p-limit";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { FormattedMessage } from "react-intl";
|
||||
import Meta from "../../components/Meta";
|
||||
import Dropzone from "../../components/upload/Dropzone";
|
||||
|
@ -19,7 +19,6 @@ import { CreateShare, Share } from "../../types/share.type";
|
|||
import toast from "../../utils/toast.util";
|
||||
|
||||
const promiseLimit = pLimit(3);
|
||||
const chunkSize = 10 * 1024 * 1024; // 10MB
|
||||
let errorToastShown = false;
|
||||
let createdShare: Share;
|
||||
|
||||
|
@ -38,6 +37,8 @@ const Upload = ({
|
|||
const [files, setFiles] = useState<FileUpload[]>([]);
|
||||
const [isUploading, setisUploading] = useState(false);
|
||||
|
||||
const chunkSize = useRef(parseInt(config.get("share.chunkSize")));
|
||||
|
||||
maxShareSize ??= parseInt(config.get("share.maxSize"));
|
||||
|
||||
const uploadFiles = async (share: CreateShare, files: FileUpload[]) => {
|
||||
|
@ -54,7 +55,7 @@ const Upload = ({
|
|||
const fileUploadPromises = files.map(async (file, fileIndex) =>
|
||||
// Limit the number of concurrent uploads to 3
|
||||
promiseLimit(async () => {
|
||||
let fileId: string;
|
||||
let fileId;
|
||||
|
||||
const setFileProgress = (progress: number) => {
|
||||
setFiles((files) =>
|
||||
|
@ -69,38 +70,30 @@ const Upload = ({
|
|||
|
||||
setFileProgress(1);
|
||||
|
||||
let chunks = Math.ceil(file.size / chunkSize);
|
||||
let chunks = Math.ceil(file.size / chunkSize.current);
|
||||
|
||||
// If the file is 0 bytes, we still need to upload 1 chunk
|
||||
if (chunks == 0) chunks++;
|
||||
|
||||
for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) {
|
||||
const from = chunkIndex * chunkSize;
|
||||
const to = from + chunkSize;
|
||||
const from = chunkIndex * chunkSize.current;
|
||||
const to = from + chunkSize.current;
|
||||
const blob = file.slice(from, to);
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = async (event) =>
|
||||
await shareService
|
||||
.uploadFile(
|
||||
createdShare.id,
|
||||
event,
|
||||
{
|
||||
id: fileId,
|
||||
name: file.name,
|
||||
},
|
||||
chunkIndex,
|
||||
chunks,
|
||||
)
|
||||
.then((response) => {
|
||||
fileId = response.id;
|
||||
resolve(response);
|
||||
})
|
||||
.catch(reject);
|
||||
|
||||
reader.readAsDataURL(blob);
|
||||
});
|
||||
await shareService
|
||||
.uploadFile(
|
||||
createdShare.id,
|
||||
blob,
|
||||
{
|
||||
id: fileId,
|
||||
name: file.name,
|
||||
},
|
||||
chunkIndex,
|
||||
chunks,
|
||||
)
|
||||
.then((response) => {
|
||||
fileId = response.id;
|
||||
});
|
||||
|
||||
setFileProgress(((chunkIndex + 1) / chunks) * 100);
|
||||
} catch (e) {
|
||||
|
|
|
@ -77,7 +77,7 @@ const removeFile = async (shareId: string, fileId: string) => {
|
|||
|
||||
const uploadFile = async (
|
||||
shareId: string,
|
||||
readerEvent: ProgressEvent<FileReader>,
|
||||
chunk: Blob,
|
||||
file: {
|
||||
id?: string;
|
||||
name: string;
|
||||
|
@ -85,10 +85,8 @@ const uploadFile = async (
|
|||
chunkIndex: number,
|
||||
totalChunks: number,
|
||||
): Promise<FileUploadResponse> => {
|
||||
const data = readerEvent.target!.result;
|
||||
|
||||
return (
|
||||
await api.post(`shares/${shareId}/files`, data, {
|
||||
await api.post(`shares/${shareId}/files`, chunk, {
|
||||
headers: { "Content-Type": "application/octet-stream" },
|
||||
params: {
|
||||
id: file.id,
|
||||
|
|
Loading…
Add table
Reference in a new issue