From 4feaf49ca7574e2041cbbee854b29424cb33c799 Mon Sep 17 00:00:00 2001 From: Nazar Gargol Date: Thu, 20 Aug 2020 19:11:41 +1200 Subject: [PATCH] Improved error handling for batch deleted records no issue - Similar handling to one introduced in 31db3c86800b3268da5485417b16e0fcd8e6579a - Having granular tracking for failed to remove id's would make it possible to return more specific errors to the client --- ghost/members-importer/lib/bulk-operations.js | 31 ++++++++++++++----- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/ghost/members-importer/lib/bulk-operations.js b/ghost/members-importer/lib/bulk-operations.js index 85106ba494..68f5411d02 100644 --- a/ghost/members-importer/lib/bulk-operations.js +++ b/ghost/members-importer/lib/bulk-operations.js @@ -40,21 +40,38 @@ async function insert(table, data) { return result; } +async function delChunkSequential(table, chunk, result) { + for (const record of chunk) { + try { + await db.knex(table).where('id', record).del(); + result.successful += 1; + } catch (err) { + result.errors.push(err); + result.unsuccessfulIds.push(record); + result.unsuccessful += 1; + } + } +} + +async function delChunk(table, chunk, result) { + try { + await db.knex(table).whereIn('id', chunk).del(); + result.successful += chunk.length; + } catch (err) { + await delChunkSequential(table, chunk, result); + } +} + async function del(table, ids) { const result = { successful: 0, unsuccessful: 0, + unsuccessfulIds: [], errors: [] }; for (const chunk of _.chunk(ids, CHUNK_SIZE)) { - try { - await db.knex(table).whereIn('id', chunk).del(); - result.successful += chunk.length; - } catch (error) { - result.unsuccessful += chunk.length; - result.errors.push(error); - } + await delChunk(table, chunk, result); } return result;