2021-05-13 09:07:54 -04:00
|
|
|
/**
|
|
|
|
* ContentCacheManager - maintains a cache of stream hashes from a PDF file
|
|
|
|
*/
|
|
|
|
|
|
|
|
const { callbackify } = require('util')
|
|
|
|
const fs = require('fs')
|
|
|
|
const crypto = require('crypto')
|
|
|
|
const Path = require('path')
|
2021-07-12 12:47:21 -04:00
|
|
|
const Settings = require('@overleaf/settings')
|
2021-05-31 04:20:25 -04:00
|
|
|
const OError = require('@overleaf/o-error')
|
2021-05-18 13:06:15 -04:00
|
|
|
const pLimit = require('p-limit')
|
2022-08-02 05:09:22 -04:00
|
|
|
const { parseXrefTable } = require('./XrefParser')
|
|
|
|
const {
|
|
|
|
QueueLimitReachedError,
|
|
|
|
TimedOutError,
|
|
|
|
NoXrefTableError,
|
|
|
|
} = require('./Errors')
|
2021-08-24 05:22:30 -04:00
|
|
|
const workerpool = require('workerpool')
|
|
|
|
const Metrics = require('@overleaf/metrics')
|
|
|
|
|
|
|
|
let WORKER_POOL
|
|
|
|
// NOTE: Check for main thread to avoid recursive start of pool.
|
|
|
|
if (Settings.pdfCachingEnableWorkerPool && workerpool.isMainThread) {
|
|
|
|
WORKER_POOL = workerpool.pool(Path.join(__dirname, 'ContentCacheWorker.js'), {
|
|
|
|
// Cap number of worker threads.
|
|
|
|
maxWorkers: Settings.pdfCachingWorkerPoolSize,
|
|
|
|
// Warmup workers.
|
|
|
|
minWorkers: Settings.pdfCachingWorkerPoolSize,
|
|
|
|
// Limit queue back-log
|
|
|
|
maxQueueSize: Settings.pdfCachingWorkerPoolBackLogLimit,
|
|
|
|
})
|
|
|
|
setInterval(() => {
|
|
|
|
const {
|
|
|
|
totalWorkers,
|
|
|
|
busyWorkers,
|
|
|
|
idleWorkers,
|
|
|
|
pendingTasks,
|
|
|
|
activeTasks,
|
|
|
|
} = WORKER_POOL.stats()
|
|
|
|
Metrics.gauge('pdf_caching_total_workers', totalWorkers)
|
|
|
|
Metrics.gauge('pdf_caching_busy_workers', busyWorkers)
|
|
|
|
Metrics.gauge('pdf_caching_idle_workers', idleWorkers)
|
|
|
|
Metrics.gauge('pdf_caching_pending_tasks', pendingTasks)
|
|
|
|
Metrics.gauge('pdf_caching_active_tasks', activeTasks)
|
|
|
|
}, 15 * 1000)
|
|
|
|
}
|
2021-05-17 09:07:37 -04:00
|
|
|
|
2021-05-13 09:07:54 -04:00
|
|
|
/**
|
|
|
|
*
|
|
|
|
* @param {String} contentDir path to directory where content hash files are cached
|
|
|
|
* @param {String} filePath the pdf file to scan for streams
|
2022-10-25 05:24:11 -04:00
|
|
|
* @param {number} pdfSize the pdf size
|
|
|
|
* @param {number} pdfCachingMinChunkSize per request threshold
|
2021-06-23 09:14:28 -04:00
|
|
|
* @param {number} compileTime
|
2021-05-13 09:07:54 -04:00
|
|
|
*/
|
2022-10-25 05:24:11 -04:00
|
|
|
async function update({
|
|
|
|
contentDir,
|
|
|
|
filePath,
|
|
|
|
pdfSize,
|
|
|
|
pdfCachingMinChunkSize,
|
|
|
|
compileTime,
|
|
|
|
}) {
|
|
|
|
if (pdfSize < pdfCachingMinChunkSize) {
|
2022-07-20 10:17:41 -04:00
|
|
|
return {
|
|
|
|
contentRanges: [],
|
|
|
|
newContentRanges: [],
|
|
|
|
reclaimedSpace: 0,
|
|
|
|
startXRefTable: undefined,
|
|
|
|
}
|
|
|
|
}
|
2021-08-24 05:22:30 -04:00
|
|
|
if (Settings.pdfCachingEnableWorkerPool) {
|
2022-10-25 05:24:11 -04:00
|
|
|
return await updateOtherEventLoop({
|
|
|
|
contentDir,
|
|
|
|
filePath,
|
|
|
|
pdfSize,
|
|
|
|
pdfCachingMinChunkSize,
|
|
|
|
compileTime,
|
|
|
|
})
|
2021-08-24 05:22:30 -04:00
|
|
|
} else {
|
2022-10-25 05:24:11 -04:00
|
|
|
return await updateSameEventLoop({
|
|
|
|
contentDir,
|
|
|
|
filePath,
|
|
|
|
pdfSize,
|
|
|
|
pdfCachingMinChunkSize,
|
|
|
|
compileTime,
|
|
|
|
})
|
2021-08-24 05:22:30 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
*
|
|
|
|
* @param {String} contentDir path to directory where content hash files are cached
|
|
|
|
* @param {String} filePath the pdf file to scan for streams
|
2022-10-25 05:24:11 -04:00
|
|
|
* @param {number} pdfSize the pdf size
|
|
|
|
* @param {number} pdfCachingMinChunkSize per request threshold
|
2021-08-24 05:22:30 -04:00
|
|
|
* @param {number} compileTime
|
|
|
|
*/
|
2022-10-25 05:24:11 -04:00
|
|
|
async function updateOtherEventLoop({
|
|
|
|
contentDir,
|
|
|
|
filePath,
|
|
|
|
pdfSize,
|
|
|
|
pdfCachingMinChunkSize,
|
|
|
|
compileTime,
|
|
|
|
}) {
|
2022-11-01 10:50:02 -04:00
|
|
|
const workerLatencyInMs = 100
|
2022-07-20 10:17:41 -04:00
|
|
|
// Prefer getting the timeout error from the worker vs timing out the worker.
|
|
|
|
const timeout = getMaxOverhead(compileTime) + workerLatencyInMs
|
2021-08-24 05:22:30 -04:00
|
|
|
try {
|
2022-07-20 10:17:41 -04:00
|
|
|
return await WORKER_POOL.exec('updateSameEventLoop', [
|
2022-10-25 05:24:11 -04:00
|
|
|
{
|
|
|
|
contentDir,
|
|
|
|
filePath,
|
|
|
|
pdfSize,
|
|
|
|
pdfCachingMinChunkSize,
|
|
|
|
compileTime,
|
|
|
|
},
|
2021-08-24 05:22:30 -04:00
|
|
|
]).timeout(timeout)
|
|
|
|
} catch (e) {
|
|
|
|
if (e instanceof workerpool.Promise.TimeoutError) {
|
2022-07-20 10:17:41 -04:00
|
|
|
throw new TimedOutError('context-lost-in-worker', { timeout })
|
2021-08-24 05:22:30 -04:00
|
|
|
}
|
2022-07-20 10:17:41 -04:00
|
|
|
if (e.message?.includes?.('Max queue size of ')) {
|
2021-08-24 05:22:30 -04:00
|
|
|
throw new QueueLimitReachedError()
|
|
|
|
}
|
2022-08-02 05:09:22 -04:00
|
|
|
if (e.message?.includes?.('xref')) {
|
|
|
|
throw new NoXrefTableError(e.message)
|
|
|
|
}
|
2021-08-24 05:22:30 -04:00
|
|
|
throw e
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
*
|
|
|
|
* @param {String} contentDir path to directory where content hash files are cached
|
|
|
|
* @param {String} filePath the pdf file to scan for streams
|
2022-10-25 05:24:11 -04:00
|
|
|
* @param {number} pdfSize the pdf size
|
|
|
|
* @param {number} pdfCachingMinChunkSize per request threshold
|
2021-08-24 05:22:30 -04:00
|
|
|
* @param {number} compileTime
|
|
|
|
*/
|
2022-10-25 05:24:11 -04:00
|
|
|
async function updateSameEventLoop({
|
|
|
|
contentDir,
|
|
|
|
filePath,
|
|
|
|
pdfSize,
|
|
|
|
pdfCachingMinChunkSize,
|
|
|
|
compileTime,
|
|
|
|
}) {
|
2021-06-23 09:14:28 -04:00
|
|
|
const checkDeadline = getDeadlineChecker(compileTime)
|
2021-05-18 11:25:24 -04:00
|
|
|
// keep track of hashes expire old ones when they reach a generation > N.
|
2021-05-18 13:06:15 -04:00
|
|
|
const tracker = await HashFileTracker.from(contentDir)
|
2021-05-18 13:15:58 -04:00
|
|
|
tracker.updateAge()
|
2021-06-23 09:14:28 -04:00
|
|
|
checkDeadline('after init HashFileTracker')
|
|
|
|
|
2022-11-01 10:50:02 -04:00
|
|
|
const [reclaimedSpace, overheadDeleteStaleHashes] =
|
|
|
|
await tracker.deleteStaleHashes(5)
|
|
|
|
checkDeadline('after delete stale hashes')
|
|
|
|
|
2022-07-15 04:03:40 -04:00
|
|
|
const { xRefEntries, startXRefTable } = await parseXrefTable(
|
|
|
|
filePath,
|
2022-10-25 05:24:11 -04:00
|
|
|
pdfSize
|
2022-07-15 04:03:40 -04:00
|
|
|
)
|
2022-08-02 05:09:22 -04:00
|
|
|
|
2022-07-15 04:03:40 -04:00
|
|
|
xRefEntries.sort((a, b) => {
|
2021-05-31 04:20:25 -04:00
|
|
|
return a.offset - b.offset
|
|
|
|
})
|
2022-07-15 04:03:40 -04:00
|
|
|
xRefEntries.forEach((obj, idx) => {
|
2021-05-31 04:20:25 -04:00
|
|
|
obj.idx = idx
|
|
|
|
})
|
|
|
|
|
2021-06-23 09:14:28 -04:00
|
|
|
checkDeadline('after parsing')
|
|
|
|
|
2021-05-31 04:20:25 -04:00
|
|
|
const uncompressedObjects = []
|
2022-07-15 04:03:40 -04:00
|
|
|
for (const object of xRefEntries) {
|
2021-05-31 04:20:25 -04:00
|
|
|
if (!object.uncompressed) {
|
|
|
|
continue
|
|
|
|
}
|
2022-07-15 04:03:40 -04:00
|
|
|
const nextObject = xRefEntries[object.idx + 1]
|
2021-05-31 04:20:25 -04:00
|
|
|
if (!nextObject) {
|
|
|
|
// Ignore this possible edge case.
|
|
|
|
// The last object should be part of the xRef table.
|
|
|
|
continue
|
|
|
|
} else {
|
|
|
|
object.endOffset = nextObject.offset
|
|
|
|
}
|
|
|
|
const size = object.endOffset - object.offset
|
|
|
|
object.size = size
|
2022-10-25 05:24:11 -04:00
|
|
|
if (size < pdfCachingMinChunkSize) {
|
2021-05-31 04:20:25 -04:00
|
|
|
continue
|
|
|
|
}
|
2021-06-23 09:14:28 -04:00
|
|
|
uncompressedObjects.push({ object, idx: uncompressedObjects.length })
|
2021-05-31 04:20:25 -04:00
|
|
|
}
|
2021-05-18 04:50:13 -04:00
|
|
|
|
2021-06-23 09:14:28 -04:00
|
|
|
checkDeadline('after finding uncompressed')
|
|
|
|
|
2022-11-01 10:50:02 -04:00
|
|
|
let timedOutErr = null
|
|
|
|
const contentRanges = []
|
|
|
|
const newContentRanges = []
|
2021-05-31 04:20:25 -04:00
|
|
|
const handle = await fs.promises.open(filePath)
|
|
|
|
try {
|
2021-06-23 09:14:28 -04:00
|
|
|
for (const { object, idx } of uncompressedObjects) {
|
2021-05-31 04:20:25 -04:00
|
|
|
let buffer = Buffer.alloc(object.size, 0)
|
|
|
|
const { bytesRead } = await handle.read(
|
|
|
|
buffer,
|
|
|
|
0,
|
|
|
|
object.size,
|
|
|
|
object.offset
|
|
|
|
)
|
2021-06-23 09:14:28 -04:00
|
|
|
checkDeadline('after read ' + idx)
|
2021-05-31 04:20:25 -04:00
|
|
|
if (bytesRead !== object.size) {
|
|
|
|
throw new OError('could not read full chunk', {
|
|
|
|
object,
|
2021-07-13 07:04:48 -04:00
|
|
|
bytesRead,
|
2021-05-31 04:20:25 -04:00
|
|
|
})
|
|
|
|
}
|
|
|
|
const idxObj = buffer.indexOf('obj')
|
|
|
|
if (idxObj > 100) {
|
|
|
|
throw new OError('objectId is too large', {
|
|
|
|
object,
|
2021-07-13 07:04:48 -04:00
|
|
|
idxObj,
|
2021-05-31 04:20:25 -04:00
|
|
|
})
|
|
|
|
}
|
|
|
|
const objectIdRaw = buffer.subarray(0, idxObj)
|
|
|
|
buffer = buffer.subarray(objectIdRaw.byteLength)
|
|
|
|
|
|
|
|
const hash = pdfStreamHash(buffer)
|
2021-06-23 09:14:28 -04:00
|
|
|
checkDeadline('after hash ' + idx)
|
2021-05-31 04:20:25 -04:00
|
|
|
const range = {
|
|
|
|
objectId: objectIdRaw.toString(),
|
|
|
|
start: object.offset + objectIdRaw.byteLength,
|
|
|
|
end: object.endOffset,
|
2021-07-13 07:04:48 -04:00
|
|
|
hash,
|
2021-05-31 04:20:25 -04:00
|
|
|
}
|
2021-05-18 04:50:13 -04:00
|
|
|
|
2022-11-01 10:50:02 -04:00
|
|
|
if (tracker.has(range.hash)) {
|
|
|
|
// Optimization: Skip writing of already seen hashes.
|
|
|
|
tracker.track(range)
|
|
|
|
contentRanges.push(range)
|
|
|
|
continue
|
|
|
|
}
|
2021-05-18 04:50:13 -04:00
|
|
|
|
2021-05-31 04:20:25 -04:00
|
|
|
await writePdfStream(contentDir, hash, buffer)
|
2022-11-01 10:50:02 -04:00
|
|
|
tracker.track(range)
|
|
|
|
contentRanges.push(range)
|
2022-07-20 10:17:41 -04:00
|
|
|
newContentRanges.push(range)
|
2022-11-01 10:50:02 -04:00
|
|
|
checkDeadline('after write ' + idx)
|
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
if (err instanceof TimedOutError) {
|
|
|
|
// Let the frontend use ranges that were processed so far.
|
|
|
|
timedOutErr = err
|
|
|
|
} else {
|
|
|
|
throw err
|
2021-05-13 09:07:54 -04:00
|
|
|
}
|
2021-05-31 04:20:25 -04:00
|
|
|
} finally {
|
|
|
|
await handle.close()
|
|
|
|
|
2022-11-01 10:50:02 -04:00
|
|
|
// Flush from both success and failure code path. This allows the next
|
|
|
|
// cycle to complete faster as it can use the already written ranges.
|
|
|
|
await tracker.flush()
|
|
|
|
}
|
|
|
|
return {
|
|
|
|
contentRanges,
|
|
|
|
newContentRanges,
|
|
|
|
reclaimedSpace,
|
|
|
|
startXRefTable,
|
|
|
|
overheadDeleteStaleHashes,
|
|
|
|
timedOutErr,
|
|
|
|
}
|
2021-05-18 13:06:15 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
function getStatePath(contentDir) {
|
|
|
|
return Path.join(contentDir, '.state.v0.json')
|
2021-05-13 09:07:54 -04:00
|
|
|
}
|
|
|
|
|
2021-05-18 11:25:24 -04:00
|
|
|
class HashFileTracker {
|
2021-05-18 13:06:15 -04:00
|
|
|
constructor(contentDir, { hashAge = [], hashSize = [] }) {
|
|
|
|
this.contentDir = contentDir
|
|
|
|
this.hashAge = new Map(hashAge)
|
|
|
|
this.hashSize = new Map(hashSize)
|
|
|
|
}
|
|
|
|
|
|
|
|
static async from(contentDir) {
|
|
|
|
const statePath = getStatePath(contentDir)
|
|
|
|
let state = {}
|
|
|
|
try {
|
|
|
|
const blob = await fs.promises.readFile(statePath)
|
|
|
|
state = JSON.parse(blob)
|
|
|
|
} catch (e) {}
|
|
|
|
return new HashFileTracker(contentDir, state)
|
2021-05-18 11:25:24 -04:00
|
|
|
}
|
|
|
|
|
2022-11-01 10:50:02 -04:00
|
|
|
has(hash) {
|
|
|
|
return this.hashAge.has(hash)
|
|
|
|
}
|
|
|
|
|
2021-05-18 13:15:58 -04:00
|
|
|
track(range) {
|
2022-11-01 10:50:02 -04:00
|
|
|
if (!this.hashSize.has(range.hash)) {
|
2021-05-18 13:15:58 -04:00
|
|
|
this.hashSize.set(range.hash, range.end - range.start)
|
|
|
|
}
|
|
|
|
this.hashAge.set(range.hash, 0)
|
|
|
|
}
|
|
|
|
|
|
|
|
updateAge() {
|
2021-05-18 11:25:24 -04:00
|
|
|
for (const [hash, age] of this.hashAge) {
|
|
|
|
this.hashAge.set(hash, age + 1)
|
|
|
|
}
|
2021-05-18 13:06:15 -04:00
|
|
|
return this
|
2021-05-18 11:25:24 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
findStale(maxAge) {
|
2021-05-18 13:06:15 -04:00
|
|
|
const stale = []
|
2021-05-18 11:25:24 -04:00
|
|
|
for (const [hash, age] of this.hashAge) {
|
|
|
|
if (age > maxAge) {
|
|
|
|
stale.push(hash)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return stale
|
|
|
|
}
|
|
|
|
|
2021-05-18 13:06:15 -04:00
|
|
|
async flush() {
|
|
|
|
const statePath = getStatePath(this.contentDir)
|
|
|
|
const blob = JSON.stringify({
|
|
|
|
hashAge: Array.from(this.hashAge.entries()),
|
2021-07-13 07:04:48 -04:00
|
|
|
hashSize: Array.from(this.hashSize.entries()),
|
2021-05-18 13:06:15 -04:00
|
|
|
})
|
|
|
|
const atomicWrite = statePath + '~'
|
|
|
|
try {
|
|
|
|
await fs.promises.writeFile(atomicWrite, blob)
|
|
|
|
} catch (err) {
|
|
|
|
try {
|
|
|
|
await fs.promises.unlink(atomicWrite)
|
|
|
|
} catch (e) {}
|
|
|
|
throw err
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
await fs.promises.rename(atomicWrite, statePath)
|
|
|
|
} catch (err) {
|
|
|
|
try {
|
|
|
|
await fs.promises.unlink(atomicWrite)
|
|
|
|
} catch (e) {}
|
|
|
|
throw err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async deleteStaleHashes(n) {
|
2022-11-01 10:50:02 -04:00
|
|
|
const t0 = Date.now()
|
2021-05-18 13:06:15 -04:00
|
|
|
// delete any hash file older than N generations
|
|
|
|
const hashes = this.findStale(n)
|
2021-05-18 11:25:24 -04:00
|
|
|
|
2021-05-18 13:06:15 -04:00
|
|
|
let reclaimedSpace = 0
|
|
|
|
if (hashes.length === 0) {
|
2022-11-01 10:50:02 -04:00
|
|
|
return [reclaimedSpace, Date.now() - t0]
|
2021-05-18 13:06:15 -04:00
|
|
|
}
|
2021-05-18 11:25:24 -04:00
|
|
|
|
2021-07-13 07:04:48 -04:00
|
|
|
await promiseMapWithLimit(10, hashes, async hash => {
|
2021-05-18 13:06:15 -04:00
|
|
|
await fs.promises.unlink(Path.join(this.contentDir, hash))
|
|
|
|
this.hashAge.delete(hash)
|
|
|
|
reclaimedSpace += this.hashSize.get(hash)
|
|
|
|
this.hashSize.delete(hash)
|
|
|
|
})
|
2022-11-01 10:50:02 -04:00
|
|
|
return [reclaimedSpace, Date.now() - t0]
|
2021-05-18 13:06:15 -04:00
|
|
|
}
|
2021-05-18 11:25:24 -04:00
|
|
|
}
|
|
|
|
|
2021-05-31 04:20:25 -04:00
|
|
|
function pdfStreamHash(buffer) {
|
2021-05-13 09:07:54 -04:00
|
|
|
const hash = crypto.createHash('sha256')
|
2021-05-31 04:20:25 -04:00
|
|
|
hash.update(buffer)
|
2021-05-13 09:07:54 -04:00
|
|
|
return hash.digest('hex')
|
|
|
|
}
|
|
|
|
|
2021-05-31 04:20:25 -04:00
|
|
|
async function writePdfStream(dir, hash, buffer) {
|
2021-05-13 09:07:54 -04:00
|
|
|
const filename = Path.join(dir, hash)
|
2021-05-17 09:18:07 -04:00
|
|
|
const atomicWriteFilename = filename + '~'
|
2021-05-13 09:07:54 -04:00
|
|
|
try {
|
2021-05-31 04:20:25 -04:00
|
|
|
await fs.promises.writeFile(atomicWriteFilename, buffer)
|
2021-05-17 09:18:07 -04:00
|
|
|
await fs.promises.rename(atomicWriteFilename, filename)
|
|
|
|
} catch (err) {
|
|
|
|
try {
|
|
|
|
await fs.promises.unlink(atomicWriteFilename)
|
|
|
|
} catch (_) {
|
|
|
|
throw err
|
2021-05-13 09:07:54 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-24 05:22:30 -04:00
|
|
|
function getMaxOverhead(compileTime) {
|
|
|
|
return Math.min(
|
2021-06-23 09:42:39 -04:00
|
|
|
// Adding 10s to a 40s compile time is OK.
|
|
|
|
// Adding 1s to a 3s compile time is OK.
|
|
|
|
Math.max(compileTime / 4, 1000),
|
2021-06-23 09:14:28 -04:00
|
|
|
// Adding 30s to a 120s compile time is not OK, limit to 10s.
|
|
|
|
Settings.pdfCachingMaxProcessingTime
|
|
|
|
)
|
2021-08-24 05:22:30 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
function getDeadlineChecker(compileTime) {
|
2022-07-20 10:17:41 -04:00
|
|
|
const timeout = getMaxOverhead(compileTime)
|
2021-06-23 09:14:28 -04:00
|
|
|
|
2022-07-20 10:17:41 -04:00
|
|
|
const deadline = Date.now() + timeout
|
2021-06-23 09:14:28 -04:00
|
|
|
let lastStage = { stage: 'start', now: Date.now() }
|
2021-06-23 09:42:39 -04:00
|
|
|
let completedStages = 0
|
2021-06-23 09:14:28 -04:00
|
|
|
return function (stage) {
|
|
|
|
const now = Date.now()
|
|
|
|
if (now > deadline) {
|
|
|
|
throw new TimedOutError(stage, {
|
2022-07-20 10:17:41 -04:00
|
|
|
timeout,
|
2021-06-23 09:42:39 -04:00
|
|
|
completedStages,
|
2021-06-23 09:14:28 -04:00
|
|
|
lastStage: lastStage.stage,
|
2021-07-13 07:04:48 -04:00
|
|
|
diffToLastStage: now - lastStage.now,
|
2021-06-23 09:14:28 -04:00
|
|
|
})
|
|
|
|
}
|
2021-06-23 09:42:39 -04:00
|
|
|
completedStages++
|
2021-06-23 09:14:28 -04:00
|
|
|
lastStage = { stage, now }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-18 13:06:15 -04:00
|
|
|
function promiseMapWithLimit(concurrency, array, fn) {
|
|
|
|
const limit = pLimit(concurrency)
|
2021-07-13 07:04:48 -04:00
|
|
|
return Promise.all(array.map(x => limit(() => fn(x))))
|
2021-05-18 13:06:15 -04:00
|
|
|
}
|
|
|
|
|
2021-05-13 09:56:15 -04:00
|
|
|
module.exports = {
|
|
|
|
HASH_REGEX: /^[0-9a-f]{64}$/,
|
2021-05-31 04:20:25 -04:00
|
|
|
update: callbackify(update),
|
|
|
|
promises: {
|
2021-07-13 07:04:48 -04:00
|
|
|
update,
|
2022-07-20 10:17:41 -04:00
|
|
|
updateSameEventLoop,
|
2021-07-13 07:04:48 -04:00
|
|
|
},
|
2021-05-13 09:56:15 -04:00
|
|
|
}
|