support continuing from last packed doc

This commit is contained in:
Brian Gough 2016-01-28 16:40:20 +00:00
parent 666a07e5ba
commit 77cafa36af
3 changed files with 7 additions and 3 deletions

View file

@ -26,7 +26,8 @@ module.exports = HttpController =
listDocs: (req, res, next = (error) ->) ->
logger.log "listing packing doc history"
limit = +req.query?.limit || 100
PackManager.listDocs {limit}, (error, doc_ids) ->
doc_id = req.query?.doc_id if req.query?.doc_id?.match(/^[0-9a-f]{24}$/)
PackManager.listDocs {limit, doc_id}, (error, doc_ids) ->
return next(error) if error?
ids = (doc.doc_id.toString() for doc in doc_ids)
output = _.uniq(ids).join("\n") + "\n"

View file

@ -533,6 +533,8 @@ module.exports = PackManager =
db.docHistory.findAndModify {query, update}, callback
listDocs: (options, callback) ->
db.docHistory.find({"op.p":{$exists:true}}, {doc_id:true}).limit (options.limit||100), (err, docs) ->
query = {"op.p":{$exists:true}}
query.doc_id = {$gt: ObjectId(options.doc_id)} if options.doc_id?
db.docHistory.find(query, {doc_id:true}).sort({doc_id:1}).limit (options.limit||100), (err, docs) ->
return callback(err) if err?
callback(null, docs)

View file

@ -11,11 +11,12 @@ for n in $(seq 5 -1 1) ; do
sleep 1
done
while docs=$(curl "$HOST/doc/list?limit=1000"); do
while docs=$(curl "$HOST/doc/list?limit=1000&doc_id=$last_doc"); do
if [ -z "$docs" ] ; then break ; fi
for d in $docs ; do
echo "packing $d"
curl -X POST "$HOST/doc/$d/pack"
sleep $T
last_doc=$d
done
done