mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
moved decaffeinated files to js folder
This commit is contained in:
parent
252f4c704b
commit
41533d8888
4 changed files with 155 additions and 143 deletions
|
@ -1,102 +0,0 @@
|
|||
/* eslint-disable
|
||||
standard/no-callback-literal,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const request = require("request");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const async = require("async");
|
||||
const fs = require("fs");
|
||||
const _ = require("underscore");
|
||||
const concurentCompiles = 5;
|
||||
const totalCompiles = 50;
|
||||
|
||||
const buildUrl = path => `http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`;
|
||||
|
||||
const mainTexContent = fs.readFileSync("./bulk.tex", "utf-8");
|
||||
|
||||
const compileTimes = [];
|
||||
let failedCount = 0;
|
||||
|
||||
const getAverageCompileTime = function() {
|
||||
const totalTime = _.reduce(compileTimes, (sum, time)=> sum + time
|
||||
, 0);
|
||||
return totalTime / compileTimes.length;
|
||||
};
|
||||
|
||||
const makeRequest = function(compileNumber, callback){
|
||||
let bulkBodyCount = 7;
|
||||
let bodyContent = "";
|
||||
while (--bulkBodyCount) {
|
||||
bodyContent = (bodyContent+=mainTexContent);
|
||||
}
|
||||
|
||||
|
||||
const startTime = new Date();
|
||||
return request.post({
|
||||
url: buildUrl(`project/loadcompile-${compileNumber}/compile`),
|
||||
json: {
|
||||
compile: {
|
||||
resources: [{
|
||||
path: "main.tex",
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
${bodyContent}
|
||||
\\end{document}\
|
||||
`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}, (err, response, body) => {
|
||||
if (response.statusCode !== 200) {
|
||||
failedCount++;
|
||||
return callback(`compile ${compileNumber} failed`);
|
||||
}
|
||||
if (err != null) {
|
||||
failedCount++;
|
||||
return callback("failed");
|
||||
}
|
||||
const totalTime = new Date() - startTime;
|
||||
console.log(totalTime+"ms");
|
||||
compileTimes.push(totalTime);
|
||||
return callback(err);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
const jobs = _.map(__range__(1, totalCompiles, true), i=>
|
||||
cb=> makeRequest(i, cb)
|
||||
);
|
||||
|
||||
const startTime = new Date();
|
||||
async.parallelLimit(jobs, concurentCompiles, (err) => {
|
||||
if (err != null) {
|
||||
console.error(err);
|
||||
}
|
||||
console.log(`total time taken = ${(new Date() - startTime)/1000}s`);
|
||||
console.log(`total compiles = ${totalCompiles}`);
|
||||
console.log(`concurent compiles = ${concurentCompiles}`);
|
||||
console.log(`average time = ${getAverageCompileTime()/1000}s`);
|
||||
console.log(`max time = ${_.max(compileTimes)/1000}s`);
|
||||
console.log(`min time = ${_.min(compileTimes)/1000}s`);
|
||||
return console.log(`total failures = ${failedCount}`);
|
||||
});
|
||||
|
||||
|
||||
function __range__(left, right, inclusive) {
|
||||
const range = [];
|
||||
const ascending = left < right;
|
||||
const end = !inclusive ? right : ascending ? right + 1 : right - 1;
|
||||
for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) {
|
||||
range.push(i);
|
||||
}
|
||||
return range;
|
||||
}
|
103
services/clsi/test/load/js/loadTest.js
Normal file
103
services/clsi/test/load/js/loadTest.js
Normal file
|
@ -0,0 +1,103 @@
|
|||
/* eslint-disable
|
||||
standard/no-callback-literal,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const request = require('request')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const async = require('async')
|
||||
const fs = require('fs')
|
||||
const _ = require('underscore')
|
||||
const concurentCompiles = 5
|
||||
const totalCompiles = 50
|
||||
|
||||
const buildUrl = path =>
|
||||
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
|
||||
|
||||
const mainTexContent = fs.readFileSync('./bulk.tex', 'utf-8')
|
||||
|
||||
const compileTimes = []
|
||||
let failedCount = 0
|
||||
|
||||
const getAverageCompileTime = function() {
|
||||
const totalTime = _.reduce(compileTimes, (sum, time) => sum + time, 0)
|
||||
return totalTime / compileTimes.length
|
||||
}
|
||||
|
||||
const makeRequest = function(compileNumber, callback) {
|
||||
let bulkBodyCount = 7
|
||||
let bodyContent = ''
|
||||
while (--bulkBodyCount) {
|
||||
bodyContent = bodyContent += mainTexContent
|
||||
}
|
||||
|
||||
const startTime = new Date()
|
||||
return request.post(
|
||||
{
|
||||
url: buildUrl(`project/loadcompile-${compileNumber}/compile`),
|
||||
json: {
|
||||
compile: {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
${bodyContent}
|
||||
\\end{document}\
|
||||
`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
(err, response, body) => {
|
||||
if (response.statusCode !== 200) {
|
||||
failedCount++
|
||||
return callback(`compile ${compileNumber} failed`)
|
||||
}
|
||||
if (err != null) {
|
||||
failedCount++
|
||||
return callback('failed')
|
||||
}
|
||||
const totalTime = new Date() - startTime
|
||||
console.log(totalTime + 'ms')
|
||||
compileTimes.push(totalTime)
|
||||
return callback(err)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const jobs = _.map(__range__(1, totalCompiles, true), i => cb =>
|
||||
makeRequest(i, cb)
|
||||
)
|
||||
|
||||
const startTime = new Date()
|
||||
async.parallelLimit(jobs, concurentCompiles, err => {
|
||||
if (err != null) {
|
||||
console.error(err)
|
||||
}
|
||||
console.log(`total time taken = ${(new Date() - startTime) / 1000}s`)
|
||||
console.log(`total compiles = ${totalCompiles}`)
|
||||
console.log(`concurent compiles = ${concurentCompiles}`)
|
||||
console.log(`average time = ${getAverageCompileTime() / 1000}s`)
|
||||
console.log(`max time = ${_.max(compileTimes) / 1000}s`)
|
||||
console.log(`min time = ${_.min(compileTimes) / 1000}s`)
|
||||
return console.log(`total failures = ${failedCount}`)
|
||||
})
|
||||
|
||||
function __range__(left, right, inclusive) {
|
||||
const range = []
|
||||
const ascending = left < right
|
||||
const end = !inclusive ? right : ascending ? right + 1 : right - 1
|
||||
for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) {
|
||||
range.push(i)
|
||||
}
|
||||
return range
|
||||
}
|
|
@ -10,25 +10,30 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const chai = require("chai");
|
||||
if (Object.prototype.should == null) { chai.should(); }
|
||||
const { expect } = chai;
|
||||
const request = require("request");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const chai = require('chai')
|
||||
if (Object.prototype.should == null) {
|
||||
chai.should()
|
||||
}
|
||||
const { expect } = chai
|
||||
const request = require('request')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
const buildUrl = path => `http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`;
|
||||
const buildUrl = path =>
|
||||
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
|
||||
|
||||
const url = buildUrl(`project/smoketest-${process.pid}/compile`);
|
||||
const url = buildUrl(`project/smoketest-${process.pid}/compile`)
|
||||
|
||||
describe("Running a compile", function() {
|
||||
before(function(done) {
|
||||
return request.post({
|
||||
url,
|
||||
json: {
|
||||
compile: {
|
||||
resources: [{
|
||||
path: "main.tex",
|
||||
content: `\
|
||||
describe('Running a compile', function() {
|
||||
before(function(done) {
|
||||
return request.post(
|
||||
{
|
||||
url,
|
||||
json: {
|
||||
compile: {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
% Membrane-like surface
|
||||
% Author: Yotam Avital
|
||||
\\documentclass{article}
|
||||
|
@ -61,29 +66,35 @@ describe("Running a compile", function() {
|
|||
\\end{tikzpicture}
|
||||
\\end{document}\
|
||||
`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}, (error, response, body) => {
|
||||
this.error = error;
|
||||
this.response = response;
|
||||
this.body = body;
|
||||
return done();
|
||||
});
|
||||
});
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
(error, response, body) => {
|
||||
this.error = error
|
||||
this.response = response
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should return the pdf", function() {
|
||||
for (const file of Array.from(this.body.compile.outputFiles)) {
|
||||
if (file.type === "pdf") { return; }
|
||||
}
|
||||
throw new Error("no pdf returned");
|
||||
});
|
||||
it('should return the pdf', function() {
|
||||
for (const file of Array.from(this.body.compile.outputFiles)) {
|
||||
if (file.type === 'pdf') {
|
||||
return
|
||||
}
|
||||
}
|
||||
throw new Error('no pdf returned')
|
||||
})
|
||||
|
||||
return it("should return the log", function() {
|
||||
for (const file of Array.from(this.body.compile.outputFiles)) {
|
||||
if (file.type === "log") { return; }
|
||||
}
|
||||
throw new Error("no log returned");
|
||||
});
|
||||
});
|
||||
return it('should return the log', function() {
|
||||
for (const file of Array.from(this.body.compile.outputFiles)) {
|
||||
if (file.type === 'log') {
|
||||
return
|
||||
}
|
||||
}
|
||||
throw new Error('no log returned')
|
||||
})
|
||||
})
|
Loading…
Reference in a new issue