Skip to content

added failed test retry #291

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jan 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 33 additions & 18 deletions commands/generate_reports.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ function download_artefact(
) {
return new Promise(function (resolve, reject) {
let response_code;
let resp;
if (!fs.existsSync(file_path)) {
fs.mkdirSync(file_path, { recursive: true });
}
Expand Down Expand Up @@ -44,6 +45,7 @@ function download_artefact(
reject(err);
}
response_code = res.statusCode;
resp = res
}).pipe(
fs
.createWriteStream(file_path, {
Expand All @@ -65,6 +67,11 @@ function download_artefact(
});
} else {
fs.unlinkSync(file_path);
if (resp.body != null) {
const responseObject = JSON.parse(resp.body);
const dataValue = responseObject.data;
reject("Could not download artefacts for test id " + test_id + " with reason " + dataValue);
}
reject("Could not download artefacts for test id " + test_id);
}
})
Expand All @@ -73,7 +80,7 @@ function download_artefact(
}

function generate_report(args) {
return new Promise(function (resolve, reject) {
return new Promise(async function (resolve, reject) {
var username = "";
var access_key = "";

Expand All @@ -96,7 +103,6 @@ function generate_report(args) {
} else {
reject("Access Key not provided");
}

//Check for session id
if (
!("session_id" in args) ||
Expand All @@ -123,7 +129,6 @@ function generate_report(args) {
);
}
}

//set working enviornment
var env = "prod";
if ("env" in args) {
Expand Down Expand Up @@ -184,8 +189,10 @@ function generate_report(args) {
fs.mkdirSync(directory, { recursive: true });
console.log("Directory created ", directory);
}
const downloadPromises = [];

for (i = 0; i < build_info["data"].length; i++) {
download_artefact(
const downloadPromise = download_artefact(
username,
access_key,
env,
Expand All @@ -198,26 +205,34 @@ function generate_report(args) {
),
build_payload["run_settings"]["reject_unauthorized"]
)
.then(function (resp) {
//Files downloaded
console.log(resp);
})
.catch(function (err) {
console.log(err);
});
downloadPromises.push(downloadPromise)
}
resolve("Done");

Promise.allSettled(downloadPromises)
.then((results) => {
// results is an array of objects
for (const result of results) {
if (result.status == 'fulfilled') {
console.log(result.value);
} else if (result.status == 'rejected') {
console.log(result.reason);
}
}
resolve("Done");
})
.catch((error) => {
// This catch block will not be executed
console.log(error);
resolve("Done");
});

})
.catch(function (err) {
console.log("Error occured while getting the build response", err);
});
});
}

module.exports = function (args) {
generate_report(args)
.then(function (resp) {})
.catch(function (err) {
console.log("ERR:", err);
});
module.exports = {
generate_report:generate_report
};
140 changes: 133 additions & 7 deletions commands/utils/batch/batch_runner.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ const { del } = require("request");
const { delete_archive } = require("../archive.js");
const poller = require("../poller/poller.js");
const builds = require("../poller/build");
const batcher = require("./batcher.js");
const reports = require("../../../commands/generate_reports.js");
const { fail } = require("yargs");

var batchCounter = 0;
var totalBatches = 0;
Expand Down Expand Up @@ -91,6 +94,7 @@ async function run(lt_config, batches, env) {
.archive_project(lt_config)
.then(function (file_obj) {
project_file = file_obj["name"];
lt_config["run_settings"]["project_file"] = project_file;
//upload the project and get the project link
uploader
.upload_zip(lt_config, file_obj["name"], "project", env)
Expand Down Expand Up @@ -121,14 +125,15 @@ async function run(lt_config, batches, env) {
access_key: lt_config["lambdatest_auth"]["access_key"],
type: "cypress"
});

run_test(
payload,
env,
lt_config.run_settings.reject_unauthorized
)
.then(function (session_id) {
delete_archive(project_file);
if (lt_config["run_settings"]["retry_failed"] == false ) {
delete_archive(project_file);
}
delete_archive(file_obj["name"]);
//listen to control+c signal and stop tests
process.on("SIGINT", async () => {
Expand All @@ -148,13 +153,48 @@ async function run(lt_config, batches, env) {
});
if (
lt_config["run_settings"]["sync"] == true ||
(lt_config["tunnel_settings"]["tunnel"] && lt_config["tunnel_settings"]["autostart"])
(lt_config["tunnel_settings"]["tunnel"] && lt_config["tunnel_settings"]["autostart"]) || (lt_config["run_settings"]["retry_failed"] == true )
) {
console.log("Waiting for build to finish...");
poller
.poll_build(lt_config, session_id, env)
.then(function (exit_code) {
resolve(exit_code);
poller.update_status(true);
poller.poll_build(lt_config, session_id, env)
.then( function (result) {
const { exit_code, build_info } = result;
if (lt_config["run_settings"]["retry_failed"] == true && build_info != null ) {
let failed_tests = [];
for (i = 0; i < build_info["data"].length; i++) {
if (build_info["data"][i]["status_ind"] == "failed" ) {
failed_tests.push(build_info["data"][i]["path"]);
}
}
if (failed_tests.length > 0) {
console.log("retrying these failed tests "+ failed_tests)
lt_config["run_settings"]["specs"]=failed_tests;
batcher
.make_batches(lt_config)
.then(function (batches) {
retry_run(lt_config, batches, env)
.then(function (exit_code) {
if (exit_code) {
console.log("retried failed tests ended with exit code " + exit_code);
}
resolve(exit_code);
})
.catch(function (error) {
console.log(error);
resolve(1);
});
})
.catch(function (err) {
console.log(err);
resolve(1);
});
} else {
resolve(exit_code);
}
} else {
resolve(exit_code);
}
})
.catch(function (err) {
console.log(
Expand Down Expand Up @@ -193,6 +233,92 @@ async function run(lt_config, batches, env) {
});
}

async function retry_run(lt_config, batches, env) {
totalBatches = batches.length;
return new Promise(function (resolve, reject) {
lt_config["test_suite"] = batches[0];
archive
.archive_batch(lt_config, batches[0], env)
.then(async function (file_obj) {
uploader
.upload_zip(lt_config, file_obj["name"], "tests", env)
.then(async function (resp) {

var payload = JSON.stringify({
payload: {
test_file: resp["value"]["message"].split("?")[0],
},
username: lt_config["lambdatest_auth"]["username"],
access_key: lt_config["lambdatest_auth"]["access_key"],
type: "cypress"
});

run_test(
payload,
env,
lt_config.run_settings.reject_unauthorized
).then(function (session_id) {

delete_archive(lt_config["run_settings"]["project_file"]);
delete_archive(file_obj["name"]);

process.on("SIGINT", async () => {
try {
console.log(
"Retry - Control+c signal received.\nTrying to Terminate the processes"
);
await builds.stop_cypress_session(
lt_config,
session_id,
env
);
resolve(0);
} catch (e) {
console.log("Retry - Could not exit process. Try Again!!!");
}
});
if (
lt_config["run_settings"]["sync"] == true ||
(lt_config["tunnel_settings"]["tunnel"] && lt_config["tunnel_settings"]["autostart"])
) {
console.log("Retry - Waiting for build to finish...");
poller.update_status(true);
poller.poll_build(lt_config, session_id, env)
.then(function (result) {
const { exit_code, build_json } = result;
resolve(exit_code);
})
.catch(function (err) {
console.log(
"Retry - Some error occured in getting build updates",
err.message
);
});
} else {
resolve(0);
}

})
.catch(function (err) {
console.log("Retry - Error occured while creating tests", err);
});


})
.catch(function (err) {
console.log("Retry - Not able to archive the batch of test files", err);
});

})
.catch(function (err) {
console.log("Retry - Unable to archive the project");
console.log(err);
reject(err);
});
});
}

module.exports = {
run_batches: run,
run_batches_retry: retry_run,
};
1 change: 1 addition & 0 deletions commands/utils/poller/build_stats.js
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ function get_build_info(lt_config, session_id, env, update_status, callback) {
) {
get_build_info_count = get_build_info_count + 1;
if (get_build_info_count > 4) {
get_build_info_count = 0;
update_status(false);
return callback(null, JSON.parse(body));
}
Expand Down
13 changes: 7 additions & 6 deletions commands/utils/poller/poller.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ function poll_build(lt_config, session_id, env) {
if (err == null) {
build_stats
.get_completed_build_info(lt_config, session_id, env)
.then(function (build_info) {
.then(async function (build_info) {
if (!build_info || build_info.data == null) {
console.log("Build info not found");
resolve(1);
resolve({exit_code:1, build_info:build_info});
return;
}
let stats = {};
Expand Down Expand Up @@ -55,24 +55,25 @@ function poll_build(lt_config, session_id, env) {
reject_unauthorized:
lt_config.run_settings.reject_unauthorized,
};
reports(args);

await reports.generate_report(args)
}
if (
Object.keys(stats).length == 1 &&
(Object.keys(stats).includes("completed") ||
Object.keys(stats).includes("passed"))
) {
resolve(0);
resolve({exit_code:0, build_info:build_info});
} else {
resolve(1);
resolve({exit_code:1, build_info:build_info});
}
})
.catch(function (err) {
console.log("Error", err);
});
} else {
console.log(err);
resolve(1);
resolve({exit_code:1, build_info:null});
}
}
);
Expand Down
10 changes: 10 additions & 0 deletions commands/utils/set_args.js
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,16 @@ function sync_args_from_cmd(args) {
lt_config["run_settings"]["network_sse"] = false;
}

if ("retry_failed" in args) {
if (args["retry_failed"] == "true") {
lt_config.run_settings.retry_failed = true;
} else {
lt_config.run_settings.retry_failed = false;
}
} else if (lt_config["run_settings"]["retry_failed"] && !lt_config["run_settings"]["retry_failed"]) {
lt_config["run_settings"]["retry_failed"] = false;
}

if ("headless" in args) {
lt_config["run_settings"]["headless"] = args["headless"];
} else if (!lt_config["run_settings"]["headless"]) {
Expand Down
9 changes: 7 additions & 2 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,11 @@ const argv = require("yargs")
describe: "show command logs on dashboard.",
type: "string",
})
.option("ret_fail", {
alias: "retry_failed",
describe: "run failed tests in a new build.",
type: "bool",
})
.option("net_http2", {
alias: "network_http2",
describe: "Capture Http2 Network logs",
Expand All @@ -227,7 +232,7 @@ const argv = require("yargs")
alias: "network_sse",
describe: "Bypass sse events calls for Network logs",
type: "bool",
});;
});
},
function (argv) {
require("./commands/run")(argv);
Expand Down Expand Up @@ -344,7 +349,7 @@ const argv = require("yargs")
});
},
function (argv) {
require("./commands/generate_reports")(argv);
require("./commands/generate_reports").generate_report(argv);
}
)
.help().argv;