Skip to content

Dev #413

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 26 commits into from
Mar 28, 2025
Merged

Dev #413

Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
756bbec
added converter
japneetlambdatest Mar 18, 2025
14ce967
npm version
japneetlambdatest Mar 19, 2025
6d6ffce
Merge branch 'dev' into HYP-14107
japneetlambdatest Mar 20, 2025
b292e2d
Revert "use bs accessibility"
abhishek-lambda Mar 21, 2025
d728e92
add privateCloud Support
abhishek-lambda Mar 24, 2025
931d74f
Merge pull request #377 from japneetlambdatest/HYP-14107
HRanjan-11 Mar 25, 2025
dfb813e
remove timezone
abhishek-lambda Mar 26, 2025
bc7d7c5
remove default config
abhishek-lambda Mar 26, 2025
d6e3e72
Merge branch 'dev' of https://github.com/LambdaTest/lambdatest-cypres…
abhishek-lambda Mar 26, 2025
f754327
version bump
abhishek-lambda Mar 26, 2025
18c147d
version fix
abhishek-lambda Mar 26, 2025
f9c10c4
Merge pull request #401 from abhishek-lambda/CYP-1005
abhishek-lambda Mar 26, 2025
0c6cbbe
version bump
abhishek-lambda Mar 26, 2025
70dee85
Merge pull request #405 from abhishek-lambda/CYP-1005
abhishek-lambda Mar 26, 2025
cffc306
add fix
abhishek-lambda Mar 26, 2025
5b9d041
Merge pull request #406 from abhishek-lambda/CYP-1005
abhishek-lambda Mar 26, 2025
36e5be4
fix private cloud
abhishek-lambda Mar 26, 2025
7c6cee1
Merge pull request #407 from abhishek-lambda/CYP-1005
abhishek-lambda Mar 26, 2025
341f950
remove check from PC
abhishek-lambda Mar 26, 2025
2ba8d08
Merge pull request #408 from abhishek-lambda/CYP-1005
abhishek-lambda Mar 26, 2025
64e3c6e
fix
abhishek-lambda Mar 26, 2025
f4ceaf3
Merge pull request #409 from abhishek-lambda/CYP-1005
abhishek-lambda Mar 26, 2025
52d3ae0
correct privateCloud
abhishek-lambda Mar 27, 2025
bddf8f5
Merge pull request #411 from abhishek-lambda/CYP-1005
abhishek-lambda Mar 27, 2025
eda451a
add args
abhishek-lambda Mar 27, 2025
53437cc
Merge pull request #412 from abhishek-lambda/CYP-1005
abhishek-lambda Mar 27, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
272 changes: 138 additions & 134 deletions commands/utils/batch/batch_runner.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ const reports = require("../../../commands/generate_reports.js");
const { fail } = require("yargs");
const https = require('https');
const axios = require('axios');
const converter=require("../../../converter/converter.js");

var batchCounter = 0;
var totalBatches = 0;
Expand Down Expand Up @@ -99,146 +100,149 @@ async function run(lt_config, batches, env) {
//console.log("Total number of batches " + totalBatches);
return new Promise(function (resolve, reject) {
//archive the project i.e the current working directory
archive
.archive_project(lt_config)
.then(function (file_obj) {
project_file = file_obj["name"];
lt_config["run_settings"]["project_file"] = project_file;
//upload the project and get the project link
uploader
.upload_zip(lt_config, file_obj["name"], "project", env)
.then(async function (resp) {
// TODO: remove hard check for undefined. handle it using nested promise rejection
if (resp == undefined) {
console.log(
"Either your creds are invalid or something is wrong with the configs provided"
);
return;
}
//add project link in lt config
project_url = resp["value"]["message"].split("?")[0].split("/");
project_url = project_url[project_url.length - 1];
lt_config["run_settings"]["project_url"] = project_url;
lt_config["test_suite"] = batches[0];
archive
.archive_batch(lt_config, batches[0], env)
.then(async function (file_obj) {
uploader
.upload_zip(lt_config, file_obj["name"], "tests", env)
.then(async function (resp) {
var payload = JSON.stringify({
payload: {
test_file: resp["value"]["message"].split("?")[0],
},
username: lt_config["lambdatest_auth"]["username"],
access_key: lt_config["lambdatest_auth"]["access_key"],
type: "cypress"
});
run_test(
payload,
env,
lt_config.run_settings.reject_unauthorized
)
.then(function (session_id) {
if (!lt_config["run_settings"]["retry_failed"]) {
delete_archive(project_file);
}
delete_archive(file_obj["name"]);
//listen to control+c signal and stop tests
process.on("SIGINT", async () => {
try {
console.log(
"Control+c signal received.\nTrying to Terminate the processes"
);
await builds.stop_cypress_session(
lt_config,
session_id,
env
);
resolve(0);
} catch (e) {
console.log("Could not exit process. Try Again!!!");
converter(lt_config,"he_conv.yaml").then(function () {
console.log("json converted to YAML")
archive
.archive_project(lt_config)
.then(function (file_obj) {
project_file = file_obj["name"];
lt_config["run_settings"]["project_file"] = project_file;
//upload the project and get the project link
uploader
.upload_zip(lt_config, file_obj["name"], "project", env)
.then(async function (resp) {
// TODO: remove hard check for undefined. handle it using nested promise rejection
if (resp == undefined) {
console.log(
"Either your creds are invalid or something is wrong with the configs provided"
);
return;
}
//add project link in lt config
project_url = resp["value"]["message"].split("?")[0].split("/");
project_url = project_url[project_url.length - 1];
lt_config["run_settings"]["project_url"] = project_url;
lt_config["test_suite"] = batches[0];
archive
.archive_batch(lt_config, batches[0], env)
.then(async function (file_obj) {
uploader
.upload_zip(lt_config, file_obj["name"], "tests", env)
.then(async function (resp) {
var payload = JSON.stringify({
payload: {
test_file: resp["value"]["message"].split("?")[0],
},
username: lt_config["lambdatest_auth"]["username"],
access_key: lt_config["lambdatest_auth"]["access_key"],
type: "cypress"
});
run_test(
payload,
env,
lt_config.run_settings.reject_unauthorized
)
.then(function (session_id) {
if (!lt_config["run_settings"]["retry_failed"]) {
delete_archive(project_file);
}
});
if (
lt_config["run_settings"]["sync"] == true ||
(lt_config["tunnel_settings"]["tunnel"] && lt_config["tunnel_settings"]["autostart"]) || (lt_config["run_settings"]["retry_failed"] == true )
) {
console.log("Waiting for build to finish...");
poller.update_status(true);
poller.poll_build(lt_config, session_id, env)
.then( function (result) {
const { exit_code, build_info } = result;
if (lt_config["run_settings"]["retry_failed"] == true && build_info != null ) {
let failed_test_suites = [];
for (i = 0; i < build_info["data"].length; i++) {
if (build_info["data"][i]["status_ind"] == "failed") {
let failed_spec = findSpecFile(lt_config["test_suite"],build_info["data"][i])
let failed_suite = {
spec_file: failed_spec,
path: build_info["data"][i]["path"],
browser: build_info["data"][i]["browser"],
version: build_info["data"][i]["version"],
platform: build_info["data"][i]["platform"]
delete_archive(file_obj["name"]);
//listen to control+c signal and stop tests
process.on("SIGINT", async () => {
try {
console.log(
"Control+c signal received.\nTrying to Terminate the processes"
);
await builds.stop_cypress_session(
lt_config,
session_id,
env
);
resolve(0);
} catch (e) {
console.log("Could not exit process. Try Again!!!");
}
});
if (
lt_config["run_settings"]["sync"] == true ||
(lt_config["tunnel_settings"]["tunnel"] && lt_config["tunnel_settings"]["autostart"]) || (lt_config["run_settings"]["retry_failed"] == true )
) {
console.log("Waiting for build to finish...");
poller.update_status(true);
poller.poll_build(lt_config, session_id, env)
.then( function (result) {
const { exit_code, build_info } = result;
if (lt_config["run_settings"]["retry_failed"] == true && build_info != null ) {
let failed_test_suites = [];
for (i = 0; i < build_info["data"].length; i++) {
if (build_info["data"][i]["status_ind"] == "failed") {
let failed_spec = findSpecFile(lt_config["test_suite"],build_info["data"][i])
let failed_suite = {
spec_file: failed_spec,
path: build_info["data"][i]["path"],
browser: build_info["data"][i]["browser"],
version: build_info["data"][i]["version"],
platform: build_info["data"][i]["platform"]
}
failed_test_suites.push(failed_suite);
}
failed_test_suites.push(failed_suite);
}
}
if (failed_test_suites.length > 0) {
console.log("Retrying failed tests.")
let batches = [failed_test_suites]
retry_run(lt_config, batches, env)
.then(function (exit_code) {
if (exit_code) {
console.log("retried failed tests ended with exit code " + exit_code);
}
resolve(exit_code);
})
.catch(function (error) {
console.log(error);
resolve(1);
});
if (failed_test_suites.length > 0) {
console.log("Retrying failed tests.")
let batches = [failed_test_suites]
retry_run(lt_config, batches, env)
.then(function (exit_code) {
if (exit_code) {
console.log("retried failed tests ended with exit code " + exit_code);
}
resolve(exit_code);
})
.catch(function (error) {
console.log(error);
resolve(1);
});
} else {
resolve(exit_code);
}
} else {
resolve(exit_code);
}
} else {
resolve(exit_code);
}
})
.catch(function (err) {
console.log(
"Some error occured in getting build updates",
err.message
);
});
} else {
resolve(0);
}
})
.catch(function (err) {
console.log("Error occured while creating tests", err);
});
})
.catch(function (err) {
delete_archive(file_obj["name"]);
console.log("Error occured while uploading files ", err);
});
})
.catch(function (err) {
console.log("Not able to archive the batch of test files", err);
});
})
.catch(function (err) {
console.log(err);
archive.delete_archive(project_file);
reject(err);
});
})
.catch(function (err) {
console.log("Unable to archive the project");
console.log(err);
reject(err);
});
})
.catch(function (err) {
console.log(
"Some error occured in getting build updates",
err.message
);
});
} else {
resolve(0);
}
})
.catch(function (err) {
console.log("Error occured while creating tests", err);
});
})
.catch(function (err) {
delete_archive(file_obj["name"]);
console.log("Error occured while uploading files ", err);
});
})
.catch(function (err) {
console.log("Not able to archive the batch of test files", err);
});
})
.catch(function (err) {
console.log(err);
archive.delete_archive(project_file);
reject(err);
});
})
.catch(function (err) {
console.log("Unable to archive the project");
console.log(err);
reject(err);
});
});
});
}

Expand Down
1 change: 1 addition & 0 deletions commands/utils/batch/batcher.js
Original file line number Diff line number Diff line change
Expand Up @@ -109,4 +109,5 @@ function make_batches(lt_config) {

module.exports = {
make_batches: make_batches,
get_spec_files:get_spec_files,
};
17 changes: 17 additions & 0 deletions commands/utils/set_args.js
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,23 @@ function sync_args_from_cmd(args) {
lt_config["run_settings"]["geo_location"] = "";
}

//Check for timezone
if ("timezone" in args) {
lt_config["run_settings"]["timezone"] = args["timezone"];
} else if (!lt_config["run_settings"]["timezone"]) {
lt_config["run_settings"]["timezone"] = "";
}

if ("privateCloud" in args) {
if (args["privateCloud"]=="true"){
lt_config["run_settings"]["privateCloud"] = true;
}else{
lt_config["run_settings"]["privateCloud"] = false;
}
}else if (!lt_config["run_settings"]["privateCloud"]) {
lt_config["run_settings"]["privateCloud"] = false ;
}

//Check for stop on failure location
if ("stop_on_failure" in args) {
lt_config["run_settings"]["stop_on_failure"] = true;
Expand Down
6 changes: 6 additions & 0 deletions commands/utils/validate.js
Original file line number Diff line number Diff line change
Expand Up @@ -452,6 +452,12 @@ module.exports = validate_config = function (lt_config, validation_configs) {
}
}

if ("privateCloud" in lt_config["run_settings"]) {
if (![true, false].includes(lt_config["run_settings"]["privateCloud"])) {
reject("Error!! boolean value is expected in privateCloud key");
}
}

if(lt_config)
resolve(cypress_version);
});
Expand Down
Loading