Skip to content

Commit 78bf04b

Browse files
committed
bugs
1 parent 72e4090 commit 78bf04b

File tree

1 file changed

+96
-114
lines changed

1 file changed

+96
-114
lines changed

docs/render_submissions.py

Lines changed: 96 additions & 114 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,12 @@ def get_pytest_info(path_to_logs, repo_name, branch_name):
3939
}
4040
report_file_path = os.path.join(path_to_logs, pytest_hash, "report.json")
4141
if not os.path.exists(report_file_path):
42-
reason_for_failure = open(
43-
os.path.join(path_to_logs, pytest_hash, "test_output.txt")
44-
).read()
42+
if os.path.exists(os.path.join(path_to_logs, pytest_hash, "test_output.txt")):
43+
reason_for_failure = open(
44+
os.path.join(path_to_logs, pytest_hash, "test_output.txt")
45+
).read()
46+
else:
47+
reason_for_failure = "Unknown failure."
4548
pytest_info[testname]["failed_to_run"] = reason_for_failure
4649
return pytest_info
4750
pytest_report = json.load(open(report_file_path))
@@ -56,7 +59,7 @@ def get_pytest_info(path_to_logs, repo_name, branch_name):
5659
if "passed" not in pytest_summary:
5760
pytest_summary["passed"] = 0
5861
for test in pytest_report["tests"]:
59-
if test["outcome"] == "passed":
62+
if test["outcome"] in {"passed", "skipped"}:
6063
continue
6164
if "longrepr" in test:
6265
failure_string = test["longrepr"]
@@ -85,17 +88,6 @@ def get_pytest_info(path_to_logs, repo_name, branch_name):
8588

8689

8790
def get_coverage_info(path_to_logs, repo_name, branch_name):
88-
# coverage_fp = open(os.path.join(path_to_logs, pytest_hash, "coverage.json"))
89-
# for filename, file_coverage in json.load(coverage_fp)["files"].items():
90-
# if not any(relevant_function.startswith(filename) for relevant_function in relevant_functions):
91-
# continue
92-
# for funcname, func_coverage in file_coverage["functions"].items():
93-
# if f"{filename}::{funcname}" not in relevant_functions: continue
94-
# pycov_info[testname][f"{filename}::{funcname}"] = {
95-
# "implementation": submission_info["function_impls"][f"{filename}::{funcname}"],
96-
# "executed_lines": func_coverage["executed_lines"],
97-
# "executed_branches": func_coverage["executed_branches"]
98-
# }
9991
raise NotImplementedError
10092

10193

@@ -182,91 +174,80 @@ def render_mds(subfolder="docs"):
182174
| Name | Repos Resolved (/{num_repos}) | Test Duration (s) | Date | Analysis | Github |
183175
|------|:-------------------------:|:--------------------:|:----------:|----|----| """
184176

185-
for branch_name in tqdm.tqdm(glob.glob(os.path.join(analysis_files_path, "*"))):
186-
branch_name = os.path.basename(branch_name)
187-
if branch_name in {"blank", "repos", "submission_repos"}:
177+
for org_path in tqdm.tqdm(glob.glob(os.path.join(analysis_files_path, "*"))):
178+
org_name = os.path.basename(org_path)
179+
if org_name in {"blank", "repos", "submission_repos"}:
188180
continue
189181
repos_resolved = 0
190182
# cum_passed = 0
191183
total_duration = 0.0
192-
# TODO better way to have submission info loaded up before get into repos...
193-
submission_info = None
194-
submission_page = """# Submission Name: **DISPLAYNAME_GOES_HERE** (split: SPLIT_GOES_HERE)
184+
for branch_path in glob.glob(os.path.join(org_path, "*.json")):
185+
branch_metrics = json.load(open(branch_path))
186+
submission_info = branch_metrics["submission_info"]
187+
split = submission_info["split"]
188+
org_name = submission_info["org_name"]
189+
project_page_link = submission_info["project_page"]
190+
display_name = submission_info["display_name"]
191+
submission_date = submission_info["submission_date"]
192+
branch_name = submission_info["branch"]
193+
submission_page = f"""# Submission Name: **{display_name}** (split: {split})
195194
196195
| Repository | Resolved | Pass Rate | Test Duration (s) | Analysis | Github Link |
197196
|------------|---------|:-----:|:-----:|-----|-----|"""
198-
for repo_file in glob.glob(
199-
os.path.join(analysis_files_path, branch_name, "*.json")
200-
):
201-
repo_metrics_output_file = os.path.join(
202-
analysis_files_path, branch_name, repo_file
203-
)
204-
repo_metrics = json.load(open(repo_metrics_output_file))
205-
repo_name = os.path.basename(repo_file[: -len(".json")])
206-
if submission_info is None:
207-
submission_info = repo_metrics["submission_info"]
208-
split = submission_info["split"]
209-
org_name = submission_info["org_name"]
210-
project_page_link = submission_info["project_page"]
211-
display_name = submission_info["display_name"]
212-
submission_date = submission_info["submission_date"]
213-
branch_name = submission_info["branch"]
214-
submission_page = submission_page.replace(
215-
"DISPLAYNAME_GOES_HERE", display_name
216-
).replace("SPLIT_GOES_HERE", split)
217-
submission_repo_page = (
218-
f"# **{display_name}**: {repo_name}"
219-
)
220-
for pytest_group, pytest_info in repo_metrics.items():
221-
if pytest_group == "submission_info":
222-
continue
223-
pytest_group = os.path.basename(pytest_group.strip("/"))
224-
patch_diff = (
225-
f"""\n\n## Patch diff\n```diff\n{pytest_info['patch_diff']}```"""
197+
198+
for repo_name, repo_pytest_results in branch_metrics.items():
199+
if repo_name == "submission_info": continue
200+
submission_repo_page = (
201+
f"# **{display_name}**: {repo_name}"
226202
)
227-
if "failed_to_run" in pytest_info:
228-
submission_repo_page += f"""\n## Failed to run pytests\n```\n{pytest_info['failed_to_run']}\n```"""
229-
resolved = False
230-
pytest_details = "Pytest failed"
231-
duration = "Failed."
232-
else:
233-
submission_repo_page += """\n## Pytest Summary
203+
for pytest_group, pytest_info in repo_pytest_results.items():
204+
pytest_group = os.path.basename(pytest_group.strip("/"))
205+
patch_diff = (
206+
f"""\n\n## Patch diff\n```diff\n{pytest_info['patch_diff']}```"""
207+
)
208+
if "failed_to_run" in pytest_info:
209+
submission_repo_page += f"""\n## Failed to run pytests for test `{pytest_group}`\n```\n{pytest_info['failed_to_run']}\n```"""
210+
resolved = False
211+
pytest_details = "Pytest failed"
212+
duration = "Failed."
213+
else:
214+
submission_repo_page += f"""\n## Pytest Summary for test `{pytest_group}`
234215
| status | count |
235216
|:---------|:-----:|
236217
"""
237-
total_duration += pytest_info["duration"]
238-
# cum_passed += pytest_info["summary"]["passed"]
239-
for category, count in pytest_info["summary"].items():
240-
if category not in {"duration"}:
241-
submission_repo_page += f"""| {category} | {count} |\n"""
242-
else:
218+
total_duration += pytest_info["duration"]
219+
# cum_passed += pytest_info["summary"]["passed"]
220+
for category, count in pytest_info["summary"].items():
221+
if category not in {"duration"}:
222+
submission_repo_page += f"""| {category} | {count} |\n"""
223+
else:
224+
submission_repo_page += (
225+
f"""| {category} | {float(count):.2f}s |\n"""
226+
)
227+
228+
submission_repo_page += "\n## Failed pytests:\n\n"
229+
for testname, failure in pytest_info["failures"].items():
230+
shortened_testname = os.path.basename(testname)
243231
submission_repo_page += (
244-
f"""| {category} | {float(count):.2f}s |\n"""
232+
f"### {shortened_testname}\n\n<details><summary> <pre>{shortened_testname}"
233+
f"</pre></summary><pre>\n{failure['failure_string']}\n</pre>\n</details>\n"
245234
)
246-
247-
submission_repo_page += "\n## Failed pytest:\n\n"
248-
for testname, failure in pytest_info["failures"].items():
249-
shortened_testname = os.path.basename(testname)
250-
submission_repo_page += (
251-
f"### {shortened_testname}\n\n<details><summary> <pre>{shortened_testname}"
252-
f"</pre></summary><pre>\n{failure['failure_string']}\n</pre>\n</details>\n"
235+
resolved = ("failed" not in pytest_info["summary"]) or (
236+
pytest_info["summary"]["failed"] == 0
253237
)
254-
resolved = ("failed" not in pytest_info["summary"]) or (
255-
pytest_info["summary"]["failed"] == 0
256-
)
257-
repos_resolved += 1
258-
pytest_details = f"{pytest_info['summary']['passed']} / {pytest_info['summary']['collected']}"
259-
duration = f"{pytest_info['duration']:.2f}"
260-
github_hyperlink = f"{project_page_link}/{repo_name}" if branch_name == "reference" else f"{project_page_link}/{repo_name}/tree/{branch_name}"
261-
submission_page += f"""
238+
repos_resolved += int(resolved)
239+
pytest_details = f"{pytest_info['summary']['passed']} / {pytest_info['summary']['collected']}"
240+
duration = f"{pytest_info['duration']:.2f}"
241+
github_hyperlink = f"{project_page_link}/{repo_name}" if branch_name == "reference" else f"{project_page_link}/{repo_name}/tree/{branch_name}"
242+
submission_page += f"""
262243
| {repo_name} | {'Yes' if resolved else 'No'} | {pytest_details} | {duration} | [Analysis](/{f'analysis_{org_name}_{branch_name}_{repo_name}'}) | [Github]({github_hyperlink}) |"""
263-
back_button = (
264-
f"[back to {display_name} summary](/{f'analysis_{org_name}_{branch_name}'})\n\n"
265-
)
266-
with open(
267-
os.path.join(subfolder, f"analysis_{org_name}_{branch_name}_{repo_name}.md"), "w"
268-
) as wf:
269-
wf.write(back_button + submission_repo_page + patch_diff)
244+
back_button = (
245+
f"[back to {display_name} summary](/{f'analysis_{org_name}_{branch_name}'})\n\n"
246+
)
247+
with open(
248+
os.path.join(subfolder, f"analysis_{org_name}_{branch_name}_{repo_name}.md"), "w"
249+
) as wf:
250+
wf.write(back_button + submission_repo_page + patch_diff)
270251
analysis_link = f"[Analysis](/{f'analysis_{org_name}_{branch_name}'})"
271252
github_link = f"[Github]({project_page_link})"
272253
leaderboard[split] += (
@@ -355,7 +336,20 @@ def main(args):
355336
f"--commit0-dot-file-path {analysis_files_path}/repos/.commit0.yaml"
356337
)
357338
branch_name = "reference"
358-
os.makedirs(os.path.join(analysis_files_path, branch_name), exist_ok=True)
339+
org_name = "commit0"
340+
submission_metrics_output_file = os.path.join(
341+
analysis_files_path, org_name, f"{branch_name}.json"
342+
)
343+
submission_details = {"submission_info": {
344+
"org_name": org_name,
345+
"branch": branch_name,
346+
"display_name": "Reference (Gold)",
347+
"submission_date": "NA",
348+
"split": args.split,
349+
"project_page": "https://github.com/commit-0",
350+
}}
351+
352+
os.makedirs(os.path.join(analysis_files_path, org_name), exist_ok=True)
359353
if not args.keep_previous_eval:
360354
for repo_log_path in glob.glob(f"{os.getcwd()}/logs/pytest/*"):
361355
if os.path.exists(os.path.join(repo_log_path, branch_name)):
@@ -364,40 +358,26 @@ def main(args):
364358
"commit0 evaluate --reference "
365359
f"--commit0-dot-file-path {analysis_files_path}/repos/.commit0.yaml"
366360
)
367-
368361
# get coverage and pytest info for each repo
369362
for example in dataset:
370363
repo_name = example["repo"].split("/")[-1]
371364
if args.split != "all" and repo_name not in SPLIT[args.split]:
372365
continue
373366

374-
repo_metrics_output_file = os.path.join(
375-
analysis_files_path, branch_name, f"{repo_name}.json"
376-
)
377-
378367
path_to_logs = f"{os.getcwd()}/logs/pytest/{repo_name}/{branch_name}"
379368
pytest_results = get_pytest_info(path_to_logs, repo_name, branch_name)
380-
pytest_results["submission_info"] = {
381-
"org_name": "gold",
382-
"branch": "reference",
383-
"display_name": "Reference (Gold)",
384-
"submission_date": "NA",
385-
"split": args.split,
386-
"project_page": "https://github.com/commit-0",
387-
}
388-
json.dump(pytest_results, open(repo_metrics_output_file, "w"), indent=4)
369+
submission_details[repo_name] = pytest_results
370+
json.dump(submission_details, open(submission_metrics_output_file, "w"), indent=4)
371+
print(f"Saved pytest info to {submission_metrics_output_file}")
389372

390373
if args.analyze_submissions:
391-
commit0_dot_file_path = os.path.join(
392-
analysis_files_path, "submission_repos", ".commit0.yaml"
393-
)
394374
if not args.keep_previous_eval:
395375
for subfolder in glob.glob(os.path.join(analysis_files_path, "*")):
396376
if os.path.basename(subfolder.rstrip("/")) not in {
397377
"blank",
398-
"reference",
399378
"repos",
400379
"submission_repos",
380+
"commit0"
401381
}:
402382
try:
403383
print(f"Clearing {subfolder}")
@@ -406,10 +386,17 @@ def main(args):
406386
print(f"{e}: when removing {subfolder}")
407387

408388
for submission in tqdm.tqdm(submission_dataset):
409-
# submission_details = {"submission_info": submission}
389+
submission_details = {"submission_info": submission}
410390
branch_name = submission["branch"]
411391
org_name = submission["org_name"]
412-
os.makedirs(os.path.join(analysis_files_path, branch_name), exist_ok=True)
392+
submission_metrics_output_file = os.path.join(
393+
analysis_files_path, org_name, f"{branch_name}.json"
394+
)
395+
os.makedirs(os.path.join(analysis_files_path, org_name), exist_ok=True)
396+
commit0_dot_file_path = os.path.join(
397+
analysis_files_path, "submission_repos", org_name, ".commit0.yaml"
398+
)
399+
print("commit0_dot_file_path", commit0_dot_file_path)
413400
if not args.keep_previous_eval:
414401
for repo_log_path in glob.glob(f"{os.getcwd()}/logs/pytest/*"):
415402
if os.path.exists(os.path.join(repo_log_path, branch_name)):
@@ -443,16 +430,11 @@ def main(args):
443430
if args.split != "all" and repo_name not in SPLIT[args.split]:
444431
continue
445432

446-
repo_metrics_output_file = os.path.join(
447-
analysis_files_path, branch_name, f"{repo_name}.json"
448-
)
449-
450433
path_to_logs = f"{os.getcwd()}/logs/pytest/{repo_name}/{branch_name}"
451434
pytest_results = get_pytest_info(path_to_logs, repo_name, branch_name)
452-
# submission_details.update(pytest_results)
453-
pytest_results["submission_info"] = submission
454-
json.dump(pytest_results, open(repo_metrics_output_file, "w"), indent=4)
455-
# json.dump(submission_details, open(repo_metrics_output_file, "w"), indent=4)
435+
submission_details[repo_name] = pytest_results
436+
json.dump(submission_details, open(submission_metrics_output_file, "w"), indent=4)
437+
print(f"Saved pytest info to {submission_metrics_output_file}")
456438

457439
if not args.keep_previous_eval:
458440
for analysis_file in glob.glob("docs/analysis*.md"):

0 commit comments

Comments
 (0)