Skip to content

Support directly rendering codegen scheduling #1019

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions database/schema.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ series aid cid value

**TODO**

### pull_request_builds
### pull_request_build

**TODO**

Expand Down Expand Up @@ -164,4 +164,4 @@ aid step start end

### error

**TODO**
**TODO**
58 changes: 46 additions & 12 deletions database/src/pool/sqlite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,17 @@ static MIGRATIONS: &[&str] = &[
PRIMARY KEY(aid, cid, crate)
);
"#,
r#"alter table pull_request_builds rename to pull_request_build"#,
r#"
create table raw_self_profile(
aid integer references artifact(id) on delete cascade on update cascade,
cid integer references collection(id) on delete cascade on update cascade,
crate text not null references benchmark(name) on delete cascade on update cascade,
profile text not null,
cache text not null,
PRIMARY KEY(aid, cid, crate, profile, cache)
);
"#,
];

#[async_trait::async_trait]
Expand Down Expand Up @@ -470,7 +481,7 @@ impl Connection for SqliteConnection {
) {
self.raw_ref()
.prepare_cached(
"insert into pull_request_builds (pr, complete, requested, include, exclude, runs) VALUES (?, 0, strftime('%s','now'), ?, ?, ?)",
"insert into pull_request_build (pr, complete, requested, include, exclude, runs) VALUES (?, 0, strftime('%s','now'), ?, ?, ?)",
)
.unwrap()
.execute(params![pr, include, exclude, &runs])
Expand All @@ -479,7 +490,7 @@ impl Connection for SqliteConnection {
async fn pr_attach_commit(&self, pr: u32, sha: &str, parent_sha: &str) -> bool {
self.raw_ref()
.prepare_cached(
"update pull_request_builds SET bors_sha = ?, parent_sha = ?
"update pull_request_build SET bors_sha = ?, parent_sha = ?
where pr = ? and bors_sha is null",
)
.unwrap()
Expand All @@ -490,7 +501,7 @@ impl Connection for SqliteConnection {
async fn queued_commits(&self) -> Vec<QueuedCommit> {
self.raw_ref()
.prepare_cached(
"select pr, bors_sha, parent_sha, include, exclude, runs from pull_request_builds
"select pr, bors_sha, parent_sha, include, exclude, runs from pull_request_build
where complete is false and bors_sha is not null
order by requested asc",
)
Expand All @@ -514,7 +525,7 @@ impl Connection for SqliteConnection {
let count = self
.raw_ref()
.execute(
"update pull_request_builds SET complete = 1 where sha = ? and complete = 0",
"update pull_request_build SET complete = 1 where sha = ? and complete = 0",
params![sha],
)
.unwrap();
Expand All @@ -524,7 +535,7 @@ impl Connection for SqliteConnection {
assert_eq!(count, 1, "sha is unique column");
self.raw_ref()
.query_row(
"select pr, sha, parent_sha, include, exclude, runs from pull_request_builds
"select pr, sha, parent_sha, include, exclude, runs from pull_request_build
where sha = ?",
params![sha],
|row| {
Expand Down Expand Up @@ -867,7 +878,7 @@ impl Connection for SqliteConnection {
async fn parent_of(&self, sha: &str) -> Option<String> {
let mut shas = self
.raw_ref()
.prepare_cached("select parent_sha from pull_request_builds where bors_sha = ?")
.prepare_cached("select parent_sha from pull_request_build where bors_sha = ?")
.unwrap()
.query(params![sha])
.unwrap()
Expand All @@ -880,7 +891,7 @@ impl Connection for SqliteConnection {
async fn pr_of(&self, sha: &str) -> Option<u32> {
self.raw_ref()
.query_row(
"select pr from pull_request_builds where bors_sha = ?",
"select pr from pull_request_build where bors_sha = ?",
params![sha],
|row| Ok(row.get(0).unwrap()),
)
Expand All @@ -903,12 +914,35 @@ impl Connection for SqliteConnection {
}
async fn list_self_profile(
&self,
_aid: ArtifactId,
_crate_: &str,
_profile: &str,
_scenario: &str,
aid: ArtifactId,
crate_: &str,
profile: &str,
scenario: &str,
) -> Vec<(ArtifactIdNumber, i32)> {
Vec::new()
self.raw_ref()
.prepare(
"select aid, cid from raw_self_profile where
crate = ?1 and
profile = ?2 and
cache = ?3 and
aid = (select id from artifact where name = ?4);",
)
.unwrap()
.query_map(
params![
&crate_,
profile,
scenario,
&match aid {
ArtifactId::Commit(c) => c.sha,
ArtifactId::Tag(a) => a,
}
],
|r| Ok((ArtifactIdNumber(r.get::<_, i32>(0)? as u32), r.get(1)?)),
)
.unwrap()
.collect::<Result<_, _>>()
.unwrap()
}

async fn get_bootstrap(&self, aids: &[ArtifactIdNumber]) -> Vec<Option<Duration>> {
Expand Down
117 changes: 77 additions & 40 deletions site/src/request_handlers/self_profile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,42 +16,80 @@ use crate::server::{Request, Response, ResponseHeaders};

pub async fn handle_self_profile_processed_download(
body: self_profile_raw::Request,
params: HashMap<String, String>,
mut params: HashMap<String, String>,
ctxt: &SiteCtxt,
) -> http::Response<hyper::Body> {
let title = format!(
"{}: {} {}",
&body.commit[..std::cmp::min(7, body.commit.len())],
body.benchmark,
body.run_name
);
let diff_against = params.remove("base_commit");
if params
.get("type")
.map_or(false, |t| t != "codegen-schedule")
&& diff_against.is_some()
{
let mut resp = Response::new("Only codegen_schedule supports diffing right now.".into());
*resp.status_mut() = StatusCode::BAD_REQUEST;
return resp;
}

let title = if let Some(diff_against) = diff_against.as_ref() {
format!(
"{} vs {}: {} {}",
&diff_against[..std::cmp::min(7, diff_against.len())],
&body.commit[..std::cmp::min(7, body.commit.len())],
body.benchmark,
body.run_name
)
} else {
format!(
"{}: {} {}",
&body.commit[..std::cmp::min(7, body.commit.len())],
body.benchmark,
body.run_name
)
};

let start = Instant::now();

let (url, is_tarball) = match handle_self_profile_raw(body, ctxt).await {
Ok(v) => (v.url, v.is_tarball),
let base_data = if let Some(diff_against) = diff_against {
match handle_self_profile_raw(
self_profile_raw::Request {
commit: diff_against,
benchmark: body.benchmark.clone(),
run_name: body.run_name.clone(),
cid: None,
},
ctxt,
)
.await
{
Ok(v) => match get_self_profile_raw_data(&v.url).await {
Ok(v) => Some(v),
Err(e) => return e,
},
Err(e) => {
let mut resp = Response::new(e.into());
*resp.status_mut() = StatusCode::BAD_REQUEST;
return resp;
}
}
} else {
None
};

let data = match handle_self_profile_raw(body, ctxt).await {
Ok(v) => match get_self_profile_raw_data(&v.url).await {
Ok(v) => v,
Err(e) => return e,
},
Err(e) => {
let mut resp = Response::new(e.into());
*resp.status_mut() = StatusCode::BAD_REQUEST;
return resp;
}
};

if is_tarball {
let mut resp =
Response::new("Processing legacy format self-profile data is not supported".into());
*resp.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
return resp;
}

let data = match get_self_profile_raw_data(&url).await {
Ok(v) => v,
Err(e) => return e,
};

log::trace!("got data in {:?}", start.elapsed());

let output = match crate::self_profile::generate(&title, data, params) {
let output = match crate::self_profile::generate(&title, base_data, data, params) {
Ok(c) => c,
Err(e) => {
log::error!("Failed to generate json {:?}", e);
Expand All @@ -63,8 +101,12 @@ pub async fn handle_self_profile_processed_download(
let mut builder = http::Response::builder()
.header_typed(if output.filename.ends_with("json") {
ContentType::json()
} else {
} else if output.filename.ends_with("svg") {
ContentType::from("image/svg+xml".parse::<mime::Mime>().unwrap())
} else if output.filename.ends_with("html") {
ContentType::html()
} else {
unreachable!()
})
.status(StatusCode::OK);

Expand Down Expand Up @@ -257,6 +299,7 @@ fn sort_self_profile(
async fn get_self_profile_raw_data(url: &str) -> Result<Vec<u8>, Response> {
log::trace!("downloading {}", url);

let start = Instant::now();
let resp = match reqwest::get(url).await {
Ok(r) => r,
Err(e) => {
Expand All @@ -283,6 +326,12 @@ async fn get_self_profile_raw_data(url: &str) -> Result<Vec<u8>, Response> {
}
};

log::trace!(
"downloaded {} bytes in {:?}",
compressed.len(),
start.elapsed()
);

let mut data = Vec::new();

match snap::read::FrameDecoder::new(compressed.reader()).read_to_end(&mut data) {
Expand Down Expand Up @@ -462,7 +511,7 @@ pub async fn handle_self_profile_raw(
let aids_and_cids = conn
.list_self_profile(
ArtifactId::Commit(database::Commit {
sha: body.commit,
sha: body.commit.clone(),
date: database::Date::empty(),
}),
bench_name,
Expand All @@ -473,7 +522,7 @@ pub async fn handle_self_profile_raw(
let (aid, first_cid) = aids_and_cids
.first()
.copied()
.ok_or_else(|| format!("No results for this commit"))?;
.ok_or_else(|| format!("No results for {}", body.commit))?;

let cid = match body.cid {
Some(cid) => {
Expand All @@ -500,27 +549,15 @@ pub async fn handle_self_profile_raw(
.map(|(_, cid)| cid)
.collect::<Vec<_>>();

return match fetch(&cids, cid, format!("{}.mm_profdata.sz", url_prefix), false).await {
return match fetch(&cids, cid, format!("{}.mm_profdata.sz", url_prefix)).await {
Ok(fetched) => Ok(fetched),
Err(new_error) => {
match fetch(&cids, cid, format!("{}.tar.sz", url_prefix), true).await {
Ok(fetched) => Ok(fetched),
Err(old_error) => {
// Both files failed to fetch; return the errors for both:
Err(format!(
"mm_profdata download failed: {:?}, tarball download failed: {:?}",
new_error, old_error
))
}
}
}
Err(new_error) => Err(format!("mm_profdata download failed: {:?}", new_error,)),
};

async fn fetch(
cids: &[i32],
cid: i32,
url: String,
is_tarball: bool,
) -> ServerResult<self_profile_raw::Response> {
let resp = reqwest::Client::new()
.head(&url)
Expand All @@ -538,7 +575,7 @@ pub async fn handle_self_profile_raw(
cids: cids.to_vec(),
cid,
url,
is_tarball,
is_tarball: false,
})
}
}
Expand Down
19 changes: 18 additions & 1 deletion site/src/self_profile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
use anyhow::Context;
use std::collections::HashMap;

mod codegen_schedule;
pub mod crox;
pub mod flamegraph;

Expand All @@ -15,6 +16,7 @@ pub struct Output {

pub fn generate(
title: &str,
self_profile_base_data: Option<Vec<u8>>,
self_profile_data: Vec<u8>,
mut params: HashMap<String, String>,
) -> anyhow::Result<Output> {
Expand All @@ -38,6 +40,21 @@ pub fn generate(
is_download: false,
})
}
_ => anyhow::bail!("Unknown type, specify type={crox,flamegraph}"),
Some("codegen-schedule") => {
let opt =
serde_json::from_str(&serde_json::to_string(&params).unwrap()).context("params")?;
Ok(Output {
filename: "schedule.html",
data: codegen_schedule::generate(
title,
self_profile_base_data,
self_profile_data,
opt,
)
.context("codegen_schedule")?,
is_download: false,
})
}
_ => anyhow::bail!("Unknown type, specify type={crox,flamegraph,codegen-schedule}"),
}
}
Loading