Add manage pages, self-add projects & history graphs

This commit is contained in:
Luke Street
2025-04-23 23:18:11 -06:00
parent 8eb275fbac
commit b3607c6346
21 changed files with 1532 additions and 227 deletions
Generated
+11
View File
@@ -1025,6 +1025,7 @@ dependencies = [
"axum",
"base64 0.22.1",
"decomp-dev-core",
"maud",
"octocrab",
"rand 0.9.1",
"serde",
@@ -1250,6 +1251,15 @@ dependencies = [
"serde",
]
[[package]]
name = "english-to-cron"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a13a7d5e0ab3872c3ee478366eae624d89ab953d30276b0eee08169774ceb73"
dependencies = [
"regex",
]
[[package]]
name = "equivalent"
version = "1.0.2"
@@ -5014,6 +5024,7 @@ checksum = "6a5597b569b4712cf78aa0c9ae29742461b7bda1e49c2a5fdad1d79bf022f8f0"
dependencies = [
"chrono",
"croner",
"english-to-cron",
"num-derive",
"num-traits",
"tokio",
+1
View File
@@ -20,6 +20,7 @@ axum = { version = "0.8", features = ["macros"] }
futures-util = "0.3.31"
hex = "0.4"
image = "0.25"
maud = { version = "0.27", features = ["axum"] }
mime = "0.3"
objdiff-core = { version = "2.5", features = ["bindings"] }
#objdiff-core = { path = "../objdiff/objdiff-core", features = ["bindings"] }
+2 -1
View File
@@ -9,6 +9,7 @@ anyhow.workspace = true
axum.workspace = true
base64 = "0.22"
decomp-dev-core = { path = "../core" }
maud.workspace = true
octocrab.workspace = true
rand = "0.9"
serde.workspace = true
@@ -16,4 +17,4 @@ serde_json.workspace = true
time.workspace = true
tower-sessions.workspace = true
tracing.workspace = true
url.workspace = true
url.workspace = true
+76 -14
View File
@@ -1,10 +1,12 @@
use anyhow::{Context, anyhow, bail};
use anyhow::{Context, Result, anyhow, bail};
use axum::{
extract::{FromRef, FromRequestParts, OptionalFromRequestParts, Query, State},
http::{StatusCode, header::ACCEPT, request::Parts},
Extension,
extract::{FromRef, FromRequestParts, OptionalFromRequestParts, OriginalUri, Query, State},
http::{Method, StatusCode, header::ACCEPT, request::Parts},
response::{IntoResponse, Redirect, Response},
};
use base64::{Engine as _, engine::general_purpose::URL_SAFE_NO_PAD};
use maud::{html, DOCTYPE};
use decomp_dev_core::{AppError, config::GitHubConfig};
use octocrab::{
Octocrab,
@@ -13,9 +15,11 @@ use octocrab::{
use rand::{TryRngCore, rngs::OsRng};
use time::{Duration, UtcDateTime};
use tower_sessions::Session;
use url::form_urlencoded;
const GITHUB_OAUTH_STATE: &str = "github_oauth_state";
const CURRENT_USER: &str = "current_user";
const RETURN_TO: &str = "return_to";
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct StoredOAuth {
@@ -50,6 +54,13 @@ pub struct CurrentUser {
}
impl CurrentUser {
pub fn client(&self) -> Result<Octocrab> {
Octocrab::builder()
.oauth(self.oauth.clone().into())
.build()
.context("Failed to create GitHub client")
}
pub fn permissions_for_repo(&self, id: u64) -> Permissions {
self.repos
.iter()
@@ -82,8 +93,14 @@ impl From<Repository> for CurrentUserRepo {
}
}
#[derive(serde::Deserialize)]
pub struct LoginQuery {
pub return_to: Option<String>,
}
pub async fn login(
session: Session,
Query(LoginQuery { return_to }): Query<LoginQuery>,
State(config): State<GitHubConfig>,
current_user: Option<CurrentUser>,
) -> Result<Response, AppError> {
@@ -98,18 +115,40 @@ pub async fn login(
OsRng.try_fill_bytes(&mut bytes)?;
let nonce = URL_SAFE_NO_PAD.encode(bytes);
session.insert(GITHUB_OAUTH_STATE, nonce.clone()).await?;
if let Some(return_to) = return_to {
if return_to.starts_with('/') {
session.insert(RETURN_TO, return_to).await?;
}
}
let mut redirect_url = url::Url::parse("https://github.com/login/oauth/authorize")?;
let mut query = redirect_url.query_pairs_mut();
query.append_pair("client_id", &config.client_id);
query.append_pair("redirect_uri", &config.redirect_uri);
query.append_pair("state", &nonce);
drop(query);
Ok(Redirect::to(redirect_url.as_str()).into_response())
Ok(html! {
(DOCTYPE)
html {
head {
meta charset="utf-8";
title { "Logging in... • decomp.dev" }
meta http-equiv="refresh" content=(format!("0;URL={redirect_url}"));
meta name="viewport" content="width=device-width, initial-scale=1.0";
meta name="color-scheme" content="dark light";
meta name="darkreader-lock";
link rel="stylesheet" href="/css/main.min.css?3";
}
body {
.loading-container {
div aria-busy="true" { "Logging in..." }
}
}
}
}.into_response())
}
pub async fn logout(session: Session) -> Result<Response, AppError> {
session.remove_value(CURRENT_USER).await?;
session.remove_value(GITHUB_OAUTH_STATE).await?;
session.flush().await?;
Ok(Redirect::to("/").into_response())
}
@@ -175,8 +214,7 @@ pub async fn oauth(
Query(OAuthQuery { code, state: oauth_state }): Query<OAuthQuery>,
State(config): State<GitHubConfig>,
) -> Result<Response, AppError> {
let existing_state = session.get::<String>(GITHUB_OAUTH_STATE).await?;
let Some(existing_state) = existing_state else {
let Some(existing_state) = session.remove::<String>(GITHUB_OAUTH_STATE).await? else {
tracing::warn!("No state found in session");
return Ok((StatusCode::BAD_REQUEST, "No state found").into_response());
};
@@ -184,12 +222,15 @@ pub async fn oauth(
tracing::warn!("State mismatch: expected {}, got {}", existing_state, oauth_state);
return Ok((StatusCode::BAD_REQUEST, "State mismatch").into_response());
}
session.remove_value(GITHUB_OAUTH_STATE).await?;
let current_user = fetch_access_token(&config, &code).await?;
session.insert(CURRENT_USER, current_user).await?;
Ok(Redirect::to("/").into_response())
if let Some(return_to) = session.remove::<String>(RETURN_TO).await? {
Ok(Redirect::to(&return_to).into_response())
} else {
Ok(Redirect::to("/").into_response())
}
}
fn oauth_client() -> Octocrab {
@@ -225,6 +266,7 @@ async fn fetch_access_token(config: &GitHubConfig, code: &str) -> Result<Current
client
.current()
.list_repos_for_authenticated_user()
.visibility("public")
.per_page(100)
.send()
.await
@@ -272,12 +314,32 @@ where
GitHubConfig: FromRef<S>,
S: Send + Sync,
{
type Rejection = (StatusCode, &'static str);
type Rejection = Response;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
<CurrentUser as OptionalFromRequestParts<S>>::from_request_parts(parts, state)
.await?
.ok_or((StatusCode::UNAUTHORIZED, "Unauthorized"))
match <CurrentUser as OptionalFromRequestParts<S>>::from_request_parts(parts, state).await {
Ok(Some(user)) => Ok(user),
Ok(None) => {
let method =
Method::from_request_parts(parts, state).await.unwrap_or(Method::OPTIONS);
if method != Method::GET {
return Err((StatusCode::UNAUTHORIZED, "Unauthorized").into_response());
}
let path_and_query =
<Extension<OriginalUri> as FromRequestParts<S>>::from_request_parts(
parts, state,
)
.await
.ok()
.and_then(|uri| uri.path_and_query().cloned())
.ok_or_else(|| (StatusCode::UNAUTHORIZED, "Unauthorized").into_response())?;
let mut redirect_uri = "/login?return_to=".to_string();
redirect_uri
.extend(form_urlencoded::byte_serialize(path_and_query.as_str().as_bytes()));
Err(Redirect::to(&redirect_uri).into_response())
}
Err(e) => Err(e.into_response()),
}
}
}
+1 -1
View File
@@ -20,7 +20,7 @@ impl IntoResponse for AppError {
fn into_response(self) -> Response {
match self {
Self::Status(status) if status == StatusCode::NOT_FOUND => {
(status, "Not found!!").into_response()
(status, "Not found").into_response()
}
Self::Status(status) => status.into_response(),
Self::Internal(err) => {
+92 -1
View File
@@ -1,4 +1,4 @@
use std::{borrow::Cow, sync::Arc};
use std::{borrow::Cow, str::FromStr, sync::Arc};
use objdiff_core::bindings::report::{Measures, Report, ReportCategory, ReportUnit};
use serde::Serialize;
@@ -18,6 +18,23 @@ pub struct Project {
pub enable_pr_comments: bool,
}
impl Default for Project {
fn default() -> Self {
Self {
id: 0,
owner: String::new(),
repo: String::new(),
name: None,
short_name: None,
default_category: None,
default_version: None,
platform: None,
workflow_id: None,
enable_pr_comments: true,
}
}
}
impl Project {
pub fn name(&self) -> Cow<str> {
if let Some(name) = self.name.as_ref() {
@@ -129,3 +146,77 @@ pub struct FrogressMapping {
pub project_category_name: String,
pub project_measure: String,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum Platform {
GBA,
GBC,
N64,
DS,
PS,
PS2,
Switch,
GC,
Wii,
}
pub const ALL_PLATFORMS: &[Platform] = &[
Platform::GBA,
Platform::GBC,
Platform::N64,
Platform::DS,
Platform::PS,
Platform::PS2,
Platform::Switch,
Platform::GC,
Platform::Wii,
];
impl Platform {
pub fn to_str(self) -> &'static str {
match self {
Self::GBA => "gba",
Self::GBC => "gbc",
Self::N64 => "n64",
Self::DS => "nds",
Self::PS => "ps",
Self::PS2 => "ps2",
Self::Switch => "switch",
Self::GC => "gc",
Self::Wii => "wii",
}
}
pub fn name(self) -> &'static str {
match self {
Platform::GBA => "Game Boy Advance",
Platform::GBC => "Game Boy Color",
Platform::N64 => "Nintendo 64",
Platform::DS => "Nintendo DS",
Platform::PS => "PlayStation",
Platform::PS2 => "PlayStation 2",
Platform::Switch => "Nintendo Switch",
Platform::GC => "GameCube",
Platform::Wii => "Wii",
}
}
}
impl FromStr for Platform {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"gba" => Ok(Self::GBA),
"gbc" => Ok(Self::GBC),
"n64" => Ok(Self::N64),
"nds" => Ok(Self::DS),
"ps" => Ok(Self::PS),
"ps2" => Ok(Self::PS2),
"switch" => Ok(Self::Switch),
"gc" => Ok(Self::GC),
"wii" => Ok(Self::Wii),
_ => Err(()),
}
}
}
+60 -20
View File
@@ -69,9 +69,9 @@ impl Database {
})
.build();
let db = Self { pool, report_cache, report_unit_cache };
db.fixup_report_units().await?;
db.migrate_reports().await?;
// db.cleanup_report_units().await?;
db.fixup_report_units().await.context("Fixing report units")?;
db.migrate_reports().await.context("Migrating reports")?;
// db.cleanup_report_units().await.context("Running report cleanup")?;
Ok(db)
}
@@ -765,13 +765,24 @@ impl Database {
pub async fn cleanup_report_units(&self) -> Result<()> {
let mut conn = self.pool.acquire().await?;
conn.execute("PRAGMA foreign_keys = OFF").await?;
let mut tx = conn.begin().await?;
let deleted_reports = sqlx::query!(
r#"
DELETE FROM reports
WHERE project_id NOT IN (SELECT id FROM projects)
"#,
)
.execute(&mut *tx)
.await?
.rows_affected();
let deleted_report_report_units = sqlx::query!(
r#"
DELETE FROM report_report_units
WHERE report_id NOT IN (SELECT id FROM reports)
"#,
)
.execute(&mut *conn)
.execute(&mut *tx)
.await?
.rows_affected();
let deleted_report_units = sqlx::query!(
@@ -780,12 +791,15 @@ impl Database {
WHERE id NOT IN (SELECT report_unit_id FROM report_report_units)
"#,
)
.execute(&mut *conn)
.execute(&mut *tx)
.await?
.rows_affected();
if deleted_report_units > 0 || deleted_report_report_units > 0 {
tx.commit().await?;
conn.execute("PRAGMA foreign_keys = ON").await?;
if deleted_reports > 0 || deleted_report_units > 0 || deleted_report_report_units > 0 {
tracing::info!(
"Deleted {} orphaned report units and {} orphaned mappings",
"Deleted {} orphaned reports, {} orphaned report units and {} orphaned mappings",
deleted_reports,
deleted_report_units,
deleted_report_report_units,
);
@@ -905,7 +919,7 @@ impl Database {
sqlx::query!(
r#"
UPDATE projects
SET workflow_id = ?
SET workflow_id = ?, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
"#,
workflow_id,
@@ -927,7 +941,7 @@ impl Database {
sqlx::query!(
r#"
UPDATE projects
SET owner = ?, repo = ?
SET owner = ?, repo = ?, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
"#,
owner,
@@ -939,23 +953,49 @@ impl Database {
Ok(())
}
pub async fn update_project_settings(
&self,
project_id: u64,
enable_pr_comments: bool,
default_version: Option<String>,
) -> Result<()> {
pub async fn update_project(&self, project: &Project) -> Result<()> {
let mut conn = self.pool.acquire().await?;
let project_id_db = project_id as i64;
let project_id = project.id as i64;
sqlx::query!(
r#"
UPDATE projects
SET enable_pr_comments = ?, default_version = ?
SET owner = ?, repo = ?, name = ?, short_name = ?, default_category = ?, default_version = ?, platform = ?, workflow_id = ?, enable_pr_comments = ?, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
"#,
enable_pr_comments,
default_version,
project_id_db,
project.owner,
project.repo,
project.name,
project.short_name,
project.default_category,
project.default_version,
project.platform,
project.workflow_id,
project.enable_pr_comments,
project_id,
)
.execute(&mut *conn)
.await?;
Ok(())
}
pub async fn create_project(&self, project: &Project) -> Result<()> {
let mut conn = self.pool.acquire().await?;
let project_id = project.id as i64;
sqlx::query!(
r#"
INSERT INTO projects (id, owner, repo, name, short_name, default_category, default_version, platform, workflow_id, enable_pr_comments, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
"#,
project_id,
project.owner,
project.repo,
project.name,
project.short_name,
project.default_category,
project.default_version,
project.platform,
project.workflow_id,
project.enable_pr_comments,
)
.execute(&mut *conn)
.await?;
-17
View File
@@ -264,20 +264,3 @@ pub fn generate_comment(
}
comment
}
#[allow(unused)]
fn platform_name(platform: &str) -> &str {
match platform {
"gc" => "GameCube",
"wii" => "Wii",
"n64" => "Nintendo 64",
"switch" => "Nintendo Switch",
"3ds" => "Nintendo 3DS",
"nds" => "Nintendo DS",
"gba" => "Game Boy Advance",
"gbc" => "Game Boy Color",
"ps" => "PlayStation",
"ps2" => "PlayStation 2",
_ => platform,
}
}
+191 -51
View File
@@ -2,14 +2,14 @@ pub mod changes;
pub mod webhook;
use std::{
collections::{HashMap, hash_map::Entry},
collections::{HashMap, HashSet, hash_map::Entry},
ffi::OsStr,
io::{Cursor, Read},
pin::pin,
sync::{Arc, OnceLock},
};
use anyhow::{Context, Result};
use anyhow::{Context, Result, anyhow, bail};
use decomp_dev_core::{
config::GitHubConfig,
models::{Commit, Project},
@@ -20,7 +20,10 @@ use http::StatusCode;
use objdiff_core::bindings::report::Report;
use octocrab::{
GitHubError, Octocrab,
models::{ArtifactId, InstallationId, RunId, repos::RepoCommitPage, workflows::HeadCommit},
models::{
ArtifactId, InstallationId, InstallationRepositories, Repository, RunId,
repos::RepoCommitPage, workflows::HeadCommit,
},
params::actions::ArchiveFormat,
};
use regex::Regex;
@@ -36,35 +39,41 @@ pub struct GitHub {
pub installations: Option<Arc<Mutex<Installations>>>,
}
pub struct CachedInstallation {
pub client: Octocrab,
pub repositories: Vec<Repository>,
}
pub struct Installations {
pub app_client: Octocrab,
pub owner_to_installation: HashMap<String, InstallationId>,
pub clients: HashMap<InstallationId, Octocrab>,
pub clients: HashMap<InstallationId, CachedInstallation>,
pub repo_to_installation: HashMap<u64, InstallationId>,
}
impl Installations {
pub fn client_for_installation(
pub async fn client_for_installation(
&mut self,
installation_id: InstallationId,
owner: Option<&str>,
) -> Result<Octocrab> {
match self.clients.entry(installation_id) {
Entry::Occupied(entry) => Ok(entry.get().clone()),
Entry::Occupied(entry) => Ok(entry.get().client.clone()),
Entry::Vacant(entry) => {
// Create a new client for the installation
let client = self.app_client.installation(installation_id)?;
entry.insert(client.clone());
if let Some(owner) = owner {
self.owner_to_installation.insert(owner.to_string(), installation_id);
}
let repositories = list_installation_repositories(&client)
.await
.context("Failed to fetch installation repositories")?;
self.repo_to_installation
.extend(repositories.iter().map(|r| (r.id.into_inner(), installation_id)));
entry.insert(CachedInstallation { client: client.clone(), repositories });
Ok(client)
}
}
}
pub fn client_for_owner(&mut self, owner: &str) -> Result<Option<Octocrab>> {
if let Some(installation_id) = self.owner_to_installation.get(owner) {
return self.client_for_installation(*installation_id, None).map(Some);
pub async fn client_for_repo(&mut self, repo_id: u64) -> Result<Option<Octocrab>> {
if let Some(installation_id) = self.repo_to_installation.get(&repo_id) {
return self.client_for_installation(*installation_id).await.map(Some);
}
Ok(None)
}
@@ -77,24 +86,63 @@ pub struct GetCommit {
sha: String,
}
#[derive(serde::Serialize)]
struct PageParams {
#[serde(skip_serializing_if = "Option::is_none")]
per_page: Option<u8>,
#[serde(skip_serializing_if = "Option::is_none")]
page: Option<u32>,
}
async fn list_installation_repositories(app_client: &Octocrab) -> Result<Vec<Repository>> {
let mut page = 1;
let mut response: InstallationRepositories = app_client
.get(
"/installation/repositories",
Some(&PageParams { per_page: Some(100), page: Some(page) }),
)
.await?;
let mut repositories = response.repositories;
while repositories.len() < response.total_count as usize {
page += 1;
response = app_client
.get(
&format!("/installation/repositories?page={}", page),
Some(&PageParams { per_page: Some(100), page: Some(page) }),
)
.await?;
if response.repositories.is_empty() {
break;
}
repositories.extend(response.repositories);
}
Ok(repositories)
}
async fn list_installations(app_client: Octocrab) -> Result<Installations> {
let mut owner_to_installation = HashMap::new();
let mut clients = HashMap::new();
let mut repo_to_installation = HashMap::new();
{
let mut stream =
pin!(app_client.apps().installations().send().await?.into_stream(&app_client));
while let Some(installation) = stream.try_next().await? {
let owner = installation.account.login;
if owner_to_installation.contains_key(&owner) {
tracing::warn!("Duplicate installation for {}", owner);
continue;
}
let client = app_client.installation(installation.id)?;
owner_to_installation.insert(owner.clone(), installation.id);
clients.insert(installation.id, client);
let repositories = list_installation_repositories(&client).await?;
for repository in &repositories {
if repo_to_installation
.insert(repository.id.into_inner(), installation.id)
.is_some()
{
tracing::warn!(
"Duplicate installation for repository {}",
repository.full_name.as_deref().unwrap_or_default()
);
}
}
clients.insert(installation.id, CachedInstallation { client, repositories });
}
}
Ok(Installations { app_client, owner_to_installation, clients })
Ok(Installations { app_client, clients, repo_to_installation })
}
impl GitHub {
@@ -118,8 +166,25 @@ impl GitHub {
let result =
list_installations(app_client).await.context("Failed to fetch installations")?;
tracing::info!("Found {} installations", result.clients.len());
for (owner, installation_id) in &result.owner_to_installation {
tracing::info!(" - {}: {}", owner, installation_id);
for (installation_id, cached) in &result.clients {
let owners = cached
.repositories
.iter()
.map(|r| r.owner.as_ref().map(|o| o.login.as_str()).unwrap_or_default())
.collect::<HashSet<_>>();
let mut owner = String::new();
for o in owners {
if !owner.is_empty() {
owner.push_str(", ");
}
owner.push_str(o);
}
tracing::info!(
" - {}: {} ({} repositories)",
owner,
installation_id,
cached.repositories.len()
);
}
Some(Arc::new(Mutex::new(result)))
} else {
@@ -145,10 +210,10 @@ impl GitHub {
}
}
pub async fn client_for(&self, owner: &str) -> Result<Octocrab> {
pub async fn client_for(&self, repo_id: u64) -> Result<Octocrab> {
if let Some(installations) = &self.installations {
let mut installations = installations.lock().await;
if let Some(client) = installations.client_for_owner(owner)? {
if let Some(client) = installations.client_for_repo(repo_id).await? {
return Ok(client);
}
}
@@ -156,14 +221,20 @@ impl GitHub {
}
}
pub async fn run(github: &GitHub, db: &Database, repo_id: u64, stop_run_id: u64) -> Result<()> {
pub async fn refresh_project(
github: &GitHub,
db: &Database,
repo_id: u64,
client_override: Option<&Octocrab>,
full_refresh: bool,
) -> Result<usize> {
let mut project_info = db
.get_project_info_by_id(repo_id, None)
.await
.context("Failed to fetch project info")?
.with_context(|| format!("Failed to fetch project info for ID {}", repo_id))?;
let repo = github
.client
let repo = client_override
.unwrap_or(&github.client)
.repos_by_id(project_info.project.id)
.get()
.await
@@ -189,7 +260,10 @@ pub async fn run(github: &GitHub, db: &Database, repo_id: u64, stop_run_id: u64)
let project = &project_info.project;
tracing::debug!("Refreshing project {}/{}", project.owner, project.repo);
let client = github.client_for(&project.owner).await?;
let client = match client_override {
Some(client) => client.clone(),
None => github.client_for(repo_id).await?,
};
let workflow_ids = if let Some(workflow_id) = &project.workflow_id {
vec![workflow_id.clone()]
@@ -204,7 +278,7 @@ pub async fn run(github: &GitHub, db: &Database, repo_id: u64, stop_run_id: u64)
};
if workflow_ids.is_empty() {
tracing::warn!("No workflows found for {}/{}", project.owner, project.repo);
return Ok(());
return Ok(0);
}
for workflow_id in workflow_ids {
let workflow_id =
@@ -239,16 +313,14 @@ pub async fn run(github: &GitHub, db: &Database, repo_id: u64, stop_run_id: u64)
}
};
for run in items {
if let Some(commit) = project_info.commit.as_ref() {
if run.head_sha == commit.sha {
break 'outer;
if !full_refresh {
if let Some(commit) = project_info.commit.as_ref() {
if run.head_sha == commit.sha {
break 'outer;
}
}
}
let run_id = run.id;
runs.push(run);
if run_id == RunId(stop_run_id) {
break 'outer;
}
}
page += 1;
}
@@ -291,7 +363,7 @@ pub async fn run(github: &GitHub, db: &Database, repo_id: u64, stop_run_id: u64)
TaskResult { run_id, commit, result }
});
}
let mut found_artifacts = false;
let mut imported_artifacts = 0;
while let Some(join_result) = set.join_next().await {
match join_result {
Ok(TaskResult {
@@ -316,7 +388,7 @@ pub async fn run(github: &GitHub, db: &Database, repo_id: u64, stop_run_id: u64)
commit.sha,
duration.as_millis()
);
found_artifacts = true;
imported_artifacts += 1;
}
}
Ok(TaskResult { run_id, commit, result: Err(e) }) => {
@@ -333,15 +405,15 @@ pub async fn run(github: &GitHub, db: &Database, repo_id: u64, stop_run_id: u64)
}
}
if found_artifacts {
if imported_artifacts > 0 {
if project.workflow_id.is_none() {
db.update_project_workflow_id(project.id, workflow_id).await?;
}
break;
return Ok(imported_artifacts);
}
}
Ok(())
Ok(0)
}
pub struct ProcessWorkflowRunResult {
@@ -388,6 +460,9 @@ pub async fn process_workflow_run(
result: DownloadArtifactResult,
}
for artifact in &artifacts {
if artifact.expired {
continue;
}
let artifact_name = artifact.name.clone();
let version =
if let Some(version) = regex.captures(&artifact_name).and_then(|c| c.name("version")) {
@@ -423,19 +498,29 @@ pub async fn process_workflow_run(
match join_result {
Ok(TaskResult { artifact_name: name, result: Ok(reports) }) => {
if reports.is_empty() {
tracing::warn!("No report found in artifact {}", name);
tracing::warn!("No report found in workflow run {} artifact {}", run_id, name);
} else {
for (version, report) in reports {
tracing::info!("Processed artifact {} ({})", name, version);
tracing::info!(
"Processed workflow run {} artifact {} ({})",
run_id,
name,
version
);
result.artifacts.push(ProcessArtifactResult { version, report });
}
}
}
Ok(TaskResult { artifact_name: name, result: Err(e) }) => {
tracing::error!("Failed to process artifact {}: {:?}", name, e);
tracing::error!(
"Failed to process workflow run {} artifact {}: {:?}",
run_id,
name,
e
);
}
Err(e) => {
tracing::error!("Failed to process artifact: {:?}", e);
tracing::error!("Failed to process workflow run {} artifact: {:?}", run_id, e);
}
}
}
@@ -484,8 +569,63 @@ async fn download_artifact(
pub fn commit_from_head_commit(commit: &HeadCommit) -> Commit {
Commit {
sha: commit.id.clone(),
timestamp: UtcDateTime::from_unix_timestamp(commit.timestamp.to_utc().timestamp_millis())
.unwrap_or_else(|_| UtcDateTime::now()),
timestamp: UtcDateTime::from_unix_timestamp(
commit.timestamp.to_utc().timestamp_millis() / 1000,
)
.unwrap_or(UtcDateTime::UNIX_EPOCH),
message: (!commit.message.is_empty()).then(|| commit.message.clone()),
}
}
pub async fn check_for_reports(
client: &Octocrab,
project: &Project,
repo: &Repository,
) -> Result<String> {
let workflow_ids = if let Some(workflow_id) = &project.workflow_id {
vec![workflow_id.clone()]
} else {
let workflows = client
.workflows(&project.owner, &project.repo)
.list()
.send()
.await
.context("Failed to fetch workflows")?;
workflows.items.into_iter().map(|w| w.path).collect()
};
if workflow_ids.is_empty() {
bail!("No workflows found in repository.");
}
let branch = repo.default_branch.as_deref().unwrap_or("main");
for workflow_id in workflow_ids {
let workflow_id =
workflow_id.strip_prefix(".github/workflows/").unwrap_or(workflow_id.as_str());
let result = client
.workflows(&project.owner, &project.repo)
.list_runs(workflow_id)
.branch(branch)
.event("push")
.status("completed")
.exclude_pull_requests(true)
.send()
.await;
let items = match result {
Ok(result) if result.items.is_empty() => continue,
Ok(result) => result.items,
Err(octocrab::Error::GitHub { source, .. })
if matches!(*source, GitHubError { status_code: StatusCode::NOT_FOUND, .. }) =>
{
continue;
}
Err(e) => {
return Err(e).context("Failed to fetch workflow runs");
}
};
let run = items.first().unwrap();
let result = process_workflow_run(&client, &project, run.id).await?;
if !result.artifacts.is_empty() {
return Ok(workflow_id.to_string());
}
}
Err(anyhow!("No workflow runs containing reports found."))
}
+31 -6
View File
@@ -81,7 +81,7 @@ pub async fn webhook(GitHubEvent { event, state }: GitHubEvent) -> Result<Respon
};
let client = if let Some(installation_id) = installation_id {
let mut installations = installations.lock().await;
installations.client_for_installation(installation_id, owner.as_deref())?
installations.client_for_installation(installation_id).await?
} else {
state.github.client.clone()
};
@@ -141,12 +141,8 @@ pub async fn webhook(GitHubEvent { event, state }: GitHubEvent) -> Result<Respon
InstallationWebhookEventAction::Deleted => {
// Remove the installation client
let mut installations = installations.lock().await;
if let Some(owner) = &owner {
installations.owner_to_installation.remove(owner);
} else {
tracing::warn!("Received installation deleted event with no owner");
}
if let Some(installation_id) = installation_id {
installations.repo_to_installation.retain(|_, v| *v != installation_id);
installations.clients.remove(&installation_id);
} else {
tracing::warn!(
@@ -157,6 +153,35 @@ pub async fn webhook(GitHubEvent { event, state }: GitHubEvent) -> Result<Respon
_ => {}
}
}
WebhookEventPayload::InstallationRepositories(inner) => {
tracing::info!(
"Installation {:?} for {} repositories changed",
inner.action,
owner.as_deref().unwrap_or("[unknown]")
);
let Some(installation_id) = installation_id else {
tracing::warn!("Received installation_repositories event with no installation ID");
return Ok((StatusCode::OK, "No installation ID").into_response());
};
let mut installations = installations.lock().await;
for repository in &inner.repositories_added {
tracing::info!("Added repository {}", repository.full_name);
installations
.repo_to_installation
.insert(repository.id.into_inner(), installation_id);
}
if !inner.repositories_removed.is_empty() {
for repository in &inner.repositories_removed {
tracing::info!("Removed repository {}", repository.full_name);
}
installations.repo_to_installation.retain(|repo, id| {
if *id != installation_id {
return true;
}
inner.repositories_removed.iter().any(|r| r.id.into_inner() == *repo)
});
}
}
_ => {}
}
Ok((StatusCode::OK, "Event processed").into_response())
+2 -2
View File
@@ -14,7 +14,7 @@ decomp-dev-github = { path = "../github" }
decomp-dev-images = { path = "../images" }
decomp-dev-scripts = { path = "../scripts" }
itertools = "0.14"
maud = { version = "0.27", features = ["axum"] }
maud.workspace = true
mime.workspace = true
objdiff-core.workspace = true
reqwest.workspace = true
@@ -23,7 +23,7 @@ serde_json.workspace = true
serde_yaml = "0.9"
time.workspace = true
timeago = { version = "0.4.2", default-features = false }
tokio-cron-scheduler = "0.13"
tokio-cron-scheduler = { version = "0.13", features = ["english"] }
tokio.workspace = true
tower = { version = "0.5", features = ["full"] }
tower-http = { version = "0.6", features = ["full"] }
+31 -12
View File
@@ -15,17 +15,28 @@ pub async fn create(
{
let state = state.clone();
sched
.add(Job::new_async("0 0/5 * * * *", move |_uuid, _l| {
.add(Job::new_async("every 5 minutes", move |_uuid, _l| {
let state = state.clone();
Box::pin(async move {
refresh_projects(&state).await.expect("Failed to refresh projects");
refresh_projects(&state, false).await.expect("Failed to refresh projects");
})
})?)
.await?;
}
{
let state = state.clone();
sched
.add(Job::new_async("every 12 hours", move |_uuid, _l| {
let state = state.clone();
Box::pin(async move {
refresh_projects(&state, true).await.expect("Failed to refresh projects");
})
})?)
.await?;
}
{
sched
.add(Job::new_async("0 0 0/24 * * *", move |_uuid, _l| {
.add(Job::new_async("at midnight", move |_uuid, _l| {
let state = state.clone();
Box::pin(async move {
state.db.cleanup_report_units().await.expect("Failed to clean up report units");
@@ -35,7 +46,7 @@ pub async fn create(
}
{
sched
.add(Job::new_async("0 0/1 * * * *", move |_uuid, _l| {
.add(Job::new_async("every 1 minute", move |_uuid, _l| {
let session_store = session_store.clone();
Box::pin(async move {
session_store
@@ -50,17 +61,25 @@ pub async fn create(
Ok(sched)
}
pub async fn refresh_projects(state: &AppState) -> Result<()> {
pub async fn refresh_projects(state: &AppState, full_refresh: bool) -> Result<()> {
for project_info in state.db.get_projects().await? {
// Skip projects with active app installations
if let Some(installations) = &state.github.installations {
let installations = installations.lock().await;
if installations.owner_to_installation.contains_key(&project_info.project.owner) {
continue;
if !full_refresh {
// Skip projects with active app installations
if let Some(installations) = &state.github.installations {
let installations = installations.lock().await;
if installations.repo_to_installation.contains_key(&project_info.project.id) {
continue;
}
}
}
if let Err(e) =
decomp_dev_github::run(&state.github, &state.db, project_info.project.id, 0).await
if let Err(e) = decomp_dev_github::refresh_project(
&state.github,
&state.db,
project_info.project.id,
None,
full_refresh,
)
.await
{
log::error!(
"Failed to refresh {}/{}: {:?}",
+36 -13
View File
@@ -1,6 +1,11 @@
use std::time::{Duration, Instant};
use std::{
sync::LazyLock,
time::{Duration, Instant},
};
use axum::http::StatusCode;
use decomp_dev_auth::CurrentUser;
use decomp_dev_core::AppError;
use maud::{Markup, PreEscaped, html};
use objdiff_core::bindings::report::Measures;
use time::{UtcDateTime, macros::format_description};
@@ -23,18 +28,9 @@ pub fn header() -> Markup {
meta name="viewport" content="width=device-width, initial-scale=1.0";
meta name="color-scheme" content="dark light";
meta name="darkreader-lock";
link rel="stylesheet" href="/css/main.min.css";
script src="/js/main.min.js" defer;
script {
r#"let theme = null;
try {
theme = localStorage.getItem('theme');
} catch (_) {
}
if (theme) {
document.documentElement.setAttribute('data-theme', theme);
}"#
}
link rel="stylesheet" href="/css/main.min.css?3";
script src="/js/main.min.js" defer {}
script { (PreEscaped(r#"let t;try{t=localStorage.getItem("theme")}catch(_){}if(t)document.documentElement.setAttribute("data-theme",t);"#)) }
}
}
@@ -199,3 +195,30 @@ pub fn data_progress_sections(measures: &Measures) -> Markup {
}
}
}
pub async fn get_robots() -> Result<String, AppError> {
static ROBOTS_CACHE: LazyLock<std::sync::RwLock<Option<String>>> =
LazyLock::new(|| std::sync::RwLock::new(None));
{
let cache =
ROBOTS_CACHE.read().map_err(|_| AppError::Status(StatusCode::INTERNAL_SERVER_ERROR))?;
if let Some(robots) = &*cache {
return Ok(robots.clone());
}
}
let response = reqwest::get(
"https://raw.githubusercontent.com/ai-robots-txt/ai.robots.txt/refs/heads/main/robots.txt",
)
.await?;
if response.status() != StatusCode::OK {
return Err(AppError::Status(StatusCode::BAD_GATEWAY));
}
let text = response.text().await?;
{
let mut cache = ROBOTS_CACHE
.write()
.map_err(|_| AppError::Status(StatusCode::INTERNAL_SERVER_ERROR))?;
*cache = Some(text.clone());
}
Ok(text)
}
File diff suppressed because it is too large Load Diff
+8 -1
View File
@@ -11,23 +11,30 @@ use mime::Mime;
use crate::AppState;
mod common;
mod manage;
mod project;
mod report;
mod treemap;
pub fn build_router() -> Router<AppState> {
Router::new()
.route("/robots.txt", get(common::get_robots))
.route("/api/github/webhook", post(decomp_dev_github::webhook::webhook))
.route("/api/github/oauth", get(decomp_dev_auth::oauth))
.route("/login", get(decomp_dev_auth::login))
.route("/logout", post(decomp_dev_auth::logout))
.route("/manage", get(manage::manage))
.route("/manage/new", get(manage::new))
.route("/manage/new", post(manage::new_save))
.route("/manage/{owner}/{repo}", get(manage::manage_project))
.route("/manage/{owner}/{repo}", post(manage::manage_project_save))
.route("/manage/{owner}/{repo}/refresh", post(manage::manage_project_refresh))
.route("/css/{*filename}", get(decomp_dev_scripts::get_css))
.route("/js/{*filename}", get(decomp_dev_scripts::get_js))
.route("/assets/{*filename}", get(decomp_dev_images::get_asset))
.route("/og.png", get(decomp_dev_images::get_og))
.route("/", get(project::get_projects))
.route("/{owner}/{repo}", get(report::get_report))
.route("/{owner}/{repo}", post(report::save_project))
.route("/{owner}/{repo}/{version}", get(report::get_report))
.route("/{owner}/{repo}/{version}/{commit}", get(report::get_report))
}
+4 -3
View File
@@ -1,4 +1,4 @@
use std::{sync::Arc, time::Instant};
use std::{str::FromStr, sync::Arc, time::Instant};
use anyhow::{Context, anyhow};
use axum::{
@@ -9,7 +9,7 @@ use axum::{
use decomp_dev_auth::CurrentUser;
use decomp_dev_core::{
AppError, FullUri,
models::{Commit, Project},
models::{Commit, Platform, Project},
util::UrlExt,
};
use maud::{DOCTYPE, Markup, html};
@@ -247,8 +247,9 @@ fn project_fragment(
a href=(project_path) { (info.project.name()) }
}
@if let Some(platform) = &info.project.platform {
@let platform_name = Platform::from_str(platform).map(|p| p.name()).unwrap_or(platform);
img class="platform-icon" src=(format!("/assets/platforms/{}.svg", platform))
alt=(platform) width="24" height="24";
alt=(platform_name) title=(platform_name) width="24" height="24";
}
}
h6 {
+180 -75
View File
@@ -2,10 +2,10 @@ use std::{borrow::Cow, iter, time::Instant};
use anyhow::{Context, Result};
use axum::{
Form, Json,
Json,
extract::{Path, Query, State},
http::{HeaderMap, StatusCode, Uri, header},
response::{IntoResponse, Redirect, Response},
response::{IntoResponse, Response},
};
use decomp_dev_auth::CurrentUser;
use decomp_dev_core::{
@@ -232,7 +232,9 @@ pub async fn get_report(
"shield" => mode_shield(&scope, query, &acceptable),
"report" => mode_report(&scope, &state, uri, query, start, &acceptable, current_user).await,
"measures" => mode_measures(&scope, &acceptable),
"history" => mode_history(&scope, &state, query, &acceptable).await,
"history" => {
mode_history(&scope, &state, uri, query, start, &acceptable, current_user).await
}
_ => Err(AppError::Status(StatusCode::BAD_REQUEST)),
}
}
@@ -251,7 +253,7 @@ async fn mode_report(
if (mime.type_() == mime::STAR && mime.subtype() == mime::STAR)
|| (mime.type_() == mime::TEXT && mime.subtype() == mime::HTML)
{
let rendered = render_template(scope, state, uri, current_user, start).await?;
let rendered = render_report(scope, state, uri, current_user, start).await?;
return Ok(rendered.into_response());
} else if mime.type_() == mime::APPLICATION && mime.subtype() == mime::JSON {
let flattened = scope.report.report.flatten();
@@ -336,8 +338,11 @@ struct ReportHistoryEntry {
async fn mode_history(
scope: &Scope<'_>,
state: &AppState,
uri: Uri,
query: ReportQuery,
start: Instant,
acceptable: &[Mime],
current_user: Option<CurrentUser>,
) -> Result<Response, AppError> {
let report_measures =
state.db.fetch_all_reports(&scope.project_info.project, &scope.report.version).await?;
@@ -378,8 +383,11 @@ async fn mode_history(
}
for mime in acceptable {
if (mime.type_() == mime::STAR && mime.subtype() == mime::STAR)
|| (mime.type_() == mime::APPLICATION && mime.subtype() == mime::JSON)
|| (mime.type_() == mime::TEXT && mime.subtype() == mime::HTML)
{
let rendered = render_history(scope, state, uri, current_user, start, result).await?;
return Ok(rendered.into_response());
} else if mime.type_() == mime::APPLICATION && mime.subtype() == mime::JSON {
return Ok(Json(result).into_response());
}
}
@@ -512,11 +520,18 @@ fn apply_scope<'a>(
let label = current_unit
.as_ref()
.map(|u| u.name.rsplit_once('/').map_or(u.name.as_str(), |(_, name)| name))
.or_else(|| current_category.as_ref().map(|c| c.name.as_str()));
.or_else(|| {
// Only show a category label if it is not the default category
let default_category_id =
project_info.project.default_category.as_deref().unwrap_or("all");
let current_category_id = current_category.map(|c| c.id.as_str()).unwrap_or("all");
(current_category_id != default_category_id)
.then(|| current_category.map(|c| c.name.as_str()).unwrap_or("All"))
});
Ok(Scope { report, project_info, measures, current_category, current_unit, units, label })
}
async fn render_template(
async fn render_report(
scope: &Scope<'_>,
state: &AppState,
uri: Uri,
@@ -557,6 +572,12 @@ async fn render_template(
let request_url = Url::parse(&uri.to_string()).context("Failed to parse URI")?;
let project_base_path =
format!("/{}/{}", project_info.project.owner, project_info.project.repo);
let project_history_path = request_url.query_param("mode", Some("history"));
let project_manage_path =
format!("/manage/{}/{}", project_info.project.owner, project_info.project.repo);
let can_manage = current_user
.as_ref()
.is_some_and(|u| u.permissions_for_repo(project_info.project.id).admin);
let canonical_url = request_url.with_path(&format!(
"/{}/{}/{}/{}",
project_info.project.owner, project_info.project.repo, report.version, report.commit.sha
@@ -575,7 +596,14 @@ async fn render_template(
})
.collect::<Vec<_>>();
let all_url = canonical_url.query_param("category", None);
let all_url = canonical_url.query_param(
"category",
if project_info.project.default_category.as_deref().is_none_or(|c| c == "all") {
None
} else {
Some("all")
},
);
let all_category =
ReportCategoryItem { id: "all", name: "All", path: all_url.path_and_query().to_string() };
let current_category = current_category
@@ -679,6 +707,17 @@ async fn render_template(
}
}
main {
.actions {
details class="dropdown" {
summary {}
ul dir="rtl" {
li { a href=(project_history_path) { "History" } }
@if can_manage {
li { a href=(project_manage_path) { "Manage" } }
}
}
}
}
h3 { (format!("{project_short_name} is {:.2}% decompiled", measures.matched_code_percent)) }
@if current_unit.is_none() && measures.complete_code_percent > 0.0 {
h4 class="muted" { (format!("{:.2}% fully linked", measures.complete_code_percent)) }
@@ -783,9 +822,6 @@ async fn render_template(
noscript {
img #treemap src=(image_url) alt="Progress graph";
}
@if current_user.as_ref().is_some_and(|u| u.permissions_for_repo(project_info.project.id).admin) {
(manage_form(project_info))
}
}
}
(footer(start, current_user.as_ref()))
@@ -793,77 +829,146 @@ async fn render_template(
})
}
fn manage_form(project_info: &ProjectInfo) -> Markup {
async fn render_history(
scope: &Scope<'_>,
state: &AppState,
uri: Uri,
current_user: Option<CurrentUser>,
start: Instant,
result: Vec<ReportHistoryEntry>,
) -> Result<Markup> {
let Scope { report, project_info, measures, current_category, current_unit, units, label } =
scope;
let request_url = Url::parse(&uri.to_string()).context("Failed to parse URI")?;
let project_base_path =
format!("/{}/{}", project_info.project.owner, project_info.project.repo);
let default_version = project_info.default_version();
html! {
h6 class="report-header" { "Manage" }
form action=(project_base_path) method="post" {
fieldset {
label {
"Default version"
select name="default_version" {
@for version in &project_info.report_versions {
@if default_version == Some(version.as_str()) {
option value=(version) selected { (version) }
} @else {
option value=(version) { (version) }
let canonical_url = request_url.with_path(&format!(
"/{}/{}/{}",
project_info.project.owner, project_info.project.repo, report.version
));
let image_url = canonical_url.with_path(&format!("{}.png", canonical_url.path()));
let versions = project_info
.report_versions
.iter()
.map(|version| {
let version_url = request_url.with_path(&format!(
"/{}/{}/{}/{}",
project_info.project.owner, project_info.project.repo, version, report.commit.sha
));
ReportTemplateVersion { id: version, path: version_url.path_and_query().to_string() }
})
.collect::<Vec<_>>();
let all_url = canonical_url.query_param(
"category",
if project_info.project.default_category.as_deref().is_none_or(|c| c == "all") {
None
} else {
Some("all")
},
);
let all_category =
ReportCategoryItem { id: "all", name: "All", path: all_url.path_and_query().to_string() };
let current_category = current_category
.map(|c| {
let path =
canonical_url.query_param("category", Some(&c.id)).path_and_query().to_string();
ReportCategoryItem { id: &c.id, name: &c.name, path }
})
.unwrap_or_else(|| all_category.clone());
let categories = iter::once(all_category)
.chain(report.report.categories.iter().map(|c| {
let path =
canonical_url.query_param("category", Some(&c.id)).path_and_query().to_string();
ReportCategoryItem { id: &c.id, name: &c.name, path }
}))
.collect::<Vec<_>>();
let project_name = if let Some(label) = label {
Cow::Owned(format!("{} ({})", project_info.project.name(), label))
} else {
project_info.project.name()
};
let project_short_name = if let Some(label) = label {
Cow::Owned(format!("{} ({})", project_info.project.short_name(), label))
} else {
Cow::Borrowed(project_info.project.short_name())
};
Ok(html! {
(DOCTYPE)
html {
head lang="en" {
meta charset="utf-8";
title { (project_short_name) " • Progress History" }
(header())
meta name="description" content=(format!("Decompilation progress history for {project_name}"));
meta property="og:title" content=(format!("{project_short_name} is {:.2}% decompiled", measures.matched_code_percent));
meta property="og:description" content=(format!("Decompilation progress history for {project_name}"));
meta property="og:image" content=(image_url);
meta property="og:url" content=(canonical_url);
link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/uplot@1.6.32/dist/uPlot.min.css";
}
body {
header {
nav {
ul {
li {
a href="https://decomp.dev" { strong { "decomp.dev" } }
}
li {
a href="/" { "Projects" }
}
li {
a href=(project_base_path) { (project_short_name) }
}
li {
a href=(request_url) { "History" }
}
}
(nav_links())
}
}
main {
h3 { "History for " (project_short_name) }
details class="dropdown" title="Version" {
summary { (report.version) }
ul {
@for version in &versions {
li {
a href=(version.path) { (version.id) }
}
}
}
}
}
label {
@if project_info.project.enable_pr_comments {
input name="enable_pr_comments" type="checkbox" role="switch" checked;
} @else {
input name="enable_pr_comments" type="checkbox" role="switch";
@if current_unit.is_none() && categories.len() > 1 {
details class="dropdown" title="Category" {
summary { (current_category.name) }
ul {
@for category in &categories {
li {
a href=(category.path) { (category.name) }
}
}
}
}
}
script src="https://cdn.jsdelivr.net/npm/uplot@1.6.32/dist/uPlot.iife.min.js" {}
script src="/js/history.min.js" {}
script {
(PreEscaped(r#"document.write('<div id="chart" width="100%"></div>');renderChart("chart","#))
(PreEscaped(serde_json::to_string(&result)?))
(PreEscaped(r#");"#))
}
hr;
div role="group" {
a role="button" href=(project_base_path) { "Back to report" }
}
"Enable PR comments"
}
}
button type="submit" { "Save" }
(footer(start, current_user.as_ref()))
}
}
}
fn form_bool<'de, D>(deserializer: D) -> Result<bool, D::Error>
where D: serde::Deserializer<'de> {
match <&str>::deserialize(deserializer)? {
"on" => Ok(true),
"off" => Ok(false),
other => Err(serde::de::Error::unknown_variant(other, &["on", "off"])),
}
}
#[derive(Deserialize)]
pub struct ProjectForm {
#[serde(default, deserialize_with = "form_bool")]
pub enable_pr_comments: bool,
pub default_version: Option<String>,
}
pub async fn save_project(
Path(params): Path<ReportParams>,
State(state): State<AppState>,
current_user: CurrentUser,
Form(form): Form<ProjectForm>,
) -> Result<Response, AppError> {
let Some(project_info) = state.db.get_project_info(&params.owner, &params.repo, None).await?
else {
return Err(AppError::Status(StatusCode::NOT_FOUND));
};
if !current_user.permissions_for_repo(project_info.project.id).admin {
return Err(AppError::Status(StatusCode::FORBIDDEN));
}
state
.db
.update_project_settings(
project_info.project.id,
form.enable_pr_comments,
form.default_version,
)
.await?;
let redirect_url = format!("/{}/{}", params.owner, params.repo);
Ok(Redirect::to(&redirect_url).into_response())
})
}
+56 -8
View File
@@ -6,15 +6,16 @@ mod proto;
use std::{
fs::File,
io::BufReader,
net::{Ipv4Addr, SocketAddr},
net::{IpAddr, Ipv4Addr, SocketAddr},
str::FromStr,
sync::Arc,
time::Duration,
};
use axum::{
Router,
extract::FromRef,
http::{Method, header},
extract::{ConnectInfo, FromRef},
http::{Method, Request, header},
};
use decomp_dev_core::config::{Config, GitHubConfig};
use decomp_dev_db::Database;
@@ -25,10 +26,11 @@ use tower_http::{
ServiceBuilderExt, cors,
cors::CorsLayer,
timeout::TimeoutLayer,
trace::{DefaultMakeSpan, DefaultOnResponse, TraceLayer},
trace::{DefaultOnResponse, MakeSpan, TraceLayer},
};
use tower_sessions::{Expiry, SessionManagerLayer, SessionStore};
use tower_sessions::{Expiry, SessionManagerLayer, SessionStore, cookie::SameSite};
use tower_sessions_sqlx_store::SqliteStore;
use tracing::{Level, Span};
use tracing_subscriber::{EnvFilter, filter::LevelFilter};
use crate::handlers::build_router;
@@ -109,14 +111,15 @@ fn app(state: AppState, session_store: impl SessionStore + Clone) -> Router {
.sensitive_response_headers(sensitive_headers)
.layer(
TraceLayer::new_for_http()
.make_span_with(DefaultMakeSpan::new().level(tracing::Level::INFO))
.on_response(DefaultOnResponse::new().level(tracing::Level::INFO)),
.make_span_with(MyMakeSpan { level: Level::INFO })
.on_response(DefaultOnResponse::new().level(Level::INFO)),
)
.layer(TimeoutLayer::new(Duration::from_secs(10)))
.layer(TimeoutLayer::new(Duration::from_secs(60)))
.layer(CorsLayer::new().allow_methods([Method::GET]).allow_origin(cors::Any))
.layer(
SessionManagerLayer::new(session_store)
.with_secure(false)
.with_same_site(SameSite::Lax)
.with_expiry(Expiry::OnInactivity(time::Duration::days(1))),
)
.compression();
@@ -147,3 +150,48 @@ async fn shutdown_signal() {
_ = terminate => {},
}
}
#[derive(Debug, Clone)]
pub struct MyMakeSpan {
level: Level,
}
impl<B> MakeSpan<B> for MyMakeSpan {
fn make_span(&mut self, request: &Request<B>) -> Span {
let cf_connecting_ip = request.headers().get("CF-Connecting-IP");
let ip = if let Some(v) = cf_connecting_ip {
str::from_utf8(v.as_bytes()).ok().and_then(|s| IpAddr::from_str(s).ok())
} else if let Some(ConnectInfo(socket_addr)) =
request.extensions().get::<ConnectInfo<SocketAddr>>()
{
Some(socket_addr.ip())
} else {
None
};
let ip = ip.unwrap_or(IpAddr::from([0, 0, 0, 0]));
let user_agent = request
.headers()
.get(header::USER_AGENT)
.and_then(|v| v.to_str().ok())
.unwrap_or("[unknown]");
macro_rules! make_span {
($level:expr) => {
tracing::span!(
$level,
"request",
method = %request.method(),
uri = %request.uri(),
ip = %ip,
user_agent = %user_agent,
)
}
}
match self.level {
Level::ERROR => make_span!(Level::ERROR),
Level::WARN => make_span!(Level::WARN),
Level::INFO => make_span!(Level::INFO),
Level::DEBUG => make_span!(Level::DEBUG),
Level::TRACE => make_span!(Level::TRACE),
}
}
}
+42 -2
View File
@@ -53,7 +53,7 @@ $breakpoints: (
"layout/landmarks": true,
"layout/container": true,
"layout/section": true,
"layout/grid": false,
"layout/grid": true,
"layout/overflow-auto": false,
// Content
@@ -80,7 +80,7 @@ $breakpoints: (
"components/card": true,
"components/dropdown": true,
"components/group": true,
"components/loading": false,
"components/loading": true,
"components/modal": false,
"components/nav": true,
"components/progress": false,
@@ -346,3 +346,43 @@ footer {
transition: none;
}
}
.info-card {
background-color: $azure-500;
color: $white;
}
.warning-card {
background-color: $pumpkin-550;
color: $white;
}
.error-card {
background-color: $red-650;
color: $white;
}
.loading-container {
display: flex;
justify-content: center;
margin: var(--pico-spacing);
}
.actions {
float: right;
position: relative;
.dropdown {
summary {
&::after {
margin-inline-start: 0;
transform: none;
height: 100%;
}
}
ul li {
text-align: left;
}
}
}
+130
View File
@@ -0,0 +1,130 @@
const height = 400;
const stroke = '#a9a9b3';
const grid = {
stroke: 'rgba(128, 128, 128, 0.1)',
};
function percentValue(self, rawValue) {
if (rawValue == null) {
return null;
}
return rawValue.toFixed(2) + "%";
}
type Measures = {
fuzzy_match_percent: number;
total_code: number;
matched_code: number;
matched_code_percent: number;
total_data: number;
matched_data: number;
matched_data_percent: number;
total_functions: number;
matched_functions: number;
matched_functions_percent: number;
complete_code: number;
complete_code_percent: number;
complete_data: number;
complete_data_percent: number;
total_units: number;
complete_units: number;
};
type ReportHistoryEntry = {
timestamp: string;
commit_sha: string;
measures: Measures;
};
function renderChart(id: string, data: ReportHistoryEntry[]) {
let chart = document.getElementById(id);
if (!chart) {
console.error(`Chart element with id ${id} not found`);
return;
}
data.reverse();
const u = new uPlot({
id: id,
width: 600,
height: height,
scales: {
x: {
time: true,
},
},
series: [
{},
{
show: false,
label: "Fuzzy Match Percent",
width: 2,
stroke: "#003f5c",
value: percentValue,
},
{
label: "Matched Code",
width: 2,
stroke: "#ff6361",
value: percentValue,
},
{
show: false,
label: "Matched Data",
width: 2,
stroke: "#ffa600",
value: percentValue,
},
{
show: false,
label: "Linked Code",
width: 2,
stroke: "#bc5090",
value: percentValue,
},
{
show: false,
label: "Linked Data",
width: 2,
stroke: "#58508d",
value: percentValue,
}
],
axes: [
{
stroke,
grid,
},
{
stroke,
grid,
values: (self, ticks) => ticks.map(rawValue => rawValue.toFixed(0) + "%"),
},
],
hooks: {
init: [
u => {
u.over.addEventListener('click', e => {
console.log('click!', data[u.legend.idx]);
});
}
],
},
}, null, chart);
function updateSize() {
const container = chart.parentElement;
u.setSize({width: container.offsetWidth, height});
}
window.addEventListener('resize', updateSize);
updateSize();
u.setData([
data.map(e => Date.parse(e.timestamp) / 1000),
data.map(e => e.measures.fuzzy_match_percent || null),
data.map(e => e.measures.matched_code_percent || null),
data.map(e => e.measures.matched_data_percent || null),
data.map(e => e.measures.complete_code_percent || null),
data.map(e => e.measures.complete_data_percent || null),
]);
}

Some files were not shown because too many files have changed in this diff Show More