Skip to content

Commit

Permalink
feat: add initial api to create pipeline with jobs
Browse files Browse the repository at this point in the history
  • Loading branch information
jiegec committed Mar 10, 2024
1 parent 57967f6 commit 0d5b88c
Show file tree
Hide file tree
Showing 9 changed files with 190 additions and 16 deletions.
13 changes: 13 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 4 additions & 0 deletions frontend/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,7 @@ pnpm-debug.log*
*.njsproj
*.sln
*.sw?

# auto-generated via `yarn dev`
components.d.ts
typed-router.d.ts
2 changes: 1 addition & 1 deletion frontend/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
]
}
},
"include": ["src/**/*.ts", "src/**/*.d.ts", "src/**/*.tsx", "src/**/*.vue"],
"include": ["src/**/*.ts", "src/**/*.d.ts", "src/**/*.tsx", "src/**/*.vue", "./typed-router.d.ts"],
"references": [{ "path": "./tsconfig.node.json" }],
"exclude": ["node_modules"]
}
2 changes: 1 addition & 1 deletion server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,5 +32,5 @@ deadpool-lapin = "0.11.0"
axum = "0.7.4"
tracing-subscriber = "0.3.18"
tracing = "0.1.40"
tower-http = { version = "0.5.2", features = ["trace"] }
tower-http = { version = "0.5.2", features = ["trace", "fs"] }
diesel = { version = "2.1.4", features = ["postgres", "chrono", "r2d2"] }
78 changes: 78 additions & 0 deletions server/src/api.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
use crate::{
models::{NewJob, NewPipeline, Pipeline},
ALL_ARCH, ARGS,
};
use anyhow::Context;
use buildit_utils::github::update_abbs;
use diesel::{
r2d2::{ConnectionManager, Pool},
PgConnection, RunQueryDsl, SelectableHelper,
};

pub async fn pipeline_new(
pool: Pool<ConnectionManager<PgConnection>>,
git_branch: &str,
packages: &str,
archs: &str,
) -> anyhow::Result<i32> {
// resolve branch name to commit hash
update_abbs(git_branch, &ARGS.abbs_path)
.await
.context("Failed to update ABBS tree")?;

let output = tokio::process::Command::new("git")
.arg("rev-parse")
.arg("HEAD")
.current_dir(&ARGS.abbs_path)
.output()
.await
.context("Failed to resolve branch to git commit")?;
let git_sha = String::from_utf8_lossy(&output.stdout).trim().to_string();

// sanitize archs arg
let mut archs: Vec<&str> = archs.split(",").collect();
if archs.contains(&"mainline") {
// archs
archs.extend(ALL_ARCH.iter());
archs.retain(|arch| *arch != "mainline");
}
archs.sort();
archs.dedup();

// create a new pipeline
let mut conn = pool
.get()
.context("Failed to get db connection from pool")?;

use crate::schema::pipelines;
let new_pipeline = NewPipeline {
packages: packages.to_string(),
archs: archs.join(","),
git_branch: git_branch.to_string(),
git_sha: git_sha.clone(),
creation_time: chrono::Utc::now(),
};
let pipeline = diesel::insert_into(pipelines::table)
.values(&new_pipeline)
.returning(Pipeline::as_returning())
.get_result(&mut conn)
.context("Failed to create pipeline")?;

// for each arch, create a new job
for arch in &archs {
use crate::schema::jobs;
let new_job = NewJob {
pipeline_id: pipeline.id,
packages: packages.to_string(),
arch: arch.to_string(),
creation_time: chrono::Utc::now(),
status: "created".to_string(),
};
diesel::insert_into(jobs::table)
.values(&new_job)
.execute(&mut conn)
.context("Failed to create job")?;
}

Ok(pipeline.id)
}
2 changes: 2 additions & 0 deletions server/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ pub mod heartbeat;
pub mod job;
pub mod models;
pub mod schema;
pub mod routes;
pub mod api;

pub struct WorkerStatus {
pub last_heartbeat: DateTime<Local>,
Expand Down
28 changes: 14 additions & 14 deletions server/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
use axum::routing::post;
use axum::{routing::get, Router};
use diesel::pg::PgConnection;
use diesel::r2d2::ConnectionManager;
use diesel::r2d2::Pool;
use server::routes::ping;
use server::routes::pipeline_new;
use server::ARGS;

// basic handler that responds with a static string
async fn root() -> &'static str {
"Hello, World!"
}
use tower_http::services::{ServeDir, ServeFile};

#[tokio::main]
async fn main() -> anyhow::Result<()> {
Expand All @@ -16,22 +15,23 @@ async fn main() -> anyhow::Result<()> {

tracing::info!("Connecting to database");
let manager = ConnectionManager::<PgConnection>::new(&ARGS.database_url);
let pool = Pool::builder()
.test_on_check_out(true)
.build(manager)?;
let pool = Pool::builder().test_on_check_out(true).build(manager)?;

tracing::info!("Starting http server");

// build our application with a route
let serve_dir = ServeDir::new("frontend/dist")
.not_found_service(ServeFile::new("frontend/dist/index.html"));
let app = Router::new()
// `GET /` goes to `root`
.route("/", get(root))
.route("/api/ping", get(ping))
.route("/api/pipeline/new", post(pipeline_new))
.fallback_service(serve_dir)
.with_state(pool)
.layer(tower_http::trace::TraceLayer::new_for_http());

// run our app with hyper, listening globally on port 3000
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
tracing::debug!("listening on {}", listener.local_addr().unwrap());
axum::serve(listener, app).await.unwrap();
tracing::debug!("listening on 127.0.0.1:3000");
let listener = tokio::net::TcpListener::bind("127.0.0.1:3000").await?;
axum::serve(listener, app).await?;

/*
dotenv::dotenv().ok();
Expand Down
22 changes: 22 additions & 0 deletions server/src/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,17 @@ pub struct Pipeline {
pub creation_time: chrono::DateTime<chrono::Utc>,
}

#[derive(Insertable)]
#[diesel(table_name = crate::schema::pipelines)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct NewPipeline {
pub packages: String,
pub archs: String,
pub git_branch: String,
pub git_sha: String,
pub creation_time: chrono::DateTime<chrono::Utc>,
}

#[derive(Queryable, Selectable, Associations)]
#[diesel(belongs_to(Pipeline))]
#[diesel(table_name = crate::schema::jobs)]
Expand All @@ -25,6 +36,17 @@ pub struct Job {
pub status: String,
}

#[derive(Insertable)]
#[diesel(table_name = crate::schema::jobs)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct NewJob {
pub pipeline_id: i32,
pub packages: String,
pub arch: String,
pub creation_time: chrono::DateTime<chrono::Utc>,
pub status: String,
}

#[derive(Queryable, Selectable)]
#[diesel(table_name = crate::schema::workers)]
#[diesel(check_for_backend(diesel::pg::Pg))]
Expand Down
55 changes: 55 additions & 0 deletions server/src/routes.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
use axum::{
extract::{Json, State},
http::StatusCode,
response::{IntoResponse, Response},
};
use diesel::{
r2d2::{ConnectionManager, Pool},
PgConnection,
};
use serde::{Deserialize, Serialize};

use crate::api;

pub async fn ping() -> &'static str {
"PONG"
}

// learned from https://github.com/tokio-rs/axum/blob/main/examples/anyhow-error-response/src/main.rs
pub struct AnyhowError(anyhow::Error);

impl IntoResponse for AnyhowError {
fn into_response(self) -> Response {
(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", self.0)).into_response()
}
}

impl<E> From<E> for AnyhowError
where
E: Into<anyhow::Error>,
{
fn from(err: E) -> Self {
Self(err.into())
}
}

#[derive(Deserialize)]
pub struct PipelineNewRequest {
git_branch: String,
packages: String,
archs: String,
}

#[derive(Serialize)]
pub struct PipelineNewResponse {
id: i32,
}

pub async fn pipeline_new(
State(pool): State<Pool<ConnectionManager<PgConnection>>>,
Json(payload): Json<PipelineNewRequest>,
) -> Result<Json<PipelineNewResponse>, AnyhowError> {
let pipeline_id =
api::pipeline_new(pool, &payload.git_branch, &payload.packages, &payload.archs).await?;
Ok(Json(PipelineNewResponse { id: pipeline_id }))
}

0 comments on commit 0d5b88c

Please sign in to comment.