Skip to content

In-browser compiler messages, migration checker, and general DX improvements #128

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Feb 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
872 changes: 864 additions & 8 deletions Cargo.lock

Large diffs are not rendered by default.

31 changes: 23 additions & 8 deletions create-rust-app/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@ categories = ["command-line-utilities", "development-tools", "web-programming",
##
## COMMON / DEFAULT - required dependencies
##
dotenv = "0.15.0"
dotenv = "0.15.0" # + plugin_dev
serde_json = "1.0.91"
lettre = "0.10.1"
tera = { version="1.17.0" }
lazy_static = { version="1.4.0" }
serde = { version = "1.0.152", features = ["derive"] }
diesel = { version="2.0.0-rc.1", default-features = false, features = ["uuid", "r2d2", "chrono", "returning_clauses_for_sqlite_3_35"] } # plugin_dev, plugin_auth
diesel = { version="2.0.0-rc.1", default-features = false, features = ["uuid", "r2d2", "chrono"] } # + plugin_dev, plugin_auth

##
## Database
Expand All @@ -42,13 +42,20 @@ chrono = { optional=true, version = "0.4.23", default-features = false, features

# plugin_dev
diesel_migrations = { optional=true, version="2.0.0" }
cargo_metadata = { optional=true, version="0.15.2" }
watchexec = { optional=true, version="2.0.2" }
#### tracing = { optional=true, version="0.1" }
#### tracing-subscriber = { optional=true, version="0.3.16", features=["env-filter"] }
reqwest = { optional=true, version="0.11.13" }
clearscreen = { optional=true, version="2.0.0" }
open = { optional=true, version="3.2.0" }
cargo_toml = { optional=true, version = "0.14.0" }

# plugin_storage
aws-config = { optional=true, version="0.14.0" }
aws-types = { optional=true, version="0.8.0" }
aws-endpoint = { optional=true, version="0.14.0" }
aws-sdk-s3 = { optional=true, version="0.8.0" }
futures-util = { optional=true, version="0.3.25" }
http = { optional=true, version="0.2.6" }
diesel_derives = { optional=true, version="2.0.1" }
uuid = { optional=true, version="1.2.2", features=["v4", "serde"] }
Expand All @@ -75,23 +82,31 @@ actix-web-httpauth = { optional=true, version="0.8.0" }
derive_more = { optional=true, version="0.99.17" }
futures = { optional=true, version="0.3.25" }
env_logger = { optional=true, version= "0.10.0" }

# axum dependencies (not yet released; only used for plugin_dev)

axum = { optional=true, version="0.6.1" }

##
## MISC - here, we list deps which are required by multiple features but are not required in all configurations
##

mime_guess = { optional=true, version="2.0.4" } # backend_poem, plugin_storage
anyhow = { optional=true, version="1.0.68" } # backend_poem, plugin_auth
tokio = { optional=true, version = "1", features = ["full"] } # backend_poem, plugin-storage
anyhow = { optional=true, version="1.0.57" } # backend_poem, plugin_auth, plugin_dev
tokio = { optional=true, version = "1", features = ["full"] } # backend_poem, plugin_storage
async-priority-channel = "0.1.0"
futures-util = { optional=true, version = "0.3.25" } # plugin_dev, TODO:plugin_storage?

[features]
default = ["backend_actix-web", "database_postgres", "plugin_auth", "plugin_container", "plugin_dev", "plugin_graphql", "plugin_storage", "plugin_utoipa"]
plugin_dev = ["diesel_migrations"]
plugin_dev = ["backend_axum", "cargo_toml", "open", "reqwest", "anyhow", "clearscreen", "watchexec", "cargo_metadata", "diesel_migrations", "futures-util"]
plugin_container = []
plugin_auth = ["anyhow", "rust-argon2", "rand", "jsonwebtoken", "chrono", "tsync"]
plugin_storage = [ "aws-config", "aws-types", "aws-endpoint", "aws-sdk-s3", "tokio", "futures-util", "http", "diesel_derives", "uuid", "md5", "mime_guess", "base64" ]
plugin_storage = [ "aws-config", "aws-types", "aws-endpoint", "aws-sdk-s3", "tokio", "http", "diesel_derives", "uuid", "md5", "mime_guess", "base64" ] # note: might need to add "futures-util"?
plugin_graphql = []
plugin_utoipa = ["utoipa", "utoipa-swagger-ui", "backend_actix-web"]
backend_poem = ["poem", "anyhow", "mime_guess", "tokio"]
backend_actix-web = ["actix-web", "actix-http", "actix-files", "actix-multipart", "actix-web-httpauth","derive_more", "futures", "env_logger"]
database_sqlite = ["diesel/sqlite", "libsqlite3-sys/bundled"]
backend_axum = ["axum", "axum/ws"]
database_sqlite = ["diesel/sqlite", "diesel/returning_clauses_for_sqlite_3_35", "libsqlite3-sys/bundled"]
database_postgres = ["diesel/postgres"]
4 changes: 3 additions & 1 deletion create-rust-app/src/auth/endpoints/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
#[cfg(feature = "backend_actix-web")]
mod service_actixweb;
#[cfg(feature = "backend_actix-web")]
pub use service_actixweb::{endpoints, ApiDoc};
pub use service_actixweb::endpoints;
#[cfg(all(feature = "backend_actix-web", feature = "plugin_utoipa"))]
pub use service_actixweb::ApiDoc;

#[cfg(feature = "backend_poem")]
mod service_poem;
Expand Down
6 changes: 6 additions & 0 deletions create-rust-app/src/database/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@ type DbCon = diesel::PgConnection;
#[cfg(feature = "database_sqlite")]
type DbCon = diesel::SqliteConnection;

#[cfg(feature = "database_postgres")]
pub type DieselBackend = diesel::pg::Pg;

#[cfg(feature = "database_sqlite")]
pub type DieselBackend = diesel::sqlite::Sqlite;

pub type Pool = r2d2::Pool<ConnectionManager<DbCon>>;
pub type Connection = PooledConnection<ConnectionManager<DbCon>>;

Expand Down
271 changes: 271 additions & 0 deletions create-rust-app/src/dev/backend_compiling_server.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,271 @@
use cargo_metadata::Message;
use std::collections::HashMap;
use std::path::PathBuf;
use std::process::Stdio;
use std::sync::Arc;
use tokio::process::Command;
use tokio::sync::Mutex;

use tokio::sync::broadcast::{Receiver, Sender};
use tokio::sync::mpsc;
use watchexec::action::Action;
use watchexec::config::{InitConfig, RuntimeConfig};
use watchexec::event::{Event, Priority, Tag};
use watchexec::handler::PrintDebug;
use watchexec::signal::source::MainSignal;
use watchexec::Watchexec;

use super::{check_exit, DevServerEvent, DevState};

pub async fn start(
project_dir: &'static str,
dev_port: u16,
mut signal_rx: Receiver<DevServerEvent>, // this is specifically for the SHUTDOWN signal
dev_server_events_s: Sender<DevServerEvent>, // this one is for any dev server event signal
state: Arc<Mutex<DevState>>,
file_events_s: Sender<String>,
) {
println!("Starting backend server @ http://localhost:3000/");
let state2 = state.clone();
let state3 = state.clone();

let (server_s, mut server_r) = mpsc::channel::<&str>(64);
let server_s2 = server_s.clone();

let ws2_s = dev_server_events_s.clone();
// let ws3_s = dev_server_events_s.clone();

tokio::spawn(async move {
let mut m = state.lock().await;
m.backend_server_running = true;
drop(m);

let new_child_process = || {
Command::new("cargo")
.arg("run")
.kill_on_drop(true)
.env("DEV_SERVER_PORT", dev_port.to_string())
.current_dir(project_dir)
.spawn()
.unwrap()
};

let mut child_process = new_child_process();
ws2_s.send(DevServerEvent::CHECK_MIGRATIONS).ok();

while let Some(event) = server_r.recv().await {
match event {
"restart" => {
println!("♻️ Restarting server...");
// we don't send BackendStatus(false) but instead we notify the user that it's restarting which should be enough
// ws2_s.send(DevServerEvent::BackendStatus(false)).ok();
ws2_s.send(DevServerEvent::BackendRestarting(true)).ok();
if child_process.id().is_some() {
child_process.kill().await.unwrap();
}
child_process = new_child_process();
// note: the child process will hit /backend-up which
// sends backend status "true" to the websocket
// i.e. ws2_s.send(DevServerEvent::BackendStatus(true)).ok();
// is not necessary
}
"stop" => {
println!("Shutting down backend server...");
ws2_s.send(DevServerEvent::BackendStatus(false)).ok();
if child_process.id().is_some() {
child_process.kill().await.unwrap();
}
let mut m = state.lock().await;
m.backend_server_running = false;
check_exit(&m);
drop(m);
break;
}
_ => {}
}
}
});

let mut init = InitConfig::default();
init.on_error(PrintDebug(std::io::stderr()));

let mut runtime = RuntimeConfig::default();
// runtime.command(watchexec::command::Command::Exec {
// prog: "cargo".to_string(),
// args: vec!["run".to_string()],
// });
let mut file_events_r = file_events_s.subscribe();
let files_to_ignore = Arc::new(std::sync::Mutex::new(vec![]));
let files_to_ignore2 = files_to_ignore.clone();
tokio::spawn(async move {
while let Ok(file) = file_events_r.recv().await {
let mut arr = files_to_ignore2.lock().unwrap();
arr.push(file);
}
});
let backend_dir = PathBuf::from(format!("{project_dir}/backend"));
let migrations_dir = PathBuf::from(format!("{project_dir}/migrations"));
runtime.pathset([backend_dir, migrations_dir.clone()]);
runtime.on_action(move |action: Action| {
let files_to_ignore = files_to_ignore.clone();
let server_s2 = server_s2.clone();
let state3 = state3.clone();
let ws_s = dev_server_events_s.clone();
let migrations_dir = migrations_dir.clone();
async move {
let exit_events = action
.events
.iter()
.filter(|e| e.metadata.contains_key("exit-watchexec"))
.collect::<Vec<_>>();

if exit_events.is_empty() {
println!("continuous backend compilation stopped.");
let mut m = state3.lock().await;
m.watchexec_running = false;
check_exit(&m);
drop(m);
return Ok(());
}

let mut ignored_files = files_to_ignore.lock().unwrap();
let files_to_ignore: Vec<String> = ignored_files.clone();
ignored_files.clear();
drop(ignored_files);
// println!("=> ignoring {:#?}", files_to_ignore);

let mut touched_migrations_dir = false;
let file_events = action
.events
.iter()
.filter(|e| {
e.tags.iter().any(|t| match t {
Tag::Path { path, file_type: _ } => {
// println!("PATH {:#?}", path);

if path
.to_str()
.unwrap()
.starts_with(migrations_dir.as_os_str().to_str().unwrap())
{
touched_migrations_dir = true;
}

!files_to_ignore.iter().any(|file_to_ignore| {
path.to_str().unwrap().ends_with(file_to_ignore)
})
}
_ => false,
})
})
.collect::<Vec<_>>();

if file_events.is_empty() {
// compile
ws_s.send(DevServerEvent::BackendCompiling(true)).ok();
if compile(project_dir, ws_s.clone()) {
// restart backend
server_s2.send("restart").await.unwrap();
ws_s.send(DevServerEvent::CompileSuccess(true)).ok();
} else {
ws_s.send(DevServerEvent::CompileSuccess(false)).ok();
}
ws_s.send(DevServerEvent::BackendCompiling(false)).ok();
}

if touched_migrations_dir {
ws_s.send(DevServerEvent::CHECK_MIGRATIONS).ok();
}

Ok::<(), std::io::Error>(())
}
});

let we = Watchexec::new(init, runtime.clone()).unwrap();
let we2 = we.clone();

tokio::spawn(async move {
while let Ok(event) = signal_rx.recv().await {
if let DevServerEvent::SHUTDOWN = event {
let mut metadata = HashMap::new();
metadata.insert("exit-watchexec".to_string(), vec!["true".to_string()]);
we2.send_event(
Event {
tags: vec![
Tag::Signal(MainSignal::Interrupt),
Tag::Signal(MainSignal::Terminate),
],
metadata,
},
Priority::Urgent,
)
.await
.unwrap(); // stops watch exec
server_s.send("stop").await.unwrap(); // stops backend server
}
}
});

let mut m = state2.lock().await;
m.watchexec_running = true;
drop(m);

we.main().await.unwrap().unwrap();
println!("backend compilation server stopped.");
}

fn compile(project_dir: &'static str, ws_s: Sender<DevServerEvent>) -> bool {
println!("🔨 Compiling backend...");
let start_time = std::time::SystemTime::now();

let mut command = std::process::Command::new("cargo")
.args([
"build",
"-q",
"--message-format=json-diagnostic-rendered-ansi",
])
.current_dir(project_dir)
.stdout(Stdio::piped())
.spawn()
.unwrap();

let reader = std::io::BufReader::new(command.stdout.take().unwrap());
let mut compiler_messages = vec![];
ws_s.send(DevServerEvent::CompileMessages(compiler_messages.clone()))
.ok(); // clear previous messages
for message in cargo_metadata::Message::parse_stream(reader) {
match message.unwrap() {
Message::CompilerMessage(msg) => {
compiler_messages.push(msg);
ws_s.send(DevServerEvent::CompileMessages(compiler_messages.clone()))
.ok();
}
Message::CompilerArtifact(_) => {
// println!("{:?}", artifact);
}
Message::BuildScriptExecuted(_) => {
// println!("{:?}", script);
}
Message::BuildFinished(finished) => {
let compile_time_s = std::time::SystemTime::now()
.duration_since(start_time)
.map(|d| d.as_secs_f32())
.map(|d| format!("{d:.2}"))
.unwrap_or("?".to_string());

if finished.success {
println!("✅ Compiled ({compile_time_s} seconds)");
} else {
println!("❌ Compilation failed: see errors in app ({compile_time_s} seconds)",);
}
}
_ => (), // Unknown message
}
}

command
.wait_with_output()
.expect("Error retrieving `cargo build` exit status")
.status
.success()
}
Loading