diff --git a/contrib/db_pools/lib/Cargo.toml b/contrib/db_pools/lib/Cargo.toml index abbc50e6..1835acb7 100644 --- a/contrib/db_pools/lib/Cargo.toml +++ b/contrib/db_pools/lib/Cargo.toml @@ -26,8 +26,8 @@ sqlx_postgres = ["sqlx", "sqlx/postgres", "log"] sqlx_sqlite = ["sqlx", "sqlx/sqlite", "log"] sqlx_macros = ["sqlx/macros"] # diesel features -diesel_postgres = ["diesel-async/postgres", "diesel-async/deadpool", "diesel", "deadpool_09"] -diesel_mysql = ["diesel-async/mysql", "diesel-async/deadpool", "diesel", "deadpool_09"] +diesel_postgres = ["diesel-async/postgres", "diesel-async/deadpool", "deadpool", "diesel"] +diesel_mysql = ["diesel-async/mysql", "diesel-async/deadpool", "deadpool", "diesel"] # implicit features: mongodb [dependencies.rocket] @@ -39,9 +39,8 @@ default-features = false path = "../codegen" version = "0.1.0" -[dependencies.deadpool_09] -package = "deadpool" -version = "0.9.5" +[dependencies.deadpool] +version = "0.12.1" default-features = false features = ["rt_tokio_1", "managed"] optional = true @@ -52,12 +51,6 @@ default-features = false features = ["rt_tokio_1"] optional = true -[dependencies.deadpool] -version = "0.12.1" -default-features = false -features = ["rt_tokio_1", "managed"] -optional = true - [dependencies.deadpool-redis] version = "0.15" default-features = false @@ -71,8 +64,9 @@ features = ["compat-3-0-0", "rustls-tls"] optional = true [dependencies.diesel-async] -version = "0.4.1" +version = "0.5.0" default-features = false +features = ["async-connection-wrapper"] optional = true [dependencies.diesel] diff --git a/contrib/db_pools/lib/src/diesel.rs b/contrib/db_pools/lib/src/diesel.rs index c7229eab..9d4d935d 100644 --- a/contrib/db_pools/lib/src/diesel.rs +++ b/contrib/db_pools/lib/src/diesel.rs @@ -84,6 +84,9 @@ pub use diesel_async::pg; #[doc(inline)] pub use diesel_async::pooled_connection::deadpool::Pool; +#[doc(inline)] +pub use diesel_async::async_connection_wrapper::AsyncConnectionWrapper; + #[doc(inline)] #[cfg(feature = "diesel_mysql")] pub use diesel_async::AsyncMysqlConnection; diff --git a/contrib/db_pools/lib/src/pool.rs b/contrib/db_pools/lib/src/pool.rs index 73219a6d..b37fecce 100644 --- a/contrib/db_pools/lib/src/pool.rs +++ b/contrib/db_pools/lib/src/pool.rs @@ -157,6 +157,9 @@ mod deadpool_postgres { use deadpool::{Runtime, managed::{Manager, Pool, PoolError, Object}}; use super::{Duration, Error, Config, Figment}; + #[cfg(feature = "diesel")] + use diesel_async::pooled_connection::AsyncDieselConnectionManager; + pub trait DeadManager: Manager + Sized + Send + Sync + 'static { fn new(config: &Config) -> Result; } @@ -175,6 +178,20 @@ mod deadpool_postgres { } } + #[cfg(feature = "diesel_postgres")] + impl DeadManager for AsyncDieselConnectionManager { + fn new(config: &Config) -> Result { + Ok(Self::new(config.url.as_str())) + } + } + + #[cfg(feature = "diesel_mysql")] + impl DeadManager for AsyncDieselConnectionManager { + fn new(config: &Config) -> Result { + Ok(Self::new(config.url.as_str())) + } + } + #[rocket::async_trait] impl>> crate::Pool for Pool where M::Type: Send, C: Send + Sync + 'static, M::Error: std::error::Error @@ -207,64 +224,6 @@ mod deadpool_postgres { } } -// TODO: Remove when new release of diesel-async with deadpool 0.10 is out. -#[cfg(all(feature = "deadpool_09", any(feature = "diesel_postgres", feature = "diesel_mysql")))] -mod deadpool_old { - use deadpool_09::{managed::{Manager, Pool, PoolError, Object, BuildError}, Runtime}; - use diesel_async::pooled_connection::AsyncDieselConnectionManager; - - use super::{Duration, Error, Config, Figment}; - - pub trait DeadManager: Manager + Sized + Send + Sync + 'static { - fn new(config: &Config) -> Result; - } - - #[cfg(feature = "diesel_postgres")] - impl DeadManager for AsyncDieselConnectionManager { - fn new(config: &Config) -> Result { - Ok(Self::new(config.url.as_str())) - } - } - - #[cfg(feature = "diesel_mysql")] - impl DeadManager for AsyncDieselConnectionManager { - fn new(config: &Config) -> Result { - Ok(Self::new(config.url.as_str())) - } - } - - #[rocket::async_trait] - impl>> crate::Pool for Pool - where M::Type: Send, C: Send + Sync + 'static, M::Error: std::error::Error - { - type Error = Error, PoolError>; - - type Connection = C; - - async fn init(figment: &Figment) -> Result { - let config: Config = figment.extract()?; - let manager = M::new(&config).map_err(|e| Error::Init(BuildError::Backend(e)))?; - - Pool::builder(manager) - .max_size(config.max_connections) - .wait_timeout(Some(Duration::from_secs(config.connect_timeout))) - .create_timeout(Some(Duration::from_secs(config.connect_timeout))) - .recycle_timeout(config.idle_timeout.map(Duration::from_secs)) - .runtime(Runtime::Tokio1) - .build() - .map_err(Error::Init) - } - - async fn get(&self) -> Result { - self.get().await.map_err(Error::Get) - } - - async fn close(&self) { - >::close(self) - } - } -} - #[cfg(feature = "sqlx")] mod sqlx { use sqlx::ConnectOptions; diff --git a/contrib/sync_db_pools/lib/src/poolable.rs b/contrib/sync_db_pools/lib/src/poolable.rs index 0451de60..ebe50994 100644 --- a/contrib/sync_db_pools/lib/src/poolable.rs +++ b/contrib/sync_db_pools/lib/src/poolable.rs @@ -130,7 +130,7 @@ impl Poolable for diesel::SqliteConnection { fn on_acquire(&self, conn: &mut SqliteConnection) -> Result<(), Error> { conn.batch_execute("\ PRAGMA journal_mode = WAL;\ - PRAGMA busy_timeout = 1000;\ + PRAGMA busy_timeout = 5000;\ PRAGMA foreign_keys = ON;\ ").map_err(Error::QueryError)?; diff --git a/examples/databases/README.md b/examples/databases/README.md index f9505bf6..15eafc1f 100644 --- a/examples/databases/README.md +++ b/examples/databases/README.md @@ -14,7 +14,7 @@ This example implements a JSON-based HTTP API for a "blog" using several databas * `sqlx` (`/sqlx`, `sqlx.rs`) * `rusqlite` (`/rusqlite`, `rusqlite.rs`) * `diesel` (sqlite) (`/diesel`, `diesel_sqlite.rs`) - * `diesel-async` (mysql) (`/diesel-async`, `diesel_mysql.rs`) + * `diesel-async` (mysql) (`/mysql`, `diesel_mysql.rs`) The exposed API is succinctly described as follows, with [`httpie`](https://httpie.io/) CLI examples: diff --git a/examples/databases/src/diesel_mysql.rs b/examples/databases/src/diesel_mysql.rs index 2d5a9a56..0cfa3f17 100644 --- a/examples/databases/src/diesel_mysql.rs +++ b/examples/databases/src/diesel_mysql.rs @@ -1,9 +1,10 @@ +use rocket::{Rocket, Build}; use rocket::fairing::AdHoc; use rocket::response::{Debug, status::Created}; use rocket::serde::{Serialize, Deserialize, json::Json}; use rocket_db_pools::{Database, Connection}; -use rocket_db_pools::diesel::{MysqlPool, prelude::*}; +use rocket_db_pools::diesel::{prelude::*, MysqlPool}; type Result> = std::result::Result; @@ -34,7 +35,7 @@ diesel::table! { #[post("/", data = "")] async fn create(mut db: Connection, mut post: Json) -> Result>> { - diesel::sql_function!(fn last_insert_id() -> BigInt); + diesel::define_sql_function!(fn last_insert_id() -> BigInt); let post = db.transaction(|mut conn| Box::pin(async move { diesel::insert_into(posts::table) @@ -89,9 +90,33 @@ async fn destroy(mut db: Connection) -> Result<()> { Ok(()) } +async fn run_migrations(rocket: Rocket) -> Rocket { + use rocket_db_pools::diesel::AsyncConnectionWrapper; + use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; + + const MIGRATIONS: EmbeddedMigrations = embed_migrations!("db/diesel/mysql-migrations"); + + let conn = Db::fetch(&rocket) + .expect("database is attached") + .get().await + .unwrap_or_else(|e| { + span_error!("failed to connect to MySQL database" => error!("{e}")); + panic!("aborting launch"); + }); + + // `run_pending_migrations` blocks, so it must be run in `spawn_blocking` + rocket::tokio::task::spawn_blocking(move || { + let mut conn: AsyncConnectionWrapper<_> = conn.into(); + conn.run_pending_migrations(MIGRATIONS).expect("diesel migrations"); + }).await.expect("diesel migrations"); + + rocket +} + pub fn stage() -> AdHoc { AdHoc::on_ignite("Diesel MySQL Stage", |rocket| async { rocket.attach(Db::init()) - .mount("/diesel-async", routes![list, read, create, delete, destroy]) + .attach(AdHoc::on_ignite("Diesel Migrations", run_migrations)) + .mount("/mysql", routes![list, read, create, delete, destroy]) }) }