diff --git a/src/admin/git_import.rs b/src/admin/git_import.rs index 955bbd23438..e762a451559 100644 --- a/src/admin/git_import.rs +++ b/src/admin/git_import.rs @@ -28,7 +28,7 @@ pub struct Opts { } pub fn run(opts: Opts) -> anyhow::Result<()> { - let conn = db::oneoff_connection().unwrap(); + let mut conn = db::oneoff_connection().unwrap(); println!("fetching git repo"); let config = RepositoryConfig::from_environment(); let repo = Repository::open(&config)?; @@ -54,8 +54,8 @@ pub fn run(opts: Opts) -> anyhow::Result<()> { let reader = BufReader::new(file); for line in reader.lines() { let krate: cargo_registry_index::Crate = serde_json::from_str(&line?)?; - conn.transaction(|| { - import_data(&conn, &krate) + conn.transaction(|conn| { + import_data(conn, &krate) .with_context(|| format!("failed to update crate: {krate:?}")) })?; } @@ -64,7 +64,7 @@ pub fn run(opts: Opts) -> anyhow::Result<()> { Ok(()) } -fn import_data(conn: &PgConnection, krate: &cargo_registry_index::Crate) -> QueryResult<()> { +fn import_data(conn: &mut PgConnection, krate: &cargo_registry_index::Crate) -> QueryResult<()> { let version_id: i32 = versions::table .inner_join(crates::table) .filter(crates::name.eq(&krate.name)) diff --git a/src/admin/migrate.rs b/src/admin/migrate.rs index fcb2cce7234..65481e08396 100644 --- a/src/admin/migrate.rs +++ b/src/admin/migrate.rs @@ -42,7 +42,9 @@ pub fn run(_opts: Opts) -> Result<(), Error> { // let migrations = ; let mut stdout = std::io::stdout(); let mut harness = HarnessWithOutput::new(conn, &mut stdout); - harness.run_pending_migrations(MIGRATIONS); + harness + .run_pending_migrations(MIGRATIONS) + .expect("failed to run migrations"); println!("==> synchronizing crate categories"); crate::boot::categories::sync_with_connection(CATEGORIES_TOML, conn).unwrap(); diff --git a/src/admin/yank_version.rs b/src/admin/yank_version.rs index 0443daf18ec..4b33ba3df29 100644 --- a/src/admin/yank_version.rs +++ b/src/admin/yank_version.rs @@ -24,15 +24,15 @@ pub struct Opts { } pub fn run(opts: Opts) { - let conn = db::oneoff_connection().unwrap(); - conn.transaction::<_, diesel::result::Error, _>(|| { - yank(opts, &conn); + let mut conn = db::oneoff_connection().unwrap(); + conn.transaction::<_, diesel::result::Error, _>(|conn| { + yank(opts, conn); Ok(()) }) .unwrap() } -fn yank(opts: Opts, conn: &PgConnection) { +fn yank(opts: Opts, conn: &mut PgConnection) { let Opts { crate_name, version, diff --git a/src/bin/enqueue-job.rs b/src/bin/enqueue-job.rs index 516c691d5d2..06be1769957 100644 --- a/src/bin/enqueue-job.rs +++ b/src/bin/enqueue-job.rs @@ -7,7 +7,7 @@ use swirl::schema::background_jobs::dsl::*; use swirl::Job; fn main() -> Result<()> { - let conn = db::oneoff_connection()?; + let conn = &mut db::oneoff_connection()?; let mut args = std::env::args().skip(1); let job = args.next().unwrap_or_default(); diff --git a/src/bin/monitor.rs b/src/bin/monitor.rs index 151db774bb0..8c4494dc776 100644 --- a/src/bin/monitor.rs +++ b/src/bin/monitor.rs @@ -11,7 +11,7 @@ use cargo_registry::{admin::on_call, db, schema::*}; use diesel::prelude::*; fn main() -> Result<()> { - let conn = db::oneoff_connection()?; + let conn = &mut db::oneoff_connection()?; check_failing_background_jobs(conn)?; check_stalled_update_downloads(conn)?; @@ -108,7 +108,6 @@ fn check_stalled_update_downloads(conn: &mut PgConnection) -> Result<()> { /// Check for known spam patterns fn check_spam_attack(conn: &mut PgConnection) -> Result<()> { use cargo_registry::sql::canon_crate_name; - use diesel::dsl::*; const EVENT_KEY: &str = "spam_attack"; @@ -123,7 +122,7 @@ fn check_spam_attack(conn: &mut PgConnection) -> Result<()> { let mut event_description = None; let bad_crate: Option = crates::table - .filter(canon_crate_name(crates::name).eq(any(bad_crate_names))) + .filter(canon_crate_name(crates::name).eq_any(bad_crate_names)) .select(crates::name) .first(conn) .optional()?; diff --git a/src/controllers/category.rs b/src/controllers/category.rs index 4166734e454..f7a24f3bb2f 100644 --- a/src/controllers/category.rs +++ b/src/controllers/category.rs @@ -16,8 +16,7 @@ pub fn index(req: &mut dyn RequestExt) -> EndpointResult { let sort = query.get("sort").map_or("alpha", String::as_str); let conn = &mut req.db_read()?; - let categories = - Category::toplevel(conn, sort, i64::from(options.per_page), i64::from(offset))?; + let categories = Category::toplevel(conn, sort, options.per_page, offset)?; let categories = categories .into_iter() .map(Category::into) diff --git a/src/models/krate.rs b/src/models/krate.rs index 87ea2055e0e..4ab866c6f5e 100644 --- a/src/models/krate.rs +++ b/src/models/krate.rs @@ -434,8 +434,8 @@ impl Crate { let rows: Vec> = sql_query(include_str!("krate_reverse_dependencies.sql")) .bind::(self.id) - .bind::(i64::from(offset)) - .bind::(i64::from(options.per_page)) + .bind::(offset) + .bind::(options.per_page) .load(conn)?; Ok(rows.records_and_total()) diff --git a/src/tests/all.rs b/src/tests/all.rs index e149339d4f3..a754cece584 100644 --- a/src/tests/all.rs +++ b/src/tests/all.rs @@ -3,8 +3,6 @@ #[macro_use] extern crate claim; #[macro_use] -extern crate diesel; -#[macro_use] extern crate serde; #[macro_use] extern crate serde_json; diff --git a/src/tests/builders/krate.rs b/src/tests/builders/krate.rs index 5669567587c..80890e28a94 100644 --- a/src/tests/builders/krate.rs +++ b/src/tests/builders/krate.rs @@ -111,7 +111,7 @@ impl<'a> CrateBuilder<'a> { self } - pub fn build(mut self, connection: &PgConnection) -> AppResult { + pub fn build(mut self, connection: &mut PgConnection) -> AppResult { use diesel::{insert_into, select, update}; let mut krate = self @@ -147,8 +147,8 @@ impl<'a> CrateBuilder<'a> { )) .execute(connection)?; - no_arg_sql_function!(refresh_recent_crate_downloads, ()); - select(refresh_recent_crate_downloads).execute(connection)?; + sql_function!(fn refresh_recent_crate_downloads()); + select(refresh_recent_crate_downloads()).execute(connection)?; } if !self.categories.is_empty() { @@ -175,7 +175,7 @@ impl<'a> CrateBuilder<'a> { /// /// Panics (and fails the test) if any part of inserting the crate record fails. #[track_caller] - pub fn expect_build(self, connection: &PgConnection) -> Crate { + pub fn expect_build(self, connection: &mut PgConnection) -> Crate { let name = self.krate.name; self.build(connection).unwrap_or_else(|e| { panic!("Unable to create crate {}: {:?}", name, e); diff --git a/src/tests/builders/version.rs b/src/tests/builders/version.rs index 9f6164a09de..f119ca546a1 100644 --- a/src/tests/builders/version.rs +++ b/src/tests/builders/version.rs @@ -81,7 +81,7 @@ impl<'a> VersionBuilder<'a> { self, crate_id: i32, published_by: i32, - connection: &PgConnection, + connection: &mut PgConnection, ) -> AppResult { use diesel::{insert_into, update}; @@ -144,7 +144,7 @@ impl<'a> VersionBuilder<'a> { self, crate_id: i32, published_by: i32, - connection: &PgConnection, + connection: &mut PgConnection, ) -> Version { self.build(crate_id, published_by, connection) .unwrap_or_else(|e| { diff --git a/src/tests/categories.rs b/src/tests/categories.rs index 8516c0331f0..1f65df66fec 100644 --- a/src/tests/categories.rs +++ b/src/tests/categories.rs @@ -40,7 +40,7 @@ description = "Another category ho hum" fn pg_connection() -> PgConnection { let database_url = dotenv::var("TEST_DATABASE_URL").expect("TEST_DATABASE_URL must be set to run tests"); - let conn = PgConnection::establish(&database_url).unwrap(); + let mut conn = PgConnection::establish(&database_url).unwrap(); conn.begin_test_transaction().unwrap(); conn } diff --git a/src/tests/server_binary.rs b/src/tests/server_binary.rs index 5652cde237e..28932b0e47b 100644 --- a/src/tests/server_binary.rs +++ b/src/tests/server_binary.rs @@ -16,7 +16,7 @@ const SERVER_BOOT_TIMEOUT_SECONDS: u64 = 30; #[test] fn normal_startup() -> Result<(), Error> { let server_bin = ServerBin::prepare()?; - initialize_dummy_crate(&server_bin.db()?); + initialize_dummy_crate(&mut server_bin.db()?); let running_server = server_bin.start()?; @@ -36,7 +36,7 @@ fn normal_startup() -> Result<(), Error> { #[test] fn startup_without_database() -> Result<(), Error> { let server_bin = ServerBin::prepare()?; - initialize_dummy_crate(&server_bin.db()?); + initialize_dummy_crate(&mut server_bin.db()?); // Break the networking *before* starting the binary, to ensure the binary can fully startup // without a database connection. Most of crates.io should not work when started without a diff --git a/src/tests/util/fresh_schema.rs b/src/tests/util/fresh_schema.rs index 9c14d6f7f74..08ba69a036d 100644 --- a/src/tests/util/fresh_schema.rs +++ b/src/tests/util/fresh_schema.rs @@ -1,6 +1,6 @@ use diesel::connection::SimpleConnection; use diesel::prelude::*; -use diesel_migrations::{find_migrations_directory, run_pending_migrations_in_directory}; +use diesel_migrations::{FileBasedMigrations, MigrationHarness}; use rand::Rng; pub(crate) struct FreshSchema { @@ -13,7 +13,7 @@ impl FreshSchema { pub(crate) fn new(database_url: &str) -> Self { let schema_name = generate_schema_name(); - let conn = PgConnection::establish(database_url).expect("can't connect to the test db"); + let mut conn = PgConnection::establish(database_url).expect("can't connect to the test db"); conn.batch_execute(&format!( " DROP SCHEMA IF EXISTS {schema_name} CASCADE; @@ -23,8 +23,9 @@ impl FreshSchema { )) .expect("failed to initialize schema"); - let migrations_dir = find_migrations_directory().unwrap(); - run_pending_migrations_in_directory(conn, &migrations_dir, &mut std::io::sink()) + let migrations = + FileBasedMigrations::find_migrations_directory().expect("Could not find migrations"); + conn.run_pending_migrations(migrations) .expect("failed to run migrations on the test schema"); let database_url = url::Url::parse_with_params( diff --git a/src/tests/util/test_app.rs b/src/tests/util/test_app.rs index 773e4b7183a..3d0083b91c5 100644 --- a/src/tests/util/test_app.rs +++ b/src/tests/util/test_app.rs @@ -46,7 +46,7 @@ impl Drop for TestAppInner { // Manually verify that all jobs have completed successfully // This will catch any tests that enqueued a job but forgot to initialize the runner - let conn = self.app.primary_database.get().unwrap(); + let conn = &mut *self.app.primary_database.get().unwrap(); let job_count: i64 = background_jobs.count().get_result(conn).unwrap(); assert_eq!( 0, job_count, @@ -92,8 +92,8 @@ impl TestApp { /// connection before making any API calls. Once the closure returns, the connection is /// dropped, ensuring it is returned to the pool and available for any future API calls. pub fn db T>(&self, f: F) -> T { - let conn = self.0.app.primary_database.get().unwrap(); - f(&mut conn) + let conn = &mut self.0.app.primary_database.get().unwrap(); + f(conn) } /// Create a new user with a verified email address in the database and return a mock user