Skip to content

Commit e89cb09

Browse files
authored
fix(macros): tell the compiler about external files/env vars to watch (#1332)
* fix(macros): tell the compiler about external files/env vars to watch closes #663 closes #681 * feat(cli): add `migrate` subcommand for generating a build script suggest embedding migrations on `sqlx migrate add` in a new project
1 parent a8544fd commit e89cb09

File tree

12 files changed

+259
-31
lines changed

12 files changed

+259
-31
lines changed

sqlx-cli/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ pub async fn run(opt: Opt) -> anyhow::Result<()> {
3636
ignore_missing,
3737
} => migrate::revert(&migrate.source, &database_url, dry_run, ignore_missing).await?,
3838
MigrateCommand::Info => migrate::info(&migrate.source, &database_url).await?,
39+
MigrateCommand::BuildScript { force } => migrate::build_script(&migrate.source, force)?,
3940
},
4041

4142
Command::Database(database) => match database.command {

sqlx-cli/src/migrate.rs

+57
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,11 @@ pub async fn add(
4242
) -> anyhow::Result<()> {
4343
fs::create_dir_all(migration_source).context("Unable to create migrations directory")?;
4444

45+
// if the migrations directory is empty
46+
let has_existing_migrations = fs::read_dir(migration_source)
47+
.map(|mut dir| dir.next().is_some())
48+
.unwrap_or(false);
49+
4550
let migrator = Migrator::new(Path::new(migration_source)).await?;
4651
// This checks if all existing migrations are of the same type as the reverisble flag passed
4752
for migration in migrator.iter() {
@@ -74,6 +79,31 @@ pub async fn add(
7479
)?;
7580
}
7681

82+
if !has_existing_migrations {
83+
let quoted_source = if migration_source != "migrations" {
84+
format!("{:?}", migration_source)
85+
} else {
86+
"".to_string()
87+
};
88+
89+
print!(
90+
r#"
91+
Congratulations on creating your first migration!
92+
93+
Did you know you can embed your migrations in your application binary?
94+
On startup, after creating your database connection or pool, add:
95+
96+
sqlx::migrate!({}).run(<&your_pool OR &mut your_connection>).await?;
97+
98+
Note that the compiler won't pick up new migrations if no Rust source files have changed.
99+
You can create a Cargo build script to work around this with `sqlx migrate build-script`.
100+
101+
See: https://docs.rs/sqlx/0.5/sqlx/macro.migrate.html
102+
"#,
103+
quoted_source
104+
);
105+
}
106+
77107
Ok(())
78108
}
79109

@@ -245,3 +275,30 @@ pub async fn revert(
245275

246276
Ok(())
247277
}
278+
279+
pub fn build_script(migration_source: &str, force: bool) -> anyhow::Result<()> {
280+
anyhow::ensure!(
281+
Path::new("Cargo.toml").exists(),
282+
"must be run in a Cargo project root"
283+
);
284+
285+
anyhow::ensure!(
286+
(force || !Path::new("build.rs").exists()),
287+
"build.rs already exists; use --force to overwrite"
288+
);
289+
290+
let contents = format!(
291+
r#"// generated by `sqlx migrate build-script`
292+
fn main() {{
293+
// trigger recompilation when a new migration is added
294+
println!("cargo:rerun-if-changed={}");
295+
}}"#,
296+
migration_source
297+
);
298+
299+
fs::write("build.rs", contents)?;
300+
301+
println!("Created `build.rs`; be sure to check it into version control!");
302+
303+
Ok(())
304+
}

sqlx-cli/src/opt.rs

+9
Original file line numberDiff line numberDiff line change
@@ -130,4 +130,13 @@ pub enum MigrateCommand {
130130

131131
/// List all available migrations.
132132
Info,
133+
134+
/// Generate a `build.rs` to trigger recompilation when a new migration is added.
135+
///
136+
/// Must be run in a Cargo project root.
137+
BuildScript {
138+
/// Overwrite the build script if it already exists.
139+
#[clap(long)]
140+
force: bool,
141+
},
133142
}

sqlx-macros/src/lib.rs

+4
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,10 @@
22
not(any(feature = "postgres", feature = "mysql", feature = "offline")),
33
allow(dead_code, unused_macros, unused_imports)
44
)]
5+
#![cfg_attr(
6+
any(sqlx_macros_unstable, procmacro2_semver_exempt),
7+
feature(track_path, proc_macro_tracked_env)
8+
)]
59
extern crate proc_macro;
610

711
use proc_macro::TokenStream;

sqlx-macros/src/migrate.rs

+31-5
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ struct QuotedMigration {
2424
version: i64,
2525
description: String,
2626
migration_type: QuotedMigrationType,
27-
sql: String,
27+
path: String,
2828
checksum: Vec<u8>,
2929
}
3030

@@ -34,7 +34,7 @@ impl ToTokens for QuotedMigration {
3434
version,
3535
description,
3636
migration_type,
37-
sql,
37+
path,
3838
checksum,
3939
} = &self;
4040

@@ -43,7 +43,8 @@ impl ToTokens for QuotedMigration {
4343
version: #version,
4444
description: ::std::borrow::Cow::Borrowed(#description),
4545
migration_type: #migration_type,
46-
sql: ::std::borrow::Cow::Borrowed(#sql),
46+
// this tells the compiler to watch this path for changes
47+
sql: ::std::borrow::Cow::Borrowed(include_str!(#path)),
4748
checksum: ::std::borrow::Cow::Borrowed(&[
4849
#(#checksum),*
4950
]),
@@ -59,7 +60,7 @@ pub(crate) fn expand_migrator_from_dir(dir: LitStr) -> crate::Result<TokenStream
5960
let path = crate::common::resolve_path(&dir.value(), dir.span())?;
6061
let mut migrations = Vec::new();
6162

62-
for entry in fs::read_dir(path)? {
63+
for entry in fs::read_dir(&path)? {
6364
let entry = entry?;
6465
if !fs::metadata(entry.path())?.is_file() {
6566
// not a file; ignore
@@ -89,18 +90,43 @@ pub(crate) fn expand_migrator_from_dir(dir: LitStr) -> crate::Result<TokenStream
8990

9091
let checksum = Vec::from(Sha384::digest(sql.as_bytes()).as_slice());
9192

93+
// canonicalize the path so we can pass it to `include_str!()`
94+
let path = entry.path().canonicalize()?;
95+
let path = path
96+
.to_str()
97+
.ok_or_else(|| {
98+
format!(
99+
"migration path cannot be represented as a string: {:?}",
100+
path
101+
)
102+
})?
103+
.to_owned();
104+
92105
migrations.push(QuotedMigration {
93106
version,
94107
description,
95108
migration_type: QuotedMigrationType(migration_type),
96-
sql,
109+
path,
97110
checksum,
98111
})
99112
}
100113

101114
// ensure that we are sorted by `VERSION ASC`
102115
migrations.sort_by_key(|m| m.version);
103116

117+
#[cfg(any(sqlx_macros_unstable, procmacro2_semver_exempt))]
118+
{
119+
let path = path.canonicalize()?;
120+
let path = path.to_str().ok_or_else(|| {
121+
format!(
122+
"migration directory path cannot be represented as a string: {:?}",
123+
path
124+
)
125+
})?;
126+
127+
proc_macro::tracked_path::path(path);
128+
}
129+
104130
Ok(quote! {
105131
::sqlx::migrate::Migrator {
106132
migrations: ::std::borrow::Cow::Borrowed(&[

sqlx-macros/src/query/data.rs

+17-3
Original file line numberDiff line numberDiff line change
@@ -61,16 +61,30 @@ pub mod offline {
6161
/// Find and deserialize the data table for this query from a shared `sqlx-data.json`
6262
/// file. The expected structure is a JSON map keyed by the SHA-256 hash of queries in hex.
6363
pub fn from_data_file(path: impl AsRef<Path>, query: &str) -> crate::Result<Self> {
64-
serde_json::Deserializer::from_reader(BufReader::new(
64+
let this = serde_json::Deserializer::from_reader(BufReader::new(
6565
File::open(path.as_ref()).map_err(|e| {
6666
format!("failed to open path {}: {}", path.as_ref().display(), e)
6767
})?,
6868
))
6969
.deserialize_map(DataFileVisitor {
7070
query,
7171
hash: hash_string(query),
72-
})
73-
.map_err(Into::into)
72+
})?;
73+
74+
#[cfg(procmacr2_semver_exempt)]
75+
{
76+
let path = path.as_ref().canonicalize()?;
77+
let path = path.to_str().ok_or_else(|| {
78+
format!(
79+
"sqlx-data.json path cannot be represented as a string: {:?}",
80+
path
81+
)
82+
})?;
83+
84+
proc_macro::tracked_path::path(path);
85+
}
86+
87+
Ok(this)
7488
}
7589
}
7690

sqlx-macros/src/query/input.rs

+28-2
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use syn::{ExprArray, Type};
88

99
/// Macro input shared by `query!()` and `query_file!()`
1010
pub struct QueryMacroInput {
11-
pub(super) src: String,
11+
pub(super) sql: String,
1212

1313
#[cfg_attr(not(feature = "offline"), allow(dead_code))]
1414
pub(super) src_span: Span,
@@ -18,6 +18,8 @@ pub struct QueryMacroInput {
1818
pub(super) arg_exprs: Vec<Expr>,
1919

2020
pub(super) checked: bool,
21+
22+
pub(super) file_path: Option<String>,
2123
}
2224

2325
enum QuerySrc {
@@ -94,12 +96,15 @@ impl Parse for QueryMacroInput {
9496

9597
let arg_exprs = args.unwrap_or_default();
9698

99+
let file_path = src.file_path(src_span)?;
100+
97101
Ok(QueryMacroInput {
98-
src: src.resolve(src_span)?,
102+
sql: src.resolve(src_span)?,
99103
src_span,
100104
record_type,
101105
arg_exprs,
102106
checked,
107+
file_path,
103108
})
104109
}
105110
}
@@ -112,6 +117,27 @@ impl QuerySrc {
112117
QuerySrc::File(file) => read_file_src(&file, source_span),
113118
}
114119
}
120+
121+
fn file_path(&self, source_span: Span) -> syn::Result<Option<String>> {
122+
if let QuerySrc::File(ref file) = *self {
123+
let path = std::path::Path::new(file)
124+
.canonicalize()
125+
.map_err(|e| syn::Error::new(source_span, e))?;
126+
127+
Ok(Some(
128+
path.to_str()
129+
.ok_or_else(|| {
130+
syn::Error::new(
131+
source_span,
132+
"query file path cannot be represented as a string",
133+
)
134+
})?
135+
.to_string(),
136+
))
137+
} else {
138+
Ok(None)
139+
}
140+
}
115141
}
116142

117143
fn read_file_src(source: &str, source_span: Span) -> syn::Result<String> {

0 commit comments

Comments
 (0)