2019-04-29 16:30:20 +02:00
|
|
|
#![recursion_limit = "128"]
|
2020-01-21 07:02:03 +01:00
|
|
|
|
2019-04-29 16:30:20 +02:00
|
|
|
#[macro_use]
|
|
|
|
extern crate quote;
|
|
|
|
|
|
|
|
use proc_macro::TokenStream;
|
|
|
|
use proc_macro2::TokenStream as TokenStream2;
|
|
|
|
use std::fs::{read_dir, File};
|
|
|
|
use std::io::Read;
|
2019-05-04 15:35:21 +02:00
|
|
|
use std::path::Path;
|
2019-04-29 16:30:20 +02:00
|
|
|
use std::str::FromStr;
|
|
|
|
|
|
|
|
#[proc_macro]
|
|
|
|
pub fn import_migrations(input: TokenStream) -> TokenStream {
|
|
|
|
assert!(input.is_empty());
|
|
|
|
let migration_dir = if cfg!(feature = "postgres") {
|
|
|
|
"migrations/postgres"
|
|
|
|
} else if cfg!(feature = "sqlite") {
|
|
|
|
"migrations/sqlite"
|
|
|
|
} else {
|
|
|
|
"migrations"
|
|
|
|
};
|
2019-05-04 15:35:21 +02:00
|
|
|
let path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
|
|
|
.ancestors()
|
|
|
|
.find(|path| path.join(migration_dir).is_dir() || path.join(".git").exists())
|
|
|
|
.expect("migrations dir not found")
|
|
|
|
.join(migration_dir);
|
|
|
|
let mut files = read_dir(path)
|
2019-04-29 16:30:20 +02:00
|
|
|
.unwrap()
|
|
|
|
.map(|dir| dir.unwrap())
|
|
|
|
.filter(|dir| dir.file_type().unwrap().is_dir())
|
|
|
|
.map(|dir| dir.path())
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
files.sort_unstable();
|
|
|
|
let migrations = files
|
|
|
|
.into_iter()
|
|
|
|
.map(|path| {
|
|
|
|
let mut up = path.clone();
|
|
|
|
let mut down = path.clone();
|
|
|
|
up.push("up.sql");
|
|
|
|
down.push("down.sql");
|
|
|
|
let mut up_sql = String::new();
|
|
|
|
let mut down_sql = String::new();
|
|
|
|
File::open(up).unwrap().read_to_string(&mut up_sql).unwrap();
|
|
|
|
File::open(down)
|
|
|
|
.unwrap()
|
|
|
|
.read_to_string(&mut down_sql)
|
|
|
|
.unwrap();
|
|
|
|
let name = path
|
|
|
|
.file_name()
|
|
|
|
.unwrap()
|
|
|
|
.to_str()
|
|
|
|
.unwrap()
|
|
|
|
.chars()
|
|
|
|
.filter(char::is_ascii_digit)
|
|
|
|
.take(14)
|
|
|
|
.collect::<String>();
|
|
|
|
(name, up_sql, down_sql)
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
2021-11-27 23:53:13 +01:00
|
|
|
let migrations_name = migrations.iter().map(|m| &m.0);
|
2019-04-29 16:30:20 +02:00
|
|
|
let migrations_up = migrations
|
|
|
|
.iter()
|
|
|
|
.map(|m| m.1.as_str())
|
|
|
|
.map(file_to_migration)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
let migrations_down = migrations
|
|
|
|
.iter()
|
|
|
|
.map(|m| m.2.as_str())
|
|
|
|
.map(file_to_migration)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
/*
|
|
|
|
enum Action {
|
|
|
|
Sql(&'static str),
|
|
|
|
Function(&'static Fn(&Connection, &Path) -> Result<()>)
|
|
|
|
}*/
|
|
|
|
|
|
|
|
quote!(
|
|
|
|
ImportedMigrations(
|
|
|
|
&[#(ComplexMigration{name: #migrations_name, up: #migrations_up, down: #migrations_down}),*]
|
|
|
|
)
|
|
|
|
).into()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn file_to_migration(file: &str) -> TokenStream2 {
|
|
|
|
let mut sql = true;
|
|
|
|
let mut acc = String::new();
|
|
|
|
let mut actions = vec![];
|
|
|
|
for line in file.lines() {
|
|
|
|
if sql {
|
2021-01-16 04:20:21 +01:00
|
|
|
if let Some(acc_str) = line.strip_prefix("--#!") {
|
2019-04-29 16:30:20 +02:00
|
|
|
if !acc.trim().is_empty() {
|
|
|
|
actions.push(quote!(Action::Sql(#acc)));
|
|
|
|
}
|
|
|
|
sql = false;
|
2021-01-16 04:20:21 +01:00
|
|
|
acc = acc_str.to_string();
|
2019-04-29 16:30:20 +02:00
|
|
|
acc.push('\n');
|
|
|
|
} else if line.starts_with("--") {
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
acc.push_str(line);
|
|
|
|
acc.push('\n');
|
|
|
|
}
|
2021-01-16 04:20:21 +01:00
|
|
|
} else if let Some(acc_str) = line.strip_prefix("--#!") {
|
2021-11-27 23:53:13 +01:00
|
|
|
acc.push_str(acc_str);
|
2020-01-19 12:52:32 +01:00
|
|
|
acc.push('\n');
|
|
|
|
} else if line.starts_with("--") {
|
|
|
|
continue;
|
2019-04-29 16:30:20 +02:00
|
|
|
} else {
|
2020-01-19 12:52:32 +01:00
|
|
|
let func: TokenStream2 = trampoline(TokenStream::from_str(&acc).unwrap().into());
|
|
|
|
actions.push(quote!(Action::Function(&#func)));
|
|
|
|
sql = true;
|
|
|
|
acc = line.to_string();
|
|
|
|
acc.push('\n');
|
2019-04-29 16:30:20 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if !acc.trim().is_empty() {
|
|
|
|
if sql {
|
|
|
|
actions.push(quote!(Action::Sql(#acc)));
|
|
|
|
} else {
|
|
|
|
let func: TokenStream2 = trampoline(TokenStream::from_str(&acc).unwrap().into());
|
|
|
|
actions.push(quote!(Action::Function(&#func)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
quote!(
|
|
|
|
&[#(#actions),*]
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Build a trampoline to allow reference to closure from const context
|
|
|
|
fn trampoline(closure: TokenStream2) -> TokenStream2 {
|
|
|
|
quote! {
|
|
|
|
{
|
|
|
|
fn trampoline<'a, 'b>(conn: &'a Connection, path: &'b Path) -> Result<()> {
|
|
|
|
(#closure)(conn, path)
|
|
|
|
}
|
|
|
|
trampoline
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|