Fix build errors

Run rustfmt

Fix some error from rebasing

Fix some wrong imports

Remove unused embed_migrations macro from diesel_infer_schema

Add missing dev-dependency to migrations_internals

Add missing -- to .travis.yaml

Add missing dev-dependencies for diesel_migrations

More stuff missing from rebase

Run rustfmt

Remove unused dependency

Remove accidentally set default feature

Remove unused macro_use annotation

Sqlite tests need macro import for diesel_migrations
This commit is contained in:
Georg Semmler 2017-10-11 12:23:25 +02:00
parent f71ef43de2
commit 84f9291dbc
No known key found for this signature in database
GPG Key ID: A87BCEE5205CE489
25 changed files with 33 additions and 901 deletions

View File

@ -20,7 +20,7 @@ script:
(cd diesel && travis-cargo test -- --no-default-features --features "extras $BACKEND")
fi &&
(cd diesel && travis-cargo test -- --no-default-features --features "extras with-deprecated $BACKEND") &&
(cd diesel_derives && travis-cargo test -- --features "$BACKEND") &&
(cd diesel_derives && travis-cargo test -- --features "$BACKEND") &&
if [[ "$BACKEND" == postgres ]]; then
(cd examples/postgres && ./test_all)
fi &&
@ -36,7 +36,7 @@ script:
(cd diesel_infer_schema && travis-cargo test -- --no-default-features --features "dotenv_macro $BACKEND") &&
(cd diesel_migrations/migrations_internals && travis-cargo test ) &&
(cd diesel_migrations/migrations_macros && travis-cargo test ) &&
(cd diesel_migrations/ && travis-cargo test --features "$BACKEND" ) &&
(cd diesel_migrations/ && travis-cargo test -- --features "$BACKEND" ) &&
if [[ "$TRAVIS_RUST_VERSION" == nightly* ]]; then
(cd diesel_tests && travis-cargo test -- --no-default-features --features "unstable $BACKEND")
else

View File

@ -14,8 +14,8 @@ extern crate chrono;
extern crate clap;
#[cfg_attr(any(feature = "mysql", feature = "postgres"), macro_use)]
extern crate diesel;
extern crate infer_schema_internals;
extern crate dotenv;
extern crate infer_schema_internals;
extern crate migrations_internals;
mod database_error;

View File

@ -2,7 +2,7 @@ use regex::Regex;
use chrono::prelude::*;
use support::project;
use diesel::migrations::TIMESTAMP_FORMAT;
use migrations_internals::TIMESTAMP_FORMAT;
#[test]
fn migration_generate_creates_a_migration_with_the_proper_name() {

View File

@ -4,7 +4,7 @@ use std::thread::sleep;
use std::time::Duration;
use support::{database, project};
use diesel::migrations::TIMESTAMP_FORMAT;
use migrations_internals::TIMESTAMP_FORMAT;
#[test]
fn migration_list_lists_pending_applied_migrations() {

View File

@ -4,6 +4,7 @@ extern crate chrono;
extern crate diesel;
#[macro_use]
extern crate difference;
extern crate migrations_internals;
extern crate regex;
extern crate tempdir;

View File

@ -3,7 +3,7 @@ extern crate cfg_if;
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_codegen;
extern crate diesel_derives;
mod queryable;
mod queryable_by_name;

View File

@ -10,9 +10,9 @@
unicode_not_nfc, if_not_else, items_after_statements, used_underscore_binding))]
extern crate diesel;
extern crate infer_schema_internals;
#[cfg(all(feature = "dotenv"))]
extern crate dotenv;
extern crate infer_schema_internals;
extern crate proc_macro;
#[macro_use]
extern crate quote;

View File

@ -44,7 +44,11 @@ pub fn derive_infer_schema(input: syn::DeriveInput) -> quote::Tokens {
quote!(joinable!(#child_table -> #parent_table (#foreign_key));)
});
let tokens = quote!(#(#tables)* #(#joinables)*);
let table_idents = table_names.iter().map(|t| syn::Ident::from(&*t.name));
let multi_table_joins = quote!(allow_tables_to_appear_in_same_query!(#(#table_idents,)*););
let tokens = quote!(#(#tables)* #(#joinables)* #multi_table_joins);
if let Some(schema_name) = schema_name {
let schema_ident = syn::Ident::new(schema_name);
quote!(pub mod #schema_ident { #tokens })

View File

@ -73,65 +73,3 @@ macro_rules! infer_table_from_schema {
struct __DieselInferTableFromSchema;
}
}
#[macro_export]
/// This macro will read your migrations at compile time, and embed a module you can use to execute
/// them at runtime without the migration files being present on the file system. This is useful if
/// you would like to use Diesel's migration infrastructure, but want to ship a single executable
/// file (such as for embedded applications). It can also be used to apply migrations to an in
/// memory database (Diesel does this for its own test suite).
///
/// You can optionally pass the path to the migrations directory to this macro. When left
/// unspecified, Diesel Codegen will search for the migrations directory in the same way that
/// Diesel CLI does. If specified, the path should be relative to the directory where `Cargo.toml`
/// resides.
///
/// # Examples
///
/// ```rust
/// # #[macro_use] extern crate diesel;
/// # #[macro_use] extern crate diesel_proc_macro;
/// # include!("../../diesel/src/doctest_setup.rs");
/// # table! {
/// # users {
/// # id -> Integer,
/// # name -> VarChar,
/// # }
/// # }
/// #
/// # #[cfg(feature = "postgres")]
/// # embed_migrations!("../migrations/postgresql");
/// # #[cfg(all(feature = "mysql", not(feature = "postgres")))]
/// # embed_migrations!("../migrations/mysql");
/// # #[cfg(all(feature = "sqlite", not(any(feature = "postgres", feature = "mysql"))))]
/// embed_migrations!("../migrations/sqlite");
///
/// fn main() {
/// let connection = establish_connection();
///
/// // This will run the necessary migrations.
/// embedded_migrations::run(&connection);
///
/// // By default the output is thrown out. If you want to redirect it to stdout, you
/// // should call embedded_migrations::run_with_output.
/// embedded_migrations::run_with_output(&connection, &mut std::io::stdout());
/// }
/// ```
macro_rules! embed_migrations {
() => {
#[allow(dead_code)]
mod embedded_migrations {
#[derive(EmbedMigrations)]
struct _Dummy;
}
};
($migrations_path: expr) => {
#[allow(dead_code)]
mod embedded_migrations {
#[derive(EmbedMigrations)]
#[embed_migrations_options(migrations_path=$migrations_path)]
struct _Dummy;
}
}
}

View File

@ -15,6 +15,8 @@ migrations_macros = { version = "0.16.0", path = "./migrations_macros" }
[dev-dependencies]
diesel = { version = "0.16.0", features = ["sqlite", "postgres", "mysql"] }
dotenv = ">=0.8, <0.11"
cfg-if = "0.1.0"
[features]
default = []

View File

@ -10,6 +10,9 @@ homepage = "http://diesel.rs"
clippy = { optional = true, version = "=0.0.162" }
diesel = "0.16.0"
[dev-dependencies]
tempdir = "0.3.4"
[features]
default = []
lint = ["clippy"]

View File

@ -94,7 +94,7 @@ use std::fs::DirEntry;
use std::io::{stdout, Write};
use diesel::expression_methods::*;
use diesel::{FilterDsl, ExecuteDsl};
use diesel::{ExecuteDsl, FilterDsl};
use self::schema::__diesel_schema_migrations::dsl::*;
use diesel::{Connection, QueryResult};
@ -253,14 +253,12 @@ pub fn setup_database<Conn: Connection>(conn: &Conn) -> QueryResult<usize> {
}
fn create_schema_migrations_table_if_needed<Conn: Connection>(conn: &Conn) -> QueryResult<usize> {
conn.silence_notices(|| {
conn.execute(
"CREATE TABLE IF NOT EXISTS __diesel_schema_migrations (
version VARCHAR(50) PRIMARY KEY NOT NULL,
run_on TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
)",
)
})
conn.execute(
"CREATE TABLE IF NOT EXISTS __diesel_schema_migrations (\
version VARCHAR(50) PRIMARY KEY NOT NULL,\
run_on TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP\
)",
)
}
#[doc(hidden)]

View File

@ -1,407 +0,0 @@
//! Provides functions for maintaining database schema.
//!
//! A database migration always provides procedures to update the schema, as well as to revert
//! itself. Diesel's migrations are versioned, and run in order. Diesel also takes care of tracking
//! which migrations have already been run automatically. Your migrations don't need to be
//! idempotent, as Diesel will ensure no migration is run twice unless it has been reverted.
//!
//! Migrations should be placed in a `/migrations` directory at the root of your project (the same
//! directory as `Cargo.toml`). When any of these functions are run, Diesel will search for the
//! migrations directory in the current directory and its parents, stopping when it finds the
//! directory containing `Cargo.toml`.
//!
//! Individual migrations should be a folder containing exactly two files, `up.sql` and `down.sql`.
//! `up.sql` will be used to run the migration, while `down.sql` will be used for reverting it. The
//! folder itself should have the structure `{version}_{migration_name}`. It is recommended that
//! you use the timestamp of creation for the version.
//!
//! Migrations can either be run with the CLI or embedded into the compiled application
//! and executed with code, for example right after establishing a database connection.
//! For more information, consult the [`embed_migrations!`](../macro.embed_migrations.html) macro.
//!
//! ## Example
//!
//! ```text
//! # Directory Structure
//! - 20151219180527_create_users
//! - up.sql
//! - down.sql
//! - 20160107082941_create_posts
//! - up.sql
//! - down.sql
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/up.sql
//! CREATE TABLE users (
//! id SERIAL PRIMARY KEY,
//! name VARCHAR NOT NULL,
//! hair_color VARCHAR
//! );
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/down.sql
//! DROP TABLE users;
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/up.sql
//! CREATE TABLE posts (
//! id SERIAL PRIMARY KEY,
//! user_id INTEGER NOT NULL,
//! title VARCHAR NOT NULL,
//! body TEXT
//! );
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/down.sql
//! DROP TABLE posts;
//! ```
mod migration;
#[doc(hidden)]
pub mod connection;
mod migration_error;
#[doc(hidden)]
pub mod schema;
#[doc(inline)]
pub use self::connection::MigrationConnection;
#[doc(inline)]
pub use self::migration::*;
pub use self::migration_error::*;
use std::fs::DirEntry;
use std::io::{stdout, Write};
use expression_methods::*;
use query_dsl::*;
use self::schema::__diesel_schema_migrations::dsl::*;
use {Connection, QueryResult};
use std::env;
use std::path::{Path, PathBuf};
pub static TIMESTAMP_FORMAT: &'static str = "%Y-%m-%d-%H%M%S";
/// Runs all migrations that have not yet been run. This function will print all progress to
/// stdout. This function will return an `Err` if some error occurs reading the migrations, or if
/// any migration fails to run. Each migration is run in its own transaction, so some migrations
/// may be committed, even if a later migration fails to run.
///
/// It should be noted that this runs all migrations that have not already been run, regardless of
/// whether or not their version is later than the latest run migration. This is generally not a
/// problem, and eases the more common case of two developers generating independent migrations on
/// a branch. Whoever created the second one will eventually need to run the first when both
/// branches are merged.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn run_pending_migrations<Conn>(conn: &Conn) -> Result<(), RunMigrationsError>
where
Conn: MigrationConnection,
{
let migrations_dir = try!(find_migrations_directory());
run_pending_migrations_in_directory(conn, &migrations_dir, &mut stdout())
}
#[doc(hidden)]
pub fn run_pending_migrations_in_directory<Conn>(
conn: &Conn,
migrations_dir: &Path,
output: &mut Write,
) -> Result<(), RunMigrationsError>
where
Conn: MigrationConnection,
{
let all_migrations = try!(migrations_in_directory(migrations_dir));
run_migrations(conn, all_migrations, output)
}
/// Compares migrations found in `migrations_dir` to those that have been applied.
/// Returns a list of pathbufs and whether they have been applied.
pub fn mark_migrations_in_directory<Conn>(
conn: &Conn,
migrations_dir: &Path,
) -> Result<Vec<(Option<PathBuf>, bool)>, RunMigrationsError>
where
Conn: MigrationConnection,
{
let migrations = migrations_in_directory(migrations_dir)?;
setup_database(conn)?;
let already_run = conn.previously_run_migration_versions()?;
let migrations = migrations
.into_iter()
.map(|m| {
let applied = already_run.contains(&m.version().to_string());
(m.file_path().map(|p| p.to_path_buf()), applied)
})
.collect();
Ok(migrations)
}
// Returns true if there are outstanding migrations in the migrations directory, otherwise
// returns false. Returns an `Err` if there are problems with migration setup.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn any_pending_migrations<Conn>(conn: &Conn) -> Result<bool, RunMigrationsError>
where
Conn: MigrationConnection,
{
let migrations_dir = find_migrations_directory()?;
let all_migrations = migrations_in_directory(&migrations_dir)?;
let already_run = conn.previously_run_migration_versions()?;
let pending = all_migrations
.into_iter()
.any(|m| !already_run.contains(&m.version().to_string()));
Ok(pending)
}
/// Reverts the last migration that was run. Returns the version that was reverted. Returns an
/// `Err` if no migrations have ever been run.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn revert_latest_migration<Conn>(conn: &Conn) -> Result<String, RunMigrationsError>
where
Conn: MigrationConnection,
{
let migrations_dir = try!(find_migrations_directory());
revert_latest_migration_in_directory(conn, &migrations_dir)
}
pub fn revert_latest_migration_in_directory<Conn>(
conn: &Conn,
path: &Path,
) -> Result<String, RunMigrationsError>
where
Conn: MigrationConnection,
{
try!(setup_database(conn));
let latest_migration_version = conn.latest_run_migration_version()?.ok_or_else(|| {
RunMigrationsError::MigrationError(MigrationError::NoMigrationRun)
})?;
revert_migration_with_version(conn, path, &latest_migration_version, &mut stdout())
.map(|_| latest_migration_version)
}
#[doc(hidden)]
pub fn revert_migration_with_version<Conn: Connection>(
conn: &Conn,
migrations_dir: &Path,
ver: &str,
output: &mut Write,
) -> Result<(), RunMigrationsError> {
migration_with_version(migrations_dir, ver)
.map_err(|e| e.into())
.and_then(|m| revert_migration(conn, m, output))
}
#[doc(hidden)]
pub fn run_migration_with_version<Conn>(
conn: &Conn,
migrations_dir: &Path,
ver: &str,
output: &mut Write,
) -> Result<(), RunMigrationsError>
where
Conn: MigrationConnection,
{
migration_with_version(migrations_dir, ver)
.map_err(|e| e.into())
.and_then(|m| run_migration(conn, &*m, output))
}
fn migration_with_version(
migrations_dir: &Path,
ver: &str,
) -> Result<Box<Migration>, MigrationError> {
let all_migrations = try!(migrations_in_directory(migrations_dir));
let migration = all_migrations.into_iter().find(|m| m.version() == ver);
match migration {
Some(m) => Ok(m),
None => Err(MigrationError::UnknownMigrationVersion(ver.into())),
}
}
#[doc(hidden)]
pub fn setup_database<Conn: Connection>(conn: &Conn) -> QueryResult<usize> {
create_schema_migrations_table_if_needed(conn)
}
fn create_schema_migrations_table_if_needed<Conn: Connection>(conn: &Conn) -> QueryResult<usize> {
conn.execute(
"CREATE TABLE IF NOT EXISTS __diesel_schema_migrations (\
version VARCHAR(50) PRIMARY KEY NOT NULL,\
run_on TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP\
)",
)
}
#[doc(hidden)]
pub fn migration_paths_in_directory(path: &Path) -> Result<Vec<DirEntry>, MigrationError> {
try!(path.read_dir())
.filter_map(|entry| {
let entry = match entry {
Ok(e) => e,
Err(e) => return Some(Err(e.into())),
};
if entry.file_name().to_string_lossy().starts_with('.') {
None
} else {
Some(Ok(entry))
}
})
.collect()
}
fn migrations_in_directory(path: &Path) -> Result<Vec<Box<Migration>>, MigrationError> {
use self::migration::migration_from;
try!(migration_paths_in_directory(path))
.iter()
.map(|e| migration_from(e.path()))
.collect()
}
/// Run all pending migrations in the given list. Apps should likely be calling
/// `run_pending_migrations` or `run_pending_migrations_in_directory` instead.
pub fn run_migrations<Conn, List>(
conn: &Conn,
migrations: List,
output: &mut Write,
) -> Result<(), RunMigrationsError>
where
Conn: MigrationConnection,
List: IntoIterator,
List::Item: Migration,
{
try!(setup_database(conn));
let already_run = try!(conn.previously_run_migration_versions());
let mut pending_migrations: Vec<_> = migrations
.into_iter()
.filter(|m| !already_run.contains(&m.version().to_string()))
.collect();
pending_migrations.sort_by(|a, b| a.version().cmp(b.version()));
for migration in pending_migrations {
try!(run_migration(conn, &migration, output));
}
Ok(())
}
fn run_migration<Conn>(
conn: &Conn,
migration: &Migration,
output: &mut Write,
) -> Result<(), RunMigrationsError>
where
Conn: MigrationConnection,
{
conn.transaction(|| {
if migration.version() != "00000000000000" {
try!(writeln!(
output,
"Running migration {}",
migration.version()
));
}
try!(migration.run(conn));
try!(conn.insert_new_migration(migration.version()));
Ok(())
})
}
fn revert_migration<Conn: Connection>(
conn: &Conn,
migration: Box<Migration>,
output: &mut Write,
) -> Result<(), RunMigrationsError> {
conn.transaction(|| {
try!(writeln!(
output,
"Rolling back migration {}",
migration.version()
));
try!(migration.revert(conn));
let target = __diesel_schema_migrations.filter(version.eq(migration.version()));
try!(::delete(target).execute(conn));
Ok(())
})
}
/// Returns the directory containing migrations. Will look at for
/// $PWD/migrations. If it is not found, it will search the parents of the
/// current directory, until it reaches the root directory. Returns
/// `MigrationError::MigrationDirectoryNotFound` if no directory is found.
pub fn find_migrations_directory() -> Result<PathBuf, MigrationError> {
search_for_migrations_directory(&try!(env::current_dir()))
}
/// Searches for the migrations directory relative to the given path. See
/// `find_migrations_directory` for more details.
pub fn search_for_migrations_directory(path: &Path) -> Result<PathBuf, MigrationError> {
let migration_path = path.join("migrations");
if migration_path.is_dir() {
Ok(migration_path)
} else {
path.parent()
.map(search_for_migrations_directory)
.unwrap_or(Err(MigrationError::MigrationDirectoryNotFound))
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use super::*;
use self::tempdir::TempDir;
use std::fs;
#[test]
fn migration_directory_not_found_if_no_migration_dir_exists() {
let dir = TempDir::new("diesel").unwrap();
assert_eq!(
Err(MigrationError::MigrationDirectoryNotFound),
search_for_migrations_directory(dir.path())
);
}
#[test]
fn migration_directory_defaults_to_pwd_slash_migrations() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
fs::create_dir(&migrations_path).unwrap();
assert_eq!(
Ok(migrations_path),
search_for_migrations_directory(&temp_path)
);
}
#[test]
fn migration_directory_checks_parents() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
let child_path = temp_path.join("child");
fs::create_dir(&child_path).unwrap();
fs::create_dir(&migrations_path).unwrap();
assert_eq!(
Ok(migrations_path),
search_for_migrations_directory(&child_path)
);
}
}

View File

@ -13,6 +13,9 @@ migrations_internals = { version = "0.16.0", path = "../migrations_internals" }
syn = { version = "0.11.4", features = ["aster"] }
quote = "0.3.12"
[dev-dependencies]
tempdir = "0.3.4"
[lib]
proc-macro = true

View File

@ -13,11 +13,11 @@
unicode_not_nfc, enum_glob_use, if_not_else, items_after_statements,
used_underscore_binding))]
#![cfg_attr(all(test, feature = "clippy"), allow(option_unwrap_used, result_unwrap_used))]
extern crate migrations_internals;
extern crate proc_macro;
#[macro_use]
extern crate quote;
extern crate syn;
extern crate migrations_internals;
mod migrations;
mod embed_migrations;

View File

@ -9,10 +9,10 @@
warn(wrong_pub_self_convention, mut_mut, non_ascii_literal, similar_names,
unicode_not_nfc, if_not_else, items_after_statements, used_underscore_binding))]
extern crate migrations_internals;
#[allow(unused_imports)]
#[macro_use]
extern crate migrations_macros;
extern crate migrations_internals;
#[doc(hidden)]
pub use migrations_macros::*;
#[doc(inline)]

View File

@ -1,83 +0,0 @@
// Built-in Lints
#![deny(warnings, missing_copy_implementations)]
// Clippy lints
#![cfg_attr(feature = "clippy", allow(needless_pass_by_value))]
#![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../clippy.toml")))]
#![cfg_attr(feature = "clippy", allow(option_map_unwrap_or_else, option_map_unwrap_or))]
#![cfg_attr(feature = "clippy",
warn(wrong_pub_self_convention, mut_mut, non_ascii_literal, similar_names,
unicode_not_nfc, if_not_else, items_after_statements, used_underscore_binding))]
#[allow(unused_imports)]
#[macro_use]
extern crate diesel_proc_macro_internal;
#[doc(hidden)]
pub use diesel_proc_macro_internal::*;
#[macro_export]
/// Queries the database for the names of all tables, and calls
/// [`infer_table_from_schema!`](macro.infer_table_from_schema.html) for each
/// one. A schema name can optionally be passed to load from schemas other than
/// the default. If a schema name is given, the inferred tables will be wrapped
/// in a module with the same name.
///
/// Attempting to use the `env!` or `dotenv!` macros here will not work due to
/// limitations of the Macros 1.1 system, but you can pass a string in the form
/// `"env:SOME_ENV_VAR"` or `"dotenv:SOME_ENV_VAR"` to achieve the same effect.
///
/// If any column name would collide with a rust keyword, a `_` will
/// automatically be placed at the end of the name. For example, a column called
/// `type` will be referenced as `type_` in the generated module.
///
/// Using the `infer_schema!` macro enables Diesel to automatically detect which
/// tables can be joined.
/// You must have foreign key constraints on the child table's foreign key column
/// for this feature to work.
macro_rules! infer_schema {
($database_url: expr) => {
mod __diesel_infer_schema {
#[derive(InferSchema)]
#[infer_schema_options(database_url=$database_url)]
struct _Dummy;
}
pub use self::__diesel_infer_schema::*;
};
($database_url: expr, $schema_name: expr) => {
mod __diesel_infer_schema {
#[derive(InferSchema)]
#[infer_schema_options(database_url=$database_url, schema_name=$schema_name)]
struct _Dummy;
}
pub use self::__diesel_infer_schema::*;
};
}
#[macro_export]
/// Establishes a database connection at compile time, loads the schema
/// information about a table's columns, and invokes
/// [`table!`](macro.table.html) for you automatically. For tables in a schema
/// other than the default, the table name should be given as
/// `"schema_name.table_name"`.
///
/// Attempting to use the `env!` or `dotenv!` macros here will not work due to
/// limitations of the Macros 1.1 system, but you can pass a string in the form
/// `"env:SOME_ENV_VAR"` or `"dotenv:SOME_ENV_VAR"` to achieve the same effect.
///
/// At this time, the schema inference macros do not support types from third
/// party crates, and having any columns with a type not supported by the diesel
/// core crate will result in a compiler error (please [open an
/// issue](https://github.com/diesel-rs/diesel/issues/new) if this happens
/// unexpectedly for a type listed in our docs.)
///
/// When using the `infer_table_from_schema!` macro,
/// Diesel can't automatically detect which tables can be joined.
/// Use the `joinable!` macro in your schema to specify how tables can be joined
macro_rules! infer_table_from_schema {
($database_url: expr, $table_name: expr) => {
#[derive(InferTableFromSchema)]
#[infer_table_from_schema_options(database_url=$database_url, table_name=$table_name)]
struct __DieselInferTableFromSchema;
}
}

View File

@ -1,26 +0,0 @@
[package]
name = "diesel_proc_macro_internal"
version = "0.16.0"
authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
license = "MIT OR Apache-2.0"
[dependencies]
syn = { version = "0.11.4", features = ["aster"] }
quote = "0.3.12"
dotenv = { version = ">=0.8, <0.11", optional = true, default-features = false }
diesel = { version = "0.16.0", default-features = false }
diesel_infer_schema = { version = "0.16.0", default-features = false }
clippy = { optional = true, version = "=0.0.162" }
[dev-dependencies]
tempdir = "^0.3.4"
[lib]
proc-macro = true
[features]
default = ["dotenv", "sqlite"]
lint = ["clippy"]
postgres = ["diesel/postgres", "diesel_infer_schema/postgres"]
sqlite = ["diesel/sqlite", "diesel_infer_schema/sqlite"]
mysql = ["diesel/mysql", "diesel_infer_schema/mysql"]

View File

@ -1,95 +0,0 @@
use syn;
use quote;
use migrations_internals::{migration_paths_in_directory, version_from_path};
use migrations::migration_directory_from_given_path;
use std::error::Error;
use std::path::Path;
use util::{get_option, get_options_from_input};
pub fn derive_embed_migrations(input: syn::DeriveInput) -> quote::Tokens {
fn bug() -> ! {
panic!(
"This is a bug. Please open a Github issue \
with your invocation of `embed_migrations!"
);
}
let options = get_options_from_input("embed_migrations_options", &input.attrs, bug);
let migrations_path_opt = options
.as_ref()
.map(|o| get_option(o, "migrations_path", bug));
let migrations_expr = migration_directory_from_given_path(migrations_path_opt)
.and_then(|path| migration_literals_from_path(&path));
let migrations_expr = match migrations_expr {
Ok(v) => v,
Err(e) => panic!("Error reading migrations: {}", e),
};
// These are split into multiple `quote!` calls to avoid recursion limit
let embedded_migration_def = quote!(
struct EmbeddedMigration {
version: &'static str,
up_sql: &'static str,
}
impl Migration for EmbeddedMigration {
fn version(&self) -> &str {
self.version
}
fn run(&self, conn: &SimpleConnection) -> Result<(), RunMigrationsError> {
conn.batch_execute(self.up_sql).map_err(Into::into)
}
fn revert(&self, _conn: &SimpleConnection) -> Result<(), RunMigrationsError> {
unreachable!()
}
}
);
let run_fns = quote!(
pub fn run<C: MigrationConnection>(conn: &C) -> Result<(), RunMigrationsError> {
run_with_output(conn, &mut io::sink())
}
pub fn run_with_output<C: MigrationConnection>(conn: &C, out: &mut io::Write)
-> Result<(), RunMigrationsError>
{
run_migrations(conn, ALL_MIGRATIONS.iter().map(|v| *v), out)
}
);
quote! {
extern crate diesel;
use self::diesel::migrations::*;
use self::diesel::connection::SimpleConnection;
use std::io;
const ALL_MIGRATIONS: &'static [&'static Migration] = &[#(#migrations_expr),*];
#embedded_migration_def
#run_fns
}
}
fn migration_literals_from_path(path: &Path) -> Result<Vec<quote::Tokens>, Box<Error>> {
try!(migration_paths_in_directory(path))
.into_iter()
.map(|e| migration_literal_from_path(&e.path()))
.collect()
}
fn migration_literal_from_path(path: &Path) -> Result<quote::Tokens, Box<Error>> {
let version = try!(version_from_path(path));
let sql_file = path.join("up.sql");
let sql_file_path = sql_file.to_str();
Ok(quote!(&EmbeddedMigration {
version: #version,
up_sql: include_str!(#sql_file_path),
}))
}

View File

@ -1,48 +0,0 @@
// Built-in Lints
#![deny(warnings, missing_copy_implementations)]
// Clippy lints
#![cfg_attr(feature = "clippy", allow(needless_pass_by_value))]
#![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../clippy.toml")))]
#![cfg_attr(feature = "clippy", allow(option_map_unwrap_or_else, option_map_unwrap_or))]
#![cfg_attr(feature = "clippy",
warn(wrong_pub_self_convention, mut_mut, non_ascii_literal, similar_names,
unicode_not_nfc, if_not_else, items_after_statements, used_underscore_binding))]
extern crate diesel;
extern crate diesel_infer_schema;
#[cfg(all(feature = "dotenv"))]
extern crate dotenv;
extern crate proc_macro;
#[macro_use]
extern crate quote;
extern crate syn;
mod database_url;
mod schema_inference;
mod migrations;
mod embed_migrations;
mod util;
use proc_macro::TokenStream;
use syn::parse_derive_input;
#[proc_macro_derive(InferSchema, attributes(infer_schema_options))]
pub fn derive_infer_schema(input: TokenStream) -> TokenStream {
expand_derive(input, schema_inference::derive_infer_schema)
}
#[proc_macro_derive(InferTableFromSchema, attributes(infer_table_from_schema_options))]
pub fn derive_infer_table_from_schema(input: TokenStream) -> TokenStream {
expand_derive(input, schema_inference::derive_infer_table_from_schema)
}
#[proc_macro_derive(EmbedMigrations, attributes(embed_migrations_options))]
pub fn derive_embed_migrations(input: TokenStream) -> TokenStream {
expand_derive(input, embed_migrations::derive_embed_migrations)
}
fn expand_derive(input: TokenStream, f: fn(syn::DeriveInput) -> quote::Tokens) -> TokenStream {
let item = parse_derive_input(&input.to_string()).unwrap();
f(item).to_string().parse().unwrap()
}

View File

@ -1,157 +0,0 @@
use quote;
use syn;
use database_url::extract_database_url;
use diesel_infer_schema::*;
use util::{get_option, get_optional_option, get_options_from_input};
pub fn derive_infer_schema(input: syn::DeriveInput) -> quote::Tokens {
fn bug() -> ! {
panic!(
"This is a bug. Please open a Github issue \
with your invocation of `infer_schema`!"
);
}
let options =
get_options_from_input("infer_schema_options", &input.attrs, bug).unwrap_or_else(|| bug());
let database_url = extract_database_url(get_option(&options, "database_url", bug)).unwrap();
let schema_name = get_optional_option(&options, "schema_name");
let schema_name = schema_name.as_ref().map(|s| &**s);
let table_names = load_table_names(&database_url, schema_name)
.expect(&error_message("table names", &database_url, schema_name));
let foreign_keys = load_foreign_key_constraints(&database_url, schema_name)
.expect(&error_message("foreign keys", &database_url, schema_name));
let foreign_keys =
remove_unsafe_foreign_keys_for_codegen(&database_url, &foreign_keys, &table_names);
let tables = table_names.iter().map(|table| {
let mod_ident = syn::Ident::new(format!("infer_{}", table.name));
let table_name = table.to_string();
quote! {
mod #mod_ident {
infer_table_from_schema!(#database_url, #table_name);
}
pub use self::#mod_ident::*;
}
});
let joinables = foreign_keys.into_iter().map(|fk| {
let child_table = syn::Ident::new(fk.child_table.name);
let parent_table = syn::Ident::new(fk.parent_table.name);
let foreign_key = syn::Ident::new(fk.foreign_key);
quote!(joinable!(#child_table -> #parent_table (#foreign_key));)
});
let table_idents = table_names.iter().map(|t| syn::Ident::from(&*t.name));
let multi_table_joins = quote!(allow_tables_to_appear_in_same_query!(#(#table_idents,)*););
let tokens = quote!(#(#tables)* #(#joinables)* #multi_table_joins);
if let Some(schema_name) = schema_name {
let schema_ident = syn::Ident::new(schema_name);
quote!(pub mod #schema_ident { #tokens })
} else {
tokens
}
}
pub fn derive_infer_table_from_schema(input: syn::DeriveInput) -> quote::Tokens {
fn bug() -> ! {
panic!(
"This is a bug. Please open a Github issue \
with your invocation of `infer_table_from_schema`!"
);
}
let options = get_options_from_input("infer_table_from_schema_options", &input.attrs, bug)
.unwrap_or_else(|| bug());
let database_url = extract_database_url(get_option(&options, "database_url", bug)).unwrap();
let table_name = get_option(&options, "table_name", bug);
let table_data = load_table_data(&database_url, table_name.parse().unwrap())
.expect(&error_message(table_name, &database_url, None));
table_data_to_tokens(table_data)
}
fn error_message(attempted_to_load: &str, database_url: &str, schema_name: Option<&str>) -> String {
let mut message = format!(
"Could not load {} from database `{}`",
attempted_to_load,
database_url
);
if let Some(name) = schema_name {
message += &format!(" with schema `{}`", name);
}
message
}
fn table_data_to_tokens(table_data: TableData) -> quote::Tokens {
let table_docs = to_doc_comment_tokens(&table_data.docs);
let table_name = table_name_to_tokens(table_data.name);
let primary_key = table_data.primary_key.into_iter().map(syn::Ident::new);
let column_definitions = table_data
.column_data
.into_iter()
.map(column_data_to_tokens);
quote! {
table! {
#(#table_docs)*
#table_name (#(#primary_key),*) {
#(#column_definitions),*,
}
}
}
}
fn table_name_to_tokens(table_name: TableName) -> quote::Tokens {
let name = syn::Ident::new(table_name.name);
if let Some(schema) = table_name.schema {
let schema = syn::Ident::new(schema);
quote!(#schema.#name)
} else {
quote!(#name)
}
}
fn column_data_to_tokens(column_data: ColumnDefinition) -> quote::Tokens {
let docs = to_doc_comment_tokens(&column_data.docs);
let ty = column_ty_to_tokens(column_data.ty);
if let Some(rust_name) = column_data.rust_name {
let rust_name = syn::Ident::new(rust_name);
let sql_name = column_data.sql_name;
quote!(
#(#docs)*
#[sql_name = #sql_name]
#rust_name -> #ty
)
} else {
let name = syn::Ident::new(column_data.sql_name);
quote!(
#(#docs)*
#name -> #ty
)
}
}
fn column_ty_to_tokens(column_ty: ColumnType) -> quote::Tokens {
let name = syn::Ident::new(column_ty.rust_name);
let mut tokens = quote!(#name);
if column_ty.is_array {
tokens = quote!(Array<#tokens>);
}
if column_ty.is_nullable {
tokens = quote!(Nullable<#tokens>);
}
tokens
}
fn to_doc_comment_tokens(docs: &str) -> Vec<syn::Token> {
docs.lines()
.map(|l| {
format!("///{}{}", if l.is_empty() { "" } else { " " }, l)
})
.map(|l| syn::Token::DocComment(l.into()))
.collect()
}

View File

@ -24,7 +24,7 @@ ipnetwork = "0.12.2"
bigdecimal = "0.0.10"
[features]
default = ["sqlite"]
default = []
unstable = ["diesel/unstable"]
postgres = ["diesel/postgres", "diesel_infer_schema/postgres"]
sqlite = ["diesel/sqlite", "diesel_infer_schema/sqlite"]

View File

@ -1,5 +1,5 @@
extern crate diesel_migrations as migrations;
extern crate diesel;
extern crate diesel_migrations as migrations;
extern crate dotenv;
use self::diesel::*;
use self::dotenv::dotenv;

View File

@ -7,7 +7,7 @@ extern crate assert_matches;
extern crate diesel;
#[macro_use]
extern crate diesel_infer_schema;
#[macro_use]
#[cfg_attr(feature = "sqlite", macro_use)]
extern crate diesel_migrations;
extern crate dotenv;
extern crate quickcheck;

View File

@ -5,4 +5,3 @@ authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
[dependencies]
diesel = { version = "0.16.0", features = ["postgres"] }
diesel_infer_schema = { path = "../../../diesel_infer_schema", version = "0.16.0" }