Delete diesel_codegen_syntex

Macros 1.1 is now on 1.15 beta. We will not be releasing a new version
until after that beta has launched. Dropping the syntex support from our
own code base makes life easier for development.
This commit is contained in:
Sean Griffin 2017-01-08 11:23:17 -05:00
parent 54a99472ed
commit 41df4a6c9f
46 changed files with 37 additions and 1592 deletions

View File

@ -20,19 +20,14 @@ script:
(cd diesel && travis-cargo test -- --no-default-features --features "chrono $BACKEND")
fi &&
if [[ "$BACKEND" == postgres ]]; then
if [[ "$TRAVIS_RUST_VERSION" == nightly* ]]; then
(cd examples && ./test_nightly)
else
(cd examples && ./test_stable)
fi
(cd examples && ./test_all)
fi &&
(cd diesel_cli && travis-cargo test -- --no-default-features --features "$BACKEND") &&
(cd diesel_codegen_shared && travis-cargo test -- --no-default-features --features "dotenv $BACKEND") &&
(cd diesel_codegen_syntex && travis-cargo test -- --no-default-features --features "$BACKEND") &&
if [[ "$TRAVIS_RUST_VERSION" == nightly* ]]; then
(cd diesel_tests && travis-cargo test -- --no-default-features --features "unstable_$BACKEND")
(cd diesel_tests && travis-cargo test -- --no-default-features --features "unstable $BACKEND")
else
(cd diesel_tests && travis-cargo test -- --no-default-features --features "stable_$BACKEND")
(cd diesel_tests && travis-cargo test -- --no-default-features --features "$BACKEND")
fi &&
if [[ "$TRAVIS_RUST_VERSION" == nightly* ]]; then
(cd diesel_compile_tests && travis-cargo test)
@ -65,3 +60,6 @@ notifications:
on_success: change
on_failure: always
on_start: never
matrix:
allow_failures:
- rust: stable

View File

@ -22,6 +22,13 @@ for Rust libraries in [RFC #1105](https://github.com/rust-lang/rfcs/blob/master/
* `#[derive(Insertable)]` allows fields of type `Option<T>` to be used with
columns that are not null if they have a default value.
### Removed
* `diesel_codegen_syntex` is no longer supported. `diesel_codegen` can now be
used on stable Rust.
* Dropped support for Rust 1.14 and earlier
## [0.9.1] - 2016-12-09
### Fixed

View File

@ -4,18 +4,15 @@ members = [
"diesel_cli",
"diesel_codegen",
"diesel_codegen_shared",
"diesel_codegen_syntex",
# FIXME: getting "multiple matching crates for `diesel`" because of the shared build directory
# "diesel_compile_tests",
"diesel_tests",
"examples/getting_started_step_1",
"examples/getting_started_step_2",
"examples/getting_started_step_3",
"examples/getting_started_step_4",
]
[replace]
"diesel:0.9.1" = { path = "diesel" }
"diesel_codegen:0.9.0" = { path = "diesel_codegen" }
"diesel_codegen_shared:0.9.0" = { path = "diesel_codegen_shared" }
"diesel_codegen_syntex:0.9.0" = { path = "diesel_codegen_syntex" }

View File

@ -2,10 +2,10 @@
set -e
if [ "$1" == "sqlite" ]; then
(cd diesel_tests && DATABASE_URL=/tmp/test.db cargo bench --no-default-features --features "unstable_sqlite")
(cd diesel_tests && DATABASE_URL=/tmp/test.db cargo bench --no-default-features --features "unstable sqlite")
elif [ "$1" == "postgres" ]; then
(cd diesel_tests && cargo bench --no-default-features --features "unstable_postgres")
(cd diesel_tests && cargo bench --no-default-features --features "unstable postgres")
else
(cd diesel_tests && DATABASE_URL=/tmp/test.db cargo bench --no-default-features --features "unstable_sqlite")
(cd diesel_tests && cargo bench --no-default-features --features "unstable_postgres")
(cd diesel_tests && DATABASE_URL=/tmp/test.db cargo bench --no-default-features --features "unstable sqlite")
(cd diesel_tests && cargo bench --no-default-features --features "unstable postgres")
fi

View File

@ -2,18 +2,18 @@
set -e
if [ "$1" == "integration" ] && [ "$2" == "sqlite" ]; then
(cd diesel_tests && DATABASE_URL=/tmp/test.db cargo test --features "unstable_sqlite" --no-default-features)
(cd diesel_tests && DATABASE_URL=/tmp/test.db cargo test --features "sqlite" --no-default-features)
elif [ "$1" == "integration" ]; then
(cd diesel_tests && cargo test --features "unstable_postgres" --no-default-features)
(cd diesel_tests && cargo test --features "postgres" --no-default-features)
elif [ "$1" == "compile" ]; then
(cd diesel_compile_tests && cargo test)
else
(cd diesel && cargo test --no-default-features --features "unstable chrono sqlite postgres")
(cd diesel && cargo test --no-default-features --features "chrono sqlite postgres")
(cd diesel_cli && cargo test --features "sqlite" --no-default-features)
(cd diesel_tests && DATABASE_URL=/tmp/test.db cargo test --features "unstable_sqlite" --no-default-features)
(cd examples && ./test_nightly)
(cd diesel_tests && DATABASE_URL=/tmp/test.db cargo test --features "sqlite" --no-default-features)
(cd examples && ./test_all)
(cd diesel_cli && cargo test --features "postgres" --no-default-features)
(cd diesel_codegen_syntex && cargo test --no-default-features --features "postgres")
(cd diesel_tests && cargo test --features "unstable_postgres" --no-default-features)
(cd diesel_tests && cargo test --features "postgres" --no-default-features)
(cd diesel_compile_tests && cargo test)
fi;

View File

@ -1,13 +0,0 @@
#!/bin/sh
set -e
(cd diesel && cargo test --no-default-features --features "chrono sqlite postgres")
(cd examples && ./test_stable)
(cd diesel_cli && cargo test --no-default-features --features "postgres")
(cd diesel_cli && cargo test --no-default-features --features "sqlite")
(cd diesel_codegen_shared && cargo test --no-default-features --features "dotenv postgres")
(cd diesel_codegen_shared && cargo test --no-default-features --features "dotenv sqlite")
(cd diesel_codegen_syntex && cargo test --no-default-features --features "postgres")
(cd diesel_codegen_syntex && cargo test --no-default-features --features "sqlite")
(cd diesel_tests && cargo test --no-default-features --features "stable_postgres")
(cd diesel_tests && DATABASE_URL=/tmp/test.db cargo test --no-default-features --features "stable_sqlite")

View File

@ -1,4 +1,3 @@
#![feature(proc_macro, proc_macro_lib)]
#![deny(warnings)]
macro_rules! t {

View File

@ -1,27 +0,0 @@
[package]
name = "diesel_codegen_syntex"
version = "0.9.0"
authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
license = "MIT OR Apache-2.0"
build = "build.rs"
description = "Allows use of `diesel_codegen` with `syntex`"
documentation = "https://github.com/diesel-rs/diesel/blob/master/diesel_codegen"
homepage = "http://diesel.rs"
repository = "https://github.com/diesel-rs/diesel/tree/master/diesel_codegen"
keywords = ["orm", "database", "postgres", "sql", "codegen"]
[build-dependencies]
syntex = { version = "0.44.0" }
syntex_syntax = { version = "0.44.0" }
[dependencies]
syntex = { version = "0.44.0" }
syntex_syntax = { version = "0.44.0" }
diesel = { version = "0.9.0", default-features = false }
diesel_codegen_shared = { version = "0.9.0", default-features = false }
[features]
default = ["dotenv"]
dotenv = ["diesel_codegen_shared/dotenv"]
postgres = ["diesel/postgres", "diesel_codegen_shared/postgres"]
sqlite = ["diesel/sqlite", "diesel_codegen_shared/sqlite"]

View File

@ -1,41 +0,0 @@
# Diesel Codegen Syntex
Provides the functionality of `diesel_codegen` using Syntex for usage on stable.
## Getting started
Add `diesel_codegen_syntex` to your `Cargo.toml`, specifying which backends you
use.
```toml
diesel_codegen_syntex = { version = "0.9.0", features = ["postgres"] }
```
Next, move the `mod` declarations of any modules that need codegen to a separate
file, such as `lib.in.rs`, like so:
```rust
// main.in.rs
mod schema;
mod models;
```
```rust
// main.rs
include!(concat!(env!("OUT_DIR"), "/main.rs"));
```
Finally, add a build file which calls `diesel_codgen_syntex::expand`
```rust
// build.rs
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
let src = Path::new("src/main.in.rs");
let dst = Path::new(&out_dir).join("main.rs");
diesel_codegen_syntex::expand(&src, &dst).unwrap();
}
```
For more examples, please see section 4 of the [getting started
guide](http://diesel.rs/guides/getting-started/)

View File

@ -1,46 +0,0 @@
extern crate syntex;
extern crate syntex_syntax as syntax;
use std::env;
use std::path::Path;
use std::thread;
use self::syntax::codemap::Span;
use self::syntax::ext::base::{self, ExtCtxt};
use self::syntax::tokenstream::TokenTree;
fn main() {
with_extra_stack(move || {
let out_dir = env::var_os("OUT_DIR").unwrap();
let mut registry = syntex::Registry::new();
macro_rules! register_quote_macro {
($macro_name: ident, $name: ident) => {
fn $name<'cx>(
cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[TokenTree],
) -> Box<base::MacResult + 'cx> {
syntax::ext::quote::$name(cx, sp, tts)
}
registry.add_macro(stringify!($macro_name), $name);
}
}
register_quote_macro!(quote_ty, expand_quote_ty);
register_quote_macro!(quote_item, expand_quote_item);
register_quote_macro!(quote_tokens, expand_quote_tokens);
register_quote_macro!(quote_expr, expand_quote_expr);
let src = Path::new("src/lib.in.rs");
let dst = Path::new(&out_dir).join("lib.rs");
registry.expand("", &src, &dst).unwrap();
});
}
fn with_extra_stack<F: FnOnce() + Send + 'static>(f: F) {
env::set_var("RUST_MIN_STACK", "16777216"); // 16MB
thread::spawn(f).join().unwrap();
}

View File

@ -1,34 +0,0 @@
use syntax::ast::MetaItem;
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use super::{parse_association_options, to_foreign_key};
#[allow(unused_imports)]
pub fn expand_belongs_to(
cx: &mut ExtCtxt,
span: Span,
meta_item: &MetaItem,
annotatable: &Annotatable,
push: &mut FnMut(Annotatable),
) {
let options = parse_association_options("belongs_to", cx, span, meta_item, annotatable);
if let Some((model, options)) = options {
let parent_struct = options.name;
let struct_name = model.name;
let foreign_key_name = options.foreign_key_name.unwrap_or_else(||
to_foreign_key(&parent_struct.name.as_str()));
let child_table_name = model.table_name();
let fields = model.field_tokens_for_stable_macro(cx);
push(Annotatable::Item(quote_item!(cx, BelongsTo! {
(
struct_name = $struct_name,
parent_struct = $parent_struct,
foreign_key_name = $foreign_key_name,
child_table_name = $child_table_name,
),
fields = [$fields],
}).unwrap()));
}
}

View File

@ -1,30 +0,0 @@
use syntax::ast::MetaItem;
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use super::{parse_association_options, to_foreign_key};
pub fn expand_has_many(
cx: &mut ExtCtxt,
span: Span,
meta_item: &MetaItem,
annotatable: &Annotatable,
push: &mut FnMut(Annotatable)
) {
let options = parse_association_options("has_many", cx, span, meta_item, annotatable);
if let Some((model, options)) = options {
let parent_table_name = model.table_name();
let child_table_name = options.name;
let foreign_key_name = options.foreign_key_name.unwrap_or_else(||
to_foreign_key(&model.name.name.as_str()));
let fields = model.field_tokens_for_stable_macro(cx);
push(Annotatable::Item(quote_item!(cx, HasMany! {
(
parent_table_name = $parent_table_name,
child_table = $child_table_name::table,
foreign_key = $child_table_name::$foreign_key_name,
),
fields = [$fields],
}).unwrap()));
}
}

View File

@ -1,99 +0,0 @@
use syntax::ast::{self, MetaItem, MetaItemKind};
use syntax::attr::HasAttrs;
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::parse::token::str_to_ident;
use model::{infer_association_name, Model};
mod has_many;
mod belongs_to;
use self::has_many::expand_has_many;
use self::belongs_to::expand_belongs_to;
pub fn expand_derive_associations(
cx: &mut ExtCtxt,
span: Span,
_: &MetaItem,
annotatable: &Annotatable,
push: &mut FnMut(Annotatable)
) {
for attr in annotatable.attrs() {
if attr.check_name("has_many") {
expand_has_many(cx, span, &attr.node.value, annotatable, push);
}
if attr.check_name("belongs_to") {
expand_belongs_to(cx, span, &attr.node.value, annotatable, push);
}
}
}
fn parse_association_options(
association_kind: &str,
cx: &mut ExtCtxt,
span: Span,
meta_item: &MetaItem,
annotatable: &Annotatable,
) -> Option<(Model, AssociationOptions)> {
let model = match Model::from_annotable(cx, span, annotatable) {
Some(model) => model,
None => {
cx.span_err(span,
&format!("#[{}] can only be applied to structs or tuple structs",
association_kind));
return None;
}
};
build_association_options(association_kind, cx, span, meta_item).map(|options| {
(model, options)
})
}
struct AssociationOptions {
name: ast::Ident,
foreign_key_name: Option<ast::Ident>,
}
fn build_association_options(
association_kind: &str,
cx: &mut ExtCtxt,
span: Span,
meta_item: &MetaItem,
) -> Option<AssociationOptions> {
let usage_err = || {
cx.span_err(span,
&format!("`#[{}]` must be in the form `#[{}(table_name, option=value)]`",
association_kind, association_kind));
None
};
match meta_item.node {
MetaItemKind::List(_, ref options) => {
let association_name = match options[0].word() {
Some(word) => str_to_ident(&word.name()),
_ => return usage_err(),
};
let foreign_key_name = options.iter().find(|a| a.check_name("foreign_key"))
.and_then(|a| a.value_str()).map(|s| str_to_ident(&s));
Some(AssociationOptions {
name: association_name,
foreign_key_name: foreign_key_name,
})
}
_ => usage_err(),
}
}
fn to_foreign_key(model_name: &str) -> ast::Ident {
let lower_cased = infer_association_name(model_name);
str_to_ident(&format!("{}_id", &lower_cased))
}
#[test]
fn to_foreign_key_properly_handles_underscores() {
assert_eq!(str_to_ident("foo_bar_id"), to_foreign_key("FooBar"));
assert_eq!(str_to_ident("foo_bar_baz_id"), to_foreign_key("FooBarBaz"));
}

View File

@ -1,97 +0,0 @@
use syntax::ast;
use syntax::ast::ItemKind;
use syntax::ext::base::ExtCtxt;
use syntax::parse::token::str_to_ident;
use syntax::ptr::P;
use syntax::tokenstream::TokenTree;
use util::{ident_value_of_attr_with_name, ty_param_of_option};
#[derive(Debug, PartialEq, Eq)]
pub struct Attr {
pub column_name: ast::Ident,
pub field_name: Option<ast::Ident>,
pub ty: P<ast::Ty>,
}
impl Attr {
pub fn from_struct_field(cx: &mut ExtCtxt, field: &ast::StructField) -> Option<Self> {
let field_name = field.ident;
let column_name =
ident_value_of_attr_with_name(cx, &field.attrs, "column_name");
let ty = field.ty.clone();
match (column_name, field_name) {
(Some(column_name), f) => Some(Attr {
column_name: column_name,
field_name: f,
ty: ty,
}),
(None, Some(field_name)) => Some(Attr {
column_name: field_name.clone(),
field_name: Some(field_name),
ty: ty,
}),
(None, None) => {
cx.span_err(field.span,
r#"Field must be named or annotated with #[column_name(something)]"#);
None
}
}
}
pub fn from_struct_fields(cx: &mut ExtCtxt, fields: &[ast::StructField])
-> Option<Vec<Self>>
{
fields.iter().map(|f| Self::from_struct_field(cx, f)).collect()
}
pub fn from_item(cx: &mut ExtCtxt, item: &ast::Item)
-> Option<(ast::Generics, Vec<Self>)>
{
match item.node {
ItemKind::Struct(ref variant_data, ref generics) => {
let fields = match *variant_data {
ast::VariantData::Struct(ref fields, _) => fields,
ast::VariantData::Tuple(ref fields, _) => fields,
_ => return None,
};
Self::from_struct_fields(cx, fields).map(|f| (generics.clone(), f))
}
_ => None
}
}
pub fn to_stable_macro_tokens(&self, cx: &mut ExtCtxt) -> Vec<TokenTree> {
let field_kind;
let field_ty;
let inner_field_ty;
if let Some(option_ty) = ty_param_of_option(&self.ty) {
field_kind = str_to_ident("option");
field_ty = quote_tokens!(cx, Option<$option_ty>);
inner_field_ty = quote_tokens!(cx, $option_ty);
} else {
let ty = &self.ty;
field_kind = str_to_ident("regular");
field_ty = quote_tokens!(cx, $ty);
inner_field_ty = quote_tokens!(cx, $ty);
}
let column_name = self.column_name;
match self.field_name {
Some(field_name) => quote_tokens!(cx, {
field_name: $field_name,
column_name: $column_name,
field_ty: $field_ty,
field_kind: $field_kind,
inner_field_ty: $inner_field_ty,
}),
None => quote_tokens!(cx, {
column_name: $column_name,
field_ty: $field_ty,
field_kind: $field_kind,
inner_field_ty: $inner_field_ty,
}),
}
}
}

View File

@ -1,21 +0,0 @@
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::tokenstream;
pub fn expand_load_table<'cx>(
cx: &'cx mut ExtCtxt,
sp: Span,
_tts: &[tokenstream::TokenTree]
) -> Box<MacResult+'cx> {
cx.span_warn(sp, "load_table_from_schema! is only supported on PostgreSQL");
DummyResult::any(sp)
}
pub fn expand_infer_schema<'cx>(
cx: &'cx mut ExtCtxt,
sp: Span,
_tts: &[tokenstream::TokenTree]
) -> Box<MacResult+'cx> {
cx.span_warn(sp, "infer_schema! is only supported on PostgreSQL");
DummyResult::any(sp)
}

View File

@ -1,41 +0,0 @@
use syntax::ast;
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::parse::token;
use model::Model;
use util::{lifetime_list_tokens, comma_delimited_tokens};
pub fn expand_derive_identifiable(
cx: &mut ExtCtxt,
span: Span,
_meta_item: &ast::MetaItem,
annotatable: &Annotatable,
push: &mut FnMut(Annotatable)
) {
if let Some(model) = Model::from_annotable(cx, span, annotatable) {
let table_name = model.table_name();
let struct_ty = &model.ty;
let lifetimes = lifetime_list_tokens(&model.generics.lifetimes, span);
let primary_key_names = model.primary_key_names();
let fields = model.field_tokens_for_stable_macro(cx);
for name in primary_key_names {
if model.attr_named(*name).is_none() {
cx.span_err(span, &format!("Could not find a field named `{}` on `{}`", name, model.name));
return;
}
}
let primary_key_names = comma_delimited_tokens(
primary_key_names.into_iter().map(|n| token::Ident(*n)), span);
push(Annotatable::Item(quote_item!(cx, impl_Identifiable! {
(
table_name = $table_name,
primary_key_names = ($primary_key_names),
struct_ty = $struct_ty,
lifetimes = ($lifetimes),
),
fields = [$fields],
}).unwrap()));
}
}

View File

@ -1,50 +0,0 @@
use syntax::ast::{self, MetaItem};
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ptr::P;
use model::Model;
use util::{lifetime_list_tokens, struct_ty};
pub fn expand_derive_insertable(
cx: &mut ExtCtxt,
span: Span,
_meta_item: &MetaItem,
annotatable: &Annotatable,
push: &mut FnMut(Annotatable)
) {
if let Some(model) = Model::from_annotable(cx, span, annotatable) {
insertable_impl(cx, span, model.table_name(), &model)
.map(Annotatable::Item)
.map(push);
}
}
#[allow(unused_imports)]
fn insertable_impl(
cx: &mut ExtCtxt,
span: Span,
table_name: ast::Ident,
model: &Model,
) -> Option<P<ast::Item>> {
if !model.generics.ty_params.is_empty() {
cx.span_err(span, "#[derive(Insertable)] does not support generic types");
return None;
}
let struct_name = model.name;
let ty = struct_ty(cx, span, struct_name, &model.generics);
let lifetimes = lifetime_list_tokens(&model.generics.lifetimes, span);
let fields = model.attrs.iter().map(|a| a.to_stable_macro_tokens(cx)).collect::<Vec<_>>();
quote_item!(cx, impl_Insertable! {
(
struct_name = $struct_name,
table_name = $table_name,
struct_ty = $ty,
lifetimes = ($lifetimes),
),
fields = [$fields],
})
}

View File

@ -1,9 +0,0 @@
pub mod associations;
mod attr;
pub mod identifiable;
pub mod insertable;
pub mod migrations;
mod model;
pub mod queryable;
pub mod schema_inference;
pub mod update;

View File

@ -1,31 +0,0 @@
#![deny(warnings)]
#[macro_use] extern crate diesel;
extern crate diesel_codegen_shared;
extern crate syntex;
extern crate syntex_syntax as syntax;
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
mod util;
use std::path::Path;
pub fn expand(input: &Path, output: &Path) -> Result<(), syntex::Error> {
let mut reg = syntex::Registry::new();
reg.add_attr("feature(custom_derive)");
reg.add_attr("feature(custom_attribute)");
reg.add_decorator("derive_AsChangeset", update::expand_derive_as_changeset);
reg.add_decorator("derive_Associations", associations::expand_derive_associations);
reg.add_decorator("derive_Identifiable", identifiable::expand_derive_identifiable);
reg.add_decorator("derive_Insertable", insertable::expand_derive_insertable);
reg.add_decorator("derive_Queryable", queryable::expand_derive_queryable);
reg.add_macro("embed_migrations", migrations::expand_embed_migrations);
reg.add_macro("infer_table_from_schema", schema_inference::expand_load_table);
reg.add_macro("infer_schema", schema_inference::expand_infer_schema);
reg.add_post_expansion_pass(util::strip_attributes);
reg.expand("", input, output)
}

View File

@ -1,111 +0,0 @@
use diesel_codegen_shared::migration_directory_from_given_path;
use std::error::Error;
use std::path::{Path, PathBuf};
use syntax::ast;
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::util::small_vector::SmallVector;
use syntax::ptr::P;
use syntax::ext::build::AstBuilder;
use syntax::tokenstream;
pub fn expand_embed_migrations<'cx>(
cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[tokenstream::TokenTree]
) -> Box<MacResult+'cx> {
let migrations_expr = migrations_directory_from_args(cx, sp, tts)
.and_then(|d| migration_literals_from_path(cx, sp, &d));
let migrations_expr = match migrations_expr {
Err(e) => {
cx.span_err(sp, &format!("Error reading migrations: {}", e));
return DummyResult::expr(sp);
}
Ok(v) => v,
};
let item = quote_item!(cx, mod embedded_migrations {
extern crate diesel;
use self::diesel::migrations::*;
use self::diesel::connection::SimpleConnection;
use std::io;
struct EmbeddedMigration {
version: &'static str,
up_sql: &'static str,
}
impl Migration for EmbeddedMigration {
fn version(&self) -> &str {
self.version
}
fn run(&self, conn: &SimpleConnection) -> Result<(), RunMigrationsError> {
conn.batch_execute(self.up_sql).map_err(Into::into)
}
fn revert(&self, _conn: &SimpleConnection) -> Result<(), RunMigrationsError> {
unreachable!()
}
}
const ALL_MIGRATIONS: &'static [&'static Migration] = $migrations_expr;
pub fn run<C: MigrationConnection>(conn: &C) -> Result<(), RunMigrationsError> {
run_with_output(conn, &mut io::sink())
}
pub fn run_with_output<C: MigrationConnection>(conn: &C, out: &mut io::Write)
-> Result<(), RunMigrationsError>
{
run_migrations(conn, ALL_MIGRATIONS.iter().map(|v| *v), out)
}
}).unwrap();
MacEager::items(SmallVector::one(item))
}
fn migrations_directory_from_args(
cx: &mut ExtCtxt,
sp: Span,
tts: &[tokenstream::TokenTree],
) -> Result<PathBuf, Box<Error>> {
let relative_path_to_migrations = if tts.is_empty() {
None
} else {
match get_single_str_from_tts(cx, sp, tts, "embed_migrations!") {
None => return Err("Usage error".into()),
value => value,
}
};
migration_directory_from_given_path(relative_path_to_migrations.as_ref().map(|v| &**v))
}
fn migration_literals_from_path(
cx: &ExtCtxt,
sp: Span,
path: &Path,
) -> Result<P<ast::Expr>, Box<Error>> {
use diesel::migrations::migration_paths_in_directory;
let exprs = try!(migration_paths_in_directory(&path)).into_iter()
.map(|e| migration_literal_from_path(cx, &e.path()))
.collect();
Ok(cx.expr_vec_slice(sp, try!(exprs)))
}
fn migration_literal_from_path(
cx: &ExtCtxt,
path: &Path,
) -> Result<P<ast::Expr>, Box<Error>> {
use diesel::migrations::version_from_path;
let version = try!(version_from_path(path));
let sql_file = path.join("up.sql");
let sql_file_path = sql_file.to_string_lossy();
Ok(quote_expr!(cx, &EmbeddedMigration {
version: $version,
up_sql: include_str!($sql_file_path),
}))
}

View File

@ -1,101 +0,0 @@
use syntax::ast;
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ptr::P;
use syntax::parse::token::str_to_ident;
use syntax::tokenstream::TokenTree;
use attr::Attr;
use util::*;
pub struct Model {
pub ty: P<ast::Ty>,
pub attrs: Vec<Attr>,
pub name: ast::Ident,
pub generics: ast::Generics,
pub primary_key_names: Vec<ast::Ident>,
table_name_from_annotation: Option<ast::Ident>,
}
impl Model {
pub fn from_annotable(
cx: &mut ExtCtxt,
span: Span,
annotatable: &Annotatable,
) -> Option<Self> {
if let Annotatable::Item(ref item) = *annotatable {
let table_name_from_annotation =
str_value_of_attr_with_name(cx, &item.attrs, "table_name");
let primary_key_names =
list_value_of_attr_with_name(cx, &item.attrs, "primary_key")
.unwrap_or_else(|| vec![str_to_ident("id")]);
Attr::from_item(cx, item).map(|(generics, attrs)| {
let ty = struct_ty(cx, span, item.ident, &generics);
Model {
ty: ty,
attrs: attrs,
name: item.ident,
generics: generics,
primary_key_names: primary_key_names,
table_name_from_annotation: table_name_from_annotation,
}
})
} else {
None
}
}
pub fn primary_key_names(&self) -> &[ast::Ident] {
&self.primary_key_names
}
pub fn table_name(&self) -> ast::Ident {
self.table_name_from_annotation.unwrap_or_else(|| {
str_to_ident(&infer_table_name(&self.name.name.as_str()))
})
}
pub fn attr_named(&self, name: ast::Ident) -> Option<&Attr> {
self.attrs.iter().find(|attr| {
attr.column_name.name == name.name
})
}
pub fn field_tokens_for_stable_macro(&self, cx: &mut ExtCtxt) -> Vec<Vec<TokenTree>> {
self.attrs.iter().map(|a| a.to_stable_macro_tokens(cx)).collect()
}
}
pub fn infer_association_name(name: &str) -> String {
let mut result = String::with_capacity(name.len());
result.push_str(&name[..1].to_lowercase());
for character in name[1..].chars() {
if character.is_uppercase() {
result.push('_');
for lowercase in character.to_lowercase() {
result.push(lowercase);
}
} else {
result.push(character);
}
}
result
}
fn infer_table_name(name: &str) -> String {
let mut result = infer_association_name(name);
result.push('s');
result
}
#[test]
fn infer_table_name_pluralizes_and_downcases() {
assert_eq!("foos", &infer_table_name("Foo"));
assert_eq!("bars", &infer_table_name("Bar"));
}
#[test]
fn infer_table_name_properly_handles_underscores() {
assert_eq!("foo_bars", &infer_table_name("FooBar"));
assert_eq!("foo_bar_bazs", &infer_table_name("FooBarBaz"));
}

View File

@ -1,82 +0,0 @@
use syntax::ast::{
self,
Item,
MetaItem,
TyKind,
};
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder;
use syntax::parse::token::*;
use syntax::ptr::P;
use attr::Attr;
use util::struct_ty;
pub fn expand_derive_queryable(
cx: &mut ExtCtxt,
span: Span,
meta_item: &MetaItem,
annotatable: &Annotatable,
push: &mut FnMut(Annotatable)
) {
if let Annotatable::Item(ref item) = *annotatable {
let (mut generics, attrs) = match Attr::from_item(cx, item) {
Some((generics, attrs)) => (generics, attrs),
None => {
cx.span_err(span, "`#[derive(Queryable)]` can only be applied to structs or tuple structs");
return;
}
};
let ty = struct_ty(cx, span, item.ident, &generics);
let row_type = cx.ty(span, TyKind::Tup(attrs.iter().map(|f| f.ty.clone()).collect()));
let build_impl = struct_literal_with_fields_assigned_to_row_elements(
span, &item, cx, &attrs);
let mut params = generics.ty_params.into_vec();
params.push(ty_param_with_name(cx, span, "__ST"));
params.push(ty_param_with_name(cx, span, "__DB"));
generics.ty_params = params.into();
let impl_item = quote_item!(cx,
impl$generics ::diesel::Queryable<__ST, __DB> for $ty where
__DB: ::diesel::backend::Backend + ::diesel::types::HasSqlType<__ST>,
$row_type: ::diesel::types::FromSqlRow<__ST, __DB>,
{
type Row = $row_type;
fn build(row: Self::Row) -> Self {
$build_impl
}
}
).unwrap();
push(Annotatable::Item(impl_item));
} else {
cx.span_err(meta_item.span,
"`derive` may only be applied to enums and structs");
};
}
fn ty_param_with_name(cx: &mut ExtCtxt, span: Span, name: &str) -> ast::TyParam {
cx.typaram(span, str_to_ident(name), P::new(), None)
}
fn struct_literal_with_fields_assigned_to_row_elements(
span: Span,
item: &Item,
cx: &mut ExtCtxt,
fields: &[Attr],
) -> P<ast::Expr> {
let tup = cx.expr_ident(span, str_to_ident("row"));
let fields = fields.iter().enumerate().map(|(i, field)| {
cx.field_imm(
span,
field.field_name.unwrap(),
cx.expr_tup_field_access(span, tup.clone(), i),
)
}).collect();
cx.expr_struct_ident(span, item.ident, fields)
}

View File

@ -1,197 +0,0 @@
use diesel_codegen_shared::*;
use syntax::ast;
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::ext::build::AstBuilder;
use syntax::parse::token::{self, InternedString, str_to_ident};
use syntax::ptr::P;
use syntax::util::small_vector::SmallVector;
use syntax::tokenstream::TokenTree;
use util::comma_delimited_tokens;
pub fn expand_load_table<'cx>(
cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[TokenTree]
) -> Box<MacResult+'cx> {
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
Some(ref exprs) if exprs.is_empty() => {
cx.span_err(sp, "load_table_from_schema! takes 2 arguments");
return DummyResult::any(sp);
}
None => return DummyResult::any(sp),
Some(exprs) => exprs.into_iter()
};
match load_table_body(cx, sp, &mut exprs) {
Ok(res) => res,
Err(res) => res,
}
}
pub fn load_table_body<T: Iterator<Item=P<ast::Expr>>>(
cx: &mut ExtCtxt,
sp: Span,
exprs: &mut T,
) -> Result<Box<MacResult>, Box<MacResult>> {
let database_url = try!(database_url(cx, sp, exprs));
let table_name = try!(next_str_lit(cx, sp, exprs));
let connection = match establish_connection(&database_url) {
Ok(conn) => conn,
Err(e) => {
cx.span_err(sp, &e.to_string());
return Err(DummyResult::any(sp));
}
};
table_macro_call(cx, sp, &connection, &table_name)
.map(|item| MacEager::items(SmallVector::one(item)))
}
pub fn expand_infer_schema<'cx>(
cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[TokenTree]
) -> Box<MacResult+'cx> {
let exprs = match get_exprs_from_tts(cx, sp, tts) {
Some(exprs) => exprs.into_iter(),
None => return DummyResult::any(sp),
};
match infer_schema_body(cx, sp, exprs) {
Ok(res) => res,
Err(res) => res,
}
}
pub fn infer_schema_body<T: Iterator<Item=P<ast::Expr>>>(
cx: &mut ExtCtxt,
sp: Span,
exprs: T,
) -> Result<Box<MacResult>, Box<MacResult>> {
let mut exprs = exprs.peekable();
let database_url = try!(next_str_lit(cx, sp, &mut exprs));
let schema_name = if exprs.peek().is_some() {
Some(try!(next_str_lit(cx, sp, &mut exprs)))
} else {
None
};
let schema_inferences = infer_schema_for_schema_name(
cx,
&database_url,
schema_name.as_ref().map(|s| &**s),
);
Ok(MacEager::items(SmallVector::many(schema_inferences)))
}
fn infer_schema_for_schema_name(
cx: &mut ExtCtxt,
database_url: &str,
schema_name: Option<&str>,
) -> Vec<P<ast::Item>> {
let table_names = load_table_names(&database_url, schema_name).unwrap();
let impls = table_names.into_iter()
.map(|table_name| {
let table_name = match schema_name {
Some(name) => format!("{}.{}", name, table_name),
None => table_name,
};
quote_item!(cx, infer_table_from_schema!($database_url, $table_name);).unwrap()
})
.collect::<Vec<_>>();
match schema_name {
Some(name) => {
let schema_ident = str_to_ident(name);
let item = quote_item!(cx, pub mod $schema_ident { $impls }).unwrap();
vec![item]
}
None => impls,
}
}
fn table_macro_call(
cx: &mut ExtCtxt,
sp: Span,
connection: &InferConnection,
table_name: &str,
) -> Result<P<ast::Item>, Box<MacResult>> {
match get_table_data(connection, table_name) {
Err(e) => {
cx.span_err(sp, &e.to_string());
Err(DummyResult::any(sp))
}
Ok(data) => {
let primary_keys = match get_primary_keys(connection, table_name) {
Ok(keys) => keys,
Err(e) => {
cx.span_err(sp, &e.to_string());
return Err(DummyResult::any(sp));
}
};
let tokens = data.iter().map(|a| column_def_tokens(cx, sp, a, &connection))
.collect::<Vec<_>>();
let table_name = str_to_ident(table_name);
let primary_key_tokens = primary_keys.iter()
.map(|s| str_to_ident(&s))
.map(token::Ident);
let primary_key = comma_delimited_tokens(primary_key_tokens, sp);
let item = quote_item!(cx, table! {
$table_name ($primary_key) {
$tokens
}
}).unwrap();
Ok(item)
}
}
}
fn next_str_lit<T: Iterator<Item=P<ast::Expr>>>(
cx: &mut ExtCtxt,
sp: Span,
exprs: &mut T,
) -> Result<InternedString, Box<MacResult>> {
match expr_to_string(cx, exprs.next().unwrap(), "expected string literal") {
Some((s, _)) => Ok(s),
None => Err(DummyResult::any(sp)),
}
}
fn column_def_tokens(cx: &mut ExtCtxt, span: Span, attr: &ColumnInformation, conn: &InferConnection)
-> Vec<TokenTree>
{
let column_name = str_to_ident(&attr.column_name);
let tpe = match determine_column_type(attr, conn) {
Ok(ty) => {
let idents = ty.path.iter().map(|a| str_to_ident(&a)).collect();
let path = cx.path_global(span, idents);
let mut path = quote_ty!(cx, $path);
if ty.is_array {
path = quote_ty!(cx, Array<$path>);
}
if ty.is_nullable {
path = quote_ty!(cx, Nullable<$path>);
}
path
}
Err(e) => {
cx.span_err(span, &e.to_string());
quote_ty!(cx, ())
}
};
quote_tokens!(cx, $column_name -> $tpe,)
}
fn database_url<T: Iterator<Item=P<ast::Expr>>>(
cx: &mut ExtCtxt,
sp: Span,
exprs: &mut T,
) -> Result<String, Box<MacResult>> {
let database_url = try!(next_str_lit(cx, sp, exprs));
match extract_database_url(&database_url) {
Ok(s) => Ok(s.into_owned()),
Err(msg) => {
cx.span_err(sp, &msg);
Err(DummyResult::any(sp))
}
}
}

View File

@ -1,118 +0,0 @@
use syntax::ast::{self, MetaItem, NestedMetaItem, MetaItemKind};
use syntax::attr::HasAttrs;
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ptr::P;
use model::Model;
use util::{lifetime_list_tokens, str_value_of_attr_with_name};
pub fn expand_derive_as_changeset(
cx: &mut ExtCtxt,
span: Span,
meta_item: &MetaItem,
annotatable: &Annotatable,
push: &mut FnMut(Annotatable),
) {
if let Some(model) = Model::from_annotable(cx, span, annotatable) {
let options = changeset_options(cx, span, annotatable.attrs()).unwrap();
push(Annotatable::Item(changeset_impl(cx, span, &options, &model).unwrap()));
} else {
cx.span_err(meta_item.span,
"`#[derive(AsChangeset)]` may only be applied to enums and structs");
}
}
struct ChangesetOptions {
table_name: ast::Ident,
treat_none_as_null: bool,
}
fn changeset_options(
cx: &mut ExtCtxt,
span: Span,
attributes: &[ast::Attribute]
) -> Result<ChangesetOptions, ()> {
let changeset_options_attr = attributes.iter().find(|a| a.check_name("changeset_options"));
let treat_none_as_null = try!(changeset_options_attr
.map(|a| extract_treat_none_as_null(cx, a))
.unwrap_or(Ok(false)));
let table_name = match str_value_of_attr_with_name(cx, attributes, "table_name") {
Some(name) => name,
None => return missing_table_name_error(cx, span),
};
Ok(ChangesetOptions {
table_name: table_name,
treat_none_as_null: treat_none_as_null,
})
}
fn extract_treat_none_as_null(cx: &mut ExtCtxt, attr: &ast::Attribute) -> Result<bool, ()>{
match attr.node.value.node {
MetaItemKind::List(_, ref items) if items.len() == 1 => {
if items[0].check_name("treat_none_as_null") {
boolean_option(cx, &items[0])
} else {
options_usage_error(cx, attr.span)
}
}
_ => options_usage_error(cx, attr.span),
}
}
fn boolean_option(cx: &mut ExtCtxt, item: &NestedMetaItem) -> Result<bool, ()> {
match item.value_str() {
Some(ref s) if *s == "true" => Ok(true),
Some(ref s) if *s == "false" => Ok(false),
_ => options_usage_error(cx, item.span())
}
}
fn options_usage_error<T>(cx: &mut ExtCtxt, span: Span) -> Result<T, ()> {
cx.span_err(span,
r#"`changeset_options` must be used in the form \
`#[changeset_options(treat_none_as_null = "true")]`"#);
Err(())
}
fn missing_table_name_error<T>(cx: &mut ExtCtxt, span: Span) -> Result<T, ()> {
cx.span_err(span, r#"Structs annotated with `#[derive(AsChangeset)]` must \
also be annotated with `#[table_name="something"]`"#);
Err(())
}
#[allow(unused_imports)] // quote_tokens! generates warnings
fn changeset_impl(
cx: &mut ExtCtxt,
span: Span,
options: &ChangesetOptions,
model: &Model,
) -> Option<P<ast::Item>> {
let struct_name = model.name;
let table_name = options.table_name;
let treat_none_as_null = if options.treat_none_as_null {
quote_tokens!(cx, "true")
} else {
quote_tokens!(cx, "false")
};
let struct_ty = &model.ty;
let lifetimes = lifetime_list_tokens(&model.generics.lifetimes, span);
let pks = model.primary_key_names();
let fields = model.attrs.iter()
.filter(|a| pks.iter().all(|pk| a.column_name.name != pk.name))
.map(|a| a.to_stable_macro_tokens(cx))
.collect::<Vec<_>>();
quote_item!(cx, impl_AsChangeset! {
(
struct_name = $struct_name,
table_name = $table_name,
treat_none_as_null = $treat_none_as_null,
struct_ty = $struct_ty,
lifetimes = ($lifetimes),
),
fields = [$fields],
})
}

View File

@ -1,155 +0,0 @@
use syntax::ast::TyKind;
use syntax::ast;
use syntax::codemap::Span;
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
use syntax::parse::token::{self, str_to_ident, intern_and_get_ident};
use syntax::ptr::P;
use syntax::tokenstream::TokenTree;
fn str_value_of_attr(
cx: &mut ExtCtxt,
attr: &ast::Attribute,
name: &str,
) -> Option<ast::Ident> {
attr.value_str().map(|value| {
str_to_ident(&value)
}).or_else(|| {
cx.span_err(attr.span(),
&format!(r#"`{}` must be in the form `#[{}="something"]`"#, name, name));
None
})
}
fn list_value_of_attr(
cx: &mut ExtCtxt,
attr: &ast::Attribute,
name: &str,
) -> Vec<ast::Ident> {
match attr.node.value.node {
ast::MetaItemKind::List(_, ref items) => {
items.iter().filter_map(|item| match item.word() {
Some(word) => Some(str_to_ident(&word.name())),
_ => {
cx.span_err(attr.span(),
&format!(r#"`{}` must be in the form `#[{}(something)]`"#, name, name));
None
}
}).collect()
}
_ => {
cx.span_err(attr.span(),
&format!(r#"`{}` must be in the form `#[{}(something)]`"#, name, name));
Vec::new()
},
}
}
pub fn str_value_of_attr_with_name(
cx: &mut ExtCtxt,
attrs: &[ast::Attribute],
name: &str,
) -> Option<ast::Ident> {
attrs.iter()
.find(|a| a.check_name(name))
.and_then(|a| str_value_of_attr(cx, &a, name))
}
pub fn ident_value_of_attr_with_name(
cx: &mut ExtCtxt,
attrs: &[ast::Attribute],
name: &str,
) -> Option<ast::Ident> {
attrs.iter()
.find(|a| a.check_name(name))
.map(|a| {
let list = list_value_of_attr(cx, &a, name);
if list.len() != 1 {
cx.span_err(a.span(),
&format!(r#"`{}` must be in the form `#[{}(something)]`"#, name, name));
}
list[0]
})
}
pub fn list_value_of_attr_with_name(
cx: &mut ExtCtxt,
attrs: &[ast::Attribute],
name: &str,
) -> Option<Vec<ast::Ident>> {
attrs.iter()
.find(|a| a.check_name(name))
.map(|a| list_value_of_attr(cx, &a, name))
}
const KNOWN_ATTRIBUTES: &'static [&'static str] = &[
"belongs_to",
"changeset_options",
"column_name",
"has_many",
"table_name",
"primary_key",
];
pub fn strip_attributes(krate: ast::Crate) -> ast::Crate {
use syntax::fold;
struct StripAttributeFolder;
impl fold::Folder for StripAttributeFolder {
fn fold_attribute(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {
if KNOWN_ATTRIBUTES.iter().any(|name| attr.check_name(name)) {
None
} else {
Some(attr)
}
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
}
fold::Folder::fold_crate(&mut StripAttributeFolder, krate)
}
pub fn struct_ty(
cx: &mut ExtCtxt,
span: Span,
name: ast::Ident,
generics: &ast::Generics,
) -> P<ast::Ty> {
let lifetimes = generics.lifetimes.iter().map(|lt| lt.lifetime).collect();
let ty_params = generics.ty_params.iter()
.map(|param| cx.ty_ident(span, param.ident))
.collect();
cx.ty_path(cx.path_all(span, false, vec![name], lifetimes, ty_params, Vec::new()))
}
pub fn ty_param_of_option(ty: &ast::Ty) -> Option<&P<ast::Ty>> {
match ty.node {
TyKind::Path(_, ref path) => {
path.segments.first().iter()
.filter(|s| s.identifier.name.as_str() == intern_and_get_ident("Option"))
.flat_map(|s| s.parameters.types().first().map(|p| *p))
.next()
}
_ => None,
}
}
pub fn lifetime_list_tokens(lifetimes: &[ast::LifetimeDef], span: Span) -> Vec<TokenTree> {
let lifetime_tokens = lifetimes.iter().map(|ld| {
let name = ld.lifetime.name;
token::Lifetime(ast::Ident::with_empty_ctxt(name))
});
comma_delimited_tokens(lifetime_tokens, span)
}
pub fn comma_delimited_tokens<T>(tokens: T, span: Span) -> Vec<TokenTree> where
T: IntoIterator<Item=token::Token>,
{
tokens.into_iter().map(|token| [TokenTree::Token(span, token)])
.collect::<Vec<_>>()
.join(&TokenTree::Token(span, token::Comma))
}

View File

@ -6,7 +6,6 @@ license = "MIT OR Apache-2.0"
build = "build.rs"
[build-dependencies]
diesel_codegen_syntex = { path = "../diesel_codegen_syntex", optional = true }
diesel = { path = "../diesel", default-features = false }
dotenv = "0.8.0"
@ -14,21 +13,16 @@ dotenv = "0.8.0"
assert_matches = "1.0.1"
chrono = { version = "^0.2.17" }
diesel = { path = "../diesel", default-features = false, features = ["quickcheck", "chrono", "uuid"] }
diesel_codegen = { path = "../diesel_codegen", optional = true }
diesel_codegen = { path = "../diesel_codegen" }
dotenv = "0.8.0"
quickcheck = { version = "0.3.1", features = ["unstable"] }
uuid = { version = ">=0.2.0, <0.4.0" }
[features]
default = ["with-syntex"]
unstable = ["diesel/unstable", "diesel_codegen"]
with-syntex = ["diesel_codegen_syntex"]
postgres = ["diesel/postgres"]
sqlite = ["diesel/sqlite"]
stable_postgres = ["with-syntex", "postgres", "diesel_codegen_syntex/postgres"]
stable_sqlite = ["with-syntex", "sqlite", "diesel_codegen_syntex/sqlite"]
unstable_postgres = ["unstable", "postgres", "diesel_codegen/postgres"]
unstable_sqlite = ["unstable", "sqlite", "diesel_codegen/sqlite"]
default = []
unstable = ["diesel/unstable"]
postgres = ["diesel/postgres", "diesel_codegen/postgres"]
sqlite = ["diesel/sqlite", "diesel_codegen/sqlite"]
[[test]]
name = "integration_tests"

View File

@ -1,25 +1,3 @@
#[cfg(not(feature = "unstable"))]
mod inner {
extern crate diesel_codegen_syntex as diesel_codegen;
use std::env;
use std::path::Path;
pub fn main() {
let out_dir = env::var_os("OUT_DIR").unwrap();
let src = Path::new("tests/lib.in.rs");
let dst = Path::new(&out_dir).join("lib.rs");
diesel_codegen::expand(&src, &dst).unwrap();
}
}
#[cfg(feature = "unstable")]
mod inner {
pub fn main() {}
}
extern crate diesel;
extern crate dotenv;
use self::diesel::*;
@ -56,5 +34,4 @@ const MIGRATION_SUBDIR: &'static str = "sqlite";
fn main() {
let migrations_dir = migrations::find_migrations_directory().unwrap().join(MIGRATION_SUBDIR);
migrations::run_pending_migrations_in_directory(&connection(), &migrations_dir, &mut io::sink()).unwrap();
::inner::main();
}

View File

@ -1,8 +0,0 @@
#[cfg(not(feature = "sqlite"))]
mod annotations;
mod associations;
mod deserialization;
mod insert;
mod schema;
mod schema_inference;
mod update;

View File

@ -1,40 +1,36 @@
#![cfg_attr(feature = "unstable", feature(proc_macro))]
extern crate quickcheck;
#[macro_use] extern crate assert_matches;
#[macro_use] extern crate diesel;
#[cfg(feature = "unstable")]
#[macro_use] extern crate diesel_codegen;
extern crate dotenv;
#[cfg(feature = "unstable")]
include!("lib.in.rs");
#[cfg(not(feature = "unstable"))]
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
#[cfg(not(feature = "sqlite"))]
mod annotations;
mod associations;
mod boxed_queries;
mod connection;
// This should be in lib.in.rs restricted to PG, but
// syntex compiles the file even if the feature is unset,
// and the macro call is invalid on SQLite.
#[cfg(all(feature = "unstable", feature = "postgres"))]
#[cfg(feature = "postgres")]
mod custom_schemas;
mod debug;
mod delete;
mod deserialization;
mod errors;
mod expressions;
mod filter;
mod filter_operators;
mod find;
mod group_by;
mod insert;
mod internal_details;
mod joins;
mod macros;
mod order;
mod perf_details;
mod schema;
mod schema_dsl;
mod schema_inference;
mod select;
mod transactions;
mod types;
mod types_roundtrip;
mod update;

View File

@ -1,5 +1,3 @@
#![feature(proc_macro)]
#[macro_use] extern crate diesel;
#[macro_use] extern crate diesel_codegen;
extern crate dotenv;

View File

@ -1,5 +1,3 @@
#![feature(proc_macro)]
#[macro_use] extern crate diesel;
#[macro_use] extern crate diesel_codegen;
extern crate dotenv;

View File

@ -1,5 +1,3 @@
#![feature(proc_macro)]
#[macro_use] extern crate diesel;
#[macro_use] extern crate diesel_codegen;
extern crate dotenv;

View File

@ -1,18 +0,0 @@
[package]
name = "diesel_demo_step_4"
version = "0.1.0"
authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
build = "build.rs"
[build-dependencies]
diesel_codegen_syntex = { version = "0.9.0", features = ["postgres"], optional = true }
[dependencies]
diesel = { version = "0.9.0", features = ["postgres"] }
diesel_codegen = { version = "0.9.0", features = ["postgres"], optional = true }
dotenv = "0.8.0"
[features]
default = ["nightly"]
with-syntex = ["diesel_codegen_syntex"]
nightly = ["diesel/unstable", "diesel_codegen"]

View File

@ -1,17 +0,0 @@
#[cfg(feature = "with-syntex")]
fn main() {
extern crate diesel_codegen_syntex as diesel_codegen;
use std::env;
use std::path::Path;
let out_dir = env::var_os("OUT_DIR").unwrap();
let src = Path::new("src/lib.in.rs");
let dst = Path::new(&out_dir).join("lib.rs");
diesel_codegen::expand(&src, &dst).unwrap();
}
#[cfg(feature = "nightly")]
fn main() {}

View File

@ -1,6 +0,0 @@
CREATE TABLE posts (
id SERIAL PRIMARY KEY,
title VARCHAR NOT NULL,
body TEXT NOT NULL,
published BOOLEAN NOT NULL DEFAULT FALSE
)

View File

@ -1,20 +0,0 @@
extern crate diesel_demo_step_4;
extern crate diesel;
use diesel::prelude::*;
use diesel_demo_step_4::*;
use std::env::args;
fn main() {
use diesel_demo_step_4::schema::posts::dsl::*;
let target = args().nth(1).expect("Expected a target to match against");
let pattern = format!("%{}%", target);
let connection = establish_connection();
let num_deleted = diesel::delete(posts.filter(title.like(pattern)))
.execute(&connection)
.expect("Error deleting posts");
println!("Deleted {} posts", num_deleted);
}

View File

@ -1,21 +0,0 @@
extern crate diesel_demo_step_4;
extern crate diesel;
use diesel::prelude::*;
use diesel_demo_step_4::*;
use diesel_demo_step_4::models::Post;
use std::env::args;
fn main() {
use diesel_demo_step_4::schema::posts::dsl::{posts, published};
let id = args().nth(1).expect("publish_post requires a post id")
.parse::<i32>().expect("Invalid ID");
let connection = establish_connection();
let post = diesel::update(posts.find(id))
.set(published.eq(true))
.get_result::<Post>(&connection)
.expect(&format!("Unable to find post {}", id));
println!("Published post {}", post.title);
}

View File

@ -1,23 +0,0 @@
extern crate diesel_demo_step_4;
extern crate diesel;
use diesel_demo_step_4::*;
use diesel_demo_step_4::models::*;
use diesel::prelude::*;
fn main() {
use diesel_demo_step_4::schema::posts::dsl::*;
let connection = establish_connection();
let results = posts.filter(published.eq(true))
.limit(5)
.load::<Post>(&connection)
.expect("Error loading posts");
println!("Displaying {} posts", results.len());
for post in results {
println!("{}", post.title);
println!("-----------\n");
println!("{}", post.body);
}
}

View File

@ -1,28 +0,0 @@
extern crate diesel_demo_step_4;
extern crate diesel;
use self::diesel_demo_step_4::*;
use std::io::{stdin, Read};
fn main() {
let connection = establish_connection();
let mut title = String::new();
let mut body = String::new();
println!("What would you like your title to be?");
stdin().read_line(&mut title).unwrap();
let title = title.trim_right(); // Remove the trailing newline
println!("\nOk! Let's write {} (Press {} when finished)\n", title, EOF);
stdin().read_to_string(&mut body).unwrap();
let post = create_post(&connection, title, &body);
println!("\nSaved draft {} with id {}", title, post.id);
}
#[cfg(not(windows))]
const EOF: &'static str = "CTRL+D";
#[cfg(windows)]
const EOF: &'static str = "CTRL+Z";

View File

@ -1,2 +0,0 @@
pub mod schema;
pub mod models;

View File

@ -1,41 +0,0 @@
#![cfg_attr(feature = "nightly", feature(proc_macro))]
#[macro_use] extern crate diesel;
#[cfg(feature = "nightly")]
#[macro_use] extern crate diesel_codegen;
extern crate dotenv;
#[cfg(feature = "nightly")]
include!("lib.in.rs");
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
use diesel::prelude::*;
use diesel::pg::PgConnection;
use dotenv::dotenv;
use std::env;
use self::models::{Post, NewPost};
pub fn establish_connection() -> PgConnection {
dotenv().ok();
let database_url = env::var("DATABASE_URL")
.expect("DATABASE_URL must be set");
PgConnection::establish(&database_url)
.expect(&format!("Error connecting to {}", database_url))
}
pub fn create_post(conn: &PgConnection, title: &str, body: &str) -> Post {
use schema::posts;
let new_post = NewPost {
title: title,
body: body,
};
diesel::insert(&new_post).into(posts::table)
.get_result(conn)
.expect("Error saving new post")
}

View File

@ -1,16 +0,0 @@
use schema::posts;
#[derive(Queryable)]
pub struct Post {
pub id: i32,
pub title: String,
pub body: String,
pub published: bool,
}
#[derive(Insertable)]
#[table_name="posts"]
pub struct NewPost<'a> {
pub title: &'a str,
pub body: &'a str,
}

View File

@ -1 +0,0 @@
infer_schema!("dotenv:DATABASE_URL");

View File

@ -1,12 +0,0 @@
#!/bin/sh
set -e
cd ../diesel_cli
cargo build
cd ../examples
# Only step 4 can be run on stable
cd getting_started_step_4
export DATABASE_URL=postgres://localhost/diesel_getting_started_step_4
../../target/debug/diesel setup
cargo build --no-default-features --features "with-syntex"