bjam for people who don't have it

This commit is contained in:
Kenneth Heafield 2011-11-18 13:14:09 -05:00
parent f15eee0abf
commit 9102477a07
243 changed files with 67532 additions and 0 deletions

3
.gitignore vendored
View File

@ -12,6 +12,9 @@ config.h
config.log
config.status
configure
jam-files/bjam
jam-files/engine/bootstrap
jam-files/engine/bin.*
lm/.deps/
lm/.libs/
util/.deps/

13
bjam Executable file
View File

@ -0,0 +1,13 @@
#!/bin/env bash
set -e
which bjam >/dev/null 2>/dev/null && exec bjam "$@"
top="$(dirname "$0")"
if [ ! -x "$top"/jam-files/bjam ]; then
pushd "$top/jam-files/engine"
./build.sh
cp -f bin.*/bjam ../bjam
popd
fi
BOOST_BUILD_PATH="$top"/jam-files/boost-build exec "$top"/jam-files/bjam "$@"

23
jam-files/LICENSE_1_0.txt Normal file
View File

@ -0,0 +1,23 @@
Boost Software License - Version 1.0 - August 17th, 2003
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,8 @@
# Copyright 2001, 2002 Dave Abrahams
# Copyright 2002 Rene Rivera
# Copyright 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
boost-build kernel ;

View File

@ -0,0 +1,18 @@
# Copyright (c) 2003 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
# This file handles initial phase of Boost.Build loading.
# Boost.Jam has already figured out where Boost.Build is
# and loads this file, which is responsible for initialization
# of basic facilities such a module system and loading the
# main Boost.Build module, build-system.jam.
#
# Exact operation of this module is not interesting, it makes
# sense to look at build-system.jam right away.
# Load the kernel/bootstrap.jam, which does all the work.
.bootstrap-file = $(.bootstrap-file:D)/kernel/bootstrap.jam ;
include $(.bootstrap-file) ;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,198 @@
# Copyright (c) 2010 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import property-set ;
import path ;
import modules ;
import "class" ;
import errors ;
import configure ;
rule find-include-path ( variable : properties : header
: provided-path ? )
{
# FIXME: document which properties affect this function by
# default.
local target-os = [ $(properties).get <target-os> ] ;
properties = [ property-set.create <target-os>$(toolset) ] ;
if $($(variable)-$(properties))
{
return $($(variable)-$(properties)) ;
}
else
{
provided-path ?= [ modules.peek : $(variable) ] ;
includes = $(provided-path) ;
includes += [ $(properties).get <include> ] ;
if [ $(properties).get <target-os> ] != windows
{
# FIXME: use sysroot
includes += /usr/include ;
}
local result ;
while ! $(result) && $(includes)
{
local f = [ path.root $(header) $(includes[1]) ] ;
ECHO "Checking " $(f) ;
if [ path.exists $(f) ]
{
result = $(includes[1]) ;
}
else if $(provided-path)
{
errors.user-error "Could not find header" $(header)
: "in the user-specified directory" $(provided-path) ;
}
includes = $(includes[2-]) ;
}
$(variable)-$(properties) = $(result) ;
return $(result) ;
}
}
rule find-library ( variable : properties : names + : provided-path ? )
{
local target-os = [ $(properties).get <target-os> ] ;
properties = [ property-set.create <target-os>$(toolset) ] ;
if $($(variable)-$(properties))
{
return $($(variable)-$(properties)) ;
}
else
{
provided-path ?= [ modules.peek : $(variable) ] ;
paths = $(provided-path) ;
paths += [ $(properties).get <library-path> ] ;
if [ $(properties).get <target-os> ] != windows
{
paths += /usr/lib /usr/lib32 /usr/lib64 ;
}
local result ;
while ! $(result) && $(paths)
{
while ! $(result) && $(names)
{
local f ;
if $(target-os) = windows
{
f = $(paths[1])/$(names[1]).lib ;
if [ path.exists $(f) ]
{
result = $(f) ;
}
}
else
{
# FIXME: check for .a as well, depending on
# the 'link' feature.
f = $(paths[1])/lib$(names[1]).so ;
ECHO "CHECKING $(f) " ;
if [ path.exists $(f) ]
{
result = $(f) ;
}
}
if ! $(result) && $(provided-path)
{
errors.user-error "Could not find either of: " $(names)
: "in the user-specified directory" $(provided-path) ;
}
names = $(names[2-]) ;
}
paths = $(paths[2-]) ;
}
$(variable)-$(properties) = $(result) ;
return $(result) ;
}
}
class ac-library : basic-target
{
import errors ;
import indirect ;
import virtual-target ;
import ac ;
import configure ;
rule __init__ ( name : project : * : * )
{
basic-target.__init__ $(name) : $(project) : $(sources)
: $(requirements) ;
reconfigure $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule set-header ( header )
{
self.header = $(header) ;
}
rule set-default-names ( names + )
{
self.default-names = $(names) ;
}
rule reconfigure ( * : * )
{
ECHO "XXX" $(1) ;
if ! $(1)
{
# This is 'using xxx ;'. Nothing to configure, really.
}
else
{
for i in 1 2 3 4 5 6 7 8 9
{
# FIXME: this naming is inconsistent with XXX_INCLUDE/XXX_LIBRARY
if ! ( $($(i)[1]) in root include-path library-path library-name condition )
{
errors.user-error "Invalid named parameter" $($(i)[1]) ;
}
local name = $($(i)[1]) ;
local value = $($(i)[2-]) ;
if $($(name)) && $($(name)) != $(value)
{
errors.user-error "Attempt to change value of '$(name)'" ;
}
$(name) = $(value) ;
}
include-path ?= $(root)/include ;
library-path ?= $(root)/lib ;
}
}
rule construct ( name : sources * : property-set )
{
# FIXME: log results.
local libnames = $(library-name) ;
if ! $(libnames) && ! $(include-path) && ! $(library-path)
{
libnames = [ modules.peek : $(name:U)_NAME ] ;
# Backward compatibility only.
libnames ?= [ modules.peek : $(name:U)_BINARY ] ;
}
libnames ?= $(self.default-names) ;
local includes = [
ac.find-include-path $(name:U)_INCLUDE : $(property-set) : $(self.header) : $(include-path) ] ;
local library = [ ac.find-library $(name:U)_LIBRARY : $(property-set) : $(libnames) : $(library-path) ] ;
if $(includes) && $(library)
{
library = [ virtual-target.from-file $(library) : . : $(self.project) ] ;
configure.log-library-search-result $(name) : "found" ;
return [ property-set.create <include>$(includes) <source>$(library) ] ;
}
else
{
configure.log-library-search-result $(name) : "no found" ;
}
}
}

View File

@ -0,0 +1,73 @@
# Copyright 2003, 2004, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module defines the 'alias' rule and the associated target class.
#
# Alias is just a main target which returns its source targets without any
# processing. For example:
#
# alias bin : hello test_hello ;
# alias lib : helpers xml_parser ;
#
# Another important use of 'alias' is to conveniently group source files:
#
# alias platform-src : win.cpp : <os>NT ;
# alias platform-src : linux.cpp : <os>LINUX ;
# exe main : main.cpp platform-src ;
#
# Lastly, it is possible to create a local alias for some target, with different
# properties:
#
# alias big_lib : : @/external_project/big_lib/<link>static ;
#
import "class" : new ;
import project ;
import property-set ;
import targets ;
class alias-target-class : basic-target
{
rule __init__ ( name : project : sources * : requirements *
: default-build * : usage-requirements * )
{
basic-target.__init__ $(name) : $(project) : $(sources) :
$(requirements) : $(default-build) : $(usage-requirements) ;
}
rule construct ( name : source-targets * : property-set )
{
return [ property-set.empty ] $(source-targets) ;
}
rule compute-usage-requirements ( subvariant )
{
local base = [ basic-target.compute-usage-requirements $(subvariant) ] ;
return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ;
}
}
# Declares the 'alias' target. It will process its sources virtual-targets by
# returning them unaltered as its own constructed virtual-targets.
#
rule alias ( name : sources * : requirements * : default-build * :
usage-requirements * )
{
local project = [ project.current ] ;
targets.main-target-alternative
[ new alias-target-class $(name) : $(project)
: [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project)
]
: [ targets.main-target-usage-requirements $(usage-requirements) :
$(project) ]
] ;
}
IMPORT $(__name__) : alias : : alias ;

View File

@ -0,0 +1,322 @@
# Copyright 2002 Dave Abrahams
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import "class" : new ;
import sequence ;
import set ;
import regex ;
import feature ;
import property ;
import container ;
import string ;
# Transform property-set by applying f to each component property.
#
local rule apply-to-property-set ( f property-set )
{
local properties = [ feature.split $(property-set) ] ;
return [ string.join [ $(f) $(properties) ] : / ] ;
}
# Expand the given build request by combining all property-sets which do not
# specify conflicting non-free features. Expects all the project files to
# already be loaded.
#
rule expand-no-defaults ( property-sets * )
{
# First make all features and subfeatures explicit.
local expanded-property-sets = [ sequence.transform apply-to-property-set
feature.expand-subfeatures : $(property-sets) ] ;
# Now combine all of the expanded property-sets
local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ;
return $(product) ;
}
# Implementation of x-product, below. Expects all the project files to already
# be loaded.
#
local rule x-product-aux ( property-sets + )
{
local result ;
local p = [ feature.split $(property-sets[1]) ] ;
local f = [ set.difference $(p:G) : [ feature.free-features ] ] ;
local seen ;
# No conflict with things used at a higher level?
if ! [ set.intersection $(f) : $(x-product-used) ]
{
local x-product-seen ;
{
# Do not mix in any conflicting features.
local x-product-used = $(x-product-used) $(f) ;
if $(property-sets[2])
{
local rest = [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
result = $(property-sets[1])/$(rest) ;
}
result ?= $(property-sets[1]) ;
}
# If we did not encounter a conflicting feature lower down, do not
# recurse again.
if ! [ set.intersection $(f) : $(x-product-seen) ]
{
property-sets = ;
}
seen = $(x-product-seen) ;
}
if $(property-sets[2])
{
result += [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
}
# Note that we have seen these features so that higher levels will recurse
# again without them set.
x-product-seen += $(f) $(seen) ;
return $(result) ;
}
# Return the cross-product of all elements of property-sets, less any that would
# contain conflicting values for single-valued features. Expects all the project
# files to already be loaded.
#
local rule x-product ( property-sets * )
{
if $(property-sets).non-empty
{
# Prepare some "scoped globals" that can be used by the implementation
# function, x-product-aux.
local x-product-seen x-product-used ;
return [ x-product-aux $(property-sets) : $(feature-space) ] ;
}
# Otherwise return empty.
}
# Returns true if either 'v' or the part of 'v' before the first '-' symbol is
# an implicit value. Expects all the project files to already be loaded.
#
local rule looks-like-implicit-value ( v )
{
if [ feature.is-implicit-value $(v) ]
{
return true ;
}
else
{
local split = [ regex.split $(v) - ] ;
if [ feature.is-implicit-value $(split[1]) ]
{
return true ;
}
}
}
# Takes the command line tokens (such as taken from the ARGV rule) and
# constructs a build request from them. Returns a vector of two vectors (where
# "vector" means container.jam's "vector"). First is the set of targets
# specified in the command line, and second is the set of requested build
# properties. Expects all the project files to already be loaded.
#
rule from-command-line ( command-line * )
{
local targets ;
local properties ;
command-line = $(command-line[2-]) ;
local skip-next = ;
for local e in $(command-line)
{
if $(skip-next)
{
skip-next = ;
}
else if ! [ MATCH "^(-).*" : $(e) ]
{
# Build request spec either has "=" in it or completely consists of
# implicit feature values.
local fs = feature-space ;
if [ MATCH "(.*=.*)" : $(e) ]
|| [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
{
properties += [ convert-command-line-element $(e) :
$(feature-space) ] ;
}
else
{
targets += $(e) ;
}
}
else if [ MATCH "^(-[-ldjfsto])$" : $(e) ]
{
skip-next = true ;
}
}
return [ new vector
[ new vector $(targets) ]
[ new vector $(properties) ] ] ;
}
# Converts one element of command line build request specification into internal
# form. Expects all the project files to already be loaded.
#
local rule convert-command-line-element ( e )
{
local result ;
local parts = [ regex.split $(e) "/" ] ;
while $(parts)
{
local p = $(parts[1]) ;
local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ;
local lresult ;
local feature ;
local values ;
if $(m)
{
feature = $(m[1]) ;
values = [ regex.split $(m[2]) "," ] ;
lresult = <$(feature)>$(values) ;
}
else
{
lresult = [ regex.split $(p) "," ] ;
}
if $(feature) && free in [ feature.attributes $(feature) ]
{
# If we have free feature, then the value is everything
# until the end of the command line token. Slashes in
# the following string are not taked to mean separation
# of properties. Commas are also not interpreted specially.
values = $(values:J=,) ;
values = $(values) $(parts[2-]) ;
values = $(values:J=/) ;
lresult = <$(feature)>$(values) ;
parts = ;
}
if ! [ MATCH (.*-.*) : $(p) ]
{
# property.validate cannot handle subfeatures, so we avoid the check
# here.
for local p in $(lresult)
{
property.validate $(p) : $(feature-space) ;
}
}
if ! $(result)
{
result = $(lresult) ;
}
else
{
result = $(result)/$(lresult) ;
}
parts = $(parts[2-]) ;
}
return $(result) ;
}
rule __test__ ( )
{
import assert ;
import feature ;
feature.prepare-test build-request-test-temp ;
import build-request ;
import build-request : expand-no-defaults : build-request.expand-no-defaults ;
import errors : try catch ;
import feature : feature subfeature ;
feature toolset : gcc msvc borland : implicit ;
subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
3.0 3.0.1 3.0.2 : optional ;
feature variant : debug release : implicit composite ;
feature inlining : on off ;
feature "include" : : free ;
feature stdlib : native stlport : implicit ;
feature runtime-link : dynamic static : symmetric ;
# Empty build requests should expand to empty.
assert.result
: build-request.expand-no-defaults ;
assert.result
<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
<toolset>msvc/<stdlib>stlport/<variant>debug
<toolset>msvc/<variant>debug
: build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ;
assert.result
<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
<toolset>msvc/<variant>debug
<variant>debug/<toolset>msvc/<stdlib>stlport
: build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ;
assert.result
<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
: build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ;
assert.result
<include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
<include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
<include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
: build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ;
local r ;
r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
try ;
{
build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
}
catch \"static\" is not a value of an implicit feature ;
r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
assert.equal [ $(r).get-at 1 ] : target ;
assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
gcc/<runtime-link>static ;
r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
borland/<runtime-link>static ;
r = [ build-request.from-command-line bjam gcc-3.0 ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
feature.finish-test build-request-test-temp ;
}

View File

@ -0,0 +1,237 @@
# Copyright (c) 2010 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
# This module defines function to help with two main tasks:
#
# - Discovering build-time configuration for the purposes of adjusting
# build process.
# - Reporting what is built, and how it is configured.
import targets ;
import errors ;
import targets ;
import sequence ;
import property ;
import property-set ;
import "class" : new ;
import common ;
import path ;
rule log-summary ( )
{
}
.width = 30 ;
rule set-width ( width )
{
.width = $(width) ;
}
# Declare that the components specified by the parameter exist.
rule register-components ( components * )
{
.components += $(components) ;
}
# Declare that the components specified by the parameters will
# be build.
rule components-building ( components * )
{
.built-components += $(components) ;
}
# Report something about component configuration that the
# user should better know.
rule log-component-configuration ( component : message )
{
# FIXME: implement per-property-set logs
.component-logs.$(component) += $(message) ;
}
rule log-check-result ( result )
{
if ! $(.announced-checks)
{
ECHO "Performing configuration checks\n" ;
.announced-checks = 1 ;
}
ECHO $(result) ;
#.check-results += $(result) ;
}
rule log-library-search-result ( library : result )
{
local x = [ PAD " - $(library) : $(result)" : $(.width) ] ;
log-check-result "$(x)" ;
}
rule print-component-configuration ( )
{
local c = [ sequence.unique $(.components) ] ;
ECHO "\nComponent configuration:\n" ;
for c in $(.components)
{
local s ;
if $(c) in $(.built-components)
{
s = "building" ;
}
else
{
s = "not building" ;
}
ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ;
for local m in $(.component-logs.$(c))
{
ECHO " -" $(m) ;
}
}
ECHO ;
}
rule print-configure-checks-summary ( )
{
# FIXME: the problem with that approach is tha
# the user sees checks summary when all checks are
# done, and has no progress reporting while the
# checks are being executed.
if $(.check-results)
{
ECHO "Configuration checks summary\n" ;
for local r in $(.check-results)
{
ECHO $(r) ;
}
ECHO ;
}
}
# Attempt to build a metatarget named by 'metatarget-reference'
# in context of 'project' with properties 'ps'.
# Returns non-empty value if build is OK.
rule builds-raw ( metatarget-reference : project : ps : what : retry ? )
{
local result ;
if ! $(retry) && ! $(.$(what)-tested.$(ps))
{
.$(what)-tested.$(ps) = true ;
local targets = [ targets.generate-from-reference
$(metatarget-reference) : $(project) : $(ps) ] ;
local jam-targets ;
for local t in $(targets[2-])
{
jam-targets += [ $(t).actualize ] ;
}
if ! UPDATE_NOW in [ RULENAMES ]
{
# Cannot determine. Assume existance.
}
else
{
local x = [ PAD " - $(what)" : $(.width) ] ;
if [ UPDATE_NOW $(jam-targets) :
$(.log-fd) : ignore-minus-n : ignore-minus-q ]
{
.$(what)-supported.$(ps) = yes ;
result = true ;
log-check-result "$(x) : yes" ;
}
else
{
log-check-result "$(x) : no" ;
}
}
return $(result) ;
}
else
{
return $(.$(what)-supported.$(ps)) ;
}
}
rule builds ( metatarget-reference : properties * : what ? : retry ? )
{
what ?= "$(metatarget-reference) builds" ;
# FIXME: this should not be hardcoded. Other checks might
# want to consider different set of features as relevant.
local toolset = [ property.select <toolset> : $(properties) ] ;
local toolset-version-property = "<toolset-$(toolset:G=):version>" ;
local relevant = [ property.select <target-os> <toolset> $(toolset-version-property)
<address-model> <architecture>
: $(properties) ] ;
local ps = [ property-set.create $(relevant) ] ;
local t = [ targets.current ] ;
local p = [ $(t).project ] ;
return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) : $(retry) ] ;
}
# Called by Boost.Build startup code to specify name of a file
# that will receive results of configure checks. This
# should never be called by users.
rule set-log-file ( log-file )
{
path.makedirs [ path.parent $(log-file) ] ;
.log-fd = [ FILE_OPEN $(log-file) : "w" ] ;
}
# Frontend rules
class check-target-builds-worker
{
import configure ;
import property-set ;
import targets ;
import property ;
rule __init__ ( target message ? : true-properties * : false-properties * )
{
self.target = $(target) ;
self.message = $(message) ;
self.true-properties = $(true-properties) ;
self.false-properties = $(false-properties) ;
}
rule check ( properties * )
{
local choosen ;
if [ configure.builds $(self.target) : $(properties) : $(self.message) ]
{
choosen = $(self.true-properties) ;
}
else
{
choosen = $(self.false-properties) ;
}
return [ property.evaluate-conditionals-in-context $(choosen) : $(properties) ] ;
}
}
rule check-target-builds ( target message ? : true-properties * : false-properties * )
{
local instance = [ new check-target-builds-worker $(target) $(message) : $(true-properties)
: $(false-properties) ] ;
return <conditional>@$(instance).check ;
}
IMPORT $(__name__) : check-target-builds : : check-target-builds ;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,232 @@
# Copyright 2003 Rene Rivera
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Modifiers are generalized generators that mutate targets in specific ways.
# This structure allows for grouping a variety of functionality in an
# orthogonal way to the functionality in toolsets, and without specifying
# more target variations. In turn the modifiers can be used as building
# blocks to implement simple requests, like the <version> feature.
import modules ;
import feature ;
import errors ;
import type ;
import "class" : new ;
import generators ;
import property ;
import virtual-target ;
import numbers ;
import sequence ;
import symlink ;
import property-set ;
# Base generator for creating targets that are modifications of existing
# targets.
#
class modifier : generator
{
rule __init__ (
id
composing ?
: source-types *
: target-types-and-names +
: requirements *
)
{
generator.__init__ $(id) $(composing)
: $(source-types)
: $(target-types-and-names)
: $(requirements) ;
self.targets-in-progress = ;
}
# Wraps the generation of the target to call before and after rules to
# affect the real target.
#
rule run ( project name ? : property-set : sources + )
{
local result ;
local current-target = $(project)^$(name) ;
if ! $(current-target) in $(self.targets-in-progress)
{
# Before modifications...
local project_ =
[ modify-project-before
$(project) $(name) : $(property-set) : $(sources) ] ;
local name_ =
[ modify-name-before
$(project) $(name) : $(property-set) : $(sources) ] ;
local property-set_ =
[ modify-properties-before
$(project) $(name) : $(property-set) : $(sources) ] ;
local sources_ =
[ modify-sources-before
$(project) $(name) : $(property-set) : $(sources) ] ;
project = $(project_) ;
name = $(name_) ;
property-set = $(property-set_) ;
sources = $(sources_) ;
# Generate the real target...
local target-type-p =
[ property.select <main-target-type> : [ $(property-set).raw ] ] ;
self.targets-in-progress += $(current-target) ;
result =
[ generators.construct $(project) $(name)
: $(target-type-p:G=)
: $(property-set)
: $(sources) ] ;
self.targets-in-progress = $(self.targets-in-progress[1--2]) ;
# After modifications...
result =
[ modify-target-after $(result)
: $(project) $(name)
: $(property-set)
: $(sources) ] ;
}
return $(result) ;
}
rule modify-project-before ( project name ? : property-set : sources + )
{
return $(project) ;
}
rule modify-name-before ( project name ? : property-set : sources + )
{
return $(name) ;
}
rule modify-properties-before ( project name ? : property-set : sources + )
{
return $(property-set) ;
}
rule modify-sources-before ( project name ? : property-set : sources + )
{
return $(sources) ;
}
rule modify-target-after ( target : project name ? : property-set : sources + )
{
return $(target) ;
}
# Utility, clones a file-target with optional changes to the name, type and
# project of the target.
# NOTE: This functionality should be moved, and generalized, to
# virtual-targets.
#
rule clone-file-target ( target : new-name ? : new-type ? : new-project ? )
{
# Need a MUTCH better way to clone a target...
new-name ?= [ $(target).name ] ;
new-type ?= [ $(target).type ] ;
new-project ?= [ $(target).project ] ;
local result = [ new file-target $(new-name) : $(new-type) : $(new-project) ] ;
if [ $(target).dependencies ] { $(result).depends [ $(target).dependencies ] ; }
$(result).root [ $(target).root ] ;
$(result).set-usage-requirements [ $(target).usage-requirements ] ;
local action = [ $(target).action ] ;
local action-class = [ modules.peek $(action) : __class__ ] ;
local ps = [ $(action).properties ] ;
local cloned-action = [ new $(action-class) $(result) :
[ $(action).sources ] : [ $(action).action-name ] : $(ps) ] ;
$(result).action $(cloned-action) ;
return $(result) ;
}
}
# A modifier that changes the name of a target, after it's generated, given a
# regular expression to split the name, and a set of token to insert between the
# split tokens of the name. This also exposes the target for other uses with a
# symlink to the original name (optionally).
#
class name-modifier : modifier
{
rule __init__ ( )
{
# Apply ourselves to EXE targets, for now.
modifier.__init__ name.modifier : : EXE LIB : <name-modify>yes ;
}
# Modifies the name, by cloning the target with the new name.
#
rule modify-target-after ( target : project name ? : property-set : sources + )
{
local result = $(target) ;
local name-mod-p = [ property.select <name-modifier> : [ $(property-set).raw ] ] ;
if $(name-mod-p)
{
local new-name = [ modify-name [ $(target).name ] : $(name-mod-p:G=) ] ;
if $(new-name) != [ $(target).name ]
{
result = [ clone-file-target $(target) : $(new-name) ] ;
}
local expose-original-as-symlink = [ MATCH "<symlink>(.*)" : $(name-mod-p) ] ;
if $(expose-original-as-symlink)
{
local symlink-t = [ new symlink-targets $(project) : $(name) : [ $(result).name ] ] ;
result = [ $(symlink-t).construct $(result)
: [ property-set.create [ $(property-set).raw ] <symlink-location>build-relative ] ] ;
}
}
return $(result) ;
}
# Do the transformation of the name.
#
rule modify-name ( name : modifier-spec + )
{
local match = [ MATCH "<match>(.*)" : $(modifier-spec) ] ;
local name-parts = [ MATCH $(match) : $(name) ] ;
local insertions = [ sequence.insertion-sort [ MATCH "(<[0123456789]+>.*)" : $(modifier-spec) ] ] ;
local new-name-parts ;
local insert-position = 1 ;
while $(insertions)
{
local insertion = [ MATCH "<$(insert-position)>(.*)" : $(insertions[1]) ] ;
if $(insertion)
{
new-name-parts += $(insertion) ;
insertions = $(insertions[2-]) ;
}
new-name-parts += $(name-parts[1]) ;
name-parts = $(name-parts[2-]) ;
insert-position = [ numbers.increment $(insert-position) ] ;
}
new-name-parts += $(name-parts) ;
return [ sequence.join $(new-name-parts) ] ;
}
rule optional-properties ( )
{
return <name-modify>yes ;
}
}
feature.feature name-modifier : : free ;
feature.feature name-modify : no yes : incidental optional ;
generators.register [ new name-modifier ] ;
# Translates <version> property to a set of modification properties
# that are applied by the name-modifier, and symlink-modifier.
#
rule version-to-modifier ( property : properties * )
{
return
<name-modify>yes
<name-modifier><match>"^([^.]*)(.*)" <name-modifier><2>.$(property:G=)
<name-modifier><symlink>yes
;
}
feature.action <version> : version-to-modifier ;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,481 @@
# Copyright 2003 Dave Abrahams
# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import "class" : new ;
import feature ;
import path ;
import project ;
import property ;
import sequence ;
import set ;
import option ;
# Class for storing a set of properties.
#
# There is 1<->1 correspondence between identity and value. No two instances
# of the class are equal. To maintain this property, the 'property-set.create'
# rule should be used to create new instances. Instances are immutable.
#
# Each property is classified with regard to its effect on build results.
# Incidental properties have no effect on build results, from Boost.Build's
# point of view. Others are either free, or non-free and we refer to non-free
# ones as 'base'. Each property belongs to exactly one of those categories.
#
# It is possible to get a list of properties belonging to each category as
# well as a list of properties with a specific attribute.
#
# Several operations, like and refine and as-path are provided. They all use
# caching whenever possible.
#
class property-set
{
import errors ;
import feature ;
import path ;
import property ;
import property-set ;
import set ;
rule __init__ ( raw-properties * )
{
self.raw = $(raw-properties) ;
for local p in $(raw-properties)
{
if ! $(p:G)
{
errors.error "Invalid property: '$(p)'" ;
}
local att = [ feature.attributes $(p:G) ] ;
# A feature can be both incidental and free, in which case we add it
# to incidental.
if incidental in $(att)
{
self.incidental += $(p) ;
}
else if free in $(att)
{
self.free += $(p) ;
}
else
{
self.base += $(p) ;
}
if dependency in $(att)
{
self.dependency += $(p) ;
}
else
{
self.non-dependency += $(p) ;
}
if [ MATCH (:) : $(p:G=) ]
{
self.conditional += $(p) ;
}
else
{
self.non-conditional += $(p) ;
}
if propagated in $(att)
{
self.propagated += $(p) ;
}
if link-incompatible in $(att)
{
self.link-incompatible += $(p) ;
}
}
}
# Returns Jam list of stored properties.
#
rule raw ( )
{
return $(self.raw) ;
}
rule str ( )
{
return "[" $(self.raw) "]" ;
}
# Returns properties that are neither incidental nor free.
#
rule base ( )
{
return $(self.base) ;
}
# Returns free properties which are not incidental.
#
rule free ( )
{
return $(self.free) ;
}
# Returns dependency properties.
#
rule dependency ( )
{
return $(self.dependency) ;
}
rule non-dependency ( )
{
return $(self.non-dependency) ;
}
rule conditional ( )
{
return $(self.conditional) ;
}
rule non-conditional ( )
{
return $(self.non-conditional) ;
}
# Returns incidental properties.
#
rule incidental ( )
{
return $(self.incidental) ;
}
rule refine ( ps )
{
if ! $(self.refined.$(ps))
{
local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
if $(r[1]) != "@error"
{
self.refined.$(ps) = [ property-set.create $(r) ] ;
}
else
{
self.refined.$(ps) = $(r) ;
}
}
return $(self.refined.$(ps)) ;
}
rule expand ( )
{
if ! $(self.expanded)
{
self.expanded = [ property-set.create [ feature.expand $(self.raw) ] ] ;
}
return $(self.expanded) ;
}
rule expand-composites ( )
{
if ! $(self.composites)
{
self.composites = [ property-set.create
[ feature.expand-composites $(self.raw) ] ] ;
}
return $(self.composites) ;
}
rule evaluate-conditionals ( context ? )
{
context ?= $(__name__) ;
if ! $(self.evaluated.$(context))
{
self.evaluated.$(context) = [ property-set.create
[ property.evaluate-conditionals-in-context $(self.raw) : [ $(context).raw ] ] ] ;
}
return $(self.evaluated.$(context)) ;
}
rule propagated ( )
{
if ! $(self.propagated-ps)
{
self.propagated-ps = [ property-set.create $(self.propagated) ] ;
}
return $(self.propagated-ps) ;
}
rule link-incompatible ( )
{
if ! $(self.link-incompatible-ps)
{
self.link-incompatible-ps =
[ property-set.create $(self.link-incompatible) ] ;
}
return $(self.link-incompatible-ps) ;
}
rule run-actions ( )
{
if ! $(self.run)
{
self.run = [ property-set.create [ feature.run-actions $(self.raw) ] ] ;
}
return $(self.run) ;
}
rule add-defaults ( )
{
if ! $(self.defaults)
{
self.defaults = [ property-set.create
[ feature.add-defaults $(self.raw) ] ] ;
}
return $(self.defaults) ;
}
rule as-path ( )
{
if ! $(self.as-path)
{
self.as-path = [ property.as-path $(self.base) ] ;
}
return $(self.as-path) ;
}
# Computes the path to be used for a target with the given properties.
# Returns a list of
# - the computed path
# - if the path is relative to the build directory, a value of 'true'.
#
rule target-path ( )
{
if ! $(self.target-path)
{
# The <location> feature can be used to explicitly change the
# location of generated targets.
local l = [ get <location> ] ;
if $(l)
{
self.target-path = $(l) ;
}
else
{
local p = [ as-path ] ;
p = [ property-set.hash-maybe $(p) ] ;
# A real ugly hack. Boost regression test system requires
# specific target paths, and it seems that changing it to handle
# other directory layout is really hard. For that reason, we
# teach V2 to do the things regression system requires. The
# value of '<location-prefix>' is prepended to the path.
local prefix = [ get <location-prefix> ] ;
if $(prefix)
{
self.target-path = [ path.join $(prefix) $(p) ] ;
}
else
{
self.target-path = $(p) ;
}
if ! $(self.target-path)
{
self.target-path = . ;
}
# The path is relative to build dir.
self.target-path += true ;
}
}
return $(self.target-path) ;
}
rule add ( ps )
{
if ! $(self.added.$(ps))
{
self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ] ;
}
return $(self.added.$(ps)) ;
}
rule add-raw ( properties * )
{
return [ add [ property-set.create $(properties) ] ] ;
}
rule link-incompatible-with ( ps )
{
if ! $(.li.$(ps))
{
local li1 = [ $(__name__).link-incompatible ] ;
local li2 = [ $(ps).link-incompatible ] ;
if [ set.equal $(li1) : $(li2) ]
{
.li.$(ps) = false ;
}
else
{
.li.$(ps) = true ;
}
}
if $(.li.$(ps)) = true
{
return true ;
}
else
{
return ;
}
}
# Returns all values of 'feature'.
#
rule get ( feature )
{
if ! $(self.map-built)
{
# For each feature, create a member var and assign all values to it.
# Since all regular member vars start with 'self', there will be no
# conflicts between names.
self.map-built = true ;
for local v in $(self.raw)
{
$(v:G) += $(v:G=) ;
}
}
return $($(feature)) ;
}
}
# Creates a new 'property-set' instance for the given raw properties or returns
# an already existing ones.
#
rule create ( raw-properties * )
{
raw-properties = [ sequence.unique
[ sequence.insertion-sort $(raw-properties) ] ] ;
local key = $(raw-properties:J=-:E=) ;
if ! $(.ps.$(key))
{
.ps.$(key) = [ new property-set $(raw-properties) ] ;
}
return $(.ps.$(key)) ;
}
NATIVE_RULE property-set : create ;
# Creates a new 'property-set' instance after checking that all properties are
# valid and converting incidental properties into gristed form.
#
rule create-with-validation ( raw-properties * )
{
property.validate $(raw-properties) ;
return [ create [ property.make $(raw-properties) ] ] ;
}
# Creates a property-set from the input given by the user, in the context of
# 'jamfile-module' at 'location'.
#
rule create-from-user-input ( raw-properties * : jamfile-module location )
{
local specification = [ property.translate-paths $(raw-properties)
: $(location) ] ;
specification = [ property.translate-indirect $(specification)
: $(jamfile-module) ] ;
local project-id = [ project.attribute $(jamfile-module) id ] ;
project-id ?= [ path.root $(location) [ path.pwd ] ] ;
specification = [ property.translate-dependencies
$(specification) : $(project-id) : $(location) ] ;
specification =
[ property.expand-subfeatures-in-conditions $(specification) ] ;
specification = [ property.make $(specification) ] ;
return [ property-set.create $(specification) ] ;
}
# Refines requirements with requirements provided by the user. Specially handles
# "-<property>value" syntax in specification to remove given requirements.
# - parent-requirements -- property-set object with requirements to refine.
# - specification -- string list of requirements provided by the user.
# - project-module -- module to which context indirect features will be
# bound.
# - location -- path to which path features are relative.
#
rule refine-from-user-input ( parent-requirements : specification * :
project-module : location )
{
if ! $(specification)
{
return $(parent-requirements) ;
}
else
{
local add-requirements ;
local remove-requirements ;
for local r in $(specification)
{
local m = [ MATCH "^-(.*)" : $(r) ] ;
if $(m)
{
remove-requirements += $(m) ;
}
else
{
add-requirements += $(r) ;
}
}
if $(remove-requirements)
{
# Need to create a property set, so that path features and indirect
# features are translated just like they are in project
# requirements.
local ps = [ property-set.create-from-user-input
$(remove-requirements) : $(project-module) $(location) ] ;
parent-requirements = [ property-set.create
[ set.difference [ $(parent-requirements).raw ]
: [ $(ps).raw ] ] ] ;
specification = $(add-requirements) ;
}
local requirements = [ property-set.create-from-user-input
$(specification) : $(project-module) $(location) ] ;
return [ $(parent-requirements).refine $(requirements) ] ;
}
}
# Returns a property-set with an empty set of properties.
#
rule empty ( )
{
if ! $(.empty)
{
.empty = [ create ] ;
}
return $(.empty) ;
}
if [ option.get hash : : yes ] = yes
{
rule hash-maybe ( path ? )
{
path ?= "" ;
return [ MD5 $(path) ] ;
}
}
else
{
rule hash-maybe ( path ? )
{
return $(path) ;
}
}

View File

@ -0,0 +1,788 @@
# Copyright 2001, 2002, 2003 Dave Abrahams
# Copyright 2006 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import errors ;
import feature ;
import indirect ;
import path ;
import regex ;
import string ;
import sequence ;
import set ;
import utility ;
# Refines 'properties' by overriding any non-free and non-conditional properties
# for which a different value is specified in 'requirements'. Returns the
# resulting list of properties.
#
rule refine ( properties * : requirements * )
{
local result ;
local error ;
# All the 'requirements' elements should be present in the result. Record
# them so that we can handle 'properties'.
for local r in $(requirements)
{
# Do not consider conditional requirements.
if ! [ MATCH (:) : $(r:G=) ]
{
# Note: cannot use a local variable here, so use an ugly name.
__require__$(r:G) = $(r:G=) ;
}
}
for local p in $(properties)
{
if [ MATCH (:) : $(p:G=) ]
{
# Do not modify conditional properties.
result += $(p) ;
}
else if free in [ feature.attributes $(p:G) ]
{
# Do not modify free properties.
result += $(p) ;
}
else
{
local required-value = $(__require__$(p:G)) ;
if $(required-value)
{
if $(p:G=) != $(required-value)
{
result += $(p:G)$(required-value) ;
}
else
{
result += $(p) ;
}
}
else
{
result += $(p) ;
}
}
}
# Unset our ugly map.
for local r in $(requirements)
{
__require__$(r:G) = ;
}
if $(error)
{
return $(error) ;
}
else
{
return [ sequence.unique $(result) $(requirements) ] ;
}
}
# Removes all conditional properties whose conditions are not met. For those
# with met conditions, removes the condition. Properties in conditions are
# looked up in 'context'.
#
rule evaluate-conditionals-in-context ( properties * : context * )
{
local base ;
local conditionals ;
for local p in $(properties)
{
if [ MATCH (:<) : $(p) ]
{
conditionals += $(p) ;
}
else
{
base += $(p) ;
}
}
local result = $(base) ;
for local p in $(conditionals)
{
# Separate condition and property.
local s = [ MATCH (.*):(<.*) : $(p) ] ;
# Split condition into individual properties.
local condition = [ regex.split $(s[1]) "," ] ;
# Evaluate condition.
if ! [ MATCH (!).* : $(condition:G=) ]
{
# Only positive checks
if $(condition) in $(context)
{
result += $(s[2]) ;
}
}
else
{
# Have negative checks
local fail ;
while $(condition)
{
local c = $(condition[1]) ;
local m = [ MATCH !(.*) : $(c) ] ;
if $(m)
{
local p = $(m:G=$(c:G)) ;
if $(p) in $(context)
{
fail = true ;
c = ;
}
}
else
{
if ! $(c) in $(context)
{
fail = true ;
c = ;
}
}
condition = $(condition[2-]) ;
}
if ! $(fail)
{
result += $(s[2]) ;
}
}
}
return $(result) ;
}
rule expand-subfeatures-in-conditions ( properties * )
{
local result ;
for local p in $(properties)
{
local s = [ MATCH (.*):(<.*) : $(p) ] ;
if ! $(s)
{
result += $(p) ;
}
else
{
local condition = $(s[1]) ;
local value = $(s[2]) ;
# Condition might include several elements.
condition = [ regex.split $(condition) "," ] ;
local e ;
for local c in $(condition)
{
# It is common for a condition to include a toolset or
# subfeatures that have not been defined. In that case we want
# the condition to simply 'never be satisfied' and validation
# would only produce a spurious error so we prevent it by
# passing 'true' as the second parameter.
e += [ feature.expand-subfeatures $(c) : true ] ;
}
if $(e) = $(condition)
{
# (todo)
# This is just an optimization and possibly a premature one at
# that.
# (todo) (12.07.2008.) (Jurko)
result += $(p) ;
}
else
{
result += $(e:J=,):$(value) ;
}
}
}
return $(result) ;
}
# Helper for as-path, below. Orders properties with the implicit ones first, and
# within the two sections in alphabetical order of feature name.
#
local rule path-order ( x y )
{
if $(y:G) && ! $(x:G)
{
return true ;
}
else if $(x:G) && ! $(y:G)
{
return ;
}
else
{
if ! $(x:G)
{
x = [ feature.expand-subfeatures $(x) ] ;
y = [ feature.expand-subfeatures $(y) ] ;
}
if $(x[1]) < $(y[1])
{
return true ;
}
}
}
local rule abbreviate-dashed ( string )
{
local r ;
for local part in [ regex.split $(string) - ]
{
r += [ string.abbreviate $(part) ] ;
}
return $(r:J=-) ;
}
local rule identity ( string )
{
return $(string) ;
}
if --abbreviate-paths in [ modules.peek : ARGV ]
{
.abbrev = abbreviate-dashed ;
}
else
{
.abbrev = identity ;
}
# Returns a path representing the given expanded property set.
#
rule as-path ( properties * )
{
local entry = .result.$(properties:J=-) ;
if ! $($(entry))
{
# Trim redundancy.
properties = [ feature.minimize $(properties) ] ;
# Sort according to path-order.
properties = [ sequence.insertion-sort $(properties) : path-order ] ;
local components ;
for local p in $(properties)
{
if $(p:G)
{
local f = [ utility.ungrist $(p:G) ] ;
p = $(f)-$(p:G=) ;
}
components += [ $(.abbrev) $(p) ] ;
}
$(entry) = $(components:J=/) ;
}
return $($(entry)) ;
}
# Exit with error if property is not valid.
#
local rule validate1 ( property )
{
local msg ;
if $(property:G)
{
local feature = $(property:G) ;
local value = $(property:G=) ;
if ! [ feature.valid $(feature) ]
{
# Ungrist for better error messages.
feature = [ utility.ungrist $(property:G) ] ;
msg = "unknown feature '$(feature)'" ;
}
else if $(value) && ! free in [ feature.attributes $(feature) ]
{
feature.validate-value-string $(feature) $(value) ;
}
else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) )
{
# Ungrist for better error messages.
feature = [ utility.ungrist $(property:G) ] ;
msg = "No value specified for feature '$(feature)'" ;
}
}
else
{
local feature = [ feature.implied-feature $(property) ] ;
feature.validate-value-string $(feature) $(property) ;
}
if $(msg)
{
errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ;
}
}
rule validate ( properties * )
{
for local p in $(properties)
{
validate1 $(p) ;
}
}
rule validate-property-sets ( property-sets * )
{
for local s in $(property-sets)
{
validate [ feature.split $(s) ] ;
}
}
# Expands any implicit property values in the given property 'specification' so
# they explicitly state their feature.
#
rule make ( specification * )
{
local result ;
for local e in $(specification)
{
if $(e:G)
{
result += $(e) ;
}
else if [ feature.is-implicit-value $(e) ]
{
local feature = [ feature.implied-feature $(e) ] ;
result += $(feature)$(e) ;
}
else
{
errors.error "'$(e)' is not a valid property specification" ;
}
}
return $(result) ;
}
# Returns a property set containing all the elements in 'properties' that do not
# have their attributes listed in 'attributes'.
#
rule remove ( attributes + : properties * )
{
local result ;
for local e in $(properties)
{
if ! [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
{
result += $(e) ;
}
}
return $(result) ;
}
# Returns a property set containing all the elements in 'properties' that have
# their attributes listed in 'attributes'.
#
rule take ( attributes + : properties * )
{
local result ;
for local e in $(properties)
{
if [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
{
result += $(e) ;
}
}
return $(result) ;
}
# Selects properties corresponding to any of the given features.
#
rule select ( features * : properties * )
{
local result ;
# Add any missing angle brackets.
local empty = "" ;
features = $(empty:G=$(features)) ;
for local p in $(properties)
{
if $(p:G) in $(features)
{
result += $(p) ;
}
}
return $(result) ;
}
# Returns a modified version of properties with all values of the given feature
# replaced by the given value. If 'value' is empty the feature will be removed.
#
rule change ( properties * : feature value ? )
{
local result ;
for local p in $(properties)
{
if $(p:G) = $(feature)
{
result += $(value:G=$(feature)) ;
}
else
{
result += $(p) ;
}
}
return $(result) ;
}
# If 'property' is a conditional property, returns the condition and the
# property. E.g. <variant>debug,<toolset>gcc:<inlining>full will become
# <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns an empty
# string.
#
rule split-conditional ( property )
{
local m = [ MATCH "(.+):<(.+)" : $(property) ] ;
if $(m)
{
return $(m[1]) <$(m[2]) ;
}
}
# Interpret all path properties in 'properties' as relative to 'path'. The
# property values are assumed to be in system-specific form, and will be
# translated into normalized form.
#
rule translate-paths ( properties * : path )
{
local result ;
for local p in $(properties)
{
local split = [ split-conditional $(p) ] ;
local condition = "" ;
if $(split)
{
condition = $(split[1]): ;
p = $(split[2]) ;
}
if path in [ feature.attributes $(p:G) ]
{
local values = [ regex.split $(p:TG=) "&&" ] ;
local t ;
for local v in $(values)
{
t += [ path.root [ path.make $(v) ] $(path) ] ;
}
t = $(t:J="&&") ;
result += $(condition)$(t:TG=$(p:G)) ;
}
else
{
result += $(condition)$(p) ;
}
}
return $(result) ;
}
# Assumes that all feature values that start with '@' are names of rules, used
# in 'context-module'. Such rules can be either local to the module or global.
# Converts such values into 'indirect-rule' format (see indirect.jam), so they
# can be called from other modules. Does nothing for such values that are
# already in the 'indirect-rule' format.
#
rule translate-indirect ( specification * : context-module )
{
local result ;
for local p in $(specification)
{
local m = [ MATCH ^@(.+) : $(p:G=) ] ;
if $(m)
{
local v ;
if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ]
{
# Rule is already in the 'indirect-rule' format.
v = $(m) ;
}
else
{
if ! [ MATCH ".*([.]).*" : $(m) ]
{
# This is an unqualified rule name. The user might want to
# set flags on this rule name and toolset.flag
# auto-qualifies it. Need to do the same here so flag
# setting works. We can arrange for toolset.flag to *not*
# auto-qualify the argument but then two rules defined in
# two Jamfiles would conflict.
m = $(context-module).$(m) ;
}
v = [ indirect.make $(m) : $(context-module) ] ;
}
v = @$(v) ;
result += $(v:G=$(p:G)) ;
}
else
{
result += $(p) ;
}
}
return $(result) ;
}
# Binds all dependency properties in a list relative to the given project.
# Targets with absolute paths will be left unchanged and targets which have a
# project specified will have the path to the project interpreted relative to
# the specified location.
#
rule translate-dependencies ( specification * : project-id : location )
{
local result ;
for local p in $(specification)
{
local split = [ split-conditional $(p) ] ;
local condition = "" ;
if $(split)
{
condition = $(split[1]): ;
p = $(split[2]) ;
}
if dependency in [ feature.attributes $(p:G) ]
{
local split-target = [ regex.match (.*)//(.*) : $(p:G=) ] ;
if $(split-target)
{
local rooted = [ path.root [ path.make $(split-target[1]) ]
[ path.root $(location) [ path.pwd ] ] ] ;
result += $(condition)$(p:G)$(rooted)//$(split-target[2]) ;
}
else if [ path.is-rooted $(p:G=) ]
{
result += $(condition)$(p) ;
}
else
{
result += $(condition)$(p:G)$(project-id)//$(p:G=) ;
}
}
else
{
result += $(condition)$(p) ;
}
}
return $(result) ;
}
# Class maintaining a property set -> string mapping.
#
class property-map
{
import errors ;
import numbers ;
import sequence ;
rule __init__ ( )
{
self.next-flag = 1 ;
}
# Associate 'value' with 'properties'.
#
rule insert ( properties + : value )
{
self.all-flags += $(self.next-flag) ;
self.properties.$(self.next-flag) = $(properties) ;
self.value.$(self.next-flag) = $(value) ;
self.next-flag = [ numbers.increment $(self.next-flag) ] ;
}
# Returns the value associated with 'properties' or any subset of it. If
# more than one subset has a value assigned to it, returns the value for the
# longest subset, if it is unique.
#
rule find ( properties + )
{
return [ find-replace $(properties) ] ;
}
# Returns the value associated with 'properties'. If 'value' parameter is
# given, replaces the found value.
#
rule find-replace ( properties + : value ? )
{
# First find all matches.
local matches ;
local match-ranks ;
for local i in $(self.all-flags)
{
if $(self.properties.$(i)) in $(properties)
{
matches += $(i) ;
match-ranks += [ sequence.length $(self.properties.$(i)) ] ;
}
}
local best = [ sequence.select-highest-ranked $(matches)
: $(match-ranks) ] ;
if $(best[2])
{
errors.error "Ambiguous key $(properties:J= :E=)" ;
}
local original = $(self.value.$(best)) ;
if $(value)
{
self.value.$(best) = $(value) ;
}
return $(original) ;
}
}
rule __test__ ( )
{
import assert ;
import "class" : new ;
import errors : try catch ;
import feature ;
# Local rules must be explicitly re-imported.
import property : path-order abbreviate-dashed ;
feature.prepare-test property-test-temp ;
feature.feature toolset : gcc : implicit symmetric ;
feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1
3.0.2 : optional ;
feature.feature define : : free ;
feature.feature runtime-link : dynamic static : symmetric link-incompatible ;
feature.feature optimization : on off ;
feature.feature variant : debug release : implicit composite symmetric ;
feature.feature rtti : on off : link-incompatible ;
feature.compose <variant>debug : <define>_DEBUG <optimization>off ;
feature.compose <variant>release : <define>NDEBUG <optimization>on ;
validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
assert.true path-order $(test-space) debug <define>foo ;
assert.false path-order $(test-space) <define>foo debug ;
assert.true path-order $(test-space) gcc debug ;
assert.false path-order $(test-space) debug gcc ;
assert.true path-order $(test-space) <optimization>on <rtti>on ;
assert.false path-order $(test-space) <rtti>on <optimization>on ;
assert.result-set-equal <toolset>gcc <rtti>off <define>FOO
: refine <toolset>gcc <rtti>off
: <define>FOO
: $(test-space) ;
assert.result-set-equal <toolset>gcc <optimization>on
: refine <toolset>gcc <optimization>off
: <optimization>on
: $(test-space) ;
assert.result-set-equal <toolset>gcc <rtti>off
: refine <toolset>gcc : <rtti>off : $(test-space) ;
assert.result-set-equal <toolset>gcc <rtti>off <rtti>off:<define>FOO
: refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
: $(test-space) ;
assert.result-set-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar
: refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
: $(test-space) ;
assert.result <define>MY_RELEASE
: evaluate-conditionals-in-context
<variant>release,<rtti>off:<define>MY_RELEASE
: <toolset>gcc <variant>release <rtti>off ;
assert.result debug
: as-path <optimization>off <variant>debug
: $(test-space) ;
assert.result gcc/debug/rtti-off
: as-path <toolset>gcc <optimization>off <rtti>off <variant>debug
: $(test-space) ;
assert.result optmz-off : abbreviate-dashed optimization-off ;
assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ;
try ;
validate <feature>value : $(test-space) ;
catch "Invalid property '<feature>value': unknown feature 'feature'." ;
try ;
validate <rtti>default : $(test-space) ;
catch \"default\" is not a known value of feature <rtti> ;
validate <define>WHATEVER : $(test-space) ;
try ;
validate <rtti> : $(test-space) ;
catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
try ;
validate value : $(test-space) ;
catch "value" is not a value of an implicit feature ;
assert.result-set-equal <rtti>on
: remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
assert.result-set-equal <include>a
: select include : <include>a <toolset>gcc ;
assert.result-set-equal <include>a
: select include bar : <include>a <toolset>gcc ;
assert.result-set-equal <include>a <toolset>gcc
: select include <bar> <toolset> : <include>a <toolset>gcc ;
assert.result-set-equal <toolset>kylix <include>a
: change <toolset>gcc <include>a : <toolset> kylix ;
pm = [ new property-map ] ;
$(pm).insert <toolset>gcc : o ;
$(pm).insert <toolset>gcc <os>NT : obj ;
$(pm).insert <toolset>gcc <os>CYGWIN : obj ;
assert.equal o : [ $(pm).find <toolset>gcc ] ;
assert.equal obj : [ $(pm).find <toolset>gcc <os>NT ] ;
try ;
$(pm).find <toolset>gcc <os>NT <os>CYGWIN ;
catch "Ambiguous key <toolset>gcc <os>NT <os>CYGWIN" ;
# Test ordinary properties.
assert.result : split-conditional <toolset>gcc ;
# Test properties with ":".
assert.result : split-conditional <define>FOO=A::B ;
# Test conditional feature.
assert.result-set-equal <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
: split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO ;
feature.finish-test property-test-temp ;
}

View File

@ -0,0 +1,13 @@
Copyright 2001, 2002 Dave Abrahams
Copyright 2002 Vladimir Prus
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
Development code for new build system. To run unit tests for jam code, execute:
bjam --debug --build-system=test
Comprehensive tests require Python. See ../test/readme.txt

View File

@ -0,0 +1,153 @@
# Copyright 2003 Dave Abrahams
# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Implements scanners: objects that compute implicit dependencies for
# files, such as includes in C++.
#
# Scanner has a regular expression used to find dependencies, some
# data needed to interpret those dependencies (for example, include
# paths), and a code which actually established needed relationship
# between actual jam targets.
#
# Scanner objects are created by actions, when they try to actualize
# virtual targets, passed to 'virtual-target.actualize' method and are
# then associated with actual targets. It is possible to use
# several scanners for a virtual-target. For example, a single source
# might be used by to compile actions, with different include paths.
# In this case, two different actual targets will be created, each
# having scanner of its own.
#
# Typically, scanners are created from target type and action's
# properties, using the rule 'get' in this module. Directly creating
# scanners is not recommended, because it might create many equvivalent
# but different instances, and lead in unneeded duplication of
# actual targets. However, actions can also create scanners in a special
# way, instead of relying on just target type.
import "class" : new ;
import property virtual-target property-set ;
import errors : error ;
# Base scanner class.
class scanner
{
rule __init__ ( )
{
}
# Returns a pattern to use for scanning
rule pattern ( )
{
error "method must be overriden" ;
}
# Establish necessary relationship between targets,
# given actual target beeing scanned, and a list of
# pattern matches in that file.
rule process ( target : matches * )
{
error "method must be overriden" ;
}
}
# Registers a new generator class, specifying a set of
# properties relevant to this scanner. Ctor for that class
# should have one parameter: list of properties.
rule register ( scanner-class : relevant-properties * )
{
.registered += $(scanner-class) ;
.relevant-properties.$(scanner-class) = $(relevant-properties) ;
}
# Common scanner class, which can be used when there's only one
# kind of includes (unlike C, where "" and <> includes have different
# search paths).
class common-scanner : scanner
{
import scanner ;
rule __init__ ( includes * )
{
scanner.__init__ ;
self.includes = $(includes) ;
}
rule process ( target : matches * : binding )
{
local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
NOCARE $(matches) ;
INCLUDES $(target) : $(matches) ;
SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
ISFILE $(matches) ;
scanner.propagate $(__name__) : $(matches) : $(target) ;
}
}
# Returns an instance of previously registered scanner,
# with the specified properties.
rule get ( scanner-class : property-set )
{
if ! $(scanner-class) in $(.registered)
{
error "attempt to get unregisted scanner" ;
}
local r = $(.rv-cache.$(property-set)) ;
if ! $(r)
{
r = [ property-set.create
[ property.select $(.relevant-properties.$(scanner-class)) :
[ $(property-set).raw ] ] ] ;
.rv-cache.$(property-set) = $(r) ;
}
if ! $(scanner.$(scanner-class).$(r:J=-))
{
scanner.$(scanner-class).$(r:J=-) = [ new $(scanner-class) [ $(r).raw ] ] ;
}
return $(scanner.$(scanner-class).$(r:J=-)) ;
}
# Installs the specified scanner on actual target 'target'.
rule install ( scanner : target
vtarget # virtual target from which 'target' was actualized
)
{
HDRSCAN on $(target) = [ $(scanner).pattern ] ;
SCANNER on $(target) = $(scanner) ;
HDRRULE on $(target) = scanner.hdrrule ;
# scanner reflects difference in properties affecting
# binding of 'target', which will be known when processing
# includes for it, will give information on how to
# interpret quoted includes.
HDRGRIST on $(target) = $(scanner) ;
}
# Propagate scanner setting from 'including-target' to 'targets'.
rule propagate ( scanner : targets * : including-target )
{
HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ;
SCANNER on $(targets) = $(scanner) ;
HDRRULE on $(targets) = scanner.hdrrule ;
HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ;
}
rule hdrrule ( target : matches * : binding )
{
local scanner = [ on $(target) return $(SCANNER) ] ;
$(scanner).process $(target) : $(matches) : $(binding) ;
}
# hdrrule must be available at global scope so that it can be invoked
# by header scanning
IMPORT scanner : hdrrule : : scanner.hdrrule ;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,502 @@
# Copyright 2003 Dave Abrahams
# Copyright 2005 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Support for toolset definition.
import errors ;
import feature ;
import generators ;
import numbers ;
import path ;
import property ;
import regex ;
import sequence ;
import set ;
.flag-no = 1 ;
.ignore-requirements = ;
# This is used only for testing, to make sure we do not get random extra
# elements in paths.
if --ignore-toolset-requirements in [ modules.peek : ARGV ]
{
.ignore-requirements = 1 ;
}
# Initializes an additional toolset-like module. First load the 'toolset-module'
# and then calls its 'init' rule with trailing arguments.
#
rule using ( toolset-module : * )
{
import $(toolset-module) ;
$(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be
# converted to '<toolset>gcc/<toolset-version>3.2'.
#
local rule normalize-condition ( property-sets * )
{
local result ;
for local p in $(property-sets)
{
local split = [ feature.split $(p) ] ;
local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ;
result += $(expanded:J=/) ;
}
return $(result) ;
}
# Specifies if the 'flags' rule should check that the invoking module is the
# same as the module we are setting the flag for. 'v' can be either 'checked' or
# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore
# the setting that was in effect before calling this rule.
#
rule push-checking-for-flags-module ( v )
{
.flags-module-checking = $(v) $(.flags-module-checking) ;
}
rule pop-checking-for-flags-module ( )
{
.flags-module-checking = $(.flags-module-checking[2-]) ;
}
# Specifies the flags (variables) that must be set on targets under certain
# conditions, described by arguments.
#
rule flags (
rule-or-module # If contains a dot, should be a rule name. The flags will
# be applied when that rule is used to set up build
# actions.
#
# If does not contain dot, should be a module name. The
# flag will be applied for all rules in that module. If
# module for rule is different from the calling module, an
# error is issued.
variable-name # Variable that should be set on target.
condition * : # A condition when this flag should be applied. Should be a
# set of property sets. If one of those property sets is
# contained in the build properties, the flag will be used.
# Implied values are not allowed: "<toolset>gcc" should be
# used, not just "gcc". Subfeatures, like in
# "<toolset>gcc-3.2" are allowed. If left empty, the flag
# will be used unconditionally.
#
# Propery sets may use value-less properties ('<a>' vs.
# '<a>value') to match absent properties. This allows to
# separately match:
#
# <architecture>/<address-model>64
# <architecture>ia64/<address-model>
#
# Where both features are optional. Without this syntax
# we would be forced to define "default" values.
values * : # The value to add to variable. If <feature> is specified,
# then the value of 'feature' will be added.
unchecked ? # If value 'unchecked' is passed, will not test that flags
# are set for the calling module.
: hack-hack ? # For
# flags rule OPTIONS <cxx-abi> : -model ansi
# Treat <cxx-abi> as condition
# FIXME: ugly hack.
)
{
local caller = [ CALLER_MODULE ] ;
if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
&& [ MATCH "(Jamfile<.*)" : $(caller) ]
{
# Unqualified rule name, used inside Jamfile. Most likely used with
# 'make' or 'notfile' rules. This prevents setting flags on the entire
# Jamfile module (this will be considered as rule), but who cares?
# Probably, 'flags' rule should be split into 'flags' and
# 'flags-on-module'.
rule-or-module = $(caller).$(rule-or-module) ;
}
else
{
local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
if $(unchecked) != unchecked
&& $(.flags-module-checking[1]) != unchecked
&& $(module_) != $(caller)
{
errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
}
}
if $(condition) && ! $(condition:G=) && ! $(hack-hack)
{
# We have condition in the form '<feature>', that is, without value.
# That is an older syntax:
# flags gcc.link RPATH <dll-path> ;
# for compatibility, convert it to
# flags gcc.link RPATH : <dll-path> ;
values = $(condition) ;
condition = ;
}
if $(condition)
{
property.validate-property-sets $(condition) ;
condition = [ normalize-condition $(condition) ] ;
}
add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ;
}
# Adds a new flag setting with the specified values. Does no checking.
#
local rule add-flag ( rule-or-module : variable-name : condition * : values * )
{
.$(rule-or-module).flags += $(.flag-no) ;
# Store all flags for a module.
local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
.module-flags.$(module_) += $(.flag-no) ;
# Store flag-no -> rule-or-module mapping.
.rule-or-module.$(.flag-no) = $(rule-or-module) ;
.$(rule-or-module).variable.$(.flag-no) += $(variable-name) ;
.$(rule-or-module).values.$(.flag-no) += $(values) ;
.$(rule-or-module).condition.$(.flag-no) += $(condition) ;
.flag-no = [ numbers.increment $(.flag-no) ] ;
}
# Returns the first element of 'property-sets' which is a subset of
# 'properties' or an empty list if no such element exists.
#
rule find-property-subset ( property-sets * : properties * )
{
# Cut property values off.
local prop-keys = $(properties:G) ;
local result ;
for local s in $(property-sets)
{
if ! $(result)
{
# Handle value-less properties like '<architecture>' (compare with
# '<architecture>x86').
local set = [ feature.split $(s) ] ;
# Find the set of features that
# - have no property specified in required property set
# - are omitted in the build property set.
local default-props ;
for local i in $(set)
{
# If $(i) is a value-less property it should match default value
# of an optional property. See the first line in the example
# below:
#
# property set properties result
# <a> <b>foo <b>foo match
# <a> <b>foo <a>foo <b>foo no match
# <a>foo <b>foo <b>foo no match
# <a>foo <b>foo <a>foo <b>foo match
if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) )
{
default-props += $(i) ;
}
}
if $(set) in $(properties) $(default-props)
{
result = $(s) ;
}
}
}
return $(result) ;
}
# Returns a value to be added to some flag for some target based on the flag's
# value definition and the given target's property set.
#
rule handle-flag-value ( value * : properties * )
{
local result ;
if $(value:G)
{
local matches = [ property.select $(value) : $(properties) ] ;
for local p in $(matches)
{
local att = [ feature.attributes $(p:G) ] ;
if dependency in $(att)
{
# The value of a dependency feature is a target and needs to be
# actualized.
result += [ $(p:G=).actualize ] ;
}
else if path in $(att) || free in $(att)
{
local values ;
# Treat features with && in the value specially -- each
# &&-separated element is considered a separate value. This is
# needed to handle searched libraries or include paths, which
# may need to be in a specific order.
if ! [ MATCH (&&) : $(p:G=) ]
{
values = $(p:G=) ;
}
else
{
values = [ regex.split $(p:G=) "&&" ] ;
}
if path in $(att)
{
result += [ sequence.transform path.native : $(values) ] ;
}
else
{
result += $(values) ;
}
}
else
{
result += $(p:G=) ;
}
}
}
else
{
result += $(value) ;
}
return $(result) ;
}
# Given a rule name and a property set, returns a list of interleaved variables
# names and values which must be set on targets for that rule/property-set
# combination.
#
rule set-target-variables-aux ( rule-or-module : property-set )
{
local result ;
properties = [ $(property-set).raw ] ;
for local f in $(.$(rule-or-module).flags)
{
local variable = $(.$(rule-or-module).variable.$(f)) ;
local condition = $(.$(rule-or-module).condition.$(f)) ;
local values = $(.$(rule-or-module).values.$(f)) ;
if ! $(condition) ||
[ find-property-subset $(condition) : $(properties) ]
{
local processed ;
for local v in $(values)
{
# The value might be <feature-name> so needs special treatment.
processed += [ handle-flag-value $(v) : $(properties) ] ;
}
for local r in $(processed)
{
result += $(variable) $(r) ;
}
}
}
# Strip away last dot separated part and recurse.
local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
if $(next)
{
result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ;
}
return $(result) ;
}
rule set-target-variables ( rule-or-module targets + : property-set )
{
properties = [ $(property-set).raw ] ;
local key = $(rule-or-module).$(property-set) ;
local settings = $(.stv.$(key)) ;
if ! $(settings)
{
settings = [ set-target-variables-aux $(rule-or-module) :
$(property-set) ] ;
if ! $(settings)
{
settings = none ;
}
.stv.$(key) = $(settings) ;
}
if $(settings) != none
{
local var-name = ;
for local name-or-value in $(settings)
{
if $(var-name)
{
$(var-name) on $(targets) += $(name-or-value) ;
var-name = ;
}
else
{
var-name = $(name-or-value) ;
}
}
}
}
# Make toolset 'toolset', defined in a module of the same name, inherit from
# 'base'.
# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
# Another 'init' is called, which forwards to the base one.
# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset>
# property in requires is adjusted too.
# 3. All flags are inherited.
# 4. All rules are imported.
#
rule inherit ( toolset : base )
{
import $(base) ;
inherit-generators $(toolset) : $(base) ;
inherit-flags $(toolset) : $(base) ;
inherit-rules $(toolset) : $(base) ;
}
rule inherit-generators ( toolset properties * : base : generators-to-ignore * )
{
properties ?= <toolset>$(toolset) ;
local base-generators = [ generators.generators-for-toolset $(base) ] ;
for local g in $(base-generators)
{
local id = [ $(g).id ] ;
if ! $(id) in $(generators-to-ignore)
{
# Some generator names have multiple periods in their name, so
# $(id:B=$(toolset)) does not generate the right new-id name. E.g.
# if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is
# not what we want. Manually parse the base and suffix. If there is
# a better way to do this, I would love to see it. See also the
# register() rule in the generators module.
local base = $(id) ;
local suffix = "" ;
while $(base:S)
{
suffix = $(base:S)$(suffix) ;
base = $(base:B) ;
}
local new-id = $(toolset)$(suffix) ;
generators.register [ $(g).clone $(new-id) : $(properties) ] ;
}
}
}
# Brings all flag definitions from the 'base' toolset into the 'toolset'
# toolset. Flag definitions whose conditions make use of properties in
# 'prohibited-properties' are ignored. Do not confuse property and feature, for
# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
# not block the other one.
#
# The flag conditions are not altered at all, so if a condition includes a name,
# or version of a base toolset, it will not ever match the inheriting toolset.
# When such flag settings must be inherited, define a rule in base toolset
# module and call it as needed.
#
rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * )
{
for local f in $(.module-flags.$(base))
{
local rule-or-module = $(.rule-or-module.$(f)) ;
if ( [ set.difference
$(.$(rule-or-module).condition.$(f)) :
$(prohibited-properties) ]
|| ! $(.$(rule-or-module).condition.$(f))
) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) )
{
local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ;
local new-rule-or-module ;
if $(rule_)
{
new-rule-or-module = $(toolset).$(rule_) ;
}
else
{
new-rule-or-module = $(toolset) ;
}
add-flag
$(new-rule-or-module)
: $(.$(rule-or-module).variable.$(f))
: $(.$(rule-or-module).condition.$(f))
: $(.$(rule-or-module).values.$(f)) ;
}
}
}
rule inherit-rules ( toolset : base : localize ? )
{
# It appears that "action" creates a local rule.
local base-generators = [ generators.generators-for-toolset $(base) ] ;
local rules ;
for local g in $(base-generators)
{
rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ;
}
rules = [ sequence.unique $(rules) ] ;
IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ;
IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
}
# Return the list of global 'toolset requirements'. Those requirements will be
# automatically added to the requirements of any main target.
#
rule requirements ( )
{
return $(.requirements) ;
}
# Adds elements to the list of global 'toolset requirements'. The requirements
# will be automatically added to the requirements for all main targets, as if
# they were specified literally. For best results, all requirements added should
# be conditional or indirect conditional.
#
rule add-requirements ( requirements * )
{
if ! $(.ignore-requirements)
{
.requirements += $(requirements) ;
}
}
rule __test__ ( )
{
import assert ;
local p = <b>0 <c>1 <d>2 <e>3 <f>4 ;
assert.result <c>1/<d>2/<e>3 : find-property-subset <c>1/<d>2/<e>3 <a>0/<b>0/<c>1 <d>2/<e>5 <a>9 : $(p) ;
assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ;
local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ;
assert.result <a>/<b> : find-property-subset $(p-set) : ;
assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ;
assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ;
assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
}

View File

@ -0,0 +1,425 @@
# Copyright 2002, 2003 Dave Abrahams
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Deals with target type declaration and defines target class which supports
# typed targets.
import "class" : new ;
import errors ;
import feature ;
import generators : * ;
import project ;
import property ;
import scanner ;
import os ;
# The following import would create a circular dependency:
# project -> project-root -> builtin -> type -> targets -> project
# import targets ;
# The feature is optional so it would never get added implicitly. It is used
# only for internal purposes and in all cases we want to use it explicitly.
feature.feature target-type : : composite optional ;
feature.feature main-target-type : : optional incidental ;
feature.feature base-target-type : : composite optional free ;
# Registers a target type, possible derived from a 'base-type'. Providing a list
# of 'suffixes' here is a shortcut for separately calling the register-suffixes
# rule with the given suffixes and the set-generated-target-suffix rule with the
# first given suffix.
#
rule register ( type : suffixes * : base-type ? )
{
# Type names cannot contain hyphens, because when used as feature-values
# they would be interpreted as composite features which need to be
# decomposed.
switch $(type)
{
case *-* : errors.error "type name \"$(type)\" contains a hyphen" ;
}
if $(type) in $(.types)
{
errors.error "Type $(type) is already registered." ;
}
else
{
.types += $(type) ;
.base.$(type) = $(base-type) ;
.derived.$(base-type) += $(type) ;
if $(suffixes)-is-not-empty
{
# Specify mapping from suffixes to type.
register-suffixes $(suffixes) : $(type) ;
# By default generated targets of 'type' will use the first of
#'suffixes'. This may be overriden.
set-generated-target-suffix $(type) : : $(suffixes[1]) ;
}
feature.extend target-type : $(type) ;
feature.extend main-target-type : $(type) ;
feature.extend base-target-type : $(type) ;
feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ;
# We used to declare the main target rule only when a 'main' parameter
# has been specified. However, it is hard to decide that a type will
# *never* need a main target rule and so from time to time we needed to
# make yet another type 'main'. So now a main target rule is defined for
# each type.
main-rule-name = [ type-to-rule-name $(type) ] ;
.main-target-type.$(main-rule-name) = $(type) ;
IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ;
# Adding a new derived type affects generator selection so we need to
# make the generator selection module update any of its cached
# information related to a new derived type being defined.
generators.update-cached-information-with-a-new-type $(type) ;
}
}
# Given a type, returns the name of the main target rule which creates targets
# of that type.
#
rule type-to-rule-name ( type )
{
# Lowercase everything. Convert underscores to dashes.
import regex ;
local n = [ regex.split $(type:L) "_" ] ;
return $(n:J=-) ;
}
# Given a main target rule name, returns the type for which it creates targets.
#
rule type-from-rule-name ( rule-name )
{
return $(.main-target-type.$(rule-name)) ;
}
# Specifies that files with suffix from 'suffixes' be recognized as targets of
# type 'type'. Issues an error if a different type is already specified for any
# of the suffixes.
#
rule register-suffixes ( suffixes + : type )
{
for local s in $(suffixes)
{
if ! $(.type.$(s))
{
.type.$(s) = $(type) ;
}
else if $(.type.$(s)) != $(type)
{
errors.error Attempting to specify multiple types for suffix
\"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ;
}
}
}
# Returns true iff type has been registered.
#
rule registered ( type )
{
if $(type) in $(.types)
{
return true ;
}
}
# Issues an error if 'type' is unknown.
#
rule validate ( type )
{
if ! [ registered $(type) ]
{
errors.error "Unknown target type $(type)" ;
}
}
# Sets a scanner class that will be used for this 'type'.
#
rule set-scanner ( type : scanner )
{
validate $(type) ;
.scanner.$(type) = $(scanner) ;
}
# Returns a scanner instance appropriate to 'type' and 'properties'.
#
rule get-scanner ( type : property-set )
{
if $(.scanner.$(type))
{
return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ;
}
}
# Returns a base type for the given type or nothing in case the given type is
# not derived.
#
rule base ( type )
{
return $(.base.$(type)) ;
}
# Returns the given type and all of its base types in order of their distance
# from type.
#
rule all-bases ( type )
{
local result = $(type) ;
while $(type)
{
type = [ base $(type) ] ;
result += $(type) ;
}
return $(result) ;
}
# Returns the given type and all of its derived types in order of their distance
# from type.
#
rule all-derived ( type )
{
local result = $(type) ;
for local d in $(.derived.$(type))
{
result += [ all-derived $(d) ] ;
}
return $(result) ;
}
# Returns true if 'type' is equal to 'base' or has 'base' as its direct or
# indirect base.
#
rule is-derived ( type base )
{
if $(base) in [ all-bases $(type) ]
{
return true ;
}
}
# Returns true if 'type' is either derived from or is equal to 'base'.
#
# TODO: It might be that is-derived and is-subtype were meant to be different
# rules - one returning true for type = base and one not, but as currently
# implemented they are actually the same. Clean this up.
#
rule is-subtype ( type base )
{
return [ is-derived $(type) $(base) ] ;
}
# Store suffixes for generated targets.
.suffixes = [ new property-map ] ;
# Store prefixes for generated targets (e.g. "lib" for library).
.prefixes = [ new property-map ] ;
# Sets a file suffix to be used when generating a target of 'type' with the
# specified properties. Can be called with no properties if no suffix has
# already been specified for the 'type'. The 'suffix' parameter can be an empty
# string ("") to indicate that no suffix should be used.
#
# Note that this does not cause files with 'suffix' to be automatically
# recognized as being of 'type'. Two different types can use the same suffix for
# their generated files but only one type can be auto-detected for a file with
# that suffix. User should explicitly specify which one using the
# register-suffixes rule.
#
rule set-generated-target-suffix ( type : properties * : suffix )
{
set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
}
# Change the suffix previously registered for this type/properties combination.
# If suffix is not yet specified, sets it.
#
rule change-generated-target-suffix ( type : properties * : suffix )
{
change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
}
# Returns the suffix used when generating a file of 'type' with the given
# properties.
#
rule generated-target-suffix ( type : property-set )
{
return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
}
# Sets a target prefix that should be used when generating targets of 'type'
# with the specified properties. Can be called with empty properties if no
# prefix for 'type' has been specified yet.
#
# The 'prefix' parameter can be empty string ("") to indicate that no prefix
# should be used.
#
# Usage example: library names use the "lib" prefix on unix.
#
rule set-generated-target-prefix ( type : properties * : prefix )
{
set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
}
# Change the prefix previously registered for this type/properties combination.
# If prefix is not yet specified, sets it.
#
rule change-generated-target-prefix ( type : properties * : prefix )
{
change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
}
rule generated-target-prefix ( type : property-set )
{
return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
}
# Common rules for prefix/suffix provisioning follow.
local rule set-generated-target-ps ( ps : type : properties * : psval )
{
properties = <target-type>$(type) $(properties) ;
$(.$(ps)es).insert $(properties) : $(psval) ;
}
local rule change-generated-target-ps ( ps : type : properties * : psval )
{
properties = <target-type>$(type) $(properties) ;
local prev = [ $(.$(ps)es).find-replace $(properties) : $(psval) ] ;
if ! $(prev)
{
set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ;
}
}
# Returns either prefix or suffix (as indicated by 'ps') that should be used
# when generating a target of 'type' with the specified properties. Parameter
# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for
# 'type', returns prefix/suffix for base type, if any.
#
local rule generated-target-ps-real ( ps : type : properties * )
{
local result ;
local found ;
while $(type) && ! $(found)
{
result = [ $(.$(ps)es).find <target-type>$(type) $(properties) ] ;
# If the prefix/suffix is explicitly set to an empty string, we consider
# prefix/suffix to be found. If we were not to compare with "", there
# would be no way to specify an empty prefix/suffix.
if $(result)-is-not-empty
{
found = true ;
}
type = $(.base.$(type)) ;
}
if $(result) = ""
{
result = ;
}
return $(result) ;
}
local rule generated-target-ps ( ps : type : property-set )
{
local key = .$(ps).$(type).$(property-set) ;
local v = $($(key)) ;
if ! $(v)
{
v = [ generated-target-ps-real $(ps) : $(type) : [ $(property-set).raw ]
] ;
if ! $(v)
{
v = none ;
}
$(key) = $(v) ;
}
if $(v) != none
{
return $(v) ;
}
}
# Returns file type given its name. If there are several dots in filename, tries
# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will
# be tried.
#
rule type ( filename )
{
if [ os.name ] in NT CYGWIN
{
filename = $(filename:L) ;
}
local type ;
while ! $(type) && $(filename:S)
{
local suffix = $(filename:S) ;
type = $(.type$(suffix)) ;
filename = $(filename:S=) ;
}
return $(type) ;
}
# Rule used to construct all main targets. Note that this rule gets imported
# into the global namespace under different alias names and the exact target
# type to construct is selected based on the alias used to actually invoke this
# rule.
#
rule main-target-rule ( name : sources * : requirements * : default-build * :
usage-requirements * )
{
# First discover the required target type based on the exact alias used to
# invoke this rule.
local bt = [ BACKTRACE 1 ] ;
local rulename = $(bt[4]) ;
local target-type = [ type-from-rule-name $(rulename) ] ;
# This is a circular module dependency and so must be imported here.
import targets ;
return [ targets.create-typed-target $(target-type) : [ project.current ] :
$(name) : $(sources) : $(requirements) : $(default-build) :
$(usage-requirements) ] ;
}
rule __test__ ( )
{
import assert ;
# TODO: Add tests for all the is-derived, is-base & related type relation
# checking rules.
}

View File

@ -0,0 +1,161 @@
# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
# Copyright 2008 Jurko Gospodnetic
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import errors ;
import numbers ;
major = "2011" ;
minor = "04" ;
rule boost-build ( )
{
return "$(major).$(minor)-svn" ;
}
rule print ( )
{
if [ verify-engine-version ]
{
ECHO "Boost.Build" [ boost-build ] ;
}
}
rule verify-engine-version ( )
{
local v = [ modules.peek : JAM_VERSION ] ;
if $(v[1]) != $(major) || $(v[2]) != $(minor)
{
local argv = [ modules.peek : ARGV ] ;
local e = $(argv[1]) ;
local l = [ modules.binding version ] ;
l = $(l:D) ;
l = $(l:D) ;
ECHO "warning: mismatched versions of Boost.Build engine and core" ;
ECHO "warning: Boost.Build engine ($(e)) is $(v:J=.)" ;
ECHO "warning: Boost.Build core (at $(l)) is" [ boost-build ] ;
}
else
{
return true ;
}
}
# Utility rule for testing whether all elements in a sequence are equal to 0.
#
local rule is-all-zeroes ( sequence * )
{
local result = "true" ;
for local e in $(sequence)
{
if $(e) != "0"
{
result = "" ;
}
}
return $(result) ;
}
# Returns "true" if the first version is less than the second one.
#
rule version-less ( lhs + : rhs + )
{
numbers.check $(lhs) ;
numbers.check $(rhs) ;
local done ;
local result ;
while ! $(done) && $(lhs) && $(rhs)
{
if [ numbers.less $(lhs[1]) $(rhs[1]) ]
{
done = "true" ;
result = "true" ;
}
else if [ numbers.less $(rhs[1]) $(lhs[1]) ]
{
done = "true" ;
}
else
{
lhs = $(lhs[2-]) ;
rhs = $(rhs[2-]) ;
}
}
if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] )
{
result = "true" ;
}
return $(result) ;
}
# Returns "true" if the current JAM version version is at least the given
# version.
#
rule check-jam-version ( version + )
{
local version-tag = $(version:J=.) ;
if ! $(version-tag)
{
errors.error Invalid version specifier: : $(version:E="(undefined)") ;
}
if ! $(.jam-version-check.$(version-tag))-is-not-empty
{
local jam-version = [ modules.peek : JAM_VERSION ] ;
if ! $(jam-version)
{
errors.error "Unable to deduce Boost Jam version. Your Boost Jam"
"installation is most likely terribly outdated." ;
}
.jam-version-check.$(version-tag) = "true" ;
if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ]
{
.jam-version-check.$(version-tag) = "" ;
}
}
return $(.jam-version-check.$(version-tag)) ;
}
rule __test__ ( )
{
import assert ;
local jam-version = [ modules.peek : JAM_VERSION ] ;
local future-version = $(jam-version) ;
future-version += "1" ;
assert.true check-jam-version $(jam-version) ;
assert.false check-jam-version $(future-version) ;
assert.true version-less 0 : 1 ;
assert.false version-less 0 : 0 ;
assert.true version-less 1 : 2 ;
assert.false version-less 1 : 1 ;
assert.false version-less 2 : 1 ;
assert.true version-less 3 1 20 : 3 4 10 ;
assert.false version-less 3 1 10 : 3 1 10 ;
assert.false version-less 3 4 10 : 3 1 20 ;
assert.true version-less 3 1 20 5 1 : 3 4 10 ;
assert.false version-less 3 1 10 5 1 : 3 1 10 ;
assert.false version-less 3 4 10 5 1 : 3 1 20 ;
assert.true version-less 3 1 20 : 3 4 10 5 1 ;
assert.true version-less 3 1 10 : 3 1 10 5 1 ;
assert.false version-less 3 4 10 : 3 1 20 5 1 ;
assert.false version-less 3 1 10 : 3 1 10 0 0 ;
assert.false version-less 3 1 10 0 0 : 3 1 10 ;
assert.false version-less 3 1 10 0 : 3 1 10 0 0 ;
assert.false version-less 3 1 10 0 : 03 1 10 0 0 ;
assert.false version-less 03 1 10 0 : 3 1 10 0 0 ;
# TODO: Add tests for invalid input data being sent to version-less.
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,5 @@
# Copyright 2003 Dave Abrahams
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
boost-build . ;

View File

@ -0,0 +1,263 @@
# Copyright 2003 Dave Abrahams
# Copyright 2003, 2005, 2006 Rene Rivera
# Copyright 2003, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# First of all, check the jam version
if $(JAM_VERSION:J="") < 030112
{
ECHO "error: Boost.Jam version 3.1.12 or later required" ;
EXIT ;
}
local required-rules = GLOB-RECURSIVELY HAS_NATIVE_RULE ;
for local r in $(required-rules)
{
if ! $(r) in [ RULENAMES ]
{
ECHO "error: builtin rule '$(r)' is not present" ;
ECHO "error: your version of bjam is likely out of date" ;
ECHO "error: please get a fresh version from SVN." ;
EXIT ;
}
}
local native =
regex transform 2
;
while $(native)
{
if ! [ HAS_NATIVE_RULE $(native[1]) :
$(native[2]) :
$(native[3]) ]
{
ECHO "error: missing native rule '$(native[1]).$(native[2])'" ;
ECHO "error: or interface version of that rule is too low" ;
ECHO "error: your version of bjam is likely out of date" ;
ECHO "error: please get a fresh version from SVN." ;
EXIT ;
}
native = $(native[4-]) ;
}
# Check that the builtin .ENVIRON module is present. We don't have a
# builtin to check that a module is present, so we assume that the PATH
# environment variable is always set and verify that the .ENVIRON module
# has non-empty value of that variable.
module .ENVIRON
{
local p = $(PATH) $(Path) $(path) ;
if ! $(p)
{
ECHO "error: no builtin module .ENVIRON is found" ;
ECHO "error: your version of bjam is likely out of date" ;
ECHO "error: please get a fresh version from SVN." ;
EXIT ;
}
}
# Check that @() functionality is present. Similarly to modules,
# we don't have a way to test that directly. Instead we check that
# $(TMPNAME) functionality is present which was added at roughly
# the same time (more precisely it was added just before).
{
if ! $(TMPNAME)
{
ECHO "error: no @() functionality found" ;
ECHO "error: your version of bjam is likely out of date" ;
ECHO "error: please get a fresh version from SVN." ;
EXIT ;
}
}
# Make sure that \n escape is avaiable.
if "\n" = "n"
{
if $(OS) = CYGWIN
{
ECHO "warning: escape sequences are not supported" ;
ECHO "warning: this will cause major misbehaviour on cygwin" ;
ECHO "warning: your version of bjam is likely out of date" ;
ECHO "warning: please get a fresh version from SVN." ;
}
}
# Bootstrap the module system. Then bring the import rule into the global module.
#
SEARCH on <module@>modules.jam = $(.bootstrap-file:D) ;
module modules { include <module@>modules.jam ; }
IMPORT modules : import : : import ;
{
# Add module subdirectories to the BOOST_BUILD_PATH, which allows
# us to make an incremental refactoring step by moving modules to
# the appropriate subdirectories, thereby achieving some physical
# separation of different layers without changing all of our code
# to specify subdirectories in import statements or use an extra
# level of qualification on imported names.
local subdirs =
kernel # only the most-intrinsic modules: modules, errors
util # low-level substrate: string/number handling, etc.
build # essential elements of the build system architecture
tools # toolsets for handling specific build jobs and targets.
contrib # user contributed (unreviewed) modules
. # build-system.jam lives here
;
local whereami = [ NORMALIZE_PATH $(.bootstrap-file:DT) ] ;
BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ;
modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
modules.poke : EXTRA_PYTHONPATH : $(whereami) ;
}
# Reload the modules, to clean up things. The modules module can tolerate
# being included twice.
#
import modules ;
# Process option plugins first to alow them to prevent loading
# the rest of the build system.
#
import option ;
local dont-build = [ option.process ] ;
# Should we skip building, i.e. loading the build system, according
# to the options processed?
#
if ! $(dont-build)
{
if ! --python in $(ARGV)
{
# Allow users to override the build system file from the
# command-line (mostly for testing)
local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ;
build-system ?= build-system ;
# Use last element in case of multiple command-line options
import $(build-system[-1]) ;
}
else
{
ECHO "Boost.Build V2 Python port (experimental)" ;
# Define additional interface that is exposed to Python code. Python code will
# also have access to select bjam builtins in the 'bjam' module, but some
# things are easier to define outside C.
module python_interface
{
rule load ( module-name : location )
{
USER_MODULE $(module-name) ;
# Make all rules in the loaded module available in
# the global namespace, so that we don't have
# to bother specifying "right" module when calling
# from Python.
module $(module-name)
{
__name__ = $(1) ;
include $(2) ;
local rules = [ RULENAMES $(1) ] ;
IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
}
}
rule peek ( module-name ? : variables + )
{
module $(<)
{
return $($(>)) ;
}
}
rule set-variable ( module-name : name : value * )
{
module $(<)
{
$(>) = $(3) ;
}
}
rule set-top-level-targets ( targets * )
{
DEPENDS all : $(targets) ;
}
rule call-in-module ( m : rulename : * )
{
module $(m)
{
return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
}
}
rule set-update-action ( action : targets * : sources * : properties * )
{
$(action) $(targets) : $(sources) : $(properties) ;
}
rule set-update-action-in-module ( m : action : targets * : sources * : properties * )
{
module $(m)
{
$(2) $(3) : $(4) : $(5) ;
}
}
rule set-target-variable ( targets + : variable : value * : append ? )
{
if $(append)
{
$(variable) on $(targets) += $(value) ;
}
else
{
$(variable) on $(targets) = $(value) ;
}
}
rule get-target-variable ( targets + : variable )
{
return [ on $(targets) return $($(variable)) ] ;
}
rule import-rules-from-parent ( parent-module : this-module : user-rules * )
{
IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
EXPORT $(this-module) : $(user-rules) ;
}
rule mark-included ( targets * : includes * ) {
NOCARE $(includes) ;
INCLUDES $(targets) : $(includes) ;
ISFILE $(includes) ;
}
}
PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ;
modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ;
module PyBB
{
local ok = [ bootstrap $(root) ] ;
if ! $(ok)
{
EXIT ;
}
}
#PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ;
#module PyBB
#{
# main ;
#}
}
}

View File

@ -0,0 +1,420 @@
# Copyright 2001, 2002, 2003 Dave Abrahams
# Copyright 2002, 2005 Rene Rivera
# Copyright 2002, 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Polymorphic class system built on top of core Jam facilities.
#
# Classes are defined by 'class' keywords::
#
# class myclass
# {
# rule __init__ ( arg1 ) # constructor
# {
# self.attribute = $(arg1) ;
# }
#
# rule method1 ( ) # method
# {
# return [ method2 ] ;
# }
#
# rule method2 ( ) # method
# {
# return $(self.attribute) ;
# }
# }
#
# The __init__ rule is the constructor, and sets member variables.
#
# New instances are created by invoking [ new <class> <args...> ]:
#
# local x = [ new myclass foo ] ; # x is a new myclass object
# assert.result foo : [ $(x).method1 ] ; # $(x).method1 returns "foo"
#
# Derived class are created by mentioning base classes in the declaration::
#
# class derived : myclass
# {
# rule __init__ ( arg )
# {
# myclass.__init__ $(arg) ; # call base __init__
#
# }
#
# rule method2 ( ) # method override
# {
# return $(self.attribute)XXX ;
# }
# }
#
# All methods operate virtually, replacing behavior in the base classes. For
# example::
#
# local y = [ new derived foo ] ; # y is a new derived object
# assert.result fooXXX : [ $(y).method1 ] ; # $(y).method1 returns "foo"
#
# Each class instance is its own core Jam module. All instance attributes and
# methods are accessible without additional qualification from within the class
# instance. All rules imported in class declaration, or visible in base classses
# are also visible. Base methods are available in qualified form:
# base-name.method-name. By convention, attribute names are prefixed with
# "self.".
import modules ;
import numbers ;
rule xinit ( instance : class )
{
module $(instance)
{
__class__ = $(2) ;
__name__ = $(1) ;
}
}
rule new ( class args * : * )
{
.next-instance ?= 1 ;
local id = object($(class))@$(.next-instance) ;
xinit $(id) : $(class) ;
INSTANCE $(id) : class@$(class) ;
IMPORT_MODULE $(id) ;
$(id).__init__ $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
# Bump the next unique object name.
.next-instance = [ numbers.increment $(.next-instance) ] ;
# Return the name of the new instance.
return $(id) ;
}
rule bases ( class )
{
module class@$(class)
{
return $(__bases__) ;
}
}
rule is-derived ( class : bases + )
{
local stack = $(class) ;
local visited found ;
while ! $(found) && $(stack)
{
local top = $(stack[1]) ;
stack = $(stack[2-]) ;
if ! ( $(top) in $(visited) )
{
visited += $(top) ;
stack += [ bases $(top) ] ;
if $(bases) in $(visited)
{
found = true ;
}
}
}
return $(found) ;
}
# Returns true if the 'value' is a class instance.
#
rule is-instance ( value )
{
return [ MATCH "^(object\\()[^@]+\\)@.*" : $(value) ] ;
}
# Check if the given value is of the given type.
#
rule is-a (
instance # The value to check.
: type # The type to test for.
)
{
if [ is-instance $(instance) ]
{
return [ class.is-derived [ modules.peek $(instance) : __class__ ] : $(type) ] ;
}
}
local rule typecheck ( x )
{
local class-name = [ MATCH "^\\[(.*)\\]$" : [ BACKTRACE 1 ] ] ;
if ! [ is-a $(x) : $(class-name) ]
{
return "Expected an instance of "$(class-name)" but got \""$(x)"\" for argument" ;
}
}
rule __test__ ( )
{
import assert ;
import "class" : new ;
# This will be the construction function for a class called 'myclass'.
#
class myclass
{
import assert ;
rule __init__ ( x_ * : y_ * )
{
# Set some instance variables.
x = $(x_) ;
y = $(y_) ;
foo += 10 ;
}
rule set-x ( newx * )
{
x = $(newx) ;
}
rule get-x ( )
{
return $(x) ;
}
rule set-y ( newy * )
{
y = $(newy) ;
}
rule get-y ( )
{
return $(y) ;
}
rule f ( )
{
return [ g $(x) ] ;
}
rule g ( args * )
{
if $(x) in $(y)
{
return $(x) ;
}
else if $(y) in $(x)
{
return $(y) ;
}
else
{
return ;
}
}
rule get-class ( )
{
return $(__class__) ;
}
rule get-instance ( )
{
return $(__name__) ;
}
rule invariant ( )
{
assert.equal 1 : 1 ;
}
rule get-foo ( )
{
return $(foo) ;
}
}
# class myclass ;
class derived1 : myclass
{
rule __init__ ( z_ )
{
myclass.__init__ $(z_) : X ;
z = $(z_) ;
}
# Override g.
#
rule g ( args * )
{
return derived1.g ;
}
rule h ( )
{
return derived1.h ;
}
rule get-z ( )
{
return $(z) ;
}
# Check that 'assert.equal' visible in base class is visible here.
#
rule invariant2 ( )
{
assert.equal 2 : 2 ;
}
# Check that 'assert.variable-not-empty' visible in base class is
# visible here.
#
rule invariant3 ( )
{
local v = 10 ;
assert.variable-not-empty v ;
}
}
# class derived1 : myclass ;
class derived2 : myclass
{
rule __init__ ( )
{
myclass.__init__ 1 : 2 ;
}
# Override g.
#
rule g ( args * )
{
return derived2.g ;
}
# Test the ability to call base class functions with qualification.
#
rule get-x ( )
{
return [ myclass.get-x ] ;
}
}
# class derived2 : myclass ;
class derived2a : derived2
{
rule __init__
{
derived2.__init__ ;
}
}
# class derived2a : derived2 ;
local rule expect_derived2 ( [derived2] x ) { }
local a = [ new myclass 3 4 5 : 4 5 ] ;
local b = [ new derived1 4 ] ;
local b2 = [ new derived1 4 ] ;
local c = [ new derived2 ] ;
local d = [ new derived2 ] ;
local e = [ new derived2a ] ;
expect_derived2 $(d) ;
expect_derived2 $(e) ;
# Argument checking is set up to call exit(1) directly on failure, and we
# can not hijack that with try, so we should better not do this test by
# default. We could fix this by having errors look up and invoke the EXIT
# rule instead; EXIT can be hijacked (;-)
if --fail-typecheck in [ modules.peek : ARGV ]
{
try ;
{
expect_derived2 $(a) ;
}
catch
"Expected an instance of derived2 but got" instead
;
}
#try ;
#{
# new bad_subclass ;
#}
#catch
# bad_subclass.bad_subclass failed to call base class constructor myclass.__init__
# ;
#try ;
#{
# class bad_subclass ;
#}
#catch bad_subclass has already been declared ;
assert.result 3 4 5 : $(a).get-x ;
assert.result 4 5 : $(a).get-y ;
assert.result 4 : $(b).get-x ;
assert.result X : $(b).get-y ;
assert.result 4 : $(b).get-z ;
assert.result 1 : $(c).get-x ;
assert.result 2 : $(c).get-y ;
assert.result 4 5 : $(a).f ;
assert.result derived1.g : $(b).f ;
assert.result derived2.g : $(c).f ;
assert.result derived2.g : $(d).f ;
assert.result 10 : $(b).get-foo ;
$(a).invariant ;
$(b).invariant2 ;
$(b).invariant3 ;
# Check that the __class__ attribute is getting properly set.
assert.result myclass : $(a).get-class ;
assert.result derived1 : $(b).get-class ;
assert.result $(a) : $(a).get-instance ;
$(a).set-x a.x ;
$(b).set-x b.x ;
$(c).set-x c.x ;
$(d).set-x d.x ;
assert.result a.x : $(a).get-x ;
assert.result b.x : $(b).get-x ;
assert.result c.x : $(c).get-x ;
assert.result d.x : $(d).get-x ;
class derived3 : derived1 derived2
{
rule __init__ ( )
{
}
}
assert.result : bases myclass ;
assert.result myclass : bases derived1 ;
assert.result myclass : bases derived2 ;
assert.result derived1 derived2 : bases derived3 ;
assert.true is-derived derived1 : myclass ;
assert.true is-derived derived2 : myclass ;
assert.true is-derived derived3 : derived1 ;
assert.true is-derived derived3 : derived2 ;
assert.true is-derived derived3 : derived1 derived2 myclass ;
assert.true is-derived derived3 : myclass ;
assert.false is-derived myclass : derived1 ;
assert.true is-instance $(a) ;
assert.false is-instance bar ;
assert.true is-a $(a) : myclass ;
assert.true is-a $(c) : derived2 ;
assert.true is-a $(d) : myclass ;
assert.false is-a literal : myclass ;
}

View File

@ -0,0 +1,274 @@
# Copyright 2003 Dave Abrahams
# Copyright 2004 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Print a stack backtrace leading to this rule's caller. Each argument
# represents a line of output to be printed after the first line of the
# backtrace.
#
rule backtrace ( skip-frames prefix messages * : * )
{
local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ;
local drop-elements = $(frame-skips[$(skip-frames)]) ;
if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 )
{
ECHO "warning: backtrace doesn't support skipping $(skip-frames) frames;"
"using 1 instead." ;
drop-elements = 5 ;
}
local args = $(.args) ;
if $(.user-modules-only)
{
local bt = [ nearest-user-location ] ;
ECHO "$(prefix) at $(bt) " ;
for local n in $(args)
{
if $($(n))-is-not-empty
{
ECHO $(prefix) $($(n)) ;
}
}
}
else
{
# Get the whole backtrace, then drop the initial quadruples
# corresponding to the frames that must be skipped.
local bt = [ BACKTRACE ] ;
bt = $(bt[$(drop-elements)-]) ;
while $(bt)
{
local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
ECHO $(bt[1]):$(bt[2]): "in" $(bt[4]) "from module" $(m) ;
# The first time through, print each argument on a separate line.
for local n in $(args)
{
if $($(n))-is-not-empty
{
ECHO $(prefix) $($(n)) ;
}
}
args = ; # Kill args so that this never happens again.
# Move on to the next quadruple.
bt = $(bt[5-]) ;
}
}
}
.args ?= messages 2 3 4 5 6 7 8 9 ;
.disabled ?= ;
.last-error-$(.args) ?= ;
# try-catch --
#
# This is not really an exception-handling mechanism, but it does allow us to
# perform some error-checking on our error-checking. Errors are suppressed after
# a try, and the first one is recorded. Use catch to check that the error
# message matched expectations.
# Begin looking for error messages.
#
rule try ( )
{
.disabled += true ;
.last-error-$(.args) = ;
}
# Stop looking for error messages; generate an error if an argument of messages
# is not found in the corresponding argument in the error call.
#
rule catch ( messages * : * )
{
.disabled = $(.disabled[2-]) ; # Pop the stack.
import sequence ;
if ! $(.last-error-$(.args))-is-not-empty
{
error-skip-frames 3 expected an error, but none occurred ;
}
else
{
for local n in $(.args)
{
if ! $($(n)) in $(.last-error-$(n))
{
local v = [ sequence.join $($(n)) : " " ] ;
v ?= "" ;
local joined = [ sequence.join $(.last-error-$(n)) : " " ] ;
.last-error-$(.args) = ;
error-skip-frames 3 expected \"$(v)\" in argument $(n) of error
: got \"$(joined)\" instead ;
}
}
}
}
rule error-skip-frames ( skip-frames messages * : * )
{
if ! $(.disabled)
{
backtrace $(skip-frames) error: $(messages) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
EXIT ;
}
else if ! $(.last-error-$(.args))
{
for local n in $(.args)
{
# Add an extra empty string so that we always have
# something in the event of an error
.last-error-$(n) = $($(n)) "" ;
}
}
}
if --no-error-backtrace in [ modules.peek : ARGV ]
{
.no-error-backtrace = true ;
}
# Print an error message with a stack backtrace and exit.
#
rule error ( messages * : * )
{
if $(.no-error-backtrace)
{
# Print each argument on a separate line.
for local n in $(.args)
{
if $($(n))-is-not-empty
{
if ! $(first-printed)
{
ECHO error: $($(n)) ;
first-printed = true ;
}
else
{
ECHO $($(n)) ;
}
}
}
EXIT ;
}
else
{
error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
}
# Same as 'error', but the generated backtrace will include only user files.
#
rule user-error ( messages * : * )
{
.user-modules-only = 1 ;
error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
# Print a warning message with a stack backtrace and exit.
#
rule warning
{
backtrace 2 warning: $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
# Convert an arbitrary argument list into a list with ":" separators and quoted
# elements representing the same information. This is mostly useful for
# formatting descriptions of arguments with which a rule was called when
# reporting an error.
#
rule lol->list ( * )
{
local result ;
local remaining = 1 2 3 4 5 6 7 8 9 ;
while $($(remaining))
{
local n = $(remaining[1]) ;
remaining = $(remaining[2-]) ;
if $(n) != 1
{
result += ":" ;
}
result += \"$($(n))\" ;
}
return $(result) ;
}
# Return the file:line for the nearest entry in backtrace which correspond to a
# user module.
#
rule nearest-user-location ( )
{
local bt = [ BACKTRACE ] ;
local result ;
while $(bt) && ! $(result)
{
local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
local user-modules = ([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-root.jam) ;
if [ MATCH $(user-modules) : $(bt[1]:D=) ]
{
result = $(bt[1]):$(bt[2]) ;
}
bt = $(bt[5-]) ;
}
return $(result) ;
}
# If optimized rule is available in Jam, use it.
if NEAREST_USER_LOCATION in [ RULENAMES ]
{
rule nearest-user-location ( )
{
local r = [ NEAREST_USER_LOCATION ] ;
return $(r[1]):$(r[2]) ;
}
}
rule __test__ ( )
{
# Show that we can correctly catch an expected error.
try ;
{
error an error occurred : somewhere ;
}
catch an error occurred : somewhere ;
# Show that unexpected errors generate real errors.
try ;
{
try ;
{
error an error occurred : somewhere ;
}
catch an error occurred : nowhere ;
}
catch expected \"nowhere\" in argument 2 ;
# Show that not catching an error where one was expected is an error.
try ;
{
try ;
{
}
catch ;
}
catch expected an error, but none occurred ;
}

View File

@ -0,0 +1,354 @@
# Copyright 2003 Dave Abrahams
# Copyright 2003, 2005 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Essentially an include guard; ensures that no module is loaded multiple times.
.loaded ?= ;
# A list of modules currently being loaded for error reporting of circular
# dependencies.
.loading ?= ;
# A list of modules needing to be tested using their __test__ rule.
.untested ?= ;
# A list of modules which have been tested using their __test__ rule.
.tested ?= ;
# Runs internal Boost Build unit tests for the specified module. The module's
# __test__ rule is executed in its own module to eliminate any inadvertent
# effects of testing module dependencies (such as assert) on the module itself.
#
local rule run-module-test ( m )
{
local tested-modules = [ modules.peek modules : .tested ] ;
if ( ! $(m) in $(tested-modules) ) # Avoid recursive test invocations.
&& ( ( --debug in $(argv) ) || ( --debug-module=$(m) in $(argv) ) )
{
modules.poke modules : .tested : $(tested-modules) $(m) ;
if ! ( __test__ in [ RULENAMES $(m) ] )
{
local argv = [ peek : ARGV ] ;
if ! ( --quiet in $(argv) ) && ( --debug-tests in $(argv) )
{
ECHO warning: no __test__ rule defined in module $(m) ;
}
}
else
{
if ! ( --quiet in $(argv) )
{
ECHO testing module $(m)... ;
}
local test-module = __test-$(m)__ ;
IMPORT $(m) : [ RULENAMES $(m) ] : $(test-module) : [ RULENAMES $(m) ] ;
IMPORT $(m) : __test__ : $(test-module) : __test__ : LOCALIZE ;
module $(test-module)
{
__test__ ;
}
}
}
}
# Return the binding of the given module.
#
rule binding ( module )
{
return $($(module).__binding__) ;
}
# Sets the module-local value of a variable. This is the most reliable way to
# set a module-local variable in a different module; it eliminates issues of
# name shadowing due to dynamic scoping.
#
rule poke ( module-name ? : variables + : value * )
{
module $(<)
{
$(>) = $(3) ;
}
}
# Returns the module-local value of a variable. This is the most reliable way to
# examine a module-local variable in a different module; it eliminates issues of
# name shadowing due to dynamic scoping.
#
rule peek ( module-name ? : variables + )
{
module $(<)
{
return $($(>)) ;
}
}
# Call the given rule locally in the given module. Use this for rules accepting
# rule names as arguments, so that the passed rule may be invoked in the context
# of the rule's caller (for example, if the rule accesses module globals or is a
# local rule). Note that rules called this way may accept at most 8 parameters.
#
rule call-in ( module-name ? : rule-name args * : * )
{
module $(module-name)
{
return [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
}
}
# Given a possibly qualified rule name and arguments, remove any initial module
# qualification from the rule and invoke it in that module. If there is no
# module qualification, the rule is invoked in the global module. Note that
# rules called this way may accept at most 8 parameters.
#
rule call-locally ( qualified-rule-name args * : * )
{
local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ;
local rule-name = $(module-rule[2]) ;
rule-name ?= $(qualified-rule-name) ;
# We pass only 8 parameters here since Boost Jam allows at most 9 rule
# parameter positions and the call-in rule already uses up the initial
# position for the module name.
return [ call-in $(module-rule[1]) : $(rule-name) $(args) : $(2) : $(3) :
$(4) : $(5) : $(6) : $(7) : $(8) ] ;
}
# Load the indicated module if it is not already loaded.
#
rule load (
module-name # Name of module to load. Rules will be defined in this
# module.
: filename ? # (partial) path to file; Defaults to $(module-name).jam.
: search * # Directories in which to search for filename. Defaults to
# $(BOOST_BUILD_PATH).
)
{
# Avoid loading modules twice.
if ! ( $(module-name) in $(.loaded) )
{
filename ?= $(module-name).jam ;
# Mark the module loaded so we do not try to load it recursively.
.loaded += $(module-name) ;
# Suppress tests if any module loads are already in progress.
local suppress-test = $(.loading[1]) ;
# Push this module on the loading stack.
.loading += $(module-name) ;
# Remember that it is untested.
.untested += $(module-name) ;
# Insert the new module's __name__ and __file__ globals.
poke $(module-name) : __name__ : $(module-name) ;
poke $(module-name) : __file__ : $(filename) ;
module $(module-name)
{
# Add some grist so that the module will have a unique target name.
local module-target = $(__file__:G=module@) ;
local search = $(3) ;
search ?= [ modules.peek : BOOST_BUILD_PATH ] ;
SEARCH on $(module-target) = $(search) ;
BINDRULE on $(module-target) = modules.record-binding ;
include $(module-target) ;
# Allow the module to see its own names with full qualification.
local rules = [ RULENAMES $(__name__) ] ;
IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ;
}
if $(module-name) != modules && ! [ binding $(module-name) ]
{
import errors ;
errors.error "Could not find module" $(module-name) in $(search) ;
}
# Pop the loading stack. Must happen before testing or we will run into
# a circular loading dependency.
.loading = $(.loading[1--2]) ;
# Run any pending tests if this is an outer load.
if ! $(suppress-test)
{
local argv = [ peek : ARGV ] ;
for local m in $(.untested)
{
run-module-test $(m) ;
}
.untested = ;
}
}
else if $(module-name) in $(.loading)
{
import errors ;
errors.error loading \"$(module-name)\"
: circular module loading dependency:
: $(.loading)" ->" $(module-name) ;
}
}
# This helper is used by load (above) to record the binding (path) of each
# loaded module.
#
rule record-binding ( module-target : binding )
{
$(.loading[-1]).__binding__ = $(binding) ;
}
# Transform each path in the list, with all backslashes converted to forward
# slashes and all detectable redundancy removed. Something like this is probably
# needed in path.jam, but I am not sure of that, I do not understand it, and I
# am not ready to move all of path.jam into the kernel.
#
local rule normalize-raw-paths ( paths * )
{
local result ;
for p in $(paths:T)
{
result += [ NORMALIZE_PATH $(p) ] ;
}
return $(result) ;
}
.cwd = [ PWD ] ;
# Load the indicated module and import rule names into the current module. Any
# members of rules-opt will be available without qualification in the caller's
# module. Any members of rename-opt will be taken as the names of the rules in
# the caller's module, in place of the names they have in the imported module.
# If rules-opt = '*', all rules from the indicated module are imported into the
# caller's module. If rename-opt is supplied, it must have the same number of
# elements as rules-opt.
#
rule import ( module-names + : rules-opt * : rename-opt * )
{
if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt)
{
import errors ;
errors.error "Rule aliasing is only available for explicit imports." ;
}
if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) )
{
import errors ;
errors.error "When loading multiple modules, no specific rules or"
"renaming is allowed" ;
}
local caller = [ CALLER_MODULE ] ;
# Import each specified module
for local m in $(module-names)
{
if ! $(m) in $(.loaded)
{
# If the importing module isn't already in the BOOST_BUILD_PATH,
# prepend it to the path. We don't want to invert the search order
# of modules that are already there.
local caller-location ;
if $(caller)
{
caller-location = [ binding $(caller) ] ;
caller-location = $(caller-location:D) ;
caller-location = [ normalize-raw-paths $(caller-location:R=$(.cwd)) ] ;
}
local search = [ peek : BOOST_BUILD_PATH ] ;
search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ;
if $(caller-location) && ! $(caller-location) in $(search)
{
search = $(caller-location) $(search) ;
}
load $(m) : : $(search) ;
}
IMPORT_MODULE $(m) : $(caller) ;
if $(rules-opt)
{
local source-names ;
if $(rules-opt) = *
{
local all-rules = [ RULENAMES $(m) ] ;
source-names = $(all-rules) ;
}
else
{
source-names = $(rules-opt) ;
}
local target-names = $(rename-opt) ;
target-names ?= $(source-names) ;
IMPORT $(m) : $(source-names) : $(caller) : $(target-names) ;
}
}
}
# Define exported copies in $(target-module) of all rules exported from
# $(source-module). Also make them available in the global module with
# qualification, so that it is just as though the rules were defined originally
# in $(target-module).
#
rule clone-rules ( source-module target-module )
{
local rules = [ RULENAMES $(source-module) ] ;
IMPORT $(source-module) : $(rules) : $(target-module) : $(rules) : LOCALIZE ;
EXPORT $(target-module) : $(rules) ;
IMPORT $(target-module) : $(rules) : : $(target-module).$(rules) ;
}
# These rules need to be available in all modules to implement module loading
# itself and other fundamental operations.
local globalize = peek poke record-binding ;
IMPORT modules : $(globalize) : : modules.$(globalize) ;
rule __test__ ( )
{
import assert ;
import modules : normalize-raw-paths ;
module modules.__test__
{
foo = bar ;
}
assert.result bar : peek modules.__test__ : foo ;
poke modules.__test__ : foo : bar baz ;
assert.result bar baz : peek modules.__test__ : foo ;
assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ;
assert.result . : normalize-raw-paths . ;
assert.result .. : normalize-raw-paths .. ;
assert.result ../.. : normalize-raw-paths ../.. ;
assert.result .. : normalize-raw-paths ./.. ;
assert.result / / : normalize-raw-paths / \\ ;
assert.result a : normalize-raw-paths a ;
assert.result a : normalize-raw-paths a/ ;
assert.result /a : normalize-raw-paths /a/ ;
assert.result / : normalize-raw-paths /a/.. ;
}

View File

@ -0,0 +1,212 @@
# Copyright 2003 Dave Abrahams
# Copyright 2003, 2006 Rene Rivera
# Copyright 2003, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module is the plug-in handler for the --help and --help-.*
# command-line options
import modules ;
import assert ;
import doc : do-scan set-option set-output set-output-file print-help-usage print-help-top ;
import sequence ;
import set ;
import project ;
import print ;
import os ;
import version ;
import path ;
# List of possible modules, but which really aren't.
#
.not-modules =
boost-build bootstrap site-config test user-config
-tools allyourbase boost-base features python stlport testing unit-tests ;
# The help system options are parsed here and handed off to the doc
# module to translate into documentation requests and actions. The
# understood options are:
#
# --help-disable-<option>
# --help-doc-options
# --help-enable-<option>
# --help-internal
# --help-options
# --help-usage
# --help-output <type>
# --help-output-file <file>
# --help [<module-or-class>]
#
rule process (
command # The option.
: values * # The values, starting after the "=".
)
{
assert.result --help : MATCH ^(--help).* : $(command) ;
local did-help = ;
switch $(command)
{
case --help-internal :
local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
path-to-modules ?= . ;
local possible-modules = [ GLOB $(path-to-modules) : *\\.jam ] ;
local not-modules = [ GLOB $(path-to-modules) : *$(.not-modules)\\.jam ] ;
local modules-to-list =
[ sequence.insertion-sort
[ set.difference $(possible-modules:D=:S=) : $(not-modules:D=:S=) ] ] ;
local modules-to-scan ;
for local m in $(modules-to-list)
{
local module-files = [ GLOB $(path-to-modules) : $(m)\\.jam ] ;
modules-to-scan += $(module-files[1]) ;
}
do-scan $(modules-to-scan) : print-help-all ;
did-help = true ;
case --help-enable-* :
local option = [ MATCH --help-enable-(.*) : $(command) ] ; option = $(option:L) ;
set-option $(option) : enabled ;
did-help = true ;
case --help-disable-* :
local option = [ MATCH --help-disable-(.*) : $(command) ] ; option = $(option:L) ;
set-option $(option) ;
did-help = true ;
case --help-output :
set-output $(values[1]) ;
did-help = true ;
case --help-output-file :
set-output-file $(values[1]) ;
did-help = true ;
case --help-doc-options :
local doc-module-spec = [ split-symbol doc ] ;
do-scan $(doc-module-spec[1]) : print-help-options ;
did-help = true ;
case --help-options :
print-help-usage ;
did-help = true ;
case --help :
local spec = $(values[1]) ;
if $(spec)
{
local spec-parts = [ split-symbol $(spec) ] ;
if $(spec-parts)
{
if $(spec-parts[2])
{
do-scan $(spec-parts[1]) : print-help-classes $(spec-parts[2]) ;
do-scan $(spec-parts[1]) : print-help-rules $(spec-parts[2]) ;
do-scan $(spec-parts[1]) : print-help-variables $(spec-parts[2]) ;
}
else
{
do-scan $(spec-parts[1]) : print-help-module ;
}
}
else
{
EXIT "Unrecognized help option '"$(command)" "$(spec)"'." ;
}
}
else
{
version.print ;
ECHO ;
# First print documentation from the current Jamfile, if any.
# FIXME: Generally, this duplication of project.jam logic is bad.
local names = [ modules.peek project : JAMROOT ]
[ modules.peek project : JAMFILE ] ;
local project-file = [ path.glob . : $(names) ] ;
if ! $(project-file)
{
project-file = [ path.glob-in-parents . : $(names) ] ;
}
for local p in $(project-file)
{
do-scan $(p) : print-help-project $(p) ;
}
# Next any user-config help.
local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
local user-config = [ GLOB $(user-path) : user-config.jam ] ;
if $(user-config)
{
do-scan $(user-config[1]) : print-help-config user $(user-config[1]) ;
}
# Next any site-config help.
local site-config = [ GLOB $(user-path) : site-config.jam ] ;
if $(site-config)
{
do-scan $(site-config[1]) : print-help-config site $(site-config[1]) ;
}
# Then the overall help.
print-help-top ;
}
did-help = true ;
}
if $(did-help)
{
UPDATE all ;
NOCARE all ;
}
return $(did-help) ;
}
# Split a reference to a symbol into module and symbol parts.
#
local rule split-symbol (
symbol # The symbol to split.
)
{
local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
path-to-modules ?= . ;
local module-name = $(symbol) ;
local symbol-name = ;
local result = ;
while ! $(result)
{
local module-path = [ GLOB $(path-to-modules) : $(module-name)\\.jam ] ;
if $(module-path)
{
# The 'module-name' in fact refers to module. Return the full
# module path and a symbol within it. If 'symbol' passed to this
# rule is already module, 'symbol-name' will be empty. Otherwise,
# it's initialized on the previous loop iteration.
# In case there are several modules by this name,
# use the first one.
result = $(module-path[1]) $(symbol-name) ;
}
else
{
if ! $(module-name:S)
{
result = - ;
}
else
{
local next-symbol-part = [ MATCH ^.(.*) : $(module-name:S) ] ;
if $(symbol-name)
{
symbol-name = $(next-symbol-part).$(symbol-name) ;
}
else
{
symbol-name = $(next-symbol-part) ;
}
module-name = $(module-name:B) ;
}
}
}
if $(result) != -
{
return $(result) ;
}
}

View File

@ -0,0 +1,4 @@
# Copyright 2002, 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)

View File

@ -0,0 +1,118 @@
# Copyright Vladimir Prus 2004.
# Copyright Toon Knapen 2004.
# Copyright Boris Gubenko 2007.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt
# or copy at http://www.boost.org/LICENSE_1_0.txt)
#
# Boost.Build V2 toolset for the HP aC++ compiler.
#
import toolset : flags ;
import feature ;
import generators ;
import common ;
feature.extend toolset : acc ;
toolset.inherit acc : unix ;
generators.override builtin.lib-generator : acc.prebuilt ;
generators.override acc.searched-lib-generator : searched-lib-generator ;
# Configures the acc toolset.
rule init ( version ? : user-provided-command * : options * )
{
local condition = [ common.check-init-parameters acc
: version $(version) ] ;
local command = [ common.get-invocation-command acc : aCC
: $(user-provided-command) ] ;
common.handle-options acc : $(condition) : $(command) : $(options) ;
}
# Declare generators
generators.register-c-compiler acc.compile.c : C : OBJ : <toolset>acc ;
generators.register-c-compiler acc.compile.c++ : CPP : OBJ : <toolset>acc ;
# Declare flags.
flags acc CFLAGS <optimization>off : ;
flags acc CFLAGS <optimization>speed : -O3 ;
flags acc CFLAGS <optimization>space : -O2 ;
flags acc CFLAGS <inlining>off : +d ;
flags acc CFLAGS <inlining>on : ;
flags acc CFLAGS <inlining>full : ;
flags acc C++FLAGS <exception-handling>off : ;
flags acc C++FLAGS <exception-handling>on : ;
flags acc C++FLAGS <rtti>off : ;
flags acc C++FLAGS <rtti>on : ;
# We want the full path to the sources in the debug symbols because otherwise
# the debugger won't find the sources when we use boost.build.
flags acc CFLAGS <debug-symbols>on : -g ;
flags acc LINKFLAGS <debug-symbols>on : -g ;
flags acc LINKFLAGS <debug-symbols>off : -s ;
# V2 does not have <shared-linkable>, not sure what this meant in V1.
# flags acc CFLAGS <shared-linkable>true : +Z ;
flags acc CFLAGS <profiling>on : -pg ;
flags acc LINKFLAGS <profiling>on : -pg ;
flags acc CFLAGS <address-model>64 : +DD64 ;
flags acc LINKFLAGS <address-model>64 : +DD64 ;
# It is unknown if there's separate option for rpath used only
# at link time, similar to -rpath-link in GNU. We'll use -L.
flags acc RPATH_LINK : <xdll-path> ;
flags acc CFLAGS <cflags> ;
flags acc C++FLAGS <cxxflags> ;
flags acc DEFINES <define> ;
flags acc UNDEFS <undef> ;
flags acc HDRS <include> ;
flags acc STDHDRS <sysinclude> ;
flags acc LINKFLAGS <linkflags> ;
flags acc ARFLAGS <arflags> ;
flags acc LIBPATH <library-path> ;
flags acc NEEDLIBS <library-file> ;
flags acc FINDLIBS <find-shared-library> ;
flags acc FINDLIBS <find-static-library> ;
# Select the compiler name according to the threading model.
flags acc CFLAGS <threading>multi : -mt ;
flags acc LINKFLAGS <threading>multi : -mt ;
flags acc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
actions acc.link bind NEEDLIBS
{
$(CONFIG_COMMAND) -AA $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
}
SPACE = " " ;
actions acc.link.dll bind NEEDLIBS
{
$(CONFIG_COMMAND) -AA -b $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -Wl,+h$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
}
actions acc.compile.c
{
cc -c -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
}
actions acc.compile.c++
{
$(CONFIG_COMMAND) -AA -c -Wc,--pending_instantiations=$(TEMPLATE_DEPTH) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
}
actions updated together piecemeal acc.archive
{
ar ru$(ARFLAGS:E="") "$(<)" "$(>)"
}

View File

@ -0,0 +1,32 @@
# Copyright 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import generators ;
import feature ;
import type ;
import property ;
feature.feature bison.prefix : : free ;
type.register Y : y ;
type.register YY : yy ;
generators.register-standard bison.bison : Y : C H ;
generators.register-standard bison.bison : YY : CPP HPP ;
rule init ( )
{
}
rule bison ( dst dst_header : src : properties * )
{
local r = [ property.select bison.prefix : $(properties) ] ;
if $(r)
{
PREFIX_OPT on $(<) = -p $(r:G=) ;
}
}
actions bison
{
bison $(PREFIX_OPT) -d -o $(<[1]) $(>)
}

View File

@ -0,0 +1,13 @@
#~ Copyright 2005 Rene Rivera.
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Automatic configuration for BoostBook tools. To use, just import this module.
#
# This module is deprecated.
# using boostbook ;
# with no arguments now suffices.
import toolset : using ;
using boostbook ;

View File

@ -0,0 +1,727 @@
# Copyright 2003, 2004, 2005 Dave Abrahams
# Copyright 2003, 2004, 2005 Douglas Gregor
# Copyright 2005, 2006, 2007 Rene Rivera
# Copyright 2003, 2004, 2005 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module defines rules to handle generation of documentation
# from BoostBook sources.
#
# The type of output is controlled by the <format> feature which can
# have the following values::
#
# * html: Generates html documention. This is the default.
# * xhtml: Generates xhtml documentation
# * htmlhelp: Generates html help output.
# * onehtml: Generates a single html page.
# * man: Generates man pages.
# * pdf: Generates pdf documentation.
# * ps: Generates postscript output.
# * docbook: Generates docbook XML.
# * fo: Generates XSL formating objects.
# * tests: Extracts test cases from the boostbook XML.
#
# format is an implicit feature, so typing pdf on the command
# line (for example) is a short-cut for format=pdf.
import "class" : new ;
import common ;
import errors ;
import targets ;
import feature ;
import generators ;
import print ;
import property ;
import project ;
import property-set ;
import regex ;
import scanner ;
import sequence ;
import make ;
import os ;
import type ;
import modules path project ;
import build-system ;
import xsltproc : xslt xslt-dir ;
# Make this module into a project.
project.initialize $(__name__) ;
project boostbook ;
feature.feature format : html xhtml htmlhelp onehtml man pdf ps docbook fo tests
: incidental implicit composite propagated ;
type.register DTDXML : dtdxml ;
type.register XML : xml ;
type.register BOOSTBOOK : boostbook : XML ;
type.register DOCBOOK : docbook : XML ;
type.register FO : fo : XML ;
type.register PDF : pdf ;
type.register PS : ps ;
type.register XSLT : xsl : XML ;
type.register HTMLDIR ;
type.register XHTMLDIR ;
type.register HTMLHELP ;
type.register MANPAGES ;
type.register TESTS : tests ;
# Artificial target type, used to require invocation of top-level
# BoostBook generator.
type.register BOOSTBOOK_MAIN ;
# Initialize BoostBook support.
rule init (
docbook-xsl-dir ? # The DocBook XSL stylesheet directory. If not
# provided, we use DOCBOOK_XSL_DIR from the environment
# (if available) or look in standard locations.
# Otherwise, we let the XML processor load the
# stylesheets remotely.
: docbook-dtd-dir ? # The DocBook DTD directory. If not provided, we use
# DOCBOOK_DTD_DIR From the environment (if available) or
# look in standard locations. Otherwise, we let the XML
# processor load the DTD remotely.
: boostbook-dir ? # The BoostBook directory with the DTD and XSL subdirs.
)
{
if ! $(.initialized)
{
.initialized = true ;
check-boostbook-dir $(boostbook-dir) ;
find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ;
# Register generators only if we've were called via "using boostbook ; "
generators.register-standard boostbook.dtdxml-to-boostbook : DTDXML : XML ;
generators.register-standard boostbook.boostbook-to-docbook : XML : DOCBOOK ;
generators.register-standard boostbook.boostbook-to-tests : XML : TESTS ;
generators.register-standard boostbook.docbook-to-onehtml : DOCBOOK : HTML ;
generators.register-standard boostbook.docbook-to-htmldir : DOCBOOK : HTMLDIR ;
generators.register-standard boostbook.docbook-to-xhtmldir : DOCBOOK : XHTMLDIR ;
generators.register-standard boostbook.docbook-to-htmlhelp : DOCBOOK : HTMLHELP ;
generators.register-standard boostbook.docbook-to-manpages : DOCBOOK : MANPAGES ;
generators.register-standard boostbook.docbook-to-fo : DOCBOOK : FO ;
# The same about Jamfile main target rules.
IMPORT $(__name__) : boostbook : : boostbook ;
}
else
{
if $(docbook-xsl-dir)
{
modify-config ;
.docbook-xsl-dir = [ path.make $(docbook-xsl-dir) ] ;
check-docbook-xsl-dir ;
}
if $(docbook-dtd-dir)
{
modify-config ;
.docbook-dtd-dir = [ path.make $(docbook-dtd-dir) ] ;
check-docbook-dtd-dir ;
}
if $(boostbook-dir)
{
modify-config ;
check-boostbook-dir $(boostbook-dir) ;
local boostbook-xsl-dir = [ path.glob $(boostbook-dir) : xsl ] ;
local boostbook-dtd-dir = [ path.glob $(boostbook-dir) : dtd ] ;
.boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
.boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
check-boostbook-xsl-dir ;
check-boostbook-dtd-dir ;
}
}
}
rule lock-config ( )
{
if ! $(.initialized)
{
errors.user-error "BoostBook has not been configured." ;
}
if ! $(.config-locked)
{
.config-locked = true ;
}
}
rule modify-config ( )
{
if $(.config-locked)
{
errors.user-error "BoostBook configuration cannot be changed after it has been used." ;
}
}
rule find-boost-in-registry ( keys * )
{
local boost-root = ;
for local R in $(keys)
{
local installed-boost = [ W32_GETREG
"HKEY_LOCAL_MACHINE\\SOFTWARE\\$(R)"
: "InstallRoot" ] ;
if $(installed-boost)
{
boost-root += [ path.make $(installed-boost) ] ;
}
}
return $(boost-root) ;
}
rule check-docbook-xsl-dir ( )
{
if $(.docbook-xsl-dir)
{
if ! [ path.glob $(.docbook-xsl-dir) : common/common.xsl ]
{
errors.user-error "BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
}
else
{
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice: BoostBook: found docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
}
}
}
}
rule check-docbook-dtd-dir ( )
{
if $(.docbook-dtd-dir)
{
if ! [ path.glob $(.docbook-dtd-dir) : docbookx.dtd ]
{
errors.user-error "error: BoostBook: could not find docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
}
else
{
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice: BoostBook: found docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
}
}
}
}
rule check-boostbook-xsl-dir ( )
{
if ! $(.boostbook-xsl-dir)
{
errors.user-error "error: BoostBook: could not find boostbook XSL stylesheets." ;
}
else if ! [ path.glob $(.boostbook-xsl-dir) : docbook.xsl ]
{
errors.user-error "error: BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
}
else
{
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice: BoostBook: found boostbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
}
}
}
rule check-boostbook-dtd-dir ( )
{
if ! $(.boostbook-dtd-dir)
{
errors.user-error "error: BoostBook: could not find boostbook DTD." ;
}
else if ! [ path.glob $(.boostbook-dtd-dir) : boostbook.dtd ]
{
errors.user-error "error: BoostBook: could not find boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
}
else
{
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice: BoostBook: found boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
}
}
}
rule check-boostbook-dir ( boostbook-dir ? )
{
if $(boostbook-dir) && ! [ path.glob $(boostbook-dir) : xsl ]
{
errors.user-error "error: BoostBook: could not find boostbook in:" [ path.native $(boostbook-dir) ] ;
}
}
rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? )
{
docbook-xsl-dir ?= [ modules.peek : DOCBOOK_XSL_DIR ] ;
docbook-dtd-dir ?= [ modules.peek : DOCBOOK_DTD_DIR ] ;
boostbook-dir ?= [ modules.peek : BOOSTBOOK_DIR ] ;
# Look for the boostbook stylesheets relative to BOOST_ROOT
# and Boost.Build.
local boost-build-root = [ path.make [ build-system.location ] ] ;
local boostbook-search-dirs = [ path.join $(boost-build-root) .. .. ] ;
local boost-root = [ modules.peek : BOOST_ROOT ] ;
if $(boost-root)
{
boostbook-search-dirs += [ path.join [ path.make $(boost-root) ] tools ] ;
}
boostbook-dir ?= [ path.glob $(boostbook-search-dirs) : boostbook* ] ;
# Try to find the tools in platform specific locations
if [ os.name ] = NT
{
# If installed by the Boost installer.
local boost-root = ;
local boost-installer-versions = snapshot cvs 1.33.0 ;
local boost-consulting-installer-versions = 1.33.1 1.34.0 1.34.1 ;
local boostpro-installer-versions =
1.35.0 1.36.0 1.37.0 1.38.0 1.39.0 1.40.0 1.41.0 1.42.0
1.43.0 1.44.0 1.45.0 1.46.0 1.47.0 1.48.0 1.49.0 1.50.0 ;
local old-installer-root = [ find-boost-in-registry Boost.org\\$(boost-installer-versions) ] ;
# Make sure that the most recent version is searched for first
boost-root += [ sequence.reverse
[ find-boost-in-registry
Boost-Consulting.com\\$(boost-consulting-installer-versions)
boostpro.com\\$(boostpro-installer-versions) ] ] ;
# Plausible locations.
local root = [ PWD ] ;
while $(root) != $(root:D) { root = $(root:D) ; }
root = [ path.make $(root) ] ;
local search-dirs = ;
local docbook-search-dirs = ;
for local p in $(boost-root) {
search-dirs += [ path.join $(p) tools ] ;
}
for local p in $(old-installer-root)
{
search-dirs += [ path.join $(p) share ] ;
docbook-search-dirs += [ path.join $(p) share ] ;
}
search-dirs += [ path.join $(root) Boost tools ] ;
search-dirs += [ path.join $(root) Boost share ] ;
docbook-search-dirs += [ path.join $(root) Boost share ] ;
docbook-xsl-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xsl* ] ;
docbook-dtd-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xml* ] ;
boostbook-dir ?= [ path.glob $(search-dirs) : boostbook* ] ;
}
else
{
# Plausible locations.
local share = /usr/local/share /usr/share /opt/share /opt/local/share ;
local dtd-versions = 4.2 ;
docbook-xsl-dir ?= [ path.glob $(share) : docbook-xsl* ] ;
docbook-xsl-dir ?= [ path.glob $(share)/sgml/docbook : xsl-stylesheets ] ;
docbook-xsl-dir ?= [ path.glob $(share)/xsl : docbook* ] ;
docbook-dtd-dir ?= [ path.glob $(share) : docbook-xml* ] ;
docbook-dtd-dir ?= [ path.glob $(share)/sgml/docbook : xml-dtd-$(dtd-versions)* ] ;
docbook-dtd-dir ?= [ path.glob $(share)/xml/docbook : $(dtd-versions) ] ;
boostbook-dir ?= [ path.glob $(share) : boostbook* ] ;
# Ubuntu Linux
docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet : nwalsh ] ;
docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd : $(dtd-versions) ] ;
}
if $(docbook-xsl-dir)
{
.docbook-xsl-dir = [ path.make $(docbook-xsl-dir[1]) ] ;
}
if $(docbook-dtd-dir)
{
.docbook-dtd-dir = [ path.make $(docbook-dtd-dir[1]) ] ;
}
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice: Boost.Book: searching XSL/DTD in" ;
ECHO "notice:" [ sequence.transform path.native : $(boostbook-dir) ] ;
}
local boostbook-xsl-dir ;
for local dir in $(boostbook-dir) {
boostbook-xsl-dir += [ path.glob $(dir) : xsl ] ;
}
local boostbook-dtd-dir ;
for local dir in $(boostbook-dir) {
boostbook-dtd-dir += [ path.glob $(dir) : dtd ] ;
}
.boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
.boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
check-docbook-xsl-dir ;
check-docbook-dtd-dir ;
check-boostbook-xsl-dir ;
check-boostbook-dtd-dir ;
}
rule xsl-dir
{
lock-config ;
return $(.boostbook-xsl-dir) ;
}
rule dtd-dir
{
lock-config ;
return $(.boostbook-dtd-dir) ;
}
rule docbook-xsl-dir
{
lock-config ;
return $(.docbook-xsl-dir) ;
}
rule docbook-dtd-dir
{
lock-config ;
return $(.docbook-dtd-dir) ;
}
rule dtdxml-to-boostbook ( target : source : properties * )
{
lock-config ;
xslt $(target) : $(source) "$(.boostbook-xsl-dir)/dtd/dtd2boostbook.xsl"
: $(properties) ;
}
rule boostbook-to-docbook ( target : source : properties * )
{
lock-config ;
local stylesheet = [ path.native $(.boostbook-xsl-dir)/docbook.xsl ] ;
xslt $(target) : $(source) $(stylesheet) : $(properties) ;
}
rule docbook-to-onehtml ( target : source : properties * )
{
lock-config ;
local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-single.xsl ] ;
xslt $(target) : $(source) $(stylesheet) : $(properties) ;
}
rule docbook-to-htmldir ( target : source : properties * )
{
lock-config ;
local stylesheet = [ path.native $(.boostbook-xsl-dir)/html.xsl ] ;
xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : html ;
}
rule docbook-to-xhtmldir ( target : source : properties * )
{
lock-config ;
local stylesheet = [ path.native $(.boostbook-xsl-dir)/xhtml.xsl ] ;
xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : xhtml ;
}
rule docbook-to-htmlhelp ( target : source : properties * )
{
lock-config ;
local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-help.xsl ] ;
xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : htmlhelp ;
}
rule docbook-to-manpages ( target : source : properties * )
{
lock-config ;
local stylesheet = [ path.native $(.boostbook-xsl-dir)/manpages.xsl ] ;
xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : man ;
}
rule docbook-to-fo ( target : source : properties * )
{
lock-config ;
local stylesheet = [ path.native $(.boostbook-xsl-dir)/fo.xsl ] ;
xslt $(target) : $(source) $(stylesheet) : $(properties) ;
}
rule format-catalog-path ( path )
{
local result = $(path) ;
if [ xsltproc.is-cygwin ]
{
if [ os.name ] = NT
{
drive = [ MATCH ^/(.):(.*)$ : $(path) ] ;
result = /cygdrive/$(drive[1])$(drive[2]) ;
}
}
else
{
if [ os.name ] = CYGWIN
{
local native-path = [ path.native $(path) ] ;
result = [ path.make $(native-path:W) ] ;
}
}
return [ regex.replace $(result) " " "%20" ] ;
}
rule generate-xml-catalog ( target : sources * : properties * )
{
print.output $(target) ;
# BoostBook DTD catalog entry
local boostbook-dtd-dir = [ boostbook.dtd-dir ] ;
if $(boostbook-dtd-dir)
{
boostbook-dtd-dir = [ format-catalog-path $(boostbook-dtd-dir) ] ;
}
print.text
"<?xml version=\"1.0\"?>"
"<!DOCTYPE catalog "
" PUBLIC \"-//OASIS/DTD Entity Resolution XML Catalog V1.0//EN\""
" \"http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd\">"
"<catalog xmlns=\"urn:oasis:names:tc:entity:xmlns:xml:catalog\">"
" <rewriteURI uriStartString=\"http://www.boost.org/tools/boostbook/dtd/\" rewritePrefix=\"file://$(boostbook-dtd-dir)/\"/>"
: true ;
local docbook-xsl-dir = [ boostbook.docbook-xsl-dir ] ;
if ! $(docbook-xsl-dir)
{
ECHO "BoostBook warning: no DocBook XSL directory specified." ;
ECHO " If you have the DocBook XSL stylesheets installed, please " ;
ECHO " set DOCBOOK_XSL_DIR to the stylesheet directory on either " ;
ECHO " the command line (via -sDOCBOOK_XSL_DIR=...) or in a " ;
ECHO " Boost.Jam configuration file. The DocBook XSL stylesheets " ;
ECHO " are available here: http://docbook.sourceforge.net/ " ;
ECHO " Stylesheets will be downloaded on-the-fly (very slow!) " ;
}
else
{
docbook-xsl-dir = [ format-catalog-path $(docbook-xsl-dir) ] ;
print.text " <rewriteURI uriStartString=\"http://docbook.sourceforge.net/release/xsl/current/\" rewritePrefix=\"file://$(docbook-xsl-dir)/\"/>" ;
}
local docbook-dtd-dir = [ boostbook.docbook-dtd-dir ] ;
if ! $(docbook-dtd-dir)
{
ECHO "BoostBook warning: no DocBook DTD directory specified." ;
ECHO " If you have the DocBook DTD installed, please set " ;
ECHO " DOCBOOK_DTD_DIR to the DTD directory on either " ;
ECHO " the command line (via -sDOCBOOK_DTD_DIR=...) or in a " ;
ECHO " Boost.Jam configuration file. The DocBook DTD is available " ;
ECHO " here: http://www.oasis-open.org/docbook/xml/4.2/index.shtml" ;
ECHO " The DTD will be downloaded on-the-fly (very slow!) " ;
}
else
{
docbook-dtd-dir = [ format-catalog-path $(docbook-dtd-dir) ] ;
print.text " <rewriteURI uriStartString=\"http://www.oasis-open.org/docbook/xml/4.2/\" rewritePrefix=\"file://$(docbook-dtd-dir)/\"/>" ;
}
print.text "</catalog>" ;
}
rule xml-catalog ( )
{
if ! $(.xml-catalog)
{
# The target is created as part of the root project. But ideally
# it would be created as part of the boostbook project. This is not
# current possible as such global projects don't inherit things like
# the build directory.
# Find the root project.
local root-project = [ project.current ] ;
root-project = [ $(root-project).project-module ] ;
while
[ project.attribute $(root-project) parent-module ] &&
[ project.attribute $(root-project) parent-module ] != user-config &&
[ project.attribute $(root-project) parent-module ] != project-config
{
root-project = [ project.attribute $(root-project) parent-module ] ;
}
.xml-catalog = [ new file-target boostbook_catalog
: XML
: [ project.target $(root-project) ]
: [ new action : boostbook.generate-xml-catalog ]
:
] ;
.xml-catalog-file = [ $(.xml-catalog).path ] [ $(.xml-catalog).name ] ;
.xml-catalog-file = $(.xml-catalog-file:J=/) ;
}
return $(.xml-catalog) $(.xml-catalog-file) ;
}
class boostbook-generator : generator
{
import feature ;
import virtual-target ;
import generators ;
import boostbook ;
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * )
{
# Generate the catalog, but only once...
local global-catalog = [ boostbook.xml-catalog ] ;
local catalog = $(global-catalog[1]) ;
local catalog-file = $(global-catalog[2]) ;
local targets ;
# Add the catalog to the property set
property-set = [ $(property-set).add-raw <catalog>$(catalog-file) ] ;
local type = none ;
local manifest ;
local format = [ $(property-set).get <format> ] ;
switch $(format)
{
case html :
{
type = HTMLDIR ;
manifest = HTML.manifest ;
}
case xhtml :
{
type = XHTMLDIR ;
manifest = HTML.manifest ;
}
case htmlhelp :
{
type = HTMLHELP ;
manifest = HTML.manifest ;
}
case onehtml : type = HTML ;
case man :
{
type = MANPAGES ;
manifest = man.manifest ;
}
case docbook : type = DOCBOOK ;
case fo : type = FO ;
case pdf : type = PDF ;
case ps : type = PS ;
case tests : type = TESTS ;
}
if $(manifest)
{
# Create DOCBOOK file from BOOSTBOOK sources.
local base-target = [ generators.construct $(project)
: DOCBOOK : $(property-set) : $(sources) ] ;
base-target = $(base-target[2]) ;
$(base-target).depends $(catalog) ;
# Generate HTML/PDF/PS from DOCBOOK.
local target = [ generators.construct $(project) $(name)_$(manifest)
: $(type)
: [ $(property-set).add-raw
<xsl:param>manifest=$(name)_$(manifest) ]
: $(base-target) ] ;
local name = [ $(property-set).get <name> ] ;
name ?= $(format) ;
$(target[2]).set-path $(name) ;
$(target[2]).depends $(catalog) ;
targets += $(target[2]) ;
}
else {
local target = [ generators.construct $(project)
: $(type) : $(property-set) : $(sources) ] ;
if ! $(target)
{
errors.error "Cannot build documentation type '$(format)'" ;
}
else
{
$(target[2]).depends $(catalog) ;
targets += $(target[2]) ;
}
}
return $(targets) ;
}
}
generators.register [ new boostbook-generator boostbook.main : : BOOSTBOOK_MAIN ] ;
# Creates a boostbook target.
rule boostbook ( target-name : sources * : requirements * : default-build * )
{
local project = [ project.current ] ;
targets.main-target-alternative
[ new typed-target $(target-name) : $(project) : BOOSTBOOK_MAIN
: [ targets.main-target-sources $(sources) : $(target-name) ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
}
#############################################################################
# Dependency scanners
#############################################################################
# XInclude scanner. Mostly stolen from c-scanner :)
# Note that this assumes an "xi" prefix for XIncludes. This isn't always the
# case for XML documents, but we'll assume it's true for anything we encounter.
class xinclude-scanner : scanner
{
import virtual-target ;
import path ;
import scanner ;
rule __init__ ( includes * )
{
scanner.__init__ ;
self.includes = $(includes) ;
}
rule pattern ( )
{
return "xi:include[ ]*href=\"([^\"]*)\"" ;
}
rule process ( target : matches * : binding )
{
local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
NOCARE $(matches) ;
INCLUDES $(target) : $(matches) ;
SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
scanner.propagate $(__name__) : $(matches) : $(target) ;
}
}
scanner.register xinclude-scanner : xsl:path ;
type.set-scanner XML : xinclude-scanner ;
rule boostbook-to-tests ( target : source : properties * )
{
lock-config ;
local boost_root = [ modules.peek : BOOST_ROOT ] ;
local native-path =
[ path.native [ path.join $(.boostbook-xsl-dir) testing Jamfile ] ] ;
local stylesheet = $(native-path:S=.xsl) ;
xslt $(target) : $(source) $(stylesheet)
: $(properties) <xsl:param>boost.root=$(boost_root)
;
}

View File

@ -0,0 +1,220 @@
# Copyright 2005 Dave Abrahams
# Copyright 2003 Rene Rivera
# Copyright 2003, 2004, 2005 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Support for the Borland's command line compiler
import property ;
import generators ;
import os ;
import toolset : flags ;
import feature : get-values ;
import type ;
import common ;
feature.extend toolset : borland ;
rule init ( version ? : command * : options * )
{
local condition = [ common.check-init-parameters borland :
version $(version) ] ;
local command = [ common.get-invocation-command borland : bcc32.exe
: $(command) ] ;
common.handle-options borland : $(condition) : $(command) : $(options) ;
if $(command)
{
command = [ common.get-absolute-tool-path $(command[-1]) ] ;
}
root = $(command:D) ;
flags borland.compile STDHDRS $(condition) : $(root)/include/ ;
flags borland.link STDLIBPATH $(condition) : $(root)/lib ;
flags borland.link RUN_PATH $(condition) : $(root)/bin ;
flags borland .root $(condition) : $(root)/bin/ ;
}
# A borland-specific target type
type.register BORLAND.TDS : tds ;
# Declare generators
generators.register-linker borland.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>borland ;
generators.register-linker borland.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>borland ;
generators.register-archiver borland.archive : OBJ : STATIC_LIB : <toolset>borland ;
generators.register-c-compiler borland.compile.c++ : CPP : OBJ : <toolset>borland ;
generators.register-c-compiler borland.compile.c : C : OBJ : <toolset>borland ;
generators.register-standard borland.asm : ASM : OBJ : <toolset>borland ;
# Declare flags
flags borland.compile OPTIONS <debug-symbols>on : -v ;
flags borland.link OPTIONS <debug-symbols>on : -v ;
flags borland.compile OPTIONS <optimization>off : -Od ;
flags borland.compile OPTIONS <optimization>speed : -O2 ;
flags borland.compile OPTIONS <optimization>space : -O1 ;
if $(.BORLAND_HAS_FIXED_INLINING_BUGS)
{
flags borland CFLAGS <inlining>off : -vi- ;
flags borland CFLAGS <inlining>on : -vi -w-inl ;
flags borland CFLAGS <inlining>full : -vi -w-inl ;
}
else
{
flags borland CFLAGS : -vi- ;
}
flags borland.compile OPTIONS <warnings>off : -w- ;
flags borland.compile OPTIONS <warnings>all : -w ;
flags borland.compile OPTIONS <warnings-as-errors>on : -w! ;
# Deal with various runtime configs...
# This should be not for DLL
flags borland OPTIONS <user-interface>console : -tWC ;
# -tWR sets -tW as well, so we turn it off here and then turn it
# on again later if we need it:
flags borland OPTIONS <runtime-link>shared : -tWR -tWC ;
flags borland OPTIONS <user-interface>gui : -tW ;
flags borland OPTIONS <main-target-type>LIB/<link>shared : -tWD ;
# Hmm.. not sure what's going on here.
flags borland OPTIONS : -WM- ;
flags borland OPTIONS <threading>multi : -tWM ;
flags borland.compile OPTIONS <cxxflags> ;
flags borland.compile DEFINES <define> ;
flags borland.compile INCLUDES <include> ;
flags borland NEED_IMPLIB <main-target-type>LIB/<link>shared : "" ;
#
# for C++ compiles the following options are turned on by default:
#
# -j5 stops after 5 errors
# -g255 allow an unlimited number of warnings
# -q no banner
# -c compile to object
# -P C++ code regardless of file extention
# -a8 8 byte alignment, this option is on in the IDE by default
# and effects binary compatibility.
#
# -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
actions compile.c++
{
"$(CONFIG_COMMAND)" -j5 -g255 -q -c -P -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
}
# For C, we don't pass -P flag
actions compile.c
{
"$(CONFIG_COMMAND)" -j5 -g255 -q -c -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
}
# Declare flags and action for linking
toolset.flags borland.link OPTIONS <debug-symbols>on : -v ;
toolset.flags borland.link LIBRARY_PATH <library-path> ;
toolset.flags borland.link FINDLIBS_ST <find-static-library> ;
toolset.flags borland.link FINDLIBS_SA <find-shared-library> ;
toolset.flags borland.link LIBRARIES <library-file> ;
flags borland.link OPTIONS <linkflags> ;
flags borland.link OPTIONS <link>shared : -tWD ;
flags borland.link LIBRARY_PATH_OPTION <toolset>borland : -L : unchecked ;
flags borland.link LIBRARY_OPTION <toolset>borland : "" : unchecked ;
# bcc32 needs to have ilink32 in the path in order to invoke it, so explicitly
# specifying $(BCC_TOOL_PATH)bcc32 doesn't help. You need to add
# $(BCC_TOOL_PATH) to the path
# The NEED_IMPLIB variable controls whether we need to invoke implib.
flags borland.archive AROPTIONS <archiveflags> ;
# Declare action for archives. We don't use response file
# since it's hard to get "+-" there.
# The /P256 increases 'page' size -- with too low
# values tlib fails when building large applications.
# CONSIDER: don't know what 'together' is for...
actions updated together piecemeal archive
{
$(.set-path)$(.root:W)$(.old-path)
tlib $(AROPTIONS) /P256 /u /a /C "$(<:W)" +-"$(>:W)"
}
if [ os.name ] = CYGWIN
{
.set-path = "cmd /S /C set \"PATH=" ;
.old-path = ";%PATH%\" \"&&\"" ;
# Couldn't get TLIB to stop being confused about pathnames
# containing dashes (it seemed to treat them as option separators
# when passed through from bash), so we explicitly write the
# command into a .bat file and execute that. TLIB is also finicky
# about pathname style! Forward slashes, too, are treated as
# options.
actions updated together piecemeal archive
{
chdir $(<:D)
echo +-$(>:BS) > $(<:BS).rsp
$(.set-path)$(.root)$(.old-path) "tlib.exe" $(AROPTIONS) /P256 /C $(<:BS) @$(<:BS).rsp && $(RM) $(<:BS).rsp
}
}
else if [ os.name ] = NT
{
.set-path = "set \"PATH=" ;
.old-path = ";%PATH%\"
" ;
}
else
{
.set-path = "PATH=\"" ;
.old-path = "\":$PATH
export PATH
" ;
}
RM = [ common.rm-command ] ;
nl = "
" ;
actions link
{
$(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
}
actions link.dll bind LIBRARIES RSP
{
$(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" && "$(.root)implib" "$(<[2]:W)" "$(<[1]:W)"
}
# It seems impossible to specify output file with directory when compiling
# asm files using bcc32, so use tasm32 directly.
# /ml makes all symbol names case-sensitive
actions asm
{
$(.set-path)$(.root:W)$(.old-path) tasm32.exe /ml "$(>)" "$(<)"
}

View File

@ -0,0 +1,960 @@
# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
# Copyright 2002, 2005, 2006, 2007, 2010 Rene Rivera
# Copyright 2006 Juergen Hunold
# Copyright 2005 Toon Knapen
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Defines standard features and rules.
import alias ;
import "class" : new ;
import errors ;
import feature ;
import generators ;
import numbers ;
import os ;
import path ;
import print ;
import project ;
import property ;
import regex ;
import scanner ;
import sequence ;
import stage ;
import symlink ;
import toolset ;
import type ;
import targets ;
import types/register ;
import utility ;
import virtual-target ;
import message ;
import convert ;
# FIXME: the following generate module import is not needed here but removing it
# too hastly will break using code (e.g. the main Boost library Jamroot file)
# that forgot to import the generate module before calling the generate rule.
import generate ;
.os-names = aix bsd cygwin darwin freebsd hpux iphone linux netbsd
openbsd osf qnx qnxnto sgi solaris unix unixware windows
elf # Not actually an OS -- used for targeting bare metal where
# object format is ELF. This catches both -elf and -eabi gcc
# targets and well as other compilers targeting ELF. It is not
# clear how often do we need to key of ELF specifically as opposed
# to other bare metal targets, but let's stick with gcc naming.
;
# Feature used to determine which OS we're on. New <target-os> and <host-os>
# features should be used instead.
local os = [ modules.peek : OS ] ;
feature.feature os : $(os) : propagated link-incompatible ;
# Translates from bjam current OS to the os tags used in host-os and target-os,
# i.e. returns the running host-os.
#
local rule default-host-os ( )
{
local host-os ;
if [ os.name ] in $(.os-names:U)
{
host-os = [ os.name ] ;
}
else
{
switch [ os.name ]
{
case NT : host-os = windows ;
case AS400 : host-os = unix ;
case MINGW : host-os = windows ;
case BSDI : host-os = bsd ;
case COHERENT : host-os = unix ;
case DRAGONFLYBSD : host-os = bsd ;
case IRIX : host-os = sgi ;
case MACOSX : host-os = darwin ;
case KFREEBSD : host-os = freebsd ;
case LINUX : host-os = linux ;
case SUNOS :
ECHO "SunOS is not a supported operating system." ;
ECHO "We believe last version of SunOS was released in 1992, " ;
ECHO "so if you get this message, something is very wrong with configuration logic. " ;
ECHO "Please report this as a bug. " ;
EXIT ;
case * : host-os = unix ;
}
}
return $(host-os:L) ;
}
# The two OS features define a known set of abstract OS names. The host-os is
# the OS under which bjam is running. Even though this should really be a fixed
# property we need to list all the values to prevent unknown value errors. Both
# set the default value to the current OS to account for the default use case of
# building on the target OS.
feature.feature host-os : $(.os-names) ;
feature.set-default host-os : [ default-host-os ] ;
feature.feature target-os : $(.os-names) : propagated link-incompatible ;
feature.set-default target-os : [ default-host-os ] ;
feature.feature toolset : : implicit propagated symmetric ;
feature.feature stdlib : native : propagated composite ;
feature.feature link : shared static : propagated ;
feature.feature runtime-link : shared static : propagated ;
feature.feature runtime-debugging : on off : propagated ;
feature.feature optimization : off speed space none : propagated ;
feature.feature profiling : off on : propagated ;
feature.feature inlining : off on full : propagated ;
feature.feature threading : single multi : propagated ;
feature.feature rtti : on off : propagated ;
feature.feature exception-handling : on off : propagated ;
# Whether there is support for asynchronous EH (e.g. catching SEGVs).
feature.feature asynch-exceptions : off on : propagated ;
# Whether all extern "C" functions are considered nothrow by default.
feature.feature extern-c-nothrow : off on : propagated ;
feature.feature debug-symbols : on off none : propagated ;
# Controls whether the binary should be stripped -- that is have
# everything not necessary to running removed. This option should
# not be very often needed. Also, this feature will show up in
# target paths of everything, not just binaries. Should fix that
# when impelementing feature relevance.
feature.feature strip : off on : propagated ;
feature.feature define : : free ;
feature.feature undef : : free ;
feature.feature "include" : : free path ; #order-sensitive ;
feature.feature cflags : : free ;
feature.feature cxxflags : : free ;
feature.feature fflags : : free ;
feature.feature asmflags : : free ;
feature.feature linkflags : : free ;
feature.feature archiveflags : : free ;
feature.feature version : : free ;
# Generic, i.e. non-language specific, flags for tools.
feature.feature flags : : free ;
feature.feature location-prefix : : free ;
# The following features are incidental since they have no effect on built
# products. Not making them incidental will result in problems in corner cases,
# e.g.:
#
# unit-test a : a.cpp : <use>b ;
# lib b : a.cpp b ;
#
# Here, if <use> is not incidental, we would decide we have two targets for
# a.obj with different properties and complain about it.
#
# Note that making a feature incidental does not mean it is ignored. It may be
# ignored when creating a virtual target, but the rest of build process will use
# them.
feature.feature use : : free dependency incidental ;
feature.feature dependency : : free dependency incidental ;
feature.feature implicit-dependency : : free dependency incidental ;
feature.feature warnings :
on # Enable default/"reasonable" warning level for the tool.
all # Enable all possible warnings issued by the tool.
off # Disable all warnings issued by the tool.
: incidental propagated ;
feature.feature warnings-as-errors :
off # Do not fail the compilation if there are warnings.
on # Fail the compilation if there are warnings.
: incidental propagated ;
# Feature that allows us to configure the maximal template instantiation depth
# level allowed by a C++ compiler. Applies only to C++ toolsets whose compilers
# actually support this configuration setting.
#
# Note that Boost Build currently does not allow defining features that take any
# positive integral value as a parameter, which is what we need here, so we just
# define some of the values here and leave it up to the user to extend this set
# as he needs using the feature.extend rule.
#
# TODO: This should be upgraded as soon as Boost Build adds support for custom
# validated feature values or at least features allowing any positive integral
# value. See related Boost Build related trac ticket #194.
#
feature.feature c++-template-depth
:
[ numbers.range 64 1024 : 64 ]
[ numbers.range 20 1000 : 10 ]
# Maximum template instantiation depth guaranteed for ANSI/ISO C++
# conforming programs.
17
:
incidental optional propagated ;
feature.feature source : : free dependency incidental ;
feature.feature library : : free dependency incidental ;
feature.feature file : : free dependency incidental ;
feature.feature find-shared-library : : free ; #order-sensitive ;
feature.feature find-static-library : : free ; #order-sensitive ;
feature.feature library-path : : free path ; #order-sensitive ;
# Internal feature.
feature.feature library-file : : free dependency ;
feature.feature name : : free ;
feature.feature tag : : free ;
feature.feature search : : free path ; #order-sensitive ;
feature.feature location : : free path ;
feature.feature dll-path : : free path ;
feature.feature hardcode-dll-paths : true false : incidental ;
# An internal feature that holds the paths of all dependency shared libraries.
# On Windows, it is needed so that we can add all those paths to PATH when
# running applications. On Linux, it is needed to add proper -rpath-link command
# line options.
feature.feature xdll-path : : free path ;
# Provides means to specify def-file for windows DLLs.
feature.feature def-file : : free dependency ;
feature.feature suppress-import-lib : false true : incidental ;
# Internal feature used to store the name of a bjam action to call when building
# a target.
feature.feature action : : free ;
# This feature is used to allow specific generators to run. For example, QT
# tools can only be invoked when QT library is used. In that case, <allow>qt
# will be in usage requirement of the library.
feature.feature allow : : free ;
# The addressing model to generate code for. Currently a limited set only
# specifying the bit size of pointers.
feature.feature address-model : 16 32 64 32_64 : propagated optional ;
# Type of CPU architecture to compile for.
feature.feature architecture :
# x86 and x86-64
x86
# ia64
ia64
# Sparc
sparc
# RS/6000 & PowerPC
power
# MIPS/SGI
mips1 mips2 mips3 mips4 mips32 mips32r2 mips64
# HP/PA-RISC
parisc
# Advanced RISC Machines
arm
# Combined architectures for platforms/toolsets that support building for
# multiple architectures at once. "combined" would be the default multi-arch
# for the toolset.
combined
combined-x86-power
: propagated optional ;
# The specific instruction set in an architecture to compile.
feature.feature instruction-set :
# x86 and x86-64
native i386 i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3
pentium3m pentium-m pentium4 pentium4m prescott nocona core2 conroe conroe-xe
conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe penryn wolfdale
yorksfield nehalem k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp
athlon-mp k8 opteron athlon64 athlon-fx winchip-c6 winchip2 c3 c3-2
# ia64
itanium itanium1 merced itanium2 mckinley
# Sparc
v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934
sparclet tsc701 v9 ultrasparc ultrasparc3
# RS/6000 & PowerPC
401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400
7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2
power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a
# MIPS
4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650
r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300
vr5000 vr5400 vr5500
# HP/PA-RISC
700 7100 7100lc 7200 7300 8000
# Advanced RISC Machines
armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312
: propagated optional ;
# Used to select a specific variant of C++ ABI if the compiler supports several.
feature.feature c++abi : : propagated optional ;
feature.feature conditional : : incidental free ;
# The value of 'no' prevents building of a target.
feature.feature build : yes no : optional ;
# Windows-specific features
feature.feature user-interface : console gui wince native auto ;
feature.feature variant : : implicit composite propagated symmetric ;
# Declares a new variant.
#
# First determines explicit properties for this variant, by refining parents'
# explicit properties with the passed explicit properties. The result is
# remembered and will be used if this variant is used as parent.
#
# Second, determines the full property set for this variant by adding to the
# explicit properties default values for all missing non-symmetric properties.
#
# Lastly, makes appropriate value of 'variant' property expand to the full
# property set.
#
rule variant ( name # Name of the variant
: parents-or-properties * # Specifies parent variants, if
# 'explicit-properties' are given, and
# explicit-properties or parents otherwise.
: explicit-properties * # Explicit properties.
)
{
local parents ;
if ! $(explicit-properties)
{
if $(parents-or-properties[1]:G)
{
explicit-properties = $(parents-or-properties) ;
}
else
{
parents = $(parents-or-properties) ;
}
}
else
{
parents = $(parents-or-properties) ;
}
# The problem is that we have to check for conflicts between base variants.
if $(parents[2])
{
errors.error "multiple base variants are not yet supported" ;
}
local inherited ;
# Add explicitly specified properties for parents.
for local p in $(parents)
{
# TODO: This check may be made stricter.
if ! [ feature.is-implicit-value $(p) ]
{
errors.error "Invalid base variant" $(p) ;
}
inherited += $(.explicit-properties.$(p)) ;
}
property.validate $(explicit-properties) ;
explicit-properties = [ property.refine $(inherited)
: $(explicit-properties) ] ;
# Record explicitly specified properties for this variant. We do this after
# inheriting parents' properties so they affect other variants derived from
# this one.
.explicit-properties.$(name) = $(explicit-properties) ;
feature.extend variant : $(name) ;
feature.compose <variant>$(name) : $(explicit-properties) ;
}
IMPORT $(__name__) : variant : : variant ;
variant debug : <optimization>off <debug-symbols>on <inlining>off
<runtime-debugging>on ;
variant release : <optimization>speed <debug-symbols>off <inlining>full
<runtime-debugging>off <define>NDEBUG ;
variant profile : release : <profiling>on <debug-symbols>on ;
class searched-lib-target : abstract-file-target
{
rule __init__ ( name
: project
: shared ?
: search *
: action
)
{
abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
: $(action) : ;
self.shared = $(shared) ;
self.search = $(search) ;
}
rule shared ( )
{
return $(self.shared) ;
}
rule search ( )
{
return $(self.search) ;
}
rule actualize-location ( target )
{
NOTFILE $(target) ;
}
rule path ( )
{
}
}
# The generator class for libraries (target type LIB). Depending on properties
# it will request building of the appropriate specific library type --
# -- SHARED_LIB, STATIC_LIB or SHARED_LIB.
#
class lib-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * )
{
# The lib generator is composing, and can be only invoked with an
# explicit name. This check is present in generator.run (and so in
# builtin.linking-generator) but duplicated here to avoid doing extra
# work.
if $(name)
{
local properties = [ $(property-set).raw ] ;
# Determine the needed target type.
local actual-type ;
# <source>files can be generated by <conditional>@rule feature
# in which case we do not consider it a SEARCHED_LIB type.
if ! <source> in $(properties:G) &&
( <search> in $(properties:G) || <name> in $(properties:G) )
{
actual-type = SEARCHED_LIB ;
}
else if <file> in $(properties:G)
{
actual-type = LIB ;
}
else if <link>shared in $(properties)
{
actual-type = SHARED_LIB ;
}
else
{
actual-type = STATIC_LIB ;
}
property-set = [ $(property-set).add-raw <main-target-type>LIB ] ;
# Construct the target.
return [ generators.construct $(project) $(name) : $(actual-type)
: $(property-set) : $(sources) ] ;
}
}
rule viable-source-types ( )
{
return * ;
}
}
generators.register [ new lib-generator builtin.lib-generator : : LIB ] ;
# The implementation of the 'lib' rule. Beyond standard syntax that rule allows
# simplified: "lib a b c ;".
#
rule lib ( names + : sources * : requirements * : default-build * :
usage-requirements * )
{
if $(names[2])
{
if <name> in $(requirements:G)
{
errors.user-error "When several names are given to the 'lib' rule" :
"it is not allowed to specify the <name> feature." ;
}
if $(sources)
{
errors.user-error "When several names are given to the 'lib' rule" :
"it is not allowed to specify sources." ;
}
}
# This is a circular module dependency so it must be imported here.
import targets ;
local project = [ project.current ] ;
local result ;
for local name in $(names)
{
local r = $(requirements) ;
# Support " lib a ; " and " lib a b c ; " syntax.
if ! $(sources) && ! <name> in $(requirements:G)
&& ! <file> in $(requirements:G)
{
r += <name>$(name) ;
}
result += [ targets.main-target-alternative
[ new typed-target $(name) : $(project) : LIB
: [ targets.main-target-sources $(sources) : $(name) ]
: [ targets.main-target-requirements $(r) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
: [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
] ] ;
}
return $(result) ;
}
IMPORT $(__name__) : lib : : lib ;
class searched-lib-generator : generator
{
import property-set ;
rule __init__ ( )
{
# The requirements cause the generators to be tried *only* when we're
# building a lib target with a 'search' feature. This seems ugly --- all
# we want is to make sure searched-lib-generator is not invoked deep
# inside transformation search to produce intermediate targets.
generator.__init__ searched-lib-generator : : SEARCHED_LIB ;
}
rule run ( project name ? : property-set : sources * )
{
if $(name)
{
# If 'name' is empty, it means we have not been called to build a
# top-level target. In this case, we just fail immediately, because
# searched-lib-generator cannot be used to produce intermediate
# targets.
local properties = [ $(property-set).raw ] ;
local shared ;
if <link>shared in $(properties)
{
shared = true ;
}
local search = [ feature.get-values <search> : $(properties) ] ;
local a = [ new null-action $(property-set) ] ;
local lib-name = [ feature.get-values <name> : $(properties) ] ;
lib-name ?= $(name) ;
local t = [ new searched-lib-target $(lib-name) : $(project)
: $(shared) : $(search) : $(a) ] ;
# We return sources for a simple reason. If there is
# lib png : z : <name>png ;
# the 'z' target should be returned, so that apps linking to 'png'
# will link to 'z', too.
return [ property-set.create <xdll-path>$(search) ]
[ virtual-target.register $(t) ] $(sources) ;
}
}
}
generators.register [ new searched-lib-generator ] ;
class prebuilt-lib-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * )
{
local f = [ $(property-set).get <file> ] ;
return $(f) $(sources) ;
}
}
generators.register
[ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
generators.override builtin.prebuilt : builtin.lib-generator ;
class preprocessed-target-class : basic-target
{
import generators ;
rule construct ( name : sources * : property-set )
{
local result = [ generators.construct [ project ]
$(name) : PREPROCESSED_CPP : $(property-set) : $(sources) ] ;
if ! $(result)
{
result = [ generators.construct [ project ]
$(name) : PREPROCESSED_C : $(property-set) : $(sources) ] ;
}
if ! $(result)
{
local s ;
for x in $(sources)
{
s += [ $(x).name ] ;
}
local p = [ project ] ;
errors.user-error
"In project" [ $(p).name ] :
"Could not construct preprocessed file \"$(name)\" from $(s:J=, )." ;
}
return $(result) ;
}
}
rule preprocessed ( name : sources * : requirements * : default-build * :
usage-requirements * )
{
local project = [ project.current ] ;
return [ targets.main-target-alternative
[ new preprocessed-target-class $(name) : $(project)
: [ targets.main-target-sources $(sources) : $(name) ]
: [ targets.main-target-requirements $(r) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
: [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
] ] ;
}
IMPORT $(__name__) : preprocessed : : preprocessed ;
class compile-action : action
{
import sequence ;
rule __init__ ( targets * : sources * : action-name : properties * )
{
action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ;
}
# For all virtual targets for the same dependency graph as self, i.e. which
# belong to the same main target, add their directories to the include path.
#
rule adjust-properties ( property-set )
{
local s = [ $(self.targets[1]).creating-subvariant ] ;
return [ $(property-set).add-raw
[ $(s).implicit-includes "include" : H ] ] ;
}
}
# Declare a special compiler generator. The only thing it does is changing the
# type used to represent 'action' in the constructed dependency graph to
# 'compile-action'. That class in turn adds additional include paths to handle
# cases when a source file includes headers which are generated themselves.
#
class C-compiling-generator : generator
{
rule __init__ ( id : source-types + : target-types + : requirements *
: optional-properties * )
{
generator.__init__ $(id) : $(source-types) : $(target-types) :
$(requirements) : $(optional-properties) ;
}
rule action-class ( )
{
return compile-action ;
}
}
rule register-c-compiler ( id : source-types + : target-types + : requirements *
: optional-properties * )
{
generators.register [ new C-compiling-generator $(id) : $(source-types) :
$(target-types) : $(requirements) : $(optional-properties) ] ;
}
# FIXME: this is ugly, should find a better way (we would like client code to
# register all generators as "generators.some-rule" instead of
# "some-module.some-rule".)
#
IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ;
# The generator class for handling EXE and SHARED_LIB creation.
#
class linking-generator : generator
{
import path ;
import project ;
import property-set ;
import type ;
rule __init__ ( id
composing ? : # The generator will be composing if a non-empty
# string is passed or the parameter is not given. To
# make the generator non-composing, pass an empty
# string ("").
source-types + :
target-types + :
requirements * )
{
composing ?= true ;
generator.__init__ $(id) $(composing) : $(source-types)
: $(target-types) : $(requirements) ;
}
rule run ( project name ? : property-set : sources + )
{
sources += [ $(property-set).get <library> ] ;
# Add <library-path> properties for all searched libraries.
local extra ;
for local s in $(sources)
{
if [ $(s).type ] = SEARCHED_LIB
{
local search = [ $(s).search ] ;
extra += <library-path>$(search) ;
}
}
# It is possible that sources include shared libraries that did not came
# from 'lib' targets, e.g. .so files specified as sources. In this case
# we have to add extra dll-path properties and propagate extra xdll-path
# properties so that application linking to us will get xdll-path to
# those libraries.
local extra-xdll-paths ;
for local s in $(sources)
{
if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
{
# Unfortunately, we do not have a good way to find the path to a
# file, so use this nasty approach.
#
# TODO: This needs to be done better. One thing that is really
# broken with this is that it does not work correctly with
# projects having multiple source locations.
local p = [ $(s).project ] ;
local location = [ path.root [ $(s).name ]
[ $(p).get source-location ] ] ;
extra-xdll-paths += [ path.parent $(location) ] ;
}
}
# Hardcode DLL paths only when linking executables.
# Pros: do not need to relink libraries when installing.
# Cons: "standalone" libraries (plugins, python extensions) can not
# hardcode paths to dependent libraries.
if [ $(property-set).get <hardcode-dll-paths> ] = true
&& [ type.is-derived $(self.target-types[1]) EXE ]
{
local xdll-path = [ $(property-set).get <xdll-path> ] ;
extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ;
}
if $(extra)
{
property-set = [ $(property-set).add-raw $(extra) ] ;
}
local result = [ generator.run $(project) $(name) : $(property-set)
: $(sources) ] ;
local ur ;
if $(result)
{
ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
ur = [ $(ur).add
[ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
}
return $(ur) $(result) ;
}
rule extra-usage-requirements ( created-targets * : property-set )
{
local result = [ property-set.empty ] ;
local extra ;
# Add appropricate <xdll-path> usage requirements.
local raw = [ $(property-set).raw ] ;
if <link>shared in $(raw)
{
local paths ;
local pwd = [ path.pwd ] ;
for local t in $(created-targets)
{
if [ type.is-derived [ $(t).type ] SHARED_LIB ]
{
paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ;
}
}
extra += $(paths:G=<xdll-path>) ;
}
# We need to pass <xdll-path> features that we've got from sources,
# because if a shared library is built, exe using it needs to know paths
# to other shared libraries this one depends on in order to be able to
# find them all at runtime.
# Just pass all features in property-set, it is theorically possible
# that we will propagate <xdll-path> features explicitly specified by
# the user, but then the user is to blaim for using an internal feature.
local values = [ $(property-set).get <xdll-path> ] ;
extra += $(values:G=<xdll-path>) ;
if $(extra)
{
result = [ property-set.create $(extra) ] ;
}
return $(result) ;
}
rule generated-targets ( sources + : property-set : project name ? )
{
local sources2 ; # Sources to pass to inherited rule.
local properties2 ; # Properties to pass to inherited rule.
local libraries ; # Library sources.
# Searched libraries are not passed as arguments to the linker but via
# some option. So, we pass them to the action using a property.
properties2 = [ $(property-set).raw ] ;
local fsa ;
local fst ;
for local s in $(sources)
{
if [ type.is-derived [ $(s).type ] SEARCHED_LIB ]
{
local name = [ $(s).name ] ;
if [ $(s).shared ]
{
fsa += $(name) ;
}
else
{
fst += $(name) ;
}
}
else
{
sources2 += $(s) ;
}
}
properties2 += <find-shared-library>$(fsa:J=&&)
<find-static-library>$(fst:J=&&) ;
return [ generator.generated-targets $(sources2)
: [ property-set.create $(properties2) ] : $(project) $(name) ] ;
}
}
rule register-linker ( id composing ? : source-types + : target-types +
: requirements * )
{
generators.register [ new linking-generator $(id) $(composing)
: $(source-types) : $(target-types) : $(requirements) ] ;
}
# The generator class for handling STATIC_LIB creation.
#
class archive-generator : generator
{
import property-set ;
rule __init__ ( id composing ? : source-types + : target-types +
: requirements * )
{
composing ?= true ;
generator.__init__ $(id) $(composing) : $(source-types)
: $(target-types) : $(requirements) ;
}
rule run ( project name ? : property-set : sources + )
{
sources += [ $(property-set).get <library> ] ;
local result = [ generator.run $(project) $(name) : $(property-set)
: $(sources) ] ;
# For static linking, if we get a library in source, we can not directly
# link to it so we need to cause our dependencies to link to that
# library. There are two approaches:
# - adding the library to the list of returned targets.
# - using the <library> usage requirements.
# The problem with the first is:
#
# lib a1 : : <file>liba1.a ;
# lib a2 : a2.cpp a1 : <link>static ;
# install dist : a2 ;
#
# here we will try to install 'a1', even though it is not necessary in
# the general case. With the second approach, even indirect dependants
# will link to the library, but it should not cause any harm. So, return
# all LIB sources together with created targets, so that dependants link
# to them.
local usage-requirements ;
if [ $(property-set).get <link> ] = static
{
for local t in $(sources)
{
if [ type.is-derived [ $(t).type ] LIB ]
{
usage-requirements += <library>$(t) ;
}
}
}
usage-requirements = [ property-set.create $(usage-requirements) ] ;
return $(usage-requirements) $(result) ;
}
}
rule register-archiver ( id composing ? : source-types + : target-types +
: requirements * )
{
generators.register [ new archive-generator $(id) $(composing)
: $(source-types) : $(target-types) : $(requirements) ] ;
}
# Generator that accepts everything and produces nothing. Useful as a general
# fallback for toolset-specific actions like PCH generation.
#
class dummy-generator : generator
{
import property-set ;
rule run ( project name ? : property-set : sources + )
{
return [ property-set.empty ] ;
}
}
IMPORT $(__name__) : register-linker register-archiver
: : generators.register-linker generators.register-archiver ;

View File

@ -0,0 +1,91 @@
# Copyright 2005 Vladimir Prus.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# Defines main target 'cast', used to change type for target. For example, in Qt
# library one wants two kinds of CPP files -- those that just compiled and those
# that are passed via the MOC tool.
#
# This is done with:
#
# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
#
# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then,
# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
# support will run the MOC tool as part of the build process.
#
# At the moment, the 'cast' rule only works for non-derived (source) targets.
#
# TODO: The following comment is unclear or incorrect. Clean it up.
# > Another solution would be to add a separate main target 'moc-them' that
# > would moc all the passed sources, no matter what their type is, but I prefer
# > cast, as defining a new target type + generator for that type is somewhat
# > simpler than defining a main target rule.
import "class" : new ;
import errors ;
import project ;
import property-set ;
import targets ;
import type ;
class cast-target-class : typed-target
{
import type ;
rule __init__ ( name : project : type : sources * : requirements * :
default-build * : usage-requirements * )
{
typed-target.__init__ $(name) : $(project) : $(type) : $(sources) :
$(requirements) : $(default-build) : $(usage-requirements) ;
}
rule construct ( name : source-targets * : property-set )
{
local result ;
for local s in $(source-targets)
{
if ! [ class.is-a $(s) : file-target ]
{
import errors ;
errors.user-error Source to the 'cast' rule is not a file! ;
}
if [ $(s).action ]
{
import errors ;
errors.user-error Only non-derived target are allowed for
'cast'. : when building [ full-name ] ;
}
local r = [ $(s).clone-with-different-type $(self.type) ] ;
result += [ virtual-target.register $(r) ] ;
}
return [ property-set.empty ] $(result) ;
}
}
rule cast ( name type : sources * : requirements * : default-build * :
usage-requirements * )
{
local project = [ project.current ] ;
local real-type = [ type.type-from-rule-name $(type) ] ;
if ! $(real-type)
{
errors.user-error No type corresponds to the main target rule name
'$(type)' : "Hint: try a lowercase name" ;
}
targets.main-target-alternative [ new cast-target-class $(name) : $(project)
: $(real-type)
: [ targets.main-target-sources $(sources) : $(name) ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
: [ targets.main-target-usage-requirements $(usage-requirements) :
$(project) ] ] ;
}
IMPORT $(__name__) : cast : : cast ;

View File

@ -0,0 +1,170 @@
# Copyright Vladimir Prus 2004.
# Copyright Noel Belcourt 2007.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt
# or copy at http://www.boost.org/LICENSE_1_0.txt)
import clang ;
import feature : feature ;
import os ;
import toolset ;
import toolset : flags ;
import gcc ;
import common ;
import errors ;
import generators ;
feature.extend-subfeature toolset clang : platform : darwin ;
toolset.inherit-generators clang-darwin
<toolset>clang <toolset-clang:platform>darwin
: gcc
# Don't inherit PCH generators. They were not tested, and probably
# don't work for this compiler.
: gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
;
generators.override clang-darwin.prebuilt : builtin.lib-generator ;
generators.override clang-darwin.prebuilt : builtin.prebuilt ;
generators.override clang-darwin.searched-lib-generator : searched-lib-generator ;
toolset.inherit-rules clang-darwin : gcc ;
toolset.inherit-flags clang-darwin : gcc
: <inlining>off <inlining>on <inlining>full <optimization>space
<warnings>off <warnings>all <warnings>on
<architecture>x86/<address-model>32
<architecture>x86/<address-model>64
;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}
# vectorization diagnostics
feature vectorize : off on full ;
# Initializes the clang-darwin toolset
# version in optional
# name (default clang++) is used to invoke the specified clang complier
# compile and link options allow you to specify addition command line options for each version
rule init ( version ? : command * : options * )
{
command = [ common.get-invocation-command clang-darwin : clang++
: $(command) ] ;
# Determine the version
local command-string = $(command:J=" ") ;
if $(command)
{
version ?= [ MATCH "^([0-9.]+)"
: [ SHELL "$(command-string) -dumpversion" ] ] ;
}
local condition = [ common.check-init-parameters clang-darwin
: version $(version) ] ;
common.handle-options clang-darwin : $(condition) : $(command) : $(options) ;
gcc.init-link-flags clang-darwin darwin $(condition) ;
}
SPACE = " " ;
flags clang-darwin.compile OPTIONS <cflags> ;
flags clang-darwin.compile OPTIONS <cxxflags> ;
# flags clang-darwin.compile INCLUDES <include> ;
# Declare flags and action for compilation.
toolset.flags clang-darwin.compile OPTIONS <optimization>off : -O0 ;
toolset.flags clang-darwin.compile OPTIONS <optimization>speed : -O3 ;
toolset.flags clang-darwin.compile OPTIONS <optimization>space : -Os ;
toolset.flags clang-darwin.compile OPTIONS <inlining>off : -fno-inline ;
toolset.flags clang-darwin.compile OPTIONS <inlining>on : -Wno-inline ;
toolset.flags clang-darwin.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
toolset.flags clang-darwin.compile OPTIONS <warnings>off : -w ;
toolset.flags clang-darwin.compile OPTIONS <warnings>on : -Wall ;
toolset.flags clang-darwin.compile OPTIONS <warnings>all : -Wall -pedantic ;
toolset.flags clang-darwin.compile OPTIONS <warnings-as-errors>on : -Werror ;
toolset.flags clang-darwin.compile OPTIONS <debug-symbols>on : -g ;
toolset.flags clang-darwin.compile OPTIONS <profiling>on : -pg ;
toolset.flags clang-darwin.compile OPTIONS <rtti>off : -fno-rtti ;
actions compile.c
{
"$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c++
{
"$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
flags clang-darwin ARFLAGS <archiveflags> ;
# Default value. Mostly for the sake of clang-linux
# that inherits from gcc, but does not has the same
# logic to set the .AR variable. We can put the same
# logic in clang-linux, but that's hardly worth the trouble
# as on Linux, 'ar' is always available.
.AR = ar ;
rule archive ( targets * : sources * : properties * )
{
# Always remove archive and start again. Here's rationale from
# Andre Hentz:
#
# I had a file, say a1.c, that was included into liba.a.
# I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
# My program was crashing with absurd errors.
# After some debugging I traced it back to the fact that a1.o was *still*
# in liba.a
#
# Rene Rivera:
#
# Originally removing the archive was done by splicing an RM
# onto the archive action. That makes archives fail to build on NT
# when they have many files because it will no longer execute the
# action directly and blow the line length limit. Instead we
# remove the file in a different action, just before the building
# of the archive.
#
local clean.a = $(targets[1])(clean) ;
TEMPORARY $(clean.a) ;
NOCARE $(clean.a) ;
LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
DEPENDS $(clean.a) : $(sources) ;
DEPENDS $(targets) : $(clean.a) ;
common.RmTemps $(clean.a) : $(targets) ;
}
actions piecemeal archive
{
"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
"ranlib" -cs "$(<)"
}
flags clang-darwin.link USER_OPTIONS <linkflags> ;
# Declare actions for linking
rule link ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
# Serialize execution of the 'link' action, since
# running N links in parallel is just slower.
JAM_SEMAPHORE on $(targets) = <s>clang-darwin-link-semaphore ;
}
actions link bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
}
actions link.dll bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
}

View File

@ -0,0 +1,196 @@
# Copyright (c) 2003 Michael Stevens
# Copyright (c) 2010-2011 Bryce Lelbach (blelbach@cct.lsu.edu, maintainer)
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import toolset ;
import feature ;
import toolset : flags ;
import clang ;
import gcc ;
import common ;
import errors ;
import generators ;
import type ;
import numbers ;
feature.extend-subfeature toolset clang : platform : linux ;
toolset.inherit-generators clang-linux
<toolset>clang <toolset-clang:platform>linux : gcc
: gcc.mingw.link gcc.mingw.link.dll gcc.cygwin.link gcc.cygwin.link.dll ;
generators.override clang-linux.prebuilt : builtin.lib-generator ;
generators.override clang-linux.prebuilt : builtin.prebuilt ;
generators.override clang-linux.searched-lib-generator : searched-lib-generator ;
# Override default do-nothing generators.
generators.override clang-linux.compile.c.pch : pch.default-c-pch-generator ;
generators.override clang-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
type.set-generated-target-suffix PCH
: <toolset>clang <toolset-clang:platform>linux : pth ;
toolset.inherit-rules clang-linux : gcc ;
toolset.inherit-flags clang-linux : gcc
: <inlining>off <inlining>on <inlining>full
<optimization>space <optimization>speed
<warnings>off <warnings>all <warnings>on ;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] {
.debug-configuration = true ;
}
rule init ( version ? : command * : options * ) {
command = [ common.get-invocation-command clang-linux : clang++
: $(command) ] ;
# Determine the version
local command-string = $(command:J=" ") ;
if $(command) {
version ?= [ MATCH "version ([0-9.]+)"
: [ SHELL "$(command-string) --version" ] ] ;
}
local condition = [ common.check-init-parameters clang-linux
: version $(version) ] ;
common.handle-options clang-linux : $(condition) : $(command) : $(options) ;
gcc.init-link-flags clang-linux gnu $(condition) ;
}
###############################################################################
# Flags
toolset.flags clang-linux.compile OPTIONS <cflags> ;
toolset.flags clang-linux.compile OPTIONS <cxxflags> ;
toolset.flags clang-linux.compile OPTIONS <optimization>off : ;
toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ;
toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ;
# note: clang silently ignores some of these inlining options
toolset.flags clang-linux.compile OPTIONS <inlining>off : -fno-inline ;
toolset.flags clang-linux.compile OPTIONS <inlining>on : -Wno-inline ;
toolset.flags clang-linux.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
toolset.flags clang-linux.compile OPTIONS <warnings>off : -w ;
toolset.flags clang-linux.compile OPTIONS <warnings>on : -Wall ;
toolset.flags clang-linux.compile OPTIONS <warnings>all : -Wall -pedantic ;
toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ;
toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ;
toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ;
toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ;
###############################################################################
# C and C++ compilation
rule compile.c++ ( targets * : sources * : properties * ) {
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
local pth-file = [ on $(<) return $(PCH_FILE) ] ;
if $(pth-file) {
DEPENDS $(<) : $(pth-file) ;
compile.c++.with-pch $(targets) : $(sources) ;
}
else {
compile.c++.without-pth $(targets) : $(sources) ;
}
}
actions compile.c++.without-pth {
"$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
}
actions compile.c++.with-pch bind PCH_FILE
{
"$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -o "$(<)" "$(>)"
}
rule compile.c ( targets * : sources * : properties * )
{
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
local pth-file = [ on $(<) return $(PCH_FILE) ] ;
if $(pth-file) {
DEPENDS $(<) : $(pth-file) ;
compile.c.with-pch $(targets) : $(sources) ;
}
else {
compile.c.without-pth $(targets) : $(sources) ;
}
}
actions compile.c.without-pth
{
"$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c.with-pch bind PCH_FILE
{
"$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -c -o "$(<)" "$(>)"
}
###############################################################################
# PCH emission
rule compile.c++.pch ( targets * : sources * : properties * ) {
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
}
actions compile.c++.pch {
rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
}
rule compile.c.pch ( targets * : sources * : properties * ) {
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
}
actions compile.c.pch
{
rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
}
###############################################################################
# Linking
SPACE = " " ;
rule link ( targets * : sources * : properties * ) {
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
SPACE on $(targets) = " " ;
JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
}
actions link bind LIBRARIES {
"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
}
rule link.dll ( targets * : sources * : properties * ) {
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
SPACE on $(targets) = " " ;
JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
}
# Differ from 'link' above only by -shared.
actions link.dll bind LIBRARIES {
"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
}

View File

@ -0,0 +1,27 @@
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt
# or copy at http://www.boost.org/LICENSE_1_0.txt)
# This is a generic 'clang' toolset. Depending on the current system, it
# forwards either to 'clang-unix' or 'clang-darwin' modules.
import feature ;
import os ;
import toolset ;
feature.extend toolset : clang ;
feature.subfeature toolset clang : platform : : propagated link-incompatible ;
rule init ( * : * )
{
if [ os.name ] = MACOSX
{
toolset.using clang-darwin :
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
else
{
toolset.using clang-linux :
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
}

View File

@ -0,0 +1,986 @@
# Copyright 2003, 2005 Dave Abrahams
# Copyright 2005, 2006 Rene Rivera
# Copyright 2005 Toon Knapen
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Provides actions common to all toolsets, such as creating directories and
# removing files.
import os ;
import modules ;
import utility ;
import print ;
import type ;
import feature ;
import errors ;
import path ;
import sequence ;
import toolset ;
import virtual-target ;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}
if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ]
{
.show-configuration = true ;
}
# Configurations
#
# The following class helps to manage toolset configurations. Each configuration
# has a unique ID and one or more parameters. A typical example of a unique ID
# is a condition generated by 'common.check-init-parameters' rule. Other kinds
# of IDs can be used. Parameters may include any details about the configuration
# like 'command', 'path', etc.
#
# A toolset configuration may be in one of the following states:
#
# - registered
# Configuration has been registered (e.g. explicitly or by auto-detection
# code) but has not yet been marked as used, i.e. 'toolset.using' rule has
# not yet been called for it.
# - used
# Once called 'toolset.using' rule marks the configuration as 'used'.
#
# The main difference between the states above is that while a configuration is
# 'registered' its options can be freely changed. This is useful in particular
# for autodetection code - all detected configurations may be safely overwritten
# by user code.
class configurations
{
import errors ;
rule __init__ ( )
{
}
# Registers a configuration.
#
# Returns 'true' if the configuration has been added and an empty value if
# it already exists. Reports an error if the configuration is 'used'.
#
rule register ( id )
{
if $(id) in $(self.used)
{
errors.error "common: the configuration '$(id)' is in use" ;
}
local retval ;
if ! $(id) in $(self.all)
{
self.all += $(id) ;
# Indicate that a new configuration has been added.
retval = true ;
}
return $(retval) ;
}
# Mark a configuration as 'used'.
#
# Returns 'true' if the state of the configuration has been changed to
# 'used' and an empty value if it the state has not been changed. Reports an
# error if the configuration is not known.
#
rule use ( id )
{
if ! $(id) in $(self.all)
{
errors.error "common: the configuration '$(id)' is not known" ;
}
local retval ;
if ! $(id) in $(self.used)
{
self.used += $(id) ;
# Indicate that the configuration has been marked as 'used'.
retval = true ;
}
return $(retval) ;
}
# Return all registered configurations.
#
rule all ( )
{
return $(self.all) ;
}
# Return all used configurations.
#
rule used ( )
{
return $(self.used) ;
}
# Returns the value of a configuration parameter.
#
rule get ( id : param )
{
return $(self.$(param).$(id)) ;
}
# Sets the value of a configuration parameter.
#
rule set ( id : param : value * )
{
self.$(param).$(id) = $(value) ;
}
}
# The rule for checking toolset parameters. Trailing parameters should all be
# parameter name/value pairs. The rule will check that each parameter either has
# a value in each invocation or has no value in each invocation. Also, the rule
# will check that the combination of all parameter values is unique in all
# invocations.
#
# Each parameter name corresponds to a subfeature. This rule will declare a
# subfeature the first time a non-empty parameter value is passed and will
# extend it with all the values.
#
# The return value from this rule is a condition to be used for flags settings.
#
rule check-init-parameters ( toolset requirement * : * )
{
local sig = $(toolset) ;
local condition = <toolset>$(toolset) ;
local subcondition ;
for local index in 2 3 4 5 6 7 8 9
{
local name = $($(index)[1]) ;
local value = $($(index)[2]) ;
if $(value)-is-not-empty
{
condition = $(condition)-$(value) ;
if $(.had-unspecified-value.$(toolset).$(name))
{
errors.user-error
"$(toolset) initialization: parameter '$(name)'"
"inconsistent" : "no value was specified in earlier"
"initialization" : "an explicit value is specified now" ;
}
# The below logic is for intel compiler. It calls this rule with
# 'intel-linux' and 'intel-win' as toolset, so we need to get the
# base part of toolset name. We can not pass 'intel' as toolset
# because in that case it will be impossible to register versionless
# intel-linux and intel-win toolsets of a specific version.
local t = $(toolset) ;
local m = [ MATCH ([^-]*)- : $(toolset) ] ;
if $(m)
{
t = $(m[1]) ;
}
if ! $(.had-value.$(toolset).$(name))
{
if ! $(.declared-subfeature.$(t).$(name))
{
feature.subfeature toolset $(t) : $(name) : : propagated ;
.declared-subfeature.$(t).$(name) = true ;
}
.had-value.$(toolset).$(name) = true ;
}
feature.extend-subfeature toolset $(t) : $(name) : $(value) ;
subcondition += <toolset-$(t):$(name)>$(value) ;
}
else
{
if $(.had-value.$(toolset).$(name))
{
errors.user-error
"$(toolset) initialization: parameter '$(name)'"
"inconsistent" : "an explicit value was specified in an"
"earlier initialization" : "no value is specified now" ;
}
.had-unspecified-value.$(toolset).$(name) = true ;
}
sig = $(sig)$(value:E="")- ;
}
if $(sig) in $(.all-signatures)
{
local message =
"duplicate initialization of $(toolset) with the following parameters: " ;
for local index in 2 3 4 5 6 7 8 9
{
local p = $($(index)) ;
if $(p)
{
message += "$(p[1]) = $(p[2]:E=<unspecified>)" ;
}
}
message += "previous initialization at $(.init-loc.$(sig))" ;
errors.user-error
$(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) :
$(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
}
.all-signatures += $(sig) ;
.init-loc.$(sig) = [ errors.nearest-user-location ] ;
# If we have a requirement, this version should only be applied under that
# condition. To accomplish this we add a toolset requirement that imposes
# the toolset subcondition, which encodes the version.
if $(requirement)
{
local r = <toolset>$(toolset) $(requirement) ;
r = $(r:J=,) ;
toolset.add-requirements $(r):$(subcondition) ;
}
# We add the requirements, if any, to the condition to scope the toolset
# variables and options to this specific version.
condition += $(requirement) ;
if $(.show-configuration)
{
ECHO notice: $(condition) ;
}
return $(condition:J=/) ;
}
# A helper rule to get the command to invoke some tool. If
# 'user-provided-command' is not given, tries to find binary named 'tool' in
# PATH and in the passed 'additional-path'. Otherwise, verifies that the first
# element of 'user-provided-command' is an existing program.
#
# This rule returns the command to be used when invoking the tool. If we can not
# find the tool, a warning is issued. If 'path-last' is specified, PATH is
# checked after 'additional-paths' when searching for 'tool'.
#
rule get-invocation-command-nodefault ( toolset : tool :
user-provided-command * : additional-paths * : path-last ? )
{
local command ;
if ! $(user-provided-command)
{
command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ;
if ! $(command) && $(.debug-configuration)
{
ECHO "warning: toolset $(toolset) initialization: can not find tool $(tool)" ;
ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
}
}
else
{
command = [ check-tool $(user-provided-command) ] ;
if ! $(command) && $(.debug-configuration)
{
ECHO "warning: toolset $(toolset) initialization: " ;
ECHO "warning: can not find user-provided command " '$(user-provided-command)' ;
ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
}
}
return $(command) ;
}
# Same as get-invocation-command-nodefault, except that if no tool is found,
# returns either the user-provided-command, if present, or the 'tool' parameter.
#
rule get-invocation-command ( toolset : tool : user-provided-command * :
additional-paths * : path-last ? )
{
local result = [ get-invocation-command-nodefault $(toolset) : $(tool) :
$(user-provided-command) : $(additional-paths) : $(path-last) ] ;
if ! $(result)
{
if $(user-provided-command)
{
result = $(user-provided-command) ;
}
else
{
result = $(tool) ;
}
}
return $(result) ;
}
# Given an invocation command return the absolute path to the command. This
# works even if command has no path element and was found on the PATH.
#
rule get-absolute-tool-path ( command )
{
if $(command:D)
{
return $(command:D) ;
}
else
{
local m = [ GLOB [ modules.peek : PATH Path path ] : $(command) $(command).exe ] ;
return $(m[1]:D) ;
}
}
# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'.
# If found in PATH, returns 'name' and if found in additional paths, returns
# absolute name. If the tool is found in several directories, returns the
# first path found. Otherwise, returns an empty string. If 'path-last' is
# specified, PATH is searched after 'additional-paths'.
#
local rule find-tool ( name : additional-paths * : path-last ? )
{
local path = [ path.programs-path ] ;
local match = [ path.glob $(path) : $(name) $(name).exe ] ;
local additional-match = [ path.glob $(additional-paths) : $(name) $(name).exe ] ;
local result ;
if $(path-last)
{
result = $(additional-match) ;
if ! $(result) && $(match)
{
result = $(name) ;
}
}
else
{
if $(match)
{
result = $(name) ;
}
else
{
result = $(additional-match) ;
}
}
if $(result)
{
return [ path.native $(result[1]) ] ;
}
}
# Checks if 'command' can be found either in path or is a full name to an
# existing file.
#
local rule check-tool-aux ( command )
{
if $(command:D)
{
if [ path.exists $(command) ]
# Both NT and Cygwin will run .exe files by their unqualified names.
|| ( [ os.on-windows ] && [ path.exists $(command).exe ] )
# Only NT will run .bat & .cmd files by their unqualified names.
|| ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] ||
[ path.exists $(command).cmd ] ) )
{
return $(command) ;
}
}
else
{
if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
{
return $(command) ;
}
}
}
# Checks that a tool can be invoked by 'command'. If command is not an absolute
# path, checks if it can be found in 'path'. If comand is an absolute path,
# check that it exists. Returns 'command' if ok or empty string otherwise.
#
local rule check-tool ( xcommand + )
{
if [ check-tool-aux $(xcommand[1]) ] ||
[ check-tool-aux $(xcommand[-1]) ]
{
return $(xcommand) ;
}
}
# Handle common options for toolset, specifically sets the following flag
# variables:
# - CONFIG_COMMAND to $(command)
# - OPTIONS for compile to the value of <compileflags> in $(options)
# - OPTIONS for compile.c to the value of <cflags> in $(options)
# - OPTIONS for compile.c++ to the value of <cxxflags> in $(options)
# - OPTIONS for compile.fortran to the value of <fflags> in $(options)
# - OPTIONS for link to the value of <linkflags> in $(options)
#
rule handle-options ( toolset : condition * : command * : options * )
{
if $(.debug-configuration)
{
ECHO "notice: will use '$(command)' for $(toolset), condition $(condition:E=(empty))" ;
}
# The last parameter ('unchecked') says it is OK to set flags for another
# module.
toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command)
: unchecked ;
toolset.flags $(toolset).compile OPTIONS $(condition) :
[ feature.get-values <compileflags> : $(options) ] : unchecked ;
toolset.flags $(toolset).compile.c OPTIONS $(condition) :
[ feature.get-values <cflags> : $(options) ] : unchecked ;
toolset.flags $(toolset).compile.c++ OPTIONS $(condition) :
[ feature.get-values <cxxflags> : $(options) ] : unchecked ;
toolset.flags $(toolset).compile.fortran OPTIONS $(condition) :
[ feature.get-values <fflags> : $(options) ] : unchecked ;
toolset.flags $(toolset).link OPTIONS $(condition) :
[ feature.get-values <linkflags> : $(options) ] : unchecked ;
}
# Returns the location of the "program files" directory on a Windows platform.
#
rule get-program-files-dir ( )
{
local ProgramFiles = [ modules.peek : ProgramFiles ] ;
if $(ProgramFiles)
{
ProgramFiles = "$(ProgramFiles:J= )" ;
}
else
{
ProgramFiles = "c:\\Program Files" ;
}
return $(ProgramFiles) ;
}
if [ os.name ] = NT
{
RM = del /f /q ;
CP = copy /b ;
IGNORE = "2>nul >nul & setlocal" ;
LN ?= $(CP) ;
# Ugly hack to convince copy to set the timestamp of the
# destination to the current time by concatenating the
# source with a nonexistent file. Note that this requires
# /b (binary) as the default when concatenating files is /a (ascii).
WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ;
}
else
{
RM = rm -f ;
CP = cp ;
LN = ln ;
}
rule rm-command ( )
{
return $(RM) ;
}
rule copy-command ( )
{
return $(CP) ;
}
if "\n" = "n"
{
# Escape characters are not supported. Use ugly hacks that won't work,
# see below.
nl = "
" ;
q = "" ;
}
else
{
nl = "\n" ;
q = "\"" ;
}
# Returns the command needed to set an environment variable on the current
# platform. The variable setting persists through all following commands and is
# visible in the environment seen by subsequently executed commands. In other
# words, on Unix systems, the variable is exported, which is consistent with the
# only possible behavior on Windows systems.
#
rule variable-setting-command ( variable : value )
{
if [ os.name ] = NT
{
return "set $(variable)=$(value)$(nl)" ;
}
else
{
# If we don't have escape characters support in bjam, the below blows
# up on CYGWIN, since the $(nl) variable holds a Windows new-line \r\n
# sequence that messes up the executed export command which then reports
# that the passed variable name is incorrect.
# But we have a check for cygwin in kernel/bootstrap.jam already.
return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ;
}
}
# Returns a command to sets a named shell path variable to the given NATIVE
# paths on the current platform.
#
rule path-variable-setting-command ( variable : paths * )
{
local sep = [ os.path-separator ] ;
return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ;
}
# Returns a command that prepends the given paths to the named path variable on
# the current platform.
#
rule prepend-path-variable-command ( variable : paths * )
{
return [ path-variable-setting-command $(variable)
: $(paths) [ os.expand-variable $(variable) ] ] ;
}
# Return a command which can create a file. If 'r' is result of invocation, then
# 'r foobar' will create foobar with unspecified content. What happens if file
# already exists is unspecified.
#
rule file-creation-command ( )
{
if [ os.name ] = NT
{
# A few alternative implementations on Windows:
#
# 'type NUL >> '
# That would construct an empty file instead of a file containing
# a space and an end-of-line marker but it would also not change
# the target's timestamp in case the file already exists.
#
# 'type NUL > '
# That would construct an empty file instead of a file containing
# a space and an end-of-line marker but it would also destroy an
# already existing file by overwriting it with an empty one.
#
# I guess the best solution would be to allow Boost Jam to define
# built-in functions such as 'create a file', 'touch a file' or 'copy a
# file' which could be used from inside action code. That would allow
# completely portable operations without this kind of kludge.
# (22.02.2009.) (Jurko)
return "echo. > " ;
}
else
{
return "touch " ;
}
}
# Returns a command that may be used for 'touching' files. It is not a real
# 'touch' command on NT because it adds an empty line at the end of file but it
# works with source files.
#
rule file-touch-command ( )
{
if [ os.name ] = NT
{
return "echo. >> " ;
}
else
{
return "touch " ;
}
}
rule MkDir
{
# If dir exists, do not update it. Do this even for $(DOT).
NOUPDATE $(<) ;
if $(<) != $(DOT) && ! $($(<)-mkdir)
{
# Cheesy gate to prevent multiple invocations on same dir.
$(<)-mkdir = true ;
# Schedule the mkdir build action.
common.mkdir $(<) ;
# Prepare a Jam 'dirs' target that can be used to make the build only
# construct all the target directories.
DEPENDS dirs : $(<) ;
# Recursively create parent directories. $(<:P) = $(<)'s parent & we
# recurse until root.
local s = $(<:P) ;
if [ os.name ] = NT
{
switch $(s)
{
case *: : s = ;
case *:\\ : s = ;
}
}
if $(s)
{
if $(s) != $(<)
{
DEPENDS $(<) : $(s) ;
MkDir $(s) ;
}
else
{
NOTFILE $(s) ;
}
}
}
}
#actions MkDir1
#{
# mkdir "$(<)"
#}
# The following quick-fix actions should be replaced using the original MkDir1
# action once Boost Jam gets updated to correctly detect different paths leading
# up to the same filesystem target and triggers their build action only once.
# (todo) (04.07.2008.) (Jurko)
if [ os.name ] = NT
{
actions mkdir
{
if not exist "$(<)\\" mkdir "$(<)"
}
}
else
{
actions mkdir
{
mkdir -p "$(<)"
}
}
actions piecemeal together existing Clean
{
$(RM) "$(>)"
}
rule copy
{
}
actions copy
{
$(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)"
}
rule RmTemps
{
}
actions quietly updated piecemeal together RmTemps
{
$(RM) "$(>)" $(IGNORE)
}
actions hard-link
{
$(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT)
$(LN) "$(>)" "$(<)" $(NULL_OUT)
}
# Given a target, as given to a custom tag rule, returns a string formatted
# according to the passed format. Format is a list of properties that is
# represented in the result. For each element of format the corresponding target
# information is obtained and added to the result string. For all, but the
# literal, the format value is taken as the as string to prepend to the output
# to join the item to the rest of the result. If not given "-" is used as a
# joiner.
#
# The format options can be:
#
# <base>[joiner]
# :: The basename of the target name.
# <toolset>[joiner]
# :: The abbreviated toolset tag being used to build the target.
# <threading>[joiner]
# :: Indication of a multi-threaded build.
# <runtime>[joiner]
# :: Collective tag of the build runtime.
# <version:/version-feature | X.Y[.Z]/>[joiner]
# :: Short version tag taken from the given "version-feature" in the
# build properties. Or if not present, the literal value as the
# version number.
# <property:/property-name/>[joiner]
# :: Direct lookup of the given property-name value in the build
# properties. /property-name/ is a regular expression. E.g.
# <property:toolset-.*:flavor> will match every toolset.
# /otherwise/
# :: The literal value of the format argument.
#
# For example this format:
#
# boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
#
# Might return:
#
# boost_thread-vc80-mt-gd-1_33.dll, or
# boost_regex-vc80-gd-1_33.dll
#
# The returned name also has the target type specific prefix and suffix which
# puts it in a ready form to use as the value from a custom tag rule.
#
rule format-name ( format * : name : type ? : property-set )
{
local result = "" ;
for local f in $(format)
{
switch $(f:G)
{
case <base> :
result += $(name:B) ;
case <toolset> :
result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) :
$(property-set) ] ] ;
case <threading> :
result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type)
: $(property-set) ] ] ;
case <runtime> :
result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) :
$(property-set) ] ] ;
case <qt> :
result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) :
$(property-set) ] ] ;
case <address-model> :
result += [ join-tag $(f:G=) : [ address-model-tag $(name) : $(type) :
$(property-set) ] ] ;
case <version:*> :
local key = [ MATCH <version:(.*)> : $(f:G) ] ;
local version = [ $(property-set).get <$(key)> ] ;
version ?= $(key) ;
version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ;
result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ;
case <property:*> :
local key = [ MATCH <property:(.*)> : $(f:G) ] ;
local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ;
if $(p0)
{
local p = [ $(property-set).get <$(p0)> ] ;
if $(p)
{
result += [ join-tag $(f:G=) : $(p) ] ;
}
}
case * :
result += $(f:G=) ;
}
}
result = [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) :
$(property-set) ] ;
return $(result) ;
}
local rule join-tag ( joiner ? : tag ? )
{
if ! $(joiner) { joiner = - ; }
return $(joiner)$(tag) ;
}
local rule toolset-tag ( name : type ? : property-set )
{
local tag = ;
local properties = [ $(property-set).raw ] ;
switch [ $(property-set).get <toolset> ]
{
case borland* : tag += bcb ;
case clang* :
{
switch [ $(property-set).get <toolset-clang:platform> ]
{
case darwin : tag += clang-darwin ;
case linux : tag += clang ;
}
}
case como* : tag += como ;
case cw : tag += cw ;
case darwin* : tag += xgcc ;
case edg* : tag += edg ;
case gcc* :
{
switch [ $(property-set).get <toolset-gcc:flavor> ]
{
case *mingw* : tag += mgw ;
case * : tag += gcc ;
}
}
case intel :
if [ $(property-set).get <toolset-intel:platform> ] = win
{
tag += iw ;
}
else
{
tag += il ;
}
case kcc* : tag += kcc ;
case kylix* : tag += bck ;
#case metrowerks* : tag += cw ;
#case mingw* : tag += mgw ;
case mipspro* : tag += mp ;
case msvc* : tag += vc ;
case qcc* : tag += qcc ;
case sun* : tag += sw ;
case tru64cxx* : tag += tru ;
case vacpp* : tag += xlc ;
}
local version = [ MATCH "<toolset.*version>([0123456789]+)[.]([0123456789]*)"
: $(properties) ] ;
# For historical reasons, vc6.0 and vc7.0 use different naming.
if $(tag) = vc
{
if $(version[1]) = 6
{
# Cancel minor version.
version = 6 ;
}
else if $(version[1]) = 7 && $(version[2]) = 0
{
version = 7 ;
}
}
# On intel, version is not added, because it does not matter and it is the
# version of vc used as backend that matters. Ideally, we should encode the
# backend version but that would break compatibility with V1.
if $(tag) = iw
{
version = ;
}
# On borland, version is not added for compatibility with V1.
if $(tag) = bcb
{
version = ;
}
tag += $(version) ;
return $(tag:J=) ;
}
local rule threading-tag ( name : type ? : property-set )
{
local tag = ;
local properties = [ $(property-set).raw ] ;
if <threading>multi in $(properties) { tag = mt ; }
return $(tag:J=) ;
}
local rule runtime-tag ( name : type ? : property-set )
{
local tag = ;
local properties = [ $(property-set).raw ] ;
if <runtime-link>static in $(properties) { tag += s ; }
# This is an ugly thing. In V1, there is code to automatically detect which
# properties affect a target. So, if <runtime-debugging> does not affect gcc
# toolset, the tag rules will not even see <runtime-debugging>. Similar
# functionality in V2 is not implemented yet, so we just check for toolsets
# known to care about runtime debugging.
if ( <toolset>msvc in $(properties) ) ||
( <stdlib>stlport in $(properties) ) ||
( <toolset-intel:platform>win in $(properties) )
{
if <runtime-debugging>on in $(properties) { tag += g ; }
}
if <python-debugging>on in $(properties) { tag += y ; }
if <variant>debug in $(properties) { tag += d ; }
if <stdlib>stlport in $(properties) { tag += p ; }
if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; }
return $(tag:J=) ;
}
# Create a tag for the Qt library version
# "<qt>4.6.0" will result in tag "qt460"
local rule qt-tag ( name : type ? : property-set )
{
local properties = [ $(property-set).get <qt> ] ;
local version = [ MATCH "([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*)"
: $(properties) ] ;
local tag = "qt"$(version:J=) ;
return $(tag) ;
}
# Create a tag for the address-model
# <address-model>64 will simply generate "64"
local rule address-model-tag ( name : type ? : property-set )
{
local tag = ;
local version = [ $(property-set).get <address-model> ] ;
return $(version) ;
}
rule __test__ ( )
{
import assert ;
local nl = "
" ;
local save-os = [ modules.peek os : .name ] ;
modules.poke os : .name : LINUX ;
assert.result "PATH=\"foo:bar:baz\"$(nl)export PATH$(nl)"
: path-variable-setting-command PATH : foo bar baz ;
assert.result "PATH=\"foo:bar:$PATH\"$(nl)export PATH$(nl)"
: prepend-path-variable-command PATH : foo bar ;
modules.poke os : .name : NT ;
assert.result "set PATH=foo;bar;baz$(nl)"
: path-variable-setting-command PATH : foo bar baz ;
assert.result "set PATH=foo;bar;%PATH%$(nl)"
: prepend-path-variable-command PATH : foo bar ;
modules.poke os : .name : $(save-os) ;
}

View File

@ -0,0 +1,103 @@
# Copyright 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# The following #// line will be used by the regression test table generation
# program as the column heading for HTML tables. Must not include a version
# number.
#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
import toolset ;
import feature ;
import toolset : flags ;
import common ;
import generators ;
import unix ;
import como ;
feature.extend-subfeature toolset como : platform : linux ;
toolset.inherit-generators como-linux
<toolset>como <toolset-como:platform>linux : unix ;
generators.override como-linux.prebuilt : builtin.lib-generator ;
generators.override como-linux.searched-lib-generator : searched-lib-generator ;
toolset.inherit-flags como-linux : unix ;
toolset.inherit-rules como-linux : gcc ;
generators.register-c-compiler como-linux.compile.c++ : CPP : OBJ
: <toolset>como <toolset-como:platform>linux ;
generators.register-c-compiler como-linux.compile.c : C : OBJ
: <toolset>como <toolset-como:platform>linux ;
rule init ( version ? : command * : options * )
{
local condition = [ common.check-init-parameters como-linux
: version $(version) ] ;
command = [ common.get-invocation-command como-linux : como
: $(command) ] ;
common.handle-options como-linux : $(condition) : $(command) : $(options) ;
}
flags como-linux C++FLAGS <exception-handling>off : --no_exceptions ;
flags como-linux C++FLAGS <exception-handling>on : --exceptions ;
flags como-linux CFLAGS <inlining>off : --no_inlining ;
flags como-linux CFLAGS <inlining>on <inlining>full : --inlining ;
flags como-linux CFLAGS <optimization>off : -O0 ;
flags como-linux CFLAGS <optimization>speed : -O3 ;
flags como-linux CFLAGS <optimization>space : -Os ;
flags como-linux CFLAGS <debug-symbols>on : -g ;
flags como-linux LINKFLAGS <debug-symbols>on : -g ;
flags como-linux FINDLIBS : m ;
flags como-linux FINDLIBS : rt ;
flags como-linux CFLAGS <cflags> ;
flags como-linux C++FLAGS <cxxflags> ;
flags como-linux DEFINES <define> ;
flags como-linux UNDEFS <undef> ;
flags como-linux HDRS <include> ;
flags como-linux STDHDRS <sysinclude> ;
flags como-linux LINKFLAGS <linkflags> ;
flags como-linux ARFLAGS <arflags> ;
flags como-linux.link LIBRARIES <library-file> ;
flags como-linux.link LINKPATH <library-path> ;
flags como-linux.link FINDLIBS-ST <find-static-library> ;
flags como-linux.link FINDLIBS-SA <find-shared-library> ;
flags como-linux.link RPATH <dll-path> ;
flags como-linux.link RPATH_LINK <xdll-path> ;
actions link bind LIBRARIES
{
$(CONFIG_COMMAND) $(LINKFLAGS) -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
}
actions link.dll bind LIBRARIES
{
$(CONFIG_COMMAND) $(LINKFLAGS) -shared -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
}
actions compile.c
{
$(CONFIG_COMMAND) -c --c99 --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
}
actions compile.c++
{
$(CONFIG_COMMAND) -tused -c --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
}
actions archive
{
ar rcu $(<) $(>)
}

View File

@ -0,0 +1,117 @@
# (C) Copyright David Abrahams 2001.
# (C) Copyright MetaCommunications, Inc. 2004.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# The following #// line will be used by the regression test table generation
# program as the column heading for HTML tables. Must not include a version
# number.
#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
import common ;
import como ;
import feature ;
import generators ;
import toolset : flags ;
feature.extend-subfeature toolset como : platform : win ;
# Initializes the Comeau toolset for windows. The command is the command which
# invokes the compiler. You should either set environment variable
# COMO_XXX_INCLUDE where XXX is the used backend (as described in the
# documentation), or pass that as part of command, e.g:
#
# using como-win : 4.3 : "set COMO_BCC_INCLUDE=C:/include &&" como.exe ;
#
rule init ( version ? : command * : options * )
{
local condition = [ common.check-init-parameters como-win
: version $(version) ] ;
command = [ common.get-invocation-command como-win : como.exe :
$(command) ] ;
common.handle-options como-win : $(condition) : $(command) : $(options) ;
}
generators.register-c-compiler como-win.compile.c++ : CPP : OBJ
: <toolset>como <toolset-como:platform>win ;
generators.register-c-compiler como-win.compile.c : C : OBJ
: <toolset>como <toolset-como:platform>win ;
generators.register-linker como-win.link
: OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
: EXE
: <toolset>como <toolset-como:platform>win ;
# Note that status of shared libraries support is not clear, so we do not define
# the link.dll generator.
generators.register-archiver como-win.archive
: OBJ : STATIC_LIB
: <toolset>como <toolset-como:platform>win ;
flags como-win C++FLAGS <exception-handling>off : --no_exceptions ;
flags como-win C++FLAGS <exception-handling>on : --exceptions ;
flags como-win CFLAGS <inlining>off : --no_inlining ;
flags como-win CFLAGS <inlining>on <inlining>full : --inlining ;
# The following seems to be VC-specific options. At least, when I uncomment
# then, Comeau with bcc as backend reports that bcc32 invocation failed.
#
#flags como-win CFLAGS <debug-symbols>on : /Zi ;
#flags como-win CFLAGS <optimization>off : /Od ;
flags como-win CFLAGS <cflags> ;
flags como-win CFLAGS : -D_WIN32 ; # Make sure that we get the Boost Win32 platform config header.
flags como-win CFLAGS <threading>multi : -D_MT ; # Make sure that our config knows that threading is on.
flags como-win C++FLAGS <cxxflags> ;
flags como-win DEFINES <define> ;
flags como-win UNDEFS <undef> ;
flags como-win HDRS <include> ;
flags como-win SYSHDRS <sysinclude> ;
flags como-win LINKFLAGS <linkflags> ;
flags como-win ARFLAGS <arflags> ;
flags como-win NO_WARN <no-warn> ;
#flags como-win STDHDRS : $(COMO_INCLUDE_PATH) ;
#flags como-win STDLIB_PATH : $(COMO_STDLIB_PATH)$(SLASH) ;
flags como-win LIBPATH <library-path> ;
flags como-win LIBRARIES <library-file> ;
flags como-win FINDLIBS <find-shared-library> ;
flags como-win FINDLIBS <find-static-library> ;
nl = "
" ;
# For como, we repeat all libraries so that dependencies are always resolved.
#
actions link bind LIBRARIES
{
$(CONFIG_COMMAND) --no_version --no_prelink_verbose $(LINKFLAGS) -o "$(<[1]:S=)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" "$(FINDLIBS:S=.lib)"
}
actions compile.c
{
$(CONFIG_COMMAND) -c --c99 -e5 --no_version --display_error_number --diag_suppress=9,21,161,748,940,962 -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<:D=)" "$(>)"
}
actions compile.c++
{
$(CONFIG_COMMAND) -c -e5 --no_version --no_prelink_verbose --display_error_number --long_long --diag_suppress=9,21,161,748,940,962 --diag_error=461 -D__STL_LONG_LONG -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<)" "$(>)"
}
actions archive
{
$(CONFIG_COMMAND) --no_version --no_prelink_verbose --prelink_object @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
lib $(ARFLAGS) /nologo /out:"$(<:S=.lib)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
}

View File

@ -0,0 +1,29 @@
# Copyright Vladimir Prus 2004.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt
# or copy at http://www.boost.org/LICENSE_1_0.txt)
# This is a generic 'como' toolset. Depending on the current system, it
# forwards either to 'como-linux' or 'como-win' modules.
import feature ;
import os ;
import toolset ;
feature.extend toolset : como ;
feature.subfeature toolset como : platform : : propagated link-incompatible ;
rule init ( * : * )
{
if [ os.name ] = LINUX
{
toolset.using como-linux :
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
else
{
toolset.using como-win :
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
}

View File

@ -0,0 +1,62 @@
# Copyright (c) 2009 Vladimir Prus
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
# Implements 'convert' target that takes a bunch of source and
# tries to convert each one to the specified type.
#
# For example:
#
# convert objects obj : a.cpp b.cpp ;
#
import targets ;
import generators ;
import project ;
import type ;
import "class" : new ;
class convert-target-class : typed-target
{
rule __init__ ( name : project : type
: sources * : requirements * : default-build * : usage-requirements * )
{
typed-target.__init__ $(name) : $(project) : $(type)
: $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ;
}
rule construct ( name : source-targets * : property-set )
{
local r = [ generators.construct $(self.project) : $(self.type)
: [ property-set.create [ $(property-set).raw ] # [ feature.expand
<main-target-type>$(self.type) ]
# ]
: $(source-targets) ] ;
if ! $(r)
{
errors.error "unable to construct" [ full-name ] ;
}
return $(r) ;
}
}
rule convert ( name type : sources * : requirements * : default-build *
: usage-requirements * )
{
local project = [ project.current ] ;
# This is a circular module dependency, so it must be imported here
modules.import targets ;
targets.main-target-alternative
[ new convert-target-class $(name) : $(project) : [ type.type-from-rule-name $(type) ]
: [ targets.main-target-sources $(sources) : $(name) ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
: [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
] ;
}
IMPORT $(__name__) : convert : : convert ;

View File

@ -0,0 +1,34 @@
#~ Copyright 2005 Rene Rivera.
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Automatic configuration for CodeWarrior toolset. To use, just import this module.
import os ;
import toolset : using ;
if [ os.name ] = NT
{
for local R in 9 8 7
{
local cw-path = [ W32_GETREG
"HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
: "PATH" ] ;
local cw-version = [ W32_GETREG
"HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
: "VERSION" ] ;
cw-path ?= [ W32_GETREG
"HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior for Windows\\$(R).0"
: "PATH" ] ;
cw-version ?= $(R).0 ;
if $(cw-path)
{
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice:" using cw ":" $(cw-version) ":" "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
}
using cw : $(cw-version) : "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
}
}
}

View File

@ -0,0 +1,246 @@
# Copyright (C) Reece H Dunn 2004
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# based on the msvc.jam toolset
import property ;
import generators ;
import os ;
import type ;
import toolset : flags ;
import errors : error ;
import feature : feature get-values ;
import path ;
import sequence : unique ;
import common ;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}
feature.extend toolset : cw ;
toolset.add-requirements <toolset>cw,<runtime-link>shared:<threading>multi ;
nl = "
" ;
rule init ( version ? : command * : options * )
{
# TODO: fix the $(command[1]) = $(compiler) issue
setup = [ get-values <setup> : $(options) ] ;
setup ?= cwenv.bat ;
compiler = [ get-values <compiler> : $(options) ] ;
compiler ?= mwcc ;
linker = [ get-values <linker> : $(options) ] ;
linker ?= mwld ;
local condition = [ common.check-init-parameters cw :
version $(version) ] ;
command = [ common.get-invocation-command cw : mwcc.exe : $(command) :
[ default-paths $(version) ] ] ;
common.handle-options cw : $(condition) : $(command) : $(options) ;
local root = [ feature.get-values <root> : $(options) ] ;
if $(command)
{
command = [ common.get-absolute-tool-path $(command[-1]) ] ;
}
local tool-root = $(command) ;
setup = $(tool-root)\\$(setup) ;
# map the batch file in setup so it can be executed
other-tools = $(tool-root:D) ;
root ?= $(other-tools:D) ;
flags cw.link RUN_PATH $(condition) :
"$(root)\\Win32-x86 Support\\Libraries\\Runtime"
"$(root)\\Win32-x86 Support\\Libraries\\Runtime\\Libs\\MSL_All-DLLs" ;
setup = "set \"CWFOLDER="$(root)"\" && call \""$(setup)"\" > nul " ;
if [ os.name ] = NT
{
setup = $(setup)"
" ;
}
else
{
setup = "cmd /S /C "$(setup)" \"&&\" " ;
}
# bind the setup command to the tool so it can be executed before the
# command
local prefix = $(setup) ;
flags cw.compile .CC $(condition) : $(prefix)$(compiler) ;
flags cw.link .LD $(condition) : $(prefix)$(linker) ;
flags cw.archive .LD $(condition) : $(prefix)$(linker) ;
if [ MATCH ^([89]\\.) : $(version) ]
{
if [ os.name ] = NT
{
# The runtime libraries
flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>off : -runtime ss ;
flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>on : -runtime ssd ;
flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>off : -runtime sm ;
flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>on : -runtime smd ;
flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>off : -runtime dm ;
flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>on : -runtime dmd ;
}
}
}
local rule default-paths ( version ? ) # FIXME
{
local possible-paths ;
local ProgramFiles = [ common.get-program-files-dir ] ;
# TODO: add support for cw8 and cw9 detection
local version-6-path = $(ProgramFiles)"\\Metrowerks\\CodeWarrior" ;
possible-paths += $(version-6-path) ;
# perform post-processing
possible-paths
= $(possible-paths)"\\Other Metrowerks Tools\\Command Line Tools" ;
possible-paths += [ modules.peek : PATH Path path ] ;
return $(possible-paths) ;
}
## declare generators
generators.register-c-compiler cw.compile.c++ : CPP : OBJ : <toolset>cw ;
generators.register-c-compiler cw.compile.c : C : OBJ : <toolset>cw ;
generators.register-linker cw.link
: OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
: EXE
: <toolset>cw
;
generators.register-linker cw.link.dll
: OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
: SHARED_LIB IMPORT_LIB
: <toolset>cw
;
generators.register-archiver cw.archive
: OBJ
: STATIC_LIB
: <toolset>cw
;
## compilation phase
flags cw WHATEVER <toolset-cw:version> ;
flags cw.compile CFLAGS <debug-symbols>on : -g ;
flags cw.compile CFLAGS <optimization>off : -O0 ;
flags cw.compile CFLAGS <optimization>speed : -O4,p ;
flags cw.compile CFLAGS <optimization>space : -O4,s ;
flags cw.compile CFLAGS <inlining>off : -inline off ;
flags cw.compile CFLAGS <inlining>on : -inline on ;
flags cw.compile CFLAGS <inlining>full : -inline all ;
flags cw.compile CFLAGS <exception-handling>off : -Cpp_exceptions off ;
flags cw.compile CFLAGS <rtti>on : -RTTI on ;
flags cw.compile CFLAGS <rtti>off : -RTTI off ;
flags cw.compile CFLAGS <warnings>on : -w on ;
flags cw.compile CFLAGS <warnings>off : -w off ;
flags cw.compile CFLAGS <warnings>all : -w all ;
flags cw.compile CFLAGS <warnings-as-errors>on : -w error ;
flags cw.compile USER_CFLAGS <cflags> : ;
flags cw.compile.c++ USER_CFLAGS <cxxflags> : ;
flags cw.compile DEFINES <define> ;
flags cw.compile UNDEFS <undef> ;
flags cw.compile INCLUDES <include> ;
actions compile.c
{
$(.CC) -c -cwd include -lang c -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
}
actions compile.c++
{
$(.CC) -c -cwd include -lang c++ -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
}
## linking phase
flags cw.link DEF_FILE <def-file> ;
flags cw LINKFLAGS : -search ;
flags cw LINKFLAGS <debug-symbols>on : -g ;
flags cw LINKFLAGS <user-interface>console : -subsystem console ;
flags cw LINKFLAGS <user-interface>gui : -subsystem windows ;
flags cw LINKFLAGS <user-interface>wince : -subsystem wince ;
flags cw LINKFLAGS <user-interface>native : -subsystem native ;
flags cw LINKFLAGS <user-interface>auto : -subsystem auto ;
flags cw LINKFLAGS <main-target-type>LIB/<link>static : -library ;
flags cw.link USER_LINKFLAGS <linkflags> ;
flags cw.link LINKPATH <library-path> ;
flags cw.link FINDLIBS_ST <find-static-library> ;
flags cw.link FINDLIBS_SA <find-shared-library> ;
flags cw.link LIBRARY_OPTION <toolset>cw : "" : unchecked ;
flags cw.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
rule link.dll ( targets + : sources * : properties * )
{
DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
}
if [ os.name ] in NT
{
actions archive
{
if exist "$(<[1])" DEL "$(<[1])"
$(.LD) -library -o "$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
}
}
else # cygwin
{
actions archive
{
_bbv2_out_="$(<)"
if test -f "$_bbv2_out_" ; then
_bbv2_existing_="$(<:W)"
fi
$(.LD) -library -o "$(<:W)" $_bbv2_existing_ @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
}
}
actions link bind DEF_FILE
{
$(.LD) -o "$(<[1]:W)" -L"$(LINKPATH)" $(LINKFLAGS) $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
}
actions link.dll bind DEF_FILE
{
$(.LD) -shared -o "$(<[1]:W)" -implib "$(<[2]:W)" -L"$(LINKPATH)" $(LINKFLAGS) -f"$(DEF_FILE)" $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
}

View File

@ -0,0 +1,568 @@
# Copyright 2003 Christopher Currie
# Copyright 2006 Dave Abrahams
# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
# Copyright 2005-2007 Mat Marcus
# Copyright 2005-2007 Adobe Systems Incorporated
# Copyright 2007-2010 Rene Rivera
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
# for explanation why it's a separate toolset.
import feature : feature ;
import toolset : flags ;
import type ;
import common ;
import generators ;
import path : basename ;
import version ;
import property-set ;
import regex ;
import errors ;
## Use a framework.
feature framework : : free ;
## The MacOSX version to compile for, which maps to the SDK to use (sysroot).
feature macosx-version : : propagated link-incompatible symmetric optional ;
## The minimal MacOSX version to target.
feature macosx-version-min : : propagated optional ;
## A dependency, that is forced to be included in the link.
feature force-load : : free dependency incidental ;
#############################################################################
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}
feature.extend toolset : darwin ;
import gcc ;
toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ;
generators.override darwin.prebuilt : builtin.prebuilt ;
generators.override darwin.searched-lib-generator : searched-lib-generator ;
# Override default do-nothing generators.
generators.override darwin.compile.c.pch : pch.default-c-pch-generator ;
generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ;
type.set-generated-target-suffix PCH : <toolset>darwin : gch ;
toolset.inherit-rules darwin : gcc : localize ;
toolset.inherit-flags darwin : gcc
: <runtime-link>static
<architecture>arm/<address-model>32
<architecture>arm/<address-model>64
<architecture>arm/<instruction-set>
<architecture>x86/<address-model>32
<architecture>x86/<address-model>64
<architecture>x86/<instruction-set>
<architecture>power/<address-model>32
<architecture>power/<address-model>64
<architecture>power/<instruction-set> ;
# Options:
#
# <root>PATH
# Platform root path. The common autodetection will set this to
# "/Developer". And when a command is given it will be set to
# the corresponding "*.platform/Developer" directory.
#
rule init ( version ? : command * : options * : requirement * )
{
# First time around, figure what is host OSX version
if ! $(.host-osx-version)
{
.host-osx-version = [ MATCH "^([0-9.]+)"
: [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ;
if $(.debug-configuration)
{
ECHO notice: OSX version on this machine is $(.host-osx-version) ;
}
}
# - The root directory of the tool install.
local root = [ feature.get-values <root> : $(options) ] ;
# - The bin directory where to find the commands to execute.
local bin ;
# - The configured compile driver command.
local command = [ common.get-invocation-command darwin : g++ : $(command) ] ;
# The version as reported by the compiler
local real-version ;
# - Autodetect the root and bin dir if not given.
if $(command)
{
bin ?= [ common.get-absolute-tool-path $(command[1]) ] ;
if $(bin) = "/usr/bin"
{
root ?= /Developer ;
}
else
{
local r = $(bin:D) ;
r = $(r:D) ;
root ?= $(r) ;
}
}
# - Autodetect the version if not given.
if $(command)
{
# - The 'command' variable can have multiple elements. When calling
# the SHELL builtin we need a single string.
local command-string = $(command:J=" ") ;
real-version = [ MATCH "^([0-9.]+)"
: [ SHELL "$(command-string) -dumpversion" ] ] ;
version ?= $(real-version) ;
}
.real-version.$(version) = $(real-version) ;
# - Define the condition for this toolset instance.
local condition =
[ common.check-init-parameters darwin $(requirement) : version $(version) ] ;
# - Set the toolset generic common options.
common.handle-options darwin : $(condition) : $(command) : $(options) ;
# - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates.
if $(real-version) < "4.0.0"
{
flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ;
}
# - GCC 4.2 and higher in Darwin does not have -Wno-long-double.
if $(real-version) < "4.2.0"
{
flags darwin.compile OPTIONS $(condition) : -Wno-long-double ;
}
# - Set the link flags common with the GCC toolset.
gcc.init-link-flags darwin darwin $(condition) ;
# - The symbol strip program.
local strip ;
if <striper> in $(options)
{
# We can turn off strip by specifying it as empty. In which
# case we switch to using the linker to do the strip.
flags darwin.link.dll OPTIONS
$(condition)/<main-target-type>LIB/<link>shared/<address-model>32/<strip>on : -Wl,-x ;
flags darwin.link.dll OPTIONS
$(condition)/<main-target-type>LIB/<link>shared/<address-model>/<strip>on : -Wl,-x ;
flags darwin.link OPTIONS
$(condition)/<main-target-type>EXE/<address-model>32/<strip>on : -s ;
flags darwin.link OPTIONS
$(condition)/<main-target-type>EXE/<address-model>/<strip>on : -s ;
}
else
{
# Otherwise we need to find a strip program to use. And hence
# also tell the link action that we need to use a strip
# post-process.
flags darwin.link NEED_STRIP $(condition)/<strip>on : "" ;
strip =
[ common.get-invocation-command darwin
: strip : [ feature.get-values <striper> : $(options) ] : $(bin) : search-path ] ;
flags darwin.link .STRIP $(condition) : $(strip[1]) ;
if $(.debug-configuration)
{
ECHO notice: using strip for $(condition) at $(strip[1]) ;
}
}
# - The archive builder (libtool is the default as creating
# archives in darwin is complicated.
local archiver =
[ common.get-invocation-command darwin
: libtool : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ;
flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ;
if $(.debug-configuration)
{
ECHO notice: using archiver for $(condition) at $(archiver[1]) ;
}
# - Initialize the SDKs available in the root for this tool.
local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ;
#~ ECHO --- ;
#~ ECHO --- bin :: $(bin) ;
#~ ECHO --- root :: $(root) ;
#~ ECHO --- version :: $(version) ;
#~ ECHO --- condition :: $(condition) ;
#~ ECHO --- strip :: $(strip) ;
#~ ECHO --- archiver :: $(archiver) ;
#~ ECHO --- sdks :: $(sdks) ;
#~ ECHO --- ;
#~ EXIT ;
}
# Add and set options for a discovered SDK version.
local rule init-sdk ( condition * : root ? : version + : version-feature ? )
{
local rule version-to-feature ( version + )
{
switch $(version[1])
{
case iphone* :
{
return $(version[1])-$(version[2-]:J=.) ;
}
case mac* :
{
return $(version[2-]:J=.) ;
}
case * :
{
return $(version:J=.) ;
}
}
}
if $(version-feature)
{
if $(.debug-configuration)
{
ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(sdk) ;
}
# Add the version to the features for specifying them.
if ! $(version-feature) in [ feature.values macosx-version ]
{
feature.extend macosx-version : $(version-feature) ;
}
if ! $(version-feature) in [ feature.values macosx-version-min ]
{
feature.extend macosx-version-min : $(version-feature) ;
}
# Set the flags the version needs to compile with, first
# generic options.
flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature)
: -isysroot $(sdk) ;
flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature)
: -isysroot $(sdk) ;
# Then device variation options.
switch $(version[1])
{
case iphonesim* :
{
local N = $(version[2]) ;
if ! $(version[3]) { N += 00 ; }
else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; }
else { N += 0$(version[3]) ; }
if ! $(version[4]) { N += 00 ; }
else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; }
else { N += 0$(version[4]) ; }
N = $(N:J=) ;
flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
: -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
: -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
}
case iphone* :
{
flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
: -miphoneos-version-min=$(version[2-]:J=.) ;
flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
: -miphoneos-version-min=$(version[2-]:J=.) ;
}
case mac* :
{
flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
: -mmacosx-version-min=$(version[2-]:J=.) ;
flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
: -mmacosx-version-min=$(version[2-]:J=.) ;
}
}
return $(version-feature) ;
}
else if $(version[4])
{
# We have a patch version of an SDK. We want to set up
# both the specific patch version, and the minor version.
# So we recurse to set up the minor version. Plus the minor version.
return
[ init-sdk $(condition) : $(root)
: $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ]
[ init-sdk $(condition) : $(root)
: $(version) : [ version-to-feature $(version) ] ] ;
}
else
{
# Yes, this is intentionally recursive.
return
[ init-sdk $(condition) : $(root)
: $(version) : [ version-to-feature $(version) ] ] ;
}
}
# Determine the MacOSX SDK versions installed and their locations.
local rule init-available-sdk-versions ( condition * : root ? )
{
root ?= /Developer ;
local sdks-root = $(root)/SDKs ;
local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ;
local result ;
for local sdk in $(sdks)
{
local sdk-match = [ MATCH ([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)? : $(sdk:D=) ] ;
local sdk-platform = $(sdk-match[1]:L) ;
local sdk-version = $(sdk-match[2-]) ;
if $(sdk-version)
{
switch $(sdk-platform)
{
case macosx :
{
sdk-version = mac $(sdk-version) ;
}
case iphoneos :
{
sdk-version = iphone $(sdk-version) ;
}
case iphonesimulator :
{
sdk-version = iphonesim $(sdk-version) ;
}
case * :
{
sdk-version = $(sdk-version:J=-) ;
}
}
result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ;
}
}
return $(result) ;
}
# Generic options.
flags darwin.compile OPTIONS <flags> ;
# The following adds objective-c support to darwin.
# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759
generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ;
generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ;
rule setup-address-model ( targets * : sources * : properties * )
{
local ps = [ property-set.create $(properties) ] ;
local arch = [ $(ps).get <architecture> ] ;
local address-model = [ $(ps).get <address-model> ] ;
local osx-version = [ $(ps).get <macosx-version> ] ;
local gcc-version = [ $(ps).get <toolset-darwin:version> ] ;
gcc-version = $(.real-version.$(gcc-version)) ;
local options ;
local support-ppc64 = 1 ;
osx-version ?= $(.host-osx-version) ;
switch $(osx-version)
{
case iphone* :
{
support-ppc64 = ;
}
case * :
if $(osx-version) && ! [ version.version-less [ regex.split $(osx-version) \\. ] : 10 6 ]
{
# When targeting 10.6:
# - gcc 4.2 will give a compiler errir if ppc64 compilation is requested
# - gcc 4.0 will compile fine, somehow, but then fail at link time
support-ppc64 = ;
}
}
switch $(arch)
{
case combined :
{
if $(address-model) = 32_64 {
if $(support-ppc64) {
options = -arch i386 -arch ppc -arch x86_64 -arch ppc64 ;
} else {
# Build 3-way binary
options = -arch i386 -arch ppc -arch x86_64 ;
}
} else if $(address-model) = 64 {
if $(support-ppc64) {
options = -arch x86_64 -arch ppc64 ;
} else {
errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
}
} else {
options = -arch i386 -arch ppc ;
}
}
case x86 :
{
if $(address-model) = 32_64 {
options = -arch i386 -arch x86_64 ;
} else if $(address-model) = 64 {
options = -arch x86_64 ;
} else {
options = -arch i386 ;
}
}
case power :
{
if ! $(support-ppc64)
&& ( $(address-model) = 32_64 || $(address-model) = 64 )
{
errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
}
if $(address-model) = 32_64 {
options = -arch ppc -arch ppc64 ;
} else if $(address-model) = 64 {
options = -arch ppc64 ;
} else {
options = -arch ppc ;
}
}
case arm :
{
options = -arch armv6 ;
}
}
if $(options)
{
OPTIONS on $(targets) += $(options) ;
}
}
rule setup-threading ( targets * : sources * : properties * )
{
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
}
rule setup-fpic ( targets * : sources * : properties * )
{
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
}
rule compile.m ( targets * : sources * : properties * )
{
LANG on $(<) = "-x objective-c" ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
setup-address-model $(targets) : $(sources) : $(properties) ;
}
actions compile.m
{
"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
rule compile.mm ( targets * : sources * : properties * )
{
LANG on $(<) = "-x objective-c++" ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
setup-address-model $(targets) : $(sources) : $(properties) ;
}
actions compile.mm
{
"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
# Set the max header padding to allow renaming of libs for installation.
flags darwin.link.dll OPTIONS : -headerpad_max_install_names ;
# To link the static runtime we need to link to all the core runtime libraries.
flags darwin.link OPTIONS <runtime-link>static
: -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ;
# Strip as much as possible when optimizing.
flags darwin.link OPTIONS <optimization>speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
# Dynamic/shared linking.
flags darwin.compile OPTIONS <link>shared : -dynamic ;
# Misc options.
flags darwin.compile OPTIONS : -no-cpp-precomp -gdwarf-2 -fexceptions ;
#~ flags darwin.link OPTIONS : -fexceptions ;
# Add the framework names to use.
flags darwin.link FRAMEWORK <framework> ;
#
flags darwin.link FORCE_LOAD <force-load> ;
# This is flag is useful for debugging the link step
# uncomment to see what libtool is doing under the hood
#~ flags darwin.link.dll OPTIONS : -Wl,-v ;
_ = " " ;
# set up the -F option to include the paths to any frameworks used.
local rule prepare-framework-path ( target + )
{
# The -framework option only takes basename of the framework.
# The -F option specifies the directories where a framework
# is searched for. So, if we find <framework> feature
# with some path, we need to generate property -F option.
local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ;
# Be sure to generate no -F if there's no path.
for local framework-path in $(framework-paths)
{
if $(framework-path) != ""
{
FRAMEWORK_PATH on $(target) += -F$(framework-path) ;
}
}
}
rule link ( targets * : sources * : properties * )
{
DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ;
setup-address-model $(targets) : $(sources) : $(properties) ;
prepare-framework-path $(<) ;
}
# Note that using strip without any options was reported to result in broken
# binaries, at least on OS X 10.5.5, see:
# http://svn.boost.org/trac/boost/ticket/2347
# So we pass -S -x.
actions link bind LIBRARIES FORCE_LOAD
{
"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
$(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)"
}
rule link.dll ( targets * : sources * : properties * )
{
setup-address-model $(targets) : $(sources) : $(properties) ;
prepare-framework-path $(<) ;
}
actions link.dll bind LIBRARIES
{
"$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
}
# We use libtool instead of ar to support universal binary linking
# TODO: Find a way to use the underlying tools, i.e. lipo, to do this.
actions piecemeal archive
{
"$(.LIBTOOL)" -static -o "$(<:T)" $(ARFLAGS) "$(>:T)"
}

View File

@ -0,0 +1,134 @@
# Digital Mars C++
# (C) Copyright Christof Meerwald 2003.
# (C) Copyright Aleksey Gurtovoy 2004.
# (C) Copyright Arjan Knepper 2006.
#
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# The following #// line will be used by the regression test table generation
# program as the column heading for HTML tables. Must not include version number.
#//<a href="http://www.digitalmars.com/">Digital<br>Mars C++</a>
import feature generators common ;
import toolset : flags ;
import sequence regex ;
feature.extend toolset : dmc ;
rule init ( version ? : command * : options * )
{
local condition = [ common.check-init-parameters dmc : version $(version) ] ;
local command = [ common.get-invocation-command dmc : dmc : $(command) ] ;
command ?= dmc ;
common.handle-options dmc : $(condition) : $(command) : $(options) ;
if $(command)
{
command = [ common.get-absolute-tool-path $(command[-1]) ] ;
}
root = $(command:D) ;
if $(root)
{
# DMC linker is sensitive the the direction of slashes, and
# won't link if forward slashes are used in command.
root = [ sequence.join [ regex.split $(root) "/" ] : "\\" ] ;
flags dmc .root $(condition) : $(root)\\bin\\ ;
}
else
{
flags dmc .root $(condition) : "" ;
}
}
# Declare generators
generators.register-linker dmc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>dmc ;
generators.register-linker dmc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>dmc ;
generators.register-archiver dmc.archive : OBJ : STATIC_LIB : <toolset>dmc ;
generators.register-c-compiler dmc.compile.c++ : CPP : OBJ : <toolset>dmc ;
generators.register-c-compiler dmc.compile.c : C : OBJ : <toolset>dmc ;
# Declare flags
# dmc optlink has some limitation on the amount of debug-info included. Therefore only linenumbers are enabled in debug builds.
# flags dmc.compile OPTIONS <debug-symbols>on : -g ;
flags dmc.compile OPTIONS <debug-symbols>on : -gl ;
flags dmc.link OPTIONS <debug-symbols>on : /CO /NOPACKF /DEBUGLI ;
flags dmc.link OPTIONS <debug-symbols>off : /PACKF ;
flags dmc.compile OPTIONS <optimization>off : -S -o+none ;
flags dmc.compile OPTIONS <optimization>speed : -o+time ;
flags dmc.compile OPTIONS <optimization>space : -o+space ;
flags dmc.compile OPTIONS <exception-handling>on : -Ae ;
flags dmc.compile OPTIONS <rtti>on : -Ar ;
# FIXME:
# Compiling sources to be linked into a shared lib (dll) the -WD cflag should be used
# Compiling sources to be linked into a static lib (lib) or executable the -WA cflag should be used
# But for some reason the -WD cflag is always in use.
# flags dmc.compile OPTIONS <link>shared : -WD ;
# flags dmc.compile OPTIONS <link>static : -WA ;
# Note that these two options actually imply multithreading support on DMC
# because there is no single-threaded dynamic runtime library. Specifying
# <threading>multi would be a bad idea, though, because no option would be
# matched when the build uses the default settings of <runtime-link>dynamic
# and <threading>single.
flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>shared : -ND ;
flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>shared : -ND ;
flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>single : ;
flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>single : ;
flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>multi : -D_MT ;
flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>multi : -D_MT ;
flags dmc.compile OPTIONS : <cflags> ;
flags dmc.compile.c++ OPTIONS : <cxxflags> ;
flags dmc.compile DEFINES : <define> ;
flags dmc.compile INCLUDES : <include> ;
flags dmc.link <linkflags> ;
flags dmc.archive OPTIONS <arflags> ;
flags dmc LIBPATH <library-path> ;
flags dmc LIBRARIES <library-file> ;
flags dmc FINDLIBS <find-library-sa> ;
flags dmc FINDLIBS <find-library-st> ;
actions together link bind LIBRARIES
{
"$(.root)link" $(OPTIONS) /NOI /DE /XN "$(>)" , "$(<[1])" ,, $(LIBRARIES) user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
}
actions together link.dll bind LIBRARIES
{
echo LIBRARY "$(<[1])" > $(<[2]:B).def
echo DESCRIPTION 'A Library' >> $(<[2]:B).def
echo EXETYPE NT >> $(<[2]:B).def
echo SUBSYSTEM WINDOWS >> $(<[2]:B).def
echo CODE EXECUTE READ >> $(<[2]:B).def
echo DATA READ WRITE >> $(<[2]:B).def
"$(.root)link" $(OPTIONS) /NOI /DE /XN /ENTRY:_DllMainCRTStartup /IMPLIB:"$(<[2])" "$(>)" $(LIBRARIES) , "$(<[1])" ,, user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
}
actions compile.c
{
"$(.root)dmc" -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
}
actions compile.c++
{
"$(.root)dmc" -cpp -c -Ab $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
}
actions together piecemeal archive
{
"$(.root)lib" $(OPTIONS) -c -n -p256 "$(<)" "$(>)"
}

View File

@ -0,0 +1,84 @@
# Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
# Support for docutils ReStructuredText processing.
import type ;
import scanner ;
import generators ;
import os ;
import common ;
import toolset ;
import path ;
import feature : feature ;
import property ;
.initialized = ;
type.register ReST : rst ;
class rst-scanner : common-scanner
{
rule __init__ ( paths * )
{
common-scanner.__init__ . $(paths) ;
}
rule pattern ( )
{
return "^[ ]*\\.\\.[ ]+include::[ ]+([^
]+)"
"^[ ]*\\.\\.[ ]+image::[ ]+([^
]+)"
"^[ ]*\\.\\.[ ]+figure::[ ]+([^
]+)"
;
}
}
scanner.register rst-scanner : include ;
type.set-scanner ReST : rst-scanner ;
generators.register-standard docutils.html : ReST : HTML ;
rule init ( docutils-dir ? : tools-dir ? )
{
docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ;
tools-dir ?= $(docutils-dir)/tools ;
if ! $(.initialized)
{
.initialized = true ;
.docutils-dir = $(docutils-dir) ;
.tools-dir = $(tools-dir:R="") ;
.setup = [
common.prepend-path-variable-command PYTHONPATH
: $(.docutils-dir) $(.docutils-dir)/extras ] ;
}
}
rule html ( target : source : properties * )
{
if ! [ on $(target) return $(RST2XXX) ]
{
local python-cmd = [ property.select <python.interpreter> : $(properties) ] ;
RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ;
}
}
feature docutils : : free ;
feature docutils-html : : free ;
feature docutils-cmd : : free ;
toolset.flags docutils COMMON-FLAGS : <docutils> ;
toolset.flags docutils HTML-FLAGS : <docutils-html> ;
toolset.flags docutils RST2XXX : <docutils-cmd> ;
actions html
{
$(.setup)
"$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<)
}

View File

@ -0,0 +1,11 @@
#~ Copyright 2005, 2006 Rene Rivera.
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Automatic configuration for Doxygen tools. To use, just import this module.
import toolset : using ;
ECHO "warning: doxygen-config.jam is deprecated. Use 'using doxygen ;' instead." ;
using doxygen ;

View File

@ -0,0 +1,776 @@
# Copyright 2003, 2004 Douglas Gregor
# Copyright 2003, 2004, 2005 Vladimir Prus
# Copyright 2006 Rene Rivera
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module defines rules to handle generation of various outputs from source
# files documented with doxygen comments. The supported transformations are:
#
# * Source -> Doxygen XML -> BoostBook XML
# * Source -> Doxygen HTML
#
# The type of transformation is selected based on the target requested. For
# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an
# empty suffix, will produce a <target>.xml and <target>.boostbook. For Doxygen
# HTML specifying a target with an ".html" suffix will produce a directory
# <target> with the Doxygen html files, and a <target>.html file redirecting to
# that directory.
import "class" : new ;
import targets ;
import feature ;
import property ;
import generators ;
import boostbook ;
import type ;
import path ;
import print ;
import regex ;
import stage ;
import project ;
import xsltproc ;
import make ;
import os ;
import toolset : flags ;
import alias ;
import common ;
import modules ;
import project ;
import utility ;
import errors ;
# Use to specify extra configuration paramters. These get translated
# into a doxyfile which configures the building of the docs.
feature.feature doxygen:param : : free ;
# Specify the "<xsl:param>boost.doxygen.header.prefix" XSLT option.
feature.feature prefix : : free ;
# Specify the "<xsl:param>boost.doxygen.reftitle" XSLT option.
feature.feature reftitle : : free ;
# Which processor to use for various translations from Doxygen.
feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ;
# To generate, or not, index sections.
feature.feature doxygen.doxproc.index : no yes : propagated incidental ;
# The ID for the resulting BoostBook reference section.
feature.feature doxygen.doxproc.id : : free ;
# The title for the resulting BoostBook reference section.
feature.feature doxygen.doxproc.title : : free ;
# Location for images when generating XML
feature.feature doxygen:xml-imagedir : : free ;
# Indicates whether the entire directory should be deleted
feature.feature doxygen.rmdir : off on : optional incidental ;
# Doxygen configuration input file.
type.register DOXYFILE : doxyfile ;
# Doxygen XML multi-file output.
type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ;
# Doxygen XML coallesed output.
type.register DOXYGEN_XML : doxygen : XML ;
# Doxygen HTML multifile directory.
type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ;
# Redirection HTML file to HTML multifile directory.
type.register DOXYGEN_HTML : : HTML ;
type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ;
# Initialize the Doxygen module. Parameters are:
# name: the name of the 'doxygen' executable. If not specified, the name
# 'doxygen' will be used
#
rule init ( name ? )
{
if ! $(.initialized)
{
.initialized = true ;
.doxproc = [ modules.binding $(__name__) ] ;
.doxproc = $(.doxproc:D)/doxproc.py ;
generators.register-composing doxygen.headers-to-doxyfile
: H HPP CPP : DOXYFILE ;
generators.register-standard doxygen.run
: DOXYFILE : DOXYGEN_XML_MULTIFILE ;
generators.register-standard doxygen.xml-dir-to-boostbook
: DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ;
generators.register-standard doxygen.xml-to-boostbook
: DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ;
generators.register-standard doxygen.collect
: DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ;
generators.register-standard doxygen.run
: DOXYFILE : DOXYGEN_HTML_MULTIFILE ;
generators.register-standard doxygen.html-redirect
: DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ;
generators.register-standard doxygen.copy-latex-pngs
: DOXYGEN_HTML : DOXYGEN_XML_IMAGES ;
IMPORT $(__name__) : doxygen : : doxygen ;
}
if $(name)
{
modify-config ;
.doxygen = $(name) ;
check-doxygen ;
}
if ! $(.doxygen)
{
check-doxygen ;
}
}
rule freeze-config ( )
{
if ! $(.initialized)
{
errors.user-error "doxygen must be initialized before it can be used." ;
}
if ! $(.config-frozen)
{
.config-frozen = true ;
if [ .is-cygwin ]
{
.is-cygwin = true ;
}
}
}
rule modify-config ( )
{
if $(.config-frozen)
{
errors.user-error "Cannot change doxygen after it has been used." ;
}
}
rule check-doxygen ( )
{
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice:" using doxygen ":" $(.doxygen) ;
}
local extra-paths ;
if [ os.name ] = NT
{
local ProgramFiles = [ modules.peek : ProgramFiles ] ;
if $(ProgramFiles)
{
extra-paths = "$(ProgramFiles:J= )" ;
}
else
{
extra-paths = "C:\\Program Files" ;
}
}
.doxygen = [ common.get-invocation-command doxygen :
doxygen : $(.doxygen) : $(extra-paths) ] ;
}
rule name ( )
{
freeze-config ;
return $(.doxygen) ;
}
rule .is-cygwin ( )
{
if [ os.on-windows ]
{
local file = [ path.make [ modules.binding $(__name__) ] ] ;
local dir = [ path.native
[ path.join [ path.parent $(file) ] doxygen ] ] ;
local command =
"cd \"$(dir)\" && \"$(.doxygen)\" windows-paths-check.doxyfile 2>&1" ;
result = [ SHELL $(command) ] ;
if [ MATCH "(Parsing file /)" : $(result) ]
{
return true ;
}
}
}
# Runs Doxygen on the given Doxygen configuration file (the source) to generate
# the Doxygen files. The output is dumped according to the settings in the
# Doxygen configuration file, not according to the target! Because of this, we
# essentially "touch" the target file, in effect making it look like we have
# really written something useful to it. Anyone that uses this action must deal
# with this behavior.
#
actions doxygen-action
{
$(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)"
}
# Runs the Python doxproc XML processor.
#
actions doxproc
{
python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)"
}
rule translate-path ( path )
{
freeze-config ;
if [ os.on-windows ]
{
if [ os.name ] = CYGWIN
{
if $(.is-cygwin)
{
return $(path) ;
}
else
{
return $(path:W) ;
}
}
else
{
if $(.is-cygwin)
{
match = [ MATCH ^(.):(.*) : $(path) ] ;
if $(match)
{
return /cygdrive/$(match[1])$(match[2]:T) ;
}
else
{
return $(path:T) ;
}
}
else
{
return $(path) ;
}
}
}
else
{
return $(path) ;
}
}
# Generates a doxygen configuration file (doxyfile) given a set of C++ sources
# and a property list that may contain <doxygen:param> features.
#
rule headers-to-doxyfile ( target : sources * : properties * )
{
local text "# Generated by Boost.Build version 2" ;
local output-dir ;
# Translate <doxygen:param> into command line flags.
for local param in [ feature.get-values <doxygen:param> : $(properties) ]
{
local namevalue = [ regex.match ([^=]*)=(.*) : $(param) ] ;
if $(namevalue[1]) = OUTPUT_DIRECTORY
{
output-dir = [ translate-path
[ utility.unquote $(namevalue[2]) ] ] ;
text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
}
else
{
text += "$(namevalue[1]) = $(namevalue[2])" ;
}
}
if ! $(output-dir)
{
output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ;
text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
}
local headers = ;
for local header in $(sources:G=)
{
header = [ translate-path $(header) ] ;
headers += \"$(header)\" ;
}
# Doxygen generates LaTex by default. So disable it unconditionally, or at
# least until someone needs, and hence writes support for, LaTex output.
text += "GENERATE_LATEX = NO" ;
text += "INPUT = $(headers:J= )" ;
print.output $(target) plain ;
print.text $(text) : true ;
}
# Run Doxygen. See doxygen-action for a description of the strange properties of
# this rule.
#
rule run ( target : source : properties * )
{
freeze-config ;
if <doxygen.rmdir>on in $(properties)
{
local output-dir =
[ path.make
[ MATCH <doxygen:param>OUTPUT_DIRECTORY=\"?([^\"]*) :
$(properties) ] ] ;
local html-dir =
[ path.make
[ MATCH <doxygen:param>HTML_OUTPUT=(.*) :
$(properties) ] ] ;
if $(output-dir) && $(html-dir) &&
[ path.glob $(output-dir) : $(html-dir) ]
{
HTMLDIR on $(target) =
[ path.native [ path.join $(output-dir) $(html-dir) ] ] ;
rm-htmldir $(target) ;
}
}
doxygen-action $(target) : $(source) ;
NAME on $(target) = $(.doxygen) ;
RM on $(target) = [ modules.peek common : RM ] ;
*.XML on $(target) =
[ path.native
[ path.join
[ path.make [ on $(target) return $(LOCATE) ] ]
$(target:B:S=)
*.xml ] ] ;
}
if [ os.name ] = NT
{
RMDIR = rmdir /s /q ;
}
else
{
RMDIR = rm -rf ;
}
actions quietly rm-htmldir
{
$(RMDIR) $(HTMLDIR)
}
# The rules below require Boost.Book stylesheets, so we need some code to check
# that the boostbook module has actualy been initialized.
#
rule check-boostbook ( )
{
if ! [ modules.peek boostbook : .initialized ]
{
ECHO "error: the boostbook module is not initialized" ;
ECHO "error: you've attempted to use the 'doxygen' toolset, " ;
ECHO "error: which requires Boost.Book," ;
ECHO "error: but never initialized Boost.Book." ;
EXIT "error: Hint: add 'using boostbook ;' to your user-config.jam" ;
}
}
# Collect the set of Doxygen XML files into a single XML source file that can be
# handled by an XSLT processor. The source is completely ignored (see
# doxygen-action), because this action picks up the Doxygen XML index file
# xml/index.xml. This is because we can not teach Doxygen to act like a NORMAL
# program and take a "-o output.xml" argument (grrrr). The target of the
# collection will be a single Doxygen XML file.
#
rule collect ( target : source : properties * )
{
check-boostbook ;
local collect-xsl-dir
= [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ;
local source-path
= [ path.make [ on $(source) return $(LOCATE) ] ] ;
local collect-path
= [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ;
local native-path
= [ path.native $(collect-path) ] ;
local real-source
= [ path.native [ path.join $(collect-path) index.xml ] ] ;
xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl)
: <xsl:param>doxygen.xml.path=$(native-path) ;
}
# Translate Doxygen XML into BoostBook.
#
rule xml-to-boostbook ( target : source : properties * )
{
check-boostbook ;
local xsl-dir = [ boostbook.xsl-dir ] ;
local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen
doxygen2boostbook.xsl ] ] ;
local xslt-properties = $(properties) ;
for local prefix in [ feature.get-values <prefix> : $(properties) ]
{
xslt-properties += "<xsl:param>boost.doxygen.header.prefix=$(prefix)" ;
}
for local title in [ feature.get-values <reftitle> : $(properties) ]
{
xslt-properties += "<xsl:param>boost.doxygen.reftitle=$(title)" ;
}
xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ;
}
flags doxygen.xml-dir-to-boostbook OPTIONS <doxygen.doxproc.index>yes : --enable-index ;
flags doxygen.xml-dir-to-boostbook ID <doxygen.doxproc.id> ;
flags doxygen.xml-dir-to-boostbook TITLE <doxygen.doxproc.title> ;
rule xml-dir-to-boostbook ( target : source : properties * )
{
DOXPROC on $(target) = $(.doxproc) ;
LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ;
doxygen.doxproc $(target) : $(source:S=) ;
}
# Generate the HTML redirect to HTML dir index.html file.
#
rule html-redirect ( target : source : properties * )
{
local uri = "$(target:B)/index.html" ;
print.output $(target) plain ;
print.text
"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\"
\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">
<html xmlns=\"http://www.w3.org/1999/xhtml\">
<head>
<meta http-equiv=\"refresh\" content=\"0; URL=$(uri)\" />
<title></title>
</head>
<body>
Automatic redirection failed, please go to <a href=
\"$(uri)\">$(uri)</a>.
</body>
</html>
"
: true ;
}
rule copy-latex-pngs ( target : source : requirements * )
{
local directory = [ path.native
[ feature.get-values <doxygen:xml-imagedir> :
$(requirements) ] ] ;
local location = [ on $(target) return $(LOCATE) ] ;
local pdf-location =
[ path.native
[ path.join
[ path.make $(location) ]
[ path.make $(directory) ] ] ] ;
local html-location =
[ path.native
[ path.join
.
html
[ path.make $(directory) ] ] ] ;
common.MkDir $(pdf-location) ;
common.MkDir $(html-location) ;
DEPENDS $(target) : $(pdf-location) $(html-location) ;
if [ os.name ] = NT
{
CP on $(target) = copy /y ;
FROM on $(target) = \\*.png ;
TOHTML on $(target) = .\\html\\$(directory) ;
TOPDF on $(target) = \\$(directory) ;
}
else
{
CP on $(target) = cp ;
FROM on $(target) = /*.png ;
TOHTML on $(target) = ./html/$(directory) ;
TOPDF on $(target) = $(target:D)/$(directory) ;
}
}
actions copy-latex-pngs
{
$(CP) $(>:S=)$(FROM) $(TOHTML)
$(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF)
echo "Stamped" > "$(<)"
}
# building latex images for doxygen XML depends
# on latex, dvips, and ps being in your PATH.
# This is true for most Unix installs, but
# not on Win32, where you will need to install
# MkTex and Ghostscript and add these tools
# to your path.
actions check-latex
{
latex -version >$(<)
}
actions check-dvips
{
dvips -version >$(<)
}
if [ os.name ] = "NT"
{
actions check-gs
{
gswin32c -version >$(<)
}
}
else
{
actions check-gs
{
gs -version >$(<)
}
}
rule check-tools ( )
{
if ! $(.check-tools-targets)
{
# Find the root project.
local root-project = [ project.current ] ;
root-project = [ $(root-project).project-module ] ;
while
[ project.attribute $(root-project) parent-module ] &&
[ project.attribute $(root-project) parent-module ] != user-config
{
root-project =
[ project.attribute $(root-project) parent-module ] ;
}
.latex.check = [ new file-target latex.check
:
: [ project.target $(root-project) ]
: [ new action : doxygen.check-latex ]
:
] ;
.dvips.check = [ new file-target dvips.check
:
: [ project.target $(root-project) ]
: [ new action : doxygen.check-dvips ]
:
] ;
.gs.check = [ new file-target gs.check
:
: [ project.target $(root-project) ]
: [ new action : doxygen.check-gs ]
:
] ;
.check-tools-targets = $(.latex.check) $(.dvips.check) $(.gs.check) ;
}
return $(.check-tools-targets) ;
}
project.initialize $(__name__) ;
project doxygen ;
class doxygen-check-tools-target-class : basic-target
{
import doxygen ;
rule construct ( name : sources * : property-set )
{
return [ property-set.empty ] [ doxygen.check-tools ] ;
}
}
local project = [ project.current ] ;
targets.main-target-alternative
[ new doxygen-check-tools-target-class check-tools : $(project)
: [ targets.main-target-sources : check-tools : no-renaming ]
: [ targets.main-target-requirements : $(project) ]
: [ targets.main-target-default-build : $(project) ]
: [ targets.main-target-usage-requirements : $(project) ]
] ;
# User-level rule to generate BoostBook XML from a set of headers via Doxygen.
#
rule doxygen ( target : sources * : requirements * : default-build * : usage-requirements * )
{
freeze-config ;
local project = [ project.current ] ;
if $(target:S) = .html
{
# Build an HTML directory from the sources.
local html-location = [ feature.get-values <location> : $(requirements) ] ;
local output-dir ;
if [ $(project).get build-dir ]
{
# Explicitly specified build dir. Add html at the end.
output-dir = [ path.join [ $(project).build-dir ] $(html-location:E=html) ] ;
}
else
{
# Trim 'bin' from implicit build dir, for no other reason that backward
# compatibility.
output-dir = [ path.join [ path.parent [ $(project).build-dir ] ]
$(html-location:E=html) ] ;
}
output-dir = [ path.root $(output-dir) [ path.pwd ] ] ;
local output-dir-native = [ path.native $(output-dir) ] ;
requirements = [ property.change $(requirements) : <location> ] ;
## The doxygen configuration file.
targets.main-target-alternative
[ new typed-target $(target:S=.tag) : $(project) : DOXYFILE
: [ targets.main-target-sources $(sources) : $(target:S=.tag) ]
: [ targets.main-target-requirements $(requirements)
<doxygen:param>GENERATE_HTML=YES
<doxygen:param>GENERATE_XML=NO
<doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
<doxygen:param>HTML_OUTPUT=$(target:B)
: $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
$(project).mark-target-as-explicit $(target:S=.tag) ;
## The html directory to generate by running doxygen.
targets.main-target-alternative
[ new typed-target $(target:S=.dir) : $(project) : DOXYGEN_HTML_MULTIFILE
: $(target:S=.tag)
: [ targets.main-target-requirements $(requirements)
<doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
<doxygen:param>HTML_OUTPUT=$(target:B)
: $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
$(project).mark-target-as-explicit $(target:S=.dir) ;
## The redirect html file into the generated html.
targets.main-target-alternative
[ new typed-target $(target) : $(project) : DOXYGEN_HTML
: $(target:S=.dir)
: [ targets.main-target-requirements $(requirements)
<location>$(output-dir)
: $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
}
else
{
# Build a BoostBook XML file from the sources.
local location-xml = [ feature.get-values <location> : $(requirements) ] ;
requirements = [ property.change $(requirements) : <location> ] ;
local target-xml = $(target:B=$(target:B)-xml) ;
# Check whether we need to build images
local images-location =
[ feature.get-values <doxygen:xml-imagedir> : $(requirements) ] ;
if $(images-location)
{
doxygen $(target).doxygen-xml-images.html : $(sources)
: $(requirements)
<doxygen.rmdir>on
<doxygen:param>QUIET=YES
<doxygen:param>WARNINGS=NO
<doxygen:param>WARN_IF_UNDOCUMENTED=NO
<dependency>/doxygen//check-tools ;
$(project).mark-target-as-explicit
$(target).doxygen-xml-images.html ;
targets.main-target-alternative
[ new typed-target $(target).doxygen-xml-images
: $(project) : DOXYGEN_XML_IMAGES
: $(target).doxygen-xml-images.html
: [ targets.main-target-requirements $(requirements)
: $(project) ]
: [ targets.main-target-default-build $(default-build)
: $(project) ]
] ;
$(project).mark-target-as-explicit
$(target).doxygen-xml-images ;
if ! [ regex.match "^(.*/)$" : $(images-location) ]
{
images-location = $(images-location)/ ;
}
requirements +=
<dependency>$(target).doxygen-xml-images
<xsl:param>boost.doxygen.formuladir=$(images-location) ;
}
## The doxygen configuration file.
targets.main-target-alternative
[ new typed-target $(target-xml:S=.tag) : $(project) : DOXYFILE
: [ targets.main-target-sources $(sources) : $(target-xml:S=.tag) ]
: [ targets.main-target-requirements $(requirements)
<doxygen:param>GENERATE_HTML=NO
<doxygen:param>GENERATE_XML=YES
<doxygen:param>XML_OUTPUT=$(target-xml)
: $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
$(project).mark-target-as-explicit $(target-xml:S=.tag) ;
## The Doxygen XML directory of the processed source files.
targets.main-target-alternative
[ new typed-target $(target-xml:S=.dir) : $(project) : DOXYGEN_XML_MULTIFILE
: $(target-xml:S=.tag)
: [ targets.main-target-requirements $(requirements)
: $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
$(project).mark-target-as-explicit $(target-xml:S=.dir) ;
## The resulting BoostBook file is generated by the processor tool. The
## tool can be either the xsltproc plus accompanying XSL scripts. Or it
## can be the python doxproc.py script.
targets.main-target-alternative
[ new typed-target $(target-xml) : $(project) : BOOSTBOOK
: $(target-xml:S=.dir)
: [ targets.main-target-requirements $(requirements)
: $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
$(project).mark-target-as-explicit $(target-xml) ;
targets.main-target-alternative
[ new install-target-class $(target:S=.xml) : $(project)
: $(target-xml)
: [ targets.main-target-requirements $(requirements)
<location>$(location-xml:E=.)
<name>$(target:S=.xml)
: $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
$(project).mark-target-as-explicit $(target:S=.xml) ;
targets.main-target-alternative
[ new alias-target-class $(target) : $(project)
:
: [ targets.main-target-requirements $(requirements)
: $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
: [ targets.main-target-usage-requirements $(usage-requirements)
<dependency>$(target:S=.xml)
: $(project) ]
] ;
}
}

View File

@ -0,0 +1,3 @@
INPUT = windows-paths-check.hpp
GENERATE_HTML = NO
GENERATE_LATEX = NO

View File

@ -0,0 +1,69 @@
# Copyright (C) 2003-2004 Doug Gregor and Dave Abrahams. Distributed
# under the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
#
# This module defines rules to handle generation of PDF and
# PostScript files from XSL Formatting Objects via Apache FOP
import generators ;
import common ;
import boostbook ;
generators.register-standard fop.render.pdf : FO : PDF ;
generators.register-standard fop.render.ps : FO : PS ;
# Initializes the fop toolset.
#
rule init ( fop-command ? : java-home ? : java ? )
{
local has-command = $(.has-command) ;
if $(fop-command)
{
.has-command = true ;
}
if $(fop-command) || ! $(has-command)
{
fop-command = [ common.get-invocation-command fop : fop : $(fop-command)
: [ modules.peek : FOP_DIR ] ] ;
}
if $(fop-command)
{
.FOP_COMMAND = $(fop-command) ;
}
if $(java-home) || $(java)
{
.FOP_SETUP = ;
# JAVA_HOME is the location that java was installed to.
if $(java-home)
{
.FOP_SETUP += [ common.variable-setting-command JAVA_HOME : $(java-home) ] ;
}
# JAVACMD is the location that of the java executable, useful for a
# non-standard java installation, where the executable isn't at
# $JAVA_HOME/bin/java.
if $(java)
{
.FOP_SETUP += [ common.variable-setting-command JAVACMD : $(java) ] ;
}
}
}
actions render.pdf
{
$(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) $(<)
}
actions render.ps
{
$(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) -ps $(<)
}

View File

@ -0,0 +1,55 @@
# Copyright (C) 2004 Toon Knapen
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
#
# This file contains common settings for all fortran tools
#
import "class" : new ;
import feature : feature ;
import type ;
import generators ;
import common ;
type.register FORTRAN : f F for f77 ;
type.register FORTRAN90 : f90 F90 ;
feature fortran : : free ;
feature fortran90 : : free ;
class fortran-compiling-generator : generator
{
rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
{
generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
}
}
rule register-fortran-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
{
local g = [ new fortran-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
generators.register $(g) ;
}
class fortran90-compiling-generator : generator
{
rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
{
generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
}
}
rule register-fortran90-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
{
local g = [ new fortran90-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
generators.register $(g) ;
}
# FIXME: this is ugly, should find a better way (we'd want client code to
# register all generators as "generator.some-rule", not with "some-module.some-rule".)
IMPORT $(__name__) : register-fortran-compiler : : generators.register-fortran-compiler ;
IMPORT $(__name__) : register-fortran90-compiler : : generators.register-fortran90-compiler ;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,108 @@
# Copyright 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Declares main target 'generate' used to produce targets by calling a
# user-provided rule that takes and produces virtual targets.
import "class" : new ;
import errors ;
import feature ;
import project ;
import property ;
import property-set ;
import targets ;
import regex ;
feature.feature generating-rule : : free ;
class generated-target-class : basic-target
{
import errors ;
import indirect ;
import virtual-target ;
rule __init__ ( name : project : sources * : requirements *
: default-build * : usage-requirements * )
{
basic-target.__init__ $(name) : $(project) : $(sources)
: $(requirements) : $(default-build) : $(usage-requirements) ;
if ! [ $(self.requirements).get <generating-rule> ]
{
errors.user-error "The generate rule requires the <generating-rule>"
"property to be set" ;
}
}
rule construct ( name : sources * : property-set )
{
local result ;
local gr = [ $(property-set).get <generating-rule> ] ;
# FIXME: this is a copy-paste from virtual-target.jam. We should add a
# utility rule to call a rule like this.
local rule-name = [ MATCH ^@(.*) : $(gr) ] ;
if $(rule-name)
{
if $(gr[2])
{
local target-name = [ full-name ] ;
errors.user-error "Multiple <generating-rule> properties"
"encountered for target $(target-name)." ;
}
result = [ indirect.call $(rule-name) $(self.project) $(name)
: $(property-set) : $(sources) ] ;
if ! $(result)
{
ECHO "warning: Unable to construct" [ full-name ] ;
}
}
local ur ;
local targets ;
if $(result)
{
if [ class.is-a $(result[1]) : property-set ]
{
ur = $(result[1]) ;
targets = $(result[2-]) ;
}
else
{
ur = [ property-set.empty ] ;
targets = $(result) ;
}
}
# FIXME: the following loop should be doable using sequence.transform or
# some similar utility rule.
local rt ;
for local t in $(targets)
{
rt += [ virtual-target.register $(t) ] ;
}
return $(ur) $(rt) ;
}
}
rule generate ( name : sources * : requirements * : default-build *
: usage-requirements * )
{
local project = [ project.current ] ;
targets.main-target-alternative
[ new generated-target-class $(name) : $(project)
: [ targets.main-target-sources $(sources) : $(name) ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
: [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
] ;
}
IMPORT $(__name__) : generate : : generate ;

View File

@ -0,0 +1,230 @@
# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module support GNU gettext internationalization utilities.
#
# It provides two main target rules: 'gettext.catalog', used for
# creating machine-readable catalogs from translations files, and
# 'gettext.update', used for update translation files from modified
# sources.
#
# To add i18n support to your application you should follow these
# steps.
#
# - Decide on a file name which will contain translations and
# what main target name will be used to update it. For example::
#
# gettext.update update-russian : russian.po a.cpp my_app ;
#
# - Create the initial translation file by running::
#
# bjam update-russian
#
# - Edit russian.po. For example, you might change fields like LastTranslator.
#
# - Create a main target for final message catalog::
#
# gettext.catalog russian : russian.po ;
#
# The machine-readable catalog will be updated whenever you update
# "russian.po". The "russian.po" file will be updated only on explicit
# request. When you're ready to update translations, you should
#
# - Run::
#
# bjam update-russian
#
# - Edit "russian.po" in appropriate editor.
#
# The next bjam run will convert "russian.po" into machine-readable form.
#
# By default, translations are marked by 'i18n' call. The 'gettext.keyword'
# feature can be used to alter this.
import targets ;
import property-set ;
import virtual-target ;
import "class" : new ;
import project ;
import type ;
import generators ;
import errors ;
import feature : feature ;
import toolset : flags ;
import regex ;
.path = "" ;
# Initializes the gettext module.
rule init ( path ? # Path where all tools are located. If not specified,
# they should be in PATH.
)
{
if $(.initialized) && $(.path) != $(path)
{
errors.error "Attempt to reconfigure with different path" ;
}
.initialized = true ;
if $(path)
{
.path = $(path)/ ;
}
}
# Creates a main target 'name', which, when updated, will cause
# file 'existing-translation' to be updated with translations
# extracted from 'sources'. It's possible to specify main target
# in sources --- it which case all target from dependency graph
# of those main targets will be scanned, provided they are of
# appropricate type. The 'gettext.types' feature can be used to
# control the types.
#
# The target will be updated only if explicitly requested on the
# command line.
rule update ( name : existing-translation sources + : requirements * )
{
local project = [ project.current ] ;
targets.main-target-alternative
[ new typed-target $(name) : $(project) : gettext.UPDATE :
$(existing-translation) $(sources)
: [ targets.main-target-requirements $(requirements) : $(project) ]
] ;
$(project).mark-target-as-explicit $(name) ;
}
# The human editable source, containing translation.
type.register gettext.PO : po ;
# The machine readable message catalog.
type.register gettext.catalog : mo ;
# Intermediate type produce by extracting translations from
# sources.
type.register gettext.POT : pot ;
# Pseudo type used to invoke update-translations generator
type.register gettext.UPDATE ;
# Identifies the keyword that should be used when scanning sources.
# Default: i18n
feature gettext.keyword : : free ;
# Contains space-separated list of sources types which should be scanned.
# Default: "C CPP"
feature gettext.types : : free ;
generators.register-standard gettext.compile : gettext.PO : gettext.catalog ;
class update-translations-generator : generator
{
import regex : split ;
import property-set ;
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
# The rule should be called with at least two sources. The first source
# is the translation (.po) file to update. The remaining sources are targets
# which should be scanned for new messages. All sources files for those targets
# will be found and passed to the 'xgettext' utility, which extracts the
# messages for localization. Those messages will be merged to the .po file.
rule run ( project name ? : property-set : sources * : multiple ? )
{
local types = [ $(property-set).get <gettext.types> ] ;
types ?= "C CPP" ;
types = [ regex.split $(types) " " ] ;
local keywords = [ $(property-set).get <gettext.keyword> ] ;
property-set = [ property-set.create $(keywords:G=<gettext.keyword>) ] ;
# First deterime the list of sources that must be scanned for
# messages.
local all-sources ;
# CONSIDER: I'm not sure if the logic should be the same as for 'stage':
# i.e. following dependency properties as well.
for local s in $(sources[2-])
{
all-sources += [ virtual-target.traverse $(s) : : include-sources ] ;
}
local right-sources ;
for local s in $(all-sources)
{
if [ $(s).type ] in $(types)
{
right-sources += $(s) ;
}
}
local .constructed ;
if $(right-sources)
{
# Create the POT file, which will contain list of messages extracted
# from the sources.
local extract =
[ new action $(right-sources) : gettext.extract : $(property-set) ] ;
local new-messages = [ new file-target $(name) : gettext.POT
: $(project) : $(extract) ] ;
# Create a notfile target which will update the existing translation file
# with new messages.
local a = [ new action $(sources[1]) $(new-messages)
: gettext.update-po-dispatch ] ;
local r = [ new notfile-target $(name) : $(project) : $(a) ] ;
.constructed = [ virtual-target.register $(r) ] ;
}
else
{
errors.error "No source could be scanned by gettext tools" ;
}
return $(.constructed) ;
}
}
generators.register [ new update-translations-generator gettext.update : : gettext.UPDATE ] ;
flags gettext.extract KEYWORD <gettext.keyword> ;
actions extract
{
$(.path)xgettext -k$(KEYWORD:E=i18n) -o $(<) $(>)
}
# Does realy updating of po file. The tricky part is that
# we're actually updating one of the sources:
# $(<) is the NOTFILE target we're updating
# $(>[1]) is the PO file to be really updated.
# $(>[2]) is the PO file created from sources.
#
# When file to be updated does not exist (during the
# first run), we need to copy the file created from sources.
# In all other cases, we need to update the file.
rule update-po-dispatch
{
NOCARE $(>[1]) ;
gettext.create-po $(<) : $(>) ;
gettext.update-po $(<) : $(>) ;
_ on $(<) = " " ;
ok on $(<) = "" ;
EXISTING_PO on $(<) = $(>[1]) ;
}
# Due to fancy interaction of existing and updated, this rule can be called with
# one source, in which case we copy the lonely source into EXISTING_PO, or with
# two sources, in which case the action body expands to nothing. I'd really like
# to have "missing" action modifier.
actions quietly existing updated create-po bind EXISTING_PO
{
cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok))
}
actions updated update-po bind EXISTING_PO
{
$(.path)msgmerge$(_)-U$(_)"$(EXISTING_PO)"$(_)"$(>[1])"
}
actions gettext.compile
{
$(.path)msgfmt -o $(<) $(>)
}
IMPORT $(__name__) : update : : gettext.update ;

View File

@ -0,0 +1,39 @@
# Copyright (C) 2004 Toon Knapen
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import toolset : flags ;
import feature ;
import fortran ;
rule init ( version ? : command * : options * )
{
}
# Declare flags and action for compilation
flags gfortran OPTIONS <fflags> ;
flags gfortran OPTIONS <optimization>off : -O0 ;
flags gfortran OPTIONS <optimization>speed : -O3 ;
flags gfortran OPTIONS <optimization>space : -Os ;
flags gfortran OPTIONS <debug-symbols>on : -g ;
flags gfortran OPTIONS <profiling>on : -pg ;
flags gfortran OPTIONS <link>shared/<main-target-type>LIB : -fPIC ;
flags gfortran DEFINES <define> ;
flags gfortran INCLUDES <include> ;
rule compile.fortran
{
}
actions compile.fortran
{
gcc -Wall $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
}
generators.register-fortran-compiler gfortran.compile.fortran : FORTRAN FORTRAN90 : OBJ ;

View File

@ -0,0 +1,181 @@
# Copyright 2001 David Abrahams.
# Copyright 2004, 2005 Markus Schoepflin.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
#
# HP CXX compiler
# See http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN
#
#
# Notes on this toolset:
#
# - Because of very subtle issues with the default ansi mode, strict_ansi mode
# is used for compilation. One example of things that don't work correctly in
# the default ansi mode is overload resolution of function templates when
# mixed with non-template functions.
#
# - For template instantiation "-timplicit_local" is used. Previously,
# "-tlocal" has been tried to avoid the need for a template repository
# but this doesn't work with manually instantiated templates. "-tweak"
# has not been used to avoid the stream of warning messages issued by
# ar or ld when creating a library or linking an application.
#
# - Debug symbols are generated with "-g3", as this works both in debug and
# release mode. When compiling C++ code without optimization, we additionally
# use "-gall", which generates full symbol table information for all classes,
# structs, and unions. As this turns off optimization, it can't be used when
# optimization is needed.
#
import feature generators common ;
import toolset : flags ;
feature.extend toolset : hp_cxx ;
feature.extend c++abi : cxxarm ;
# Inherit from Unix toolset to get library ordering magic.
toolset.inherit hp_cxx : unix ;
generators.override hp_cxx.prebuilt : builtin.lib-generator ;
generators.override hp_cxx.prebuilt : builtin.prebuilt ;
generators.override hp_cxx.searched-lib-generator : searched-lib-generator ;
rule init ( version ? : command * : options * )
{
local condition = [ common.check-init-parameters hp_cxx : version $(version) ] ;
local command = [ common.get-invocation-command hp_cxx : cxx : $(command) ] ;
if $(command)
{
local root = [ common.get-absolute-tool-path $(command[-1]) ] ;
if $(root)
{
flags hp_cxx .root $(condition) : "\"$(root)\"/" ;
}
}
# If we can't find 'cxx' anyway, at least show 'cxx' in the commands
command ?= cxx ;
common.handle-options hp_cxx : $(condition) : $(command) : $(options) ;
}
generators.register-c-compiler hp_cxx.compile.c++ : CPP : OBJ : <toolset>hp_cxx ;
generators.register-c-compiler hp_cxx.compile.c : C : OBJ : <toolset>hp_cxx ;
# No static linking as far as I can tell.
# flags cxx LINKFLAGS <runtime-link>static : -bstatic ;
flags hp_cxx.compile OPTIONS <debug-symbols>on : -g3 ;
flags hp_cxx.compile OPTIONS <optimization>off/<debug-symbols>on : -gall ;
flags hp_cxx.link OPTIONS <debug-symbols>on : -g ;
flags hp_cxx.link OPTIONS <debug-symbols>off : -s ;
flags hp_cxx.compile OPTIONS <optimization>off : -O0 ;
flags hp_cxx.compile OPTIONS <optimization>speed/<inlining>on : -O2 ;
flags hp_cxx.compile OPTIONS <optimization>speed : -O2 ;
# This (undocumented) macro needs to be defined to get all C function
# overloads required by the C++ standard.
flags hp_cxx.compile.c++ OPTIONS : -D__CNAME_OVERLOADS ;
# Added for threading support
flags hp_cxx.compile OPTIONS <threading>multi : -pthread ;
flags hp_cxx.link OPTIONS <threading>multi : -pthread ;
flags hp_cxx.compile OPTIONS <optimization>space/<inlining>on : <inlining>size ;
flags hp_cxx.compile OPTIONS <optimization>space : -O1 ;
flags hp_cxx.compile OPTIONS <inlining>off : -inline none ;
# The compiler versions tried (up to V6.5-040) hang when compiling Boost code
# with full inlining enabled. So leave it at the default level for now.
#
# flags hp_cxx.compile OPTIONS <inlining>full : -inline all ;
flags hp_cxx.compile OPTIONS <profiling>on : -pg ;
flags hp_cxx.link OPTIONS <profiling>on : -pg ;
# Selection of the object model. This flag is needed on both the C++ compiler
# and linker command line.
# Unspecified ABI translates to '-model ansi' as most
# standard-conforming.
flags hp_cxx.compile.c++ OPTIONS <c++abi> : -model ansi : : hack-hack ;
flags hp_cxx.compile.c++ OPTIONS <c++abi>cxxarm : -model arm ;
flags hp_cxx.link OPTIONS <c++abi> : -model ansi : : hack-hack ;
flags hp_cxx.link OPTIONS <c++abi>cxxarm : -model arm ;
# Display a descriptive tag together with each compiler message. This tag can
# be used by the user to explicitely suppress the compiler message.
flags hp_cxx.compile OPTIONS : -msg_display_tag ;
flags hp_cxx.compile OPTIONS <cflags> ;
flags hp_cxx.compile.c++ OPTIONS <cxxflags> ;
flags hp_cxx.compile DEFINES <define> ;
flags hp_cxx.compile INCLUDES <include> ;
flags hp_cxx.link OPTIONS <linkflags> ;
flags hp_cxx.link LIBPATH <library-path> ;
flags hp_cxx.link LIBRARIES <library-file> ;
flags hp_cxx.link FINDLIBS-ST <find-static-library> ;
flags hp_cxx.link FINDLIBS-SA <find-shared-library> ;
flags hp_cxx.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
actions link bind LIBRARIES
{
$(CONFIG_COMMAND) -noimplicit_include $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lrt -lm
}
# When creating dynamic libraries, we don't want to be warned about unresolved
# symbols, therefore all unresolved symbols are marked as expected by
# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool
# chain.
actions link.dll bind LIBRARIES
{
$(CONFIG_COMMAND) -shared -expect_unresolved \* -noimplicit_include $(OPTIONS) -o "$(<[1])" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lm
}
# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI
# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std
# is the default, no special flag is needed.
actions compile.c
{
$(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
}
# Note: The compiler is forced to compile the files as C++ (-x cxx) because
# otherwise it will silently ignore files with no file extension.
#
# Note: We deliberately don't suppress any warnings on the compiler command
# line, the user can always do this in a customized toolset later on.
rule compile.c++
{
# We preprocess the TEMPLATE_DEPTH command line option here because we found
# no way to do it correctly in the actual action code. There we either get
# the -pending_instantiations parameter when no c++-template-depth property
# has been specified or we get additional quotes around
# "-pending_instantiations ".
local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
TEMPLATE_DEPTH on $(1) = "-pending_instantiations "$(template-depth) ;
}
actions compile.c++
{
$(CONFIG_COMMAND) -x cxx -c -std strict_ansi -nopure_cname -noimplicit_include -timplicit_local -ptr "$(<[1]:D)/cxx_repository" $(OPTIONS) $(TEMPLATE_DEPTH) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
}
# Always create archive from scratch. See the gcc toolet for rationale.
RM = [ common.rm-command ] ;
actions together piecemeal archive
{
$(RM) "$(<)"
ar rc $(<) $(>)
}

View File

@ -0,0 +1,35 @@
# Copyright (C) 2004 Toon Knapen
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import toolset : flags ;
import feature ;
import fortran ;
rule init ( version ? : command * : options * )
{
}
# Declare flags and action for compilation
flags hpfortran OPTIONS <optimization>off : -O0 ;
flags hpfortran OPTIONS <optimization>speed : -O3 ;
flags hpfortran OPTIONS <optimization>space : -O1 ;
flags hpfortran OPTIONS <debug-symbols>on : -g ;
flags hpfortran OPTIONS <profiling>on : -pg ;
flags hpfortran DEFINES <define> ;
flags hpfortran INCLUDES <include> ;
rule compile.fortran
{
}
actions compile.fortran
{
f77 +DD64 $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
}
generators.register-fortran-compiler hpfortran.compile.fortran : FORTRAN : OBJ ;

View File

@ -0,0 +1,44 @@
# Copyright (C) 2004 Toon Knapen
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import toolset : flags ;
import feature ;
import fortran ;
rule init ( version ? : command * : options * )
{
}
# Declare flags and action for compilation
flags ifort OPTIONS <fflags> ;
flags ifort OPTIONS <optimization>off : /Od ;
flags ifort OPTIONS <optimization>speed : /O3 ;
flags ifort OPTIONS <optimization>space : /O1 ;
flags ifort OPTIONS <debug-symbols>on : /debug:full ;
flags ifort OPTIONS <profiling>on : /Qprof_gen ;
flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
flags ifort DEFINES <define> ;
flags ifort INCLUDES <include> ;
rule compile.fortran
{
}
actions compile.fortran
{
ifort $(FFLAGS) $(OPTIONS) /names:lowercase /D$(DEFINES) /I"$(INCLUDES)" /c /object:"$(<)" "$(>)"
}
generators.register-fortran-compiler ifort.compile.fortran : FORTRAN : OBJ ;

View File

@ -0,0 +1,220 @@
# Copyright Vladimir Prus 2004.
# Copyright Noel Belcourt 2007.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt
# or copy at http://www.boost.org/LICENSE_1_0.txt)
import intel ;
import feature : feature ;
import os ;
import toolset ;
import toolset : flags ;
import gcc ;
import common ;
import errors ;
import generators ;
feature.extend-subfeature toolset intel : platform : darwin ;
toolset.inherit-generators intel-darwin
<toolset>intel <toolset-intel:platform>darwin
: gcc
# Don't inherit PCH generators. They were not tested, and probably
# don't work for this compiler.
: gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
;
generators.override intel-darwin.prebuilt : builtin.lib-generator ;
generators.override intel-darwin.prebuilt : builtin.prebuilt ;
generators.override intel-darwin.searched-lib-generator : searched-lib-generator ;
toolset.inherit-rules intel-darwin : gcc ;
toolset.inherit-flags intel-darwin : gcc
: <inlining>off <inlining>on <inlining>full <optimization>space
<warnings>off <warnings>all <warnings>on
<architecture>x86/<address-model>32
<architecture>x86/<address-model>64
;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}
# vectorization diagnostics
feature vectorize : off on full ;
# Initializes the intel-darwin toolset
# version in mandatory
# name (default icc) is used to invoke the specified intel complier
# compile and link options allow you to specify addition command line options for each version
rule init ( version ? : command * : options * )
{
local condition = [ common.check-init-parameters intel-darwin
: version $(version) ] ;
command = [ common.get-invocation-command intel-darwin : icc
: $(command) : /opt/intel_cc_80/bin ] ;
common.handle-options intel-darwin : $(condition) : $(command) : $(options) ;
gcc.init-link-flags intel-darwin darwin $(condition) ;
# handle <library-path>
# local library-path = [ feature.get-values <library-path> : $(options) ] ;
# flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
local root = [ feature.get-values <root> : $(options) ] ;
local bin ;
if $(command) || $(root)
{
bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
root ?= $(bin:D) ;
if $(root)
{
# Libraries required to run the executable may be in either
# $(root)/lib (10.1 and earlier)
# or
# $(root)/lib/architecture-name (11.0 and later:
local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
if $(.debug-configuration)
{
ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
}
flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ;
}
}
local m = [ MATCH (..).* : $(version) ] ;
local n = [ MATCH (.)\\. : $(m) ] ;
if $(n) {
m = $(n) ;
}
local major = $(m) ;
if $(major) = "9" {
flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -Ob0 ;
flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -Ob1 ;
flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -Ob2 ;
flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-libcxa -lstdc++ -lpthread ;
flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-libcxa -lstdc++ -lpthread ;
}
else {
flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ;
flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -inline-level=1 ;
flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -inline-level=2 ;
flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-intel -lstdc++ -lpthread ;
flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-intel -lstdc++ -lpthread ;
}
local minor = [ MATCH ".*\\.(.).*" : $(version) ] ;
# wchar_t char_traits workaround for compilers older than 10.2
if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) {
flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ;
}
}
SPACE = " " ;
flags intel-darwin.compile OPTIONS <cflags> ;
flags intel-darwin.compile OPTIONS <cxxflags> ;
# flags intel-darwin.compile INCLUDES <include> ;
flags intel-darwin.compile OPTIONS <optimization>space : -O1 ; # no specific space optimization flag in icc
#
cpu-type-em64t = prescott nocona ;
flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>32 : -m32 ; # -mcmodel=small ;
flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>64 : -m64 ; # -mcmodel=large ;
flags intel-darwin.compile.c OPTIONS <warnings>off : -w0 ;
flags intel-darwin.compile.c OPTIONS <warnings>on : -w1 ;
flags intel-darwin.compile.c OPTIONS <warnings>all : -w2 ;
flags intel-darwin.compile.c++ OPTIONS <warnings>off : -w0 ;
flags intel-darwin.compile.c++ OPTIONS <warnings>on : -w1 ;
flags intel-darwin.compile.c++ OPTIONS <warnings>all : -w2 ;
actions compile.c
{
"$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c++
{
"$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
flags intel-darwin ARFLAGS <archiveflags> ;
# Default value. Mostly for the sake of intel-linux
# that inherits from gcc, but does not has the same
# logic to set the .AR variable. We can put the same
# logic in intel-linux, but that's hardly worth the trouble
# as on Linux, 'ar' is always available.
.AR = ar ;
rule archive ( targets * : sources * : properties * )
{
# Always remove archive and start again. Here's rationale from
# Andre Hentz:
#
# I had a file, say a1.c, that was included into liba.a.
# I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
# My program was crashing with absurd errors.
# After some debugging I traced it back to the fact that a1.o was *still*
# in liba.a
#
# Rene Rivera:
#
# Originally removing the archive was done by splicing an RM
# onto the archive action. That makes archives fail to build on NT
# when they have many files because it will no longer execute the
# action directly and blow the line length limit. Instead we
# remove the file in a different action, just before the building
# of the archive.
#
local clean.a = $(targets[1])(clean) ;
TEMPORARY $(clean.a) ;
NOCARE $(clean.a) ;
LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
DEPENDS $(clean.a) : $(sources) ;
DEPENDS $(targets) : $(clean.a) ;
common.RmTemps $(clean.a) : $(targets) ;
}
actions piecemeal archive
{
"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
"ranlib" -cs "$(<)"
}
flags intel-darwin.link USER_OPTIONS <linkflags> ;
# Declare actions for linking
rule link ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
# Serialize execution of the 'link' action, since
# running N links in parallel is just slower.
JAM_SEMAPHORE on $(targets) = <s>intel-darwin-link-semaphore ;
}
actions link bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
}
actions link.dll bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
}

View File

@ -0,0 +1,250 @@
# Copyright (c) 2003 Michael Stevens
# Copyright (c) 2011 Bryce Lelbach
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import toolset ;
import feature ;
import toolset : flags ;
import intel ;
import gcc ;
import common ;
import errors ;
import generators ;
import type ;
import numbers ;
feature.extend-subfeature toolset intel : platform : linux ;
toolset.inherit-generators intel-linux
<toolset>intel <toolset-intel:platform>linux : gcc : gcc.mingw.link gcc.mingw.link.dll ;
generators.override intel-linux.prebuilt : builtin.lib-generator ;
generators.override intel-linux.prebuilt : builtin.prebuilt ;
generators.override intel-linux.searched-lib-generator : searched-lib-generator ;
# Override default do-nothing generators.
generators.override intel-linux.compile.c.pch : pch.default-c-pch-generator ;
generators.override intel-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
type.set-generated-target-suffix PCH : <toolset>intel <toolset-intel:platform>linux : pchi ;
toolset.inherit-rules intel-linux : gcc ;
toolset.inherit-flags intel-linux : gcc
: <inlining>off <inlining>on <inlining>full
<optimization>space <optimization>speed
<warnings>off <warnings>all <warnings>on
;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}
# Initializes the intel-linux toolset
# version in mandatory
# name (default icpc) is used to invoke the specified intel-linux complier
# compile and link options allow you to specify addition command line options for each version
rule init ( version ? : command * : options * )
{
local condition = [ common.check-init-parameters intel-linux
: version $(version) ] ;
if $(.debug-configuration)
{
ECHO "notice: intel-linux version is" $(version) ;
}
local default_path ;
# Intel C++ Composer XE 2011 for Linux, aka Intel C++ Compiler XE 12.0,
# aka intel-linux-12.0. In this version, Intel thankfully decides to install
# to a sane 'intel' folder in /opt.
if [ MATCH "(12[.]0|12)" : $(version) ]
{ default_path = /opt/intel/bin ; }
# Intel C++ Compiler 11.1.
else if [ MATCH "(11[.]1)" : $(version) ]
{ default_path = /opt/intel_cce_11.1.064.x86_64/bin ; }
# Intel C++ Compiler 11.0.
else if [ MATCH "(11[.]0|11)" : $(version) ]
{ default_path = /opt/intel_cce_11.0.074.x86_64/bin ; }
# Intel C++ Compiler 10.1.
else if [ MATCH "(10[.]1)" : $(version) ]
{ default_path = /opt/intel_cce_10.1.013_x64/bin ; }
# Intel C++ Compiler 9.1.
else if [ MATCH "(9[.]1)" : $(version) ]
{ default_path = /opt/intel_cc_91/bin ; }
# Intel C++ Compiler 9.0.
else if [ MATCH "(9[.]0|9)" : $(version) ]
{ default_path = /opt/intel_cc_90/bin ; }
# Intel C++ Compiler 8.1.
else if [ MATCH "(8[.]1)" : $(version) ]
{ default_path = /opt/intel_cc_81/bin ; }
# Intel C++ Compiler 8.0 - this used to be the default, so now it's the
# fallback.
else
{ default_path = /opt/intel_cc_80/bin ; }
if $(.debug-configuration)
{
ECHO "notice: default search path for intel-linux is" $(default_path) ;
}
command = [ common.get-invocation-command intel-linux : icpc
: $(command) : $(default_path) ] ;
common.handle-options intel-linux : $(condition) : $(command) : $(options) ;
gcc.init-link-flags intel-linux gnu $(condition) ;
local root = [ feature.get-values <root> : $(options) ] ;
local bin ;
if $(command) || $(root)
{
bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
root ?= $(bin:D) ;
local command-string = $(command:J=" ") ;
local version-output = [ SHELL "$(command-string) --version" ] ;
local real-version = [ MATCH "([0-9.]+)" : $(version-output) ] ;
local major = [ MATCH "([0-9]+).*" : $(real-version) ] ;
# If we failed to determine major version, use the behaviour for
# the current compiler.
if $(major) && [ numbers.less $(major) 10 ]
{
flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-Ob0" ;
flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-Ob1" ;
flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-Ob2" ;
flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
}
else if $(major) && [ numbers.less $(major) 11 ]
{
flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
}
else # newer version of intel do have -Os (at least 11+, don't know about 10)
{
flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-Os" ;
flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
}
if $(root)
{
# Libraries required to run the executable may be in either
# $(root)/lib (10.1 and earlier)
# or
# $(root)/lib/architecture-name (11.0 and later:
local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
if $(.debug-configuration)
{
ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
}
flags intel-linux.link RUN_PATH $(condition) : $(lib_path) ;
}
}
}
SPACE = " " ;
flags intel-linux.compile OPTIONS <warnings>off : -w0 ;
flags intel-linux.compile OPTIONS <warnings>on : -w1 ;
flags intel-linux.compile OPTIONS <warnings>all : -w2 ;
rule compile.c++ ( targets * : sources * : properties * )
{
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
}
actions compile.c++ bind PCH_FILE
{
"$(CONFIG_COMMAND)" -c -xc++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
}
rule compile.c ( targets * : sources * : properties * )
{
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
}
actions compile.c bind PCH_FILE
{
"$(CONFIG_COMMAND)" -c -xc $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
}
rule compile.c++.pch ( targets * : sources * : properties * )
{
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
}
#
# Compiling a pch first deletes any existing *.pchi file, as Intel's compiler
# won't over-write an existing pch: instead it creates filename$1.pchi, filename$2.pchi
# etc - which appear not to do anything except take up disk space :-(
#
actions compile.c++.pch
{
rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
}
actions compile.fortran
{
"ifort" -c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
rule compile.c.pch ( targets * : sources * : properties * )
{
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
}
actions compile.c.pch
{
rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
}
rule link ( targets * : sources * : properties * )
{
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
SPACE on $(targets) = " " ;
JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
}
actions link bind LIBRARIES
{
"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
}
rule link.dll ( targets * : sources * : properties * )
{
gcc.setup-threading $(targets) : $(sources) : $(properties) ;
gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
SPACE on $(targets) = " " ;
JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
}
# Differ from 'link' above only by -shared.
actions link.dll bind LIBRARIES
{
"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
}

View File

@ -0,0 +1,184 @@
# Copyright Vladimir Prus 2004.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt
# or copy at http://www.boost.org/LICENSE_1_0.txt)
# Importing common is needed because the rules we inherit here depend on it.
# That is nasty.
import common ;
import errors ;
import feature ;
import intel ;
import msvc ;
import os ;
import toolset ;
import generators ;
import type ;
feature.extend-subfeature toolset intel : platform : win ;
toolset.inherit-generators intel-win <toolset>intel <toolset-intel:platform>win : msvc ;
toolset.inherit-flags intel-win : msvc : : YLOPTION ;
toolset.inherit-rules intel-win : msvc ;
# Override default do-nothing generators.
generators.override intel-win.compile.c.pch : pch.default-c-pch-generator ;
generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ;
generators.override intel-win.compile.rc : rc.compile.resource ;
generators.override intel-win.compile.mc : mc.compile ;
toolset.flags intel-win.compile PCH_SOURCE <pch>on : <pch-source> ;
toolset.add-requirements <toolset>intel-win,<runtime-link>shared:<threading>multi ;
# Initializes the intel toolset for windows
rule init ( version ? : # the compiler version
command * : # the command to invoke the compiler itself
options * # Additional option: <compatibility>
# either 'vc6', 'vc7', 'vc7.1'
# or 'native'(default).
)
{
local compatibility =
[ feature.get-values <compatibility> : $(options) ] ;
local condition = [ common.check-init-parameters intel-win
: version $(version) : compatibility $(compatibility) ] ;
command = [ common.get-invocation-command intel-win : icl.exe :
$(command) ] ;
common.handle-options intel-win : $(condition) : $(command) : $(options) ;
local root ;
if $(command)
{
root = [ common.get-absolute-tool-path $(command[-1]) ] ;
root = $(root)/ ;
}
local setup ;
setup = [ GLOB $(root) : iclvars_*.bat ] ;
if ! $(setup)
{
setup = $(root)/iclvars.bat ;
}
setup = "call \""$(setup)"\" > nul " ;
if [ os.name ] = NT
{
setup = $(setup)"
" ;
}
else
{
setup = "cmd /S /C "$(setup)" \"&&\" " ;
}
toolset.flags intel-win.compile .CC $(condition) : $(setup)icl ;
toolset.flags intel-win.link .LD $(condition) : $(setup)xilink ;
toolset.flags intel-win.archive .LD $(condition) : $(setup)xilink /lib ;
toolset.flags intel-win.link .MT $(condition) : $(setup)mt -nologo ;
toolset.flags intel-win.compile .MC $(condition) : $(setup)mc ;
toolset.flags intel-win.compile .RC $(condition) : $(setup)rc ;
local m = [ MATCH (.).* : $(version) ] ;
local major = $(m[1]) ;
local C++FLAGS ;
C++FLAGS += /nologo ;
# Reduce the number of spurious error messages
C++FLAGS += /Qwn5 /Qwd985 ;
# Enable ADL
C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too
# Disable Microsoft "secure" overloads in Dinkumware libraries since they
# cause compile errors with Intel versions 9 and 10.
C++FLAGS += -D_SECURE_SCL=0 ;
if $(major) > 5
{
C++FLAGS += /Zc:forScope ; # Add support for correct for loop scoping.
}
# Add options recognized only by intel7 and above.
if $(major) >= 7
{
C++FLAGS += /Qansi_alias ;
}
if $(compatibility) = vc6
{
C++FLAGS +=
# Emulate VC6
/Qvc6
# No wchar_t support in vc6 dinkum library. Furthermore, in vc6
# compatibility-mode, wchar_t is not a distinct type from unsigned
# short.
-DBOOST_NO_INTRINSIC_WCHAR_T
;
}
else
{
if $(major) > 5
{
# Add support for wchar_t
C++FLAGS += /Zc:wchar_t
# Tell the dinkumware library about it.
-D_NATIVE_WCHAR_T_DEFINED
;
}
}
if $(compatibility) && $(compatibility) != native
{
C++FLAGS += /Q$(base-vc) ;
}
else
{
C++FLAGS +=
-Qoption,cpp,--arg_dep_lookup
# The following options were intended to disable the Intel compiler's
# 'bug-emulation' mode, but were later reported to be causing ICE with
# Intel-Win 9.0. It is not yet clear which options can be safely used.
# -Qoption,cpp,--const_string_literals
# -Qoption,cpp,--new_for_init
# -Qoption,cpp,--no_implicit_typename
# -Qoption,cpp,--no_friend_injection
# -Qoption,cpp,--no_microsoft_bugs
;
}
toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ;
# By default, when creating PCH, intel adds 'i' to the explicitly
# specified name of the PCH file. Of course, Boost.Build is not
# happy when compiler produces not the file it was asked for.
# The option below stops this behaviour.
toolset.flags intel-win CFLAGS : -Qpchi- ;
if ! $(compatibility)
{
# If there's no backend version, assume 7.1.
compatibility = vc7.1 ;
}
local extract-version = [ MATCH ^vc(.*) : $(compatibility) ] ;
if ! $(extract-version)
{
errors.user-error "Invalid value for compatibility option:"
$(compatibility) ;
}
# Depending on the settings, running of tests require some runtime DLLs.
toolset.flags intel-win RUN_PATH $(condition) : $(root) ;
msvc.configure-version-specific intel-win : $(extract-version[1]) : $(condition) ;
}
toolset.flags intel-win.link LIBRARY_OPTION <toolset>intel : "" ;
toolset.flags intel-win YLOPTION ;

View File

@ -0,0 +1,34 @@
# Copyright Vladimir Prus 2004.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt
# or copy at http://www.boost.org/LICENSE_1_0.txt)
# This is a generic 'intel' toolset. Depending on the current
# system, it forwards either to 'intel-linux' or 'intel-win'
# modules.
import feature ;
import os ;
import toolset ;
feature.extend toolset : intel ;
feature.subfeature toolset intel : platform : : propagated link-incompatible ;
rule init ( * : * )
{
if [ os.name ] = LINUX
{
toolset.using intel-linux :
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
else if [ os.name ] = MACOSX
{
toolset.using intel-darwin :
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
else
{
toolset.using intel-win :
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
}

View File

@ -0,0 +1,33 @@
# Copyright 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import type ;
import generators ;
import feature ;
import property ;
feature.feature flex.prefix : : free ;
type.register LEX : l ;
type.register LEX++ : ll ;
generators.register-standard lex.lex : LEX : C ;
generators.register-standard lex.lex : LEX++ : CPP ;
rule init ( )
{
}
rule lex ( target : source : properties * )
{
local r = [ property.select flex.prefix : $(properties) ] ;
if $(r)
{
PREFIX on $(<) = $(r:G=) ;
}
}
actions lex
{
flex -P$(PREFIX) -o$(<) $(>)
}

View File

@ -0,0 +1,72 @@
# Copyright 2003 Dave Abrahams
# Copyright 2003 Douglas Gregor
# Copyright 2006 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module defines the 'make' main target rule.
import "class" : new ;
import errors : error ;
import project ;
import property ;
import property-set ;
import regex ;
import targets ;
class make-target-class : basic-target
{
import type regex virtual-target ;
import "class" : new ;
rule __init__ ( name : project : sources * : requirements *
: default-build * : usage-requirements * )
{
basic-target.__init__ $(name) : $(project) : $(sources) :
$(requirements) : $(default-build) : $(usage-requirements) ;
}
rule construct ( name : source-targets * : property-set )
{
local action-name = [ $(property-set).get <action> ] ;
# 'm' will always be set -- we add '@' ourselves in the 'make' rule
# below.
local m = [ MATCH ^@(.*) : $(action-name) ] ;
local a = [ new action $(source-targets) : $(m[1]) : $(property-set) ] ;
local t = [ new file-target $(self.name) exact : [ type.type
$(self.name) ] : $(self.project) : $(a) ] ;
return [ property-set.empty ] [ virtual-target.register $(t) ] ;
}
}
# Declares the 'make' main target.
#
rule make ( target-name : sources * : generating-rule + : requirements * :
usage-requirements * )
{
local project = [ project.current ] ;
# The '@' sign causes the feature.jam module to qualify rule name with the
# module name of current project, if needed.
local m = [ MATCH ^(@).* : $(generating-rule) ] ;
if ! $(m)
{
generating-rule = @$(generating-rule) ;
}
requirements += <action>$(generating-rule) ;
targets.main-target-alternative
[ new make-target-class $(target-name) : $(project)
: [ targets.main-target-sources $(sources) : $(target-name) ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build : $(project) ]
: [ targets.main-target-usage-requirements $(usage-requirements) :
$(project) ] ] ;
}
IMPORT $(__name__) : make : : make ;

View File

@ -0,0 +1,44 @@
#~ Copyright 2005 Alexey Pakhunov.
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Support for Microsoft message compiler tool.
# Notes:
# - there's just message compiler tool, there's no tool for
# extracting message strings from sources
# - This file allows to use Microsoft message compiler
# with any toolset. In msvc.jam, there's more specific
# message compiling action.
import common ;
import generators ;
import feature : feature get-values ;
import toolset : flags ;
import type ;
import rc ;
rule init ( )
{
}
type.register MC : mc ;
# Command line options
feature mc-input-encoding : ansi unicode : free ;
feature mc-output-encoding : unicode ansi : free ;
feature mc-set-customer-bit : no yes : free ;
flags mc.compile MCFLAGS <mc-input-encoding>ansi : -a ;
flags mc.compile MCFLAGS <mc-input-encoding>unicode : -u ;
flags mc.compile MCFLAGS <mc-output-encoding>ansi : -A ;
flags mc.compile MCFLAGS <mc-output-encoding>unicode : -U ;
flags mc.compile MCFLAGS <mc-set-customer-bit>no : ;
flags mc.compile MCFLAGS <mc-set-customer-bit>yes : -c ;
generators.register-standard mc.compile : MC : H RC ;
actions compile
{
mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
}

View File

@ -0,0 +1,55 @@
# Copyright 2008 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Defines main target type 'message', that prints a message when built for the
# first time.
import project ;
import "class" : new ;
import targets ;
import property-set ;
class message-target-class : basic-target
{
rule __init__ ( name-and-dir : project : * )
{
basic-target.__init__ $(name-and-dir) : $(project) ;
self.3 = $(3) ;
self.4 = $(4) ;
self.5 = $(5) ;
self.6 = $(6) ;
self.7 = $(7) ;
self.8 = $(8) ;
self.9 = $(9) ;
self.built = ;
}
rule construct ( name : source-targets * : property-set )
{
if ! $(self.built)
{
for i in 3 4 5 6 7 8 9
{
if $(self.$(i))
{
ECHO $(self.$(i)) ;
}
}
self.built = 1 ;
}
return [ property-set.empty ] ;
}
}
rule message ( name : * )
{
local project = [ project.current ] ;
targets.main-target-alternative
[ new message-target-class $(name) : $(project)
: $(2) : $(3) : $(4) : $(5) : $(6) : $(7) ] ;
}
IMPORT $(__name__) : message : : message ;

View File

@ -0,0 +1,142 @@
# Copyright (c) 2005 Alexey Pakhunov.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
# Microsoft Interface Definition Language (MIDL) related routines
import common ;
import generators ;
import feature : feature get-values ;
import os ;
import scanner ;
import toolset : flags ;
import type ;
rule init ( )
{
}
type.register IDL : idl ;
# A type library (.tlb) is generated by MIDL compiler and can be included
# to resources of an application (.rc). In order to be found by a resource
# compiler its target type should be derived from 'H' - otherwise
# the property '<implicit-dependency>' will be ignored.
type.register MSTYPELIB : tlb : H ;
# Register scanner for MIDL files
class midl-scanner : scanner
{
import path property-set regex scanner type virtual-target ;
rule __init__ ( includes * )
{
scanner.__init__ ;
self.includes = $(includes) ;
# List of quoted strings
self.re-strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
# 'import' and 'importlib' directives
self.re-import = "import"$(self.re-strings)"[ \t]*;" ;
self.re-importlib = "importlib[ \t]*[(]"$(self.re-strings)"[)][ \t]*;" ;
# C preprocessor 'include' directive
self.re-include-angle = "#[ \t]*include[ \t]*<(.*)>" ;
self.re-include-quoted = "#[ \t]*include[ \t]*\"(.*)\"" ;
}
rule pattern ( )
{
# Match '#include', 'import' and 'importlib' directives
return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)" ;
}
rule process ( target : matches * : binding )
{
local included-angle = [ regex.transform $(matches) : $(self.re-include-angle) : 1 ] ;
local included-quoted = [ regex.transform $(matches) : $(self.re-include-quoted) : 1 ] ;
local imported = [ regex.transform $(matches) : $(self.re-import) : 1 3 ] ;
local imported_tlbs = [ regex.transform $(matches) : $(self.re-importlib) : 1 3 ] ;
# CONSIDER: the new scoping rule seem to defeat "on target" variables.
local g = [ on $(target) return $(HDRGRIST) ] ;
local b = [ NORMALIZE_PATH $(binding:D) ] ;
# Attach binding of including file to included targets.
# When target is directly created from virtual target
# this extra information is unnecessary. But in other
# cases, it allows to distinguish between two headers of the
# same name included from different places.
local g2 = $(g)"#"$(b) ;
included-angle = $(included-angle:G=$(g)) ;
included-quoted = $(included-quoted:G=$(g2)) ;
imported = $(imported:G=$(g2)) ;
imported_tlbs = $(imported_tlbs:G=$(g2)) ;
local all = $(included-angle) $(included-quoted) $(imported) ;
INCLUDES $(target) : $(all) ;
DEPENDS $(target) : $(imported_tlbs) ;
NOCARE $(all) $(imported_tlbs) ;
SEARCH on $(included-angle) = $(self.includes:G=) ;
SEARCH on $(included-quoted) = $(b) $(self.includes:G=) ;
SEARCH on $(imported) = $(b) $(self.includes:G=) ;
SEARCH on $(imported_tlbs) = $(b) $(self.includes:G=) ;
scanner.propagate
[ type.get-scanner CPP : [ property-set.create $(self.includes) ] ] :
$(included-angle) $(included-quoted) : $(target) ;
scanner.propagate $(__name__) : $(imported) : $(target) ;
}
}
scanner.register midl-scanner : include ;
type.set-scanner IDL : midl-scanner ;
# Command line options
feature midl-stubless-proxy : yes no : propagated ;
feature midl-robust : yes no : propagated ;
flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>yes : /Oicf ;
flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>no : /Oic ;
flags midl.compile.idl MIDLFLAGS <midl-robust>yes : /robust ;
flags midl.compile.idl MIDLFLAGS <midl-robust>no : /no_robust ;
# Architecture-specific options
architecture-x86 = <architecture> <architecture>x86 ;
address-model-32 = <address-model> <address-model>32 ;
address-model-64 = <address-model> <address-model>64 ;
flags midl.compile.idl MIDLFLAGS $(architecture-x86)/$(address-model-32) : /win32 ;
flags midl.compile.idl MIDLFLAGS $(architecture-x86)/<address-model>64 : /x64 ;
flags midl.compile.idl MIDLFLAGS <architecture>ia64/$(address-model-64) : /ia64 ;
flags midl.compile.idl DEFINES <define> ;
flags midl.compile.idl UNDEFS <undef> ;
flags midl.compile.idl INCLUDES <include> ;
generators.register-c-compiler midl.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) ;
# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
# that both files will be created so bjam will not try to recreate them
# constantly.
TOUCH_FILE = [ common.file-touch-command ] ;
actions compile.idl
{
midl /nologo @"@($(<[1]:W).rsp:E=$(nl)"$(>:W)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)" $(nl)-U$(UNDEFS) $(nl)$(MIDLFLAGS) $(nl)/tlb "$(<[1]:W)" $(nl)/h "$(<[2]:W)" $(nl)/iid "$(<[3]:W)" $(nl)/proxy "$(<[4]:W)" $(nl)/dlldata "$(<[5]:W)")"
$(TOUCH_FILE) "$(<[4]:W)"
$(TOUCH_FILE) "$(<[5]:W)"
}

View File

@ -0,0 +1,145 @@
# Copyright Noel Belcourt 2007.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import property ;
import generators ;
import os ;
import toolset : flags ;
import feature ;
import fortran ;
import type ;
import common ;
feature.extend toolset : mipspro ;
toolset.inherit mipspro : unix ;
generators.override mipspro.prebuilt : builtin.lib-generator ;
generators.override mipspro.searched-lib-generator : searched-lib-generator ;
# Documentation and toolchain description located
# http://www.sgi.com/products/software/irix/tools/
rule init ( version ? : command * : options * )
{
local condition = [
common.check-init-parameters mipspro : version $(version) ] ;
command = [ common.get-invocation-command mipspro : CC : $(command) ] ;
common.handle-options mipspro : $(condition) : $(command) : $(options) ;
command_c = $(command_c[1--2]) $(command[-1]:B=cc) ;
toolset.flags mipspro CONFIG_C_COMMAND $(condition) : $(command_c) ;
# fortran support
local command = [
common.get-invocation-command mipspro : f77 : $(command) : $(install_dir) ] ;
command_f = $(command_f[1--2]) $(command[-1]:B=f77) ;
toolset.flags mipspro CONFIG_F_COMMAND $(condition) : $(command_f) ;
# set link flags
flags mipspro.link FINDLIBS-ST : [
feature.get-values <find-static-library> : $(options) ] : unchecked ;
flags mipspro.link FINDLIBS-SA : [
feature.get-values <find-shared-library> : $(options) ] : unchecked ;
}
# Declare generators
generators.register-c-compiler mipspro.compile.c : C : OBJ : <toolset>mipspro ;
generators.register-c-compiler mipspro.compile.c++ : CPP : OBJ : <toolset>mipspro ;
generators.register-fortran-compiler mipspro.compile.fortran : FORTRAN : OBJ : <toolset>mipspro ;
cpu-arch-32 =
<architecture>/<address-model>
<architecture>/<address-model>32 ;
cpu-arch-64 =
<architecture>/<address-model>64 ;
flags mipspro.compile OPTIONS $(cpu-arch-32) : -n32 ;
flags mipspro.compile OPTIONS $(cpu-arch-64) : -64 ;
# Declare flags and actions for compilation
flags mipspro.compile OPTIONS <debug-symbols>on : -g ;
# flags mipspro.compile OPTIONS <profiling>on : -xprofile=tcov ;
flags mipspro.compile OPTIONS <warnings>off : -w ;
flags mipspro.compile OPTIONS <warnings>on : -ansiW -diag_suppress 1429 ; # suppress long long is nonstandard warning
flags mipspro.compile OPTIONS <warnings>all : -fullwarn ;
flags mipspro.compile OPTIONS <optimization>speed : -Ofast ;
flags mipspro.compile OPTIONS <optimization>space : -O2 ;
flags mipspro.compile OPTIONS <cflags> : -LANG:std ;
flags mipspro.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
flags mipspro.compile.c++ OPTIONS <cxxflags> ;
flags mipspro.compile DEFINES <define> ;
flags mipspro.compile INCLUDES <include> ;
flags mipspro.compile.fortran OPTIONS <fflags> ;
actions compile.c
{
"$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c++
{
"$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.fortran
{
"$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
# Declare flags and actions for linking
flags mipspro.link OPTIONS <debug-symbols>on : -g ;
# Strip the binary when no debugging is needed
# flags mipspro.link OPTIONS <debug-symbols>off : -s ;
# flags mipspro.link OPTIONS <profiling>on : -xprofile=tcov ;
# flags mipspro.link OPTIONS <threading>multi : -mt ;
flags mipspro.link OPTIONS $(cpu-arch-32) : -n32 ;
flags mipspro.link OPTIONS $(cpu-arch-64) : -64 ;
flags mipspro.link OPTIONS <optimization>speed : -Ofast ;
flags mipspro.link OPTIONS <optimization>space : -O2 ;
flags mipspro.link OPTIONS <linkflags> ;
flags mipspro.link LINKPATH <library-path> ;
flags mipspro.link FINDLIBS-ST <find-static-library> ;
flags mipspro.link FINDLIBS-SA <find-shared-library> ;
flags mipspro.link FINDLIBS-SA <threading>multi : pthread ;
flags mipspro.link LIBRARIES <library-file> ;
flags mipspro.link LINK-RUNTIME <runtime-link>static : static ;
flags mipspro.link LINK-RUNTIME <runtime-link>shared : dynamic ;
flags mipspro.link RPATH <dll-path> ;
rule link ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
}
actions link bind LIBRARIES
{
"$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -lm
}
# Slight mods for dlls
rule link.dll ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
}
actions link.dll bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
}
# Declare action for creating static libraries
actions piecemeal archive
{
ar -cr "$(<)" "$(>)"
}

View File

@ -0,0 +1,583 @@
# Support for the Message Passing Interface (MPI)
#
# (C) Copyright 2005, 2006 Trustees of Indiana University
# (C) Copyright 2005 Douglas Gregor
#
# Distributed under the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
#
# Authors: Douglas Gregor
# Andrew Lumsdaine
#
# ==== MPI Configuration ====
#
# For many users, MPI support can be enabled simply by adding the following
# line to your user-config.jam file:
#
# using mpi ;
#
# This should auto-detect MPI settings based on the MPI wrapper compiler in
# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or
# has a different name, you can pass the name of the wrapper compiler as the
# first argument to the mpi module:
#
# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ;
#
# If your MPI implementation does not have a wrapper compiler, or the MPI
# auto-detection code does not work with your MPI's wrapper compiler,
# you can pass MPI-related options explicitly via the second parameter to the
# mpi module:
#
# using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++
# <find-shared-library>mpi <find-shared-library>lam
# <find-shared-library>dl ;
#
# To see the results of MPI auto-detection, pass "--debug-configuration" on
# the bjam command line.
#
# The (optional) fourth argument configures Boost.MPI for running
# regression tests. These parameters specify the executable used to
# launch jobs (default: "mpirun") followed by any necessary arguments
# to this to run tests and tell the program to expect the number of
# processors to follow (default: "-np"). With the default parameters,
# for instance, the test harness will execute, e.g.,
#
# mpirun -np 4 all_gather_test
#
# ==== Linking Against the MPI Libraries ===
#
# To link against the MPI libraries, import the "mpi" module and add the
# following requirement to your target:
#
# <library>/mpi//mpi
#
# Since MPI support is not always available, you should check
# "mpi.configured" before trying to link against the MPI libraries.
import "class" : new ;
import common ;
import feature : feature ;
import generators ;
import os ;
import project ;
import property ;
import testing ;
import toolset ;
import type ;
import path ;
# Make this module a project
project.initialize $(__name__) ;
project mpi ;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}
# Assuming the first part of the command line is the given prefix
# followed by some non-empty value, remove the first argument. Returns
# either nothing (if there was no prefix or no value) or a pair
#
# <name>value rest-of-cmdline
#
# This is a subroutine of cmdline_to_features
rule add_feature ( prefix name cmdline )
{
local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
# If there was no value associated with the prefix, abort
if ! $(match) {
return ;
}
local value = $(match[1]) ;
if [ MATCH " +" : $(value) ] {
value = "\"$(value)\"" ;
}
return "<$(name)>$(value)" $(match[2]) ;
}
# Strip any end-of-line characters off the given string and return the
# result.
rule strip-eol ( string )
{
local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ;
if $(match)
{
return $(match[1]) ;
}
else
{
return $(string) ;
}
}
# Split a command-line into a set of features. Certain kinds of
# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced
# with their Boost.Build equivalents (e.g., <include>, <define>,
# <library-path>, <find-library>). All other arguments are introduced
# using the features in the unknown-features parameter, because we
# don't know how to deal with them. For instance, if your compile and
# correct. The incoming command line should be a string starting with
# an executable (e.g., g++ -I/include/path") and may contain any
# number of command-line arguments thereafter. The result is a list of
# features corresponding to the given command line, ignoring the
# executable.
rule cmdline_to_features ( cmdline : unknown-features ? )
{
local executable ;
local features ;
local otherflags ;
local result ;
unknown-features ?= <cxxflags> <linkflags> ;
# Pull the executable out of the command line. At this point, the
# executable is just thrown away.
local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
executable = $(match[1]) ;
cmdline = $(match[2]) ;
# List the prefix/feature pairs that we will be able to transform.
# Every kind of parameter not mentioned here will be placed in both
# cxxflags and linkflags, because we don't know where they should go.
local feature_kinds-D = "define" ;
local feature_kinds-I = "include" ;
local feature_kinds-L = "library-path" ;
local feature_kinds-l = "find-shared-library" ;
while $(cmdline) {
# Check for one of the feature prefixes we know about. If we
# find one (and the associated value is nonempty), convert it
# into a feature.
local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ;
local matched ;
if $(match) && $(match[2]) {
local prefix = $(match[1]) ;
if $(feature_kinds$(prefix)) {
local name = $(feature_kinds$(prefix)) ;
local add = [ add_feature $(prefix) $(name) $(cmdline) ] ;
if $(add) {
if $(add[1]) = <find-shared-library>pthread
{
# Uhm. It's not really nice that this MPI implementation
# uses -lpthread as opposed to -pthread. We do want to
# set <threading>multi, instead of -lpthread.
result += "<threading>multi" ;
MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
}
else
{
result += $(add[1]) ;
}
cmdline = $(add[2]) ;
matched = yes ;
}
}
}
# If we haven't matched a feature prefix, just grab the command-line
# argument itself. If we can map this argument to a feature
# (e.g., -pthread -> <threading>multi), then do so; otherwise,
# and add it to the list of "other" flags that we don't
# understand.
if ! $(matched) {
match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
local value = $(match[1]) ;
cmdline = $(match[2]) ;
# Check for multithreading support
if $(value) = "-pthread" || $(value) = "-pthreads"
{
result += "<threading>multi" ;
# DPG: This is a hack intended to work around a BBv2 bug where
# requirements propagated from libraries are not checked for
# conflicts when BBv2 determines which "common" properties to
# apply to a target. In our case, the <threading>single property
# gets propagated from the common properties to Boost.MPI
# targets, even though <threading>multi is in the usage
# requirements of <library>/mpi//mpi.
MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
}
else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] {
otherflags += $(value) ;
}
}
}
# If there are other flags that we don't understand, add them to the
# result as both <cxxflags> and <linkflags>
if $(otherflags) {
for unknown in $(unknown-features)
{
result += "$(unknown)$(otherflags:J= )" ;
}
}
return $(result) ;
}
# Determine if it is safe to execute the given shell command by trying
# to execute it and determining whether the exit code is zero or
# not. Returns true for an exit code of zero, false otherwise.
local rule safe-shell-command ( cmdline )
{
local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ;
return [ MATCH ".*(SSCOK).*" : $(result) ] ;
}
# Initialize the MPI module.
rule init ( mpicxx ? : options * : mpirun-with-options * )
{
if ! $(options) && $(.debug-configuration)
{
ECHO "===============MPI Auto-configuration===============" ;
}
if ! $(mpicxx) && [ os.on-windows ]
{
# Try to auto-configure to the Microsoft Compute Cluster Pack
local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ;
local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ;
if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ]
{
if $(.debug-configuration)
{
ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ;
}
# Pick up either the 32-bit or 64-bit library, depending on which address
# model the user has selected. Default to 32-bit.
options = <include>$(cluster_pack_path)/Include
<address-model>64:<library-path>$(cluster_pack_path)/Lib/amd64
<library-path>$(cluster_pack_path)/Lib/i386
<find-static-library>msmpi
<toolset>msvc:<define>_SECURE_SCL=0
;
# Setup the "mpirun" equivalent (mpiexec)
.mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ;
.mpirun_flags = -n ;
}
else if $(.debug-configuration)
{
ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ;
}
}
if ! $(options)
{
# Try to auto-detect options based on the wrapper compiler
local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ;
if ! $(mpicxx) && ! $(command)
{
# Try "mpiCC", which is used by MPICH
command = [ common.get-invocation-command mpi : mpiCC ] ;
}
if ! $(mpicxx) && ! $(command)
{
# Try "mpicxx", which is used by OpenMPI and MPICH2
command = [ common.get-invocation-command mpi : mpicxx ] ;
}
local result ;
local compile_flags ;
local link_flags ;
if ! $(command)
{
# Do nothing: we'll complain later
}
# OpenMPI and newer versions of LAM-MPI have -showme:compile and
# -showme:link.
else if [ safe-shell-command "$(command) -showme:compile" ] &&
[ safe-shell-command "$(command) -showme:link" ]
{
if $(.debug-configuration)
{
ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ;
}
compile_flags = [ SHELL "$(command) -showme:compile" ] ;
link_flags = [ SHELL "$(command) -showme:link" ] ;
# Prepend COMPILER as the executable name, to match the format of
# other compilation commands.
compile_flags = "COMPILER $(compile_flags)" ;
link_flags = "COMPILER $(link_flags)" ;
}
# Look for LAM-MPI's -showme
else if [ safe-shell-command "$(command) -showme" ]
{
if $(.debug-configuration)
{
ECHO "Found older LAM-MPI wrapper compiler: $(command)" ;
}
result = [ SHELL "$(command) -showme" ] ;
}
# Look for MPICH
else if [ safe-shell-command "$(command) -show" ]
{
if $(.debug-configuration)
{
ECHO "Found MPICH wrapper compiler: $(command)" ;
}
compile_flags = [ SHELL "$(command) -compile_info" ] ;
link_flags = [ SHELL "$(command) -link_info" ] ;
}
# Sun HPC and Ibm POE
else if [ SHELL "$(command) -v 2>/dev/null" ]
{
compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ;
local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ;
if $(back)
{
# Sun HPC
if $(.debug-configuration)
{
ECHO "Found Sun MPI wrapper compiler: $(command)" ;
}
compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ;
compile_flags = [ MATCH "(.*)-v" : $(compile_flags) ] ;
link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ;
link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ;
link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ;
# strip out -v from compile options
local front = [ MATCH "(.*)-v" : $(link_flags) ] ;
local back = [ MATCH "-v(.*)" : $(link_flags) ] ;
link_flags = "$(front) $(back)" ;
front = [ MATCH "(.*)-xtarget=native64" : $(link_flags) ] ;
back = [ MATCH "-xtarget=native64(.*)" : $(link_flags) ] ;
link_flags = "$(front) $(back)" ;
}
else
{
# Ibm POE
if $(.debug-configuration)
{
ECHO "Found IBM MPI wrapper compiler: $(command)" ;
}
#
compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ;
compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ;
local front = [ MATCH "(.*)-v" : $(compile_flags) ] ;
local back = [ MATCH "-v(.*)" : $(compile_flags) ] ;
compile_flags = "$(front) $(back)" ;
front = [ MATCH "(.*)-c" : $(compile_flags) ] ;
back = [ MATCH "-c(.*)" : $(compile_flags) ] ;
compile_flags = "$(front) $(back)" ;
link_flags = $(compile_flags) ;
# get location of mpif.h from mpxlf
local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ;
f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ;
front = [ MATCH "(.*)-v" : $(f_flags) ] ;
back = [ MATCH "-v(.*)" : $(f_flags) ] ;
f_flags = "$(front) $(back)" ;
f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ;
f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ;
compile_flags = [ strip-eol $(compile_flags) ] ;
compile_flags = "$(compile_flags) $(f_flags)" ;
}
}
if $(result) || $(compile_flags) && $(link_flags)
{
if $(result)
{
result = [ strip-eol $(result) ] ;
options = [ cmdline_to_features $(result) ] ;
}
else
{
compile_flags = [ strip-eol $(compile_flags) ] ;
link_flags = [ strip-eol $(link_flags) ] ;
# Separately process compilation and link features, then combine
# them at the end.
local compile_features = [ cmdline_to_features $(compile_flags)
: "<cxxflags>" ] ;
local link_features = [ cmdline_to_features $(link_flags)
: "<linkflags>" ] ;
options = $(compile_features) $(link_features) ;
}
# If requested, display MPI configuration information.
if $(.debug-configuration)
{
if $(result)
{
ECHO " Wrapper compiler command line: $(result)" ;
}
else
{
local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
: $(compile_flags) ] ;
ECHO "MPI compilation flags: $(match[2])" ;
local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
: $(link_flags) ] ;
ECHO "MPI link flags: $(match[2])" ;
}
}
}
else
{
if $(command)
{
ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ;
ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ;
}
else if $(mpicxx)
{
ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ;
}
else
{
ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ;
}
ECHO "You will need to manually configure MPI support." ;
}
}
# Find mpirun (or its equivalent) and its flags
if ! $(.mpirun)
{
.mpirun =
[ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ;
.mpirun_flags = $(mpirun-with-options[2-]) ;
.mpirun_flags ?= -np ;
}
if $(.debug-configuration)
{
if $(options)
{
echo "MPI build features: " ;
ECHO $(options) ;
}
if $(.mpirun)
{
echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ;
}
ECHO "====================================================" ;
}
if $(options)
{
.configured = true ;
# Set up the "mpi" alias
alias mpi : : : : $(options) ;
}
}
# States whether MPI has bee configured
rule configured ( )
{
return $(.configured) ;
}
# Returs the "extra" requirements needed to build MPI. These requirements are
# part of the /mpi//mpi library target, but they need to be added to anything
# that uses MPI directly to work around bugs in BBv2's propagation of
# requirements.
rule extra-requirements ( )
{
return $(MPI_EXTRA_REQUIREMENTS) ;
}
# Support for testing; borrowed from Python
type.register RUN_MPI_OUTPUT ;
type.register RUN_MPI : : TEST ;
class mpi-test-generator : generator
{
import property-set ;
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
self.composing = true ;
}
rule run ( project name ? : property-set : sources * : multiple ? )
{
# Generate an executable from the sources. This is the executable we will run.
local executable =
[ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ;
result =
[ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ;
}
}
# Use mpi-test-generator to generate MPI tests from sources
generators.register
[ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ;
generators.register-standard testing.expect-success
: RUN_MPI_OUTPUT : RUN_MPI ;
# The number of processes to spawn when executing an MPI test.
feature mpi:processes : : free incidental ;
# The flag settings on testing.capture-output do not
# apply to mpi.capture output at the moment.
# Redo this explicitly.
toolset.flags mpi.capture-output ARGS <testing.arg> ;
rule capture-output ( target : sources * : properties * )
{
# Use the standard capture-output rule to run the tests
testing.capture-output $(target) : $(sources[1]) : $(properties) ;
# Determine the number of processes we should run on.
local num_processes = [ property.select <mpi:processes> : $(properties) ] ;
num_processes = $(num_processes:G=) ;
# serialize the MPI tests to avoid overloading systems
JAM_SEMAPHORE on $(target) = <s>mpi-run-semaphore ;
# We launch MPI processes using the "mpirun" equivalent specified by the user.
LAUNCHER on $(target) =
[ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ;
}
# Creates a set of test cases to be run through the MPI launcher. The name, sources,
# and requirements are the same as for any other test generator. However, schedule is
# a list of numbers, which indicates how many processes each test run will use. For
# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7
# 7 processes. The name provided is just the base name: the actual tests will be
# the name followed by a hypen, then the number of processes.
rule mpi-test ( name : sources * : requirements * : schedule * )
{
sources ?= $(name).cpp ;
schedule ?= 1 2 3 4 7 8 13 17 ;
local result ;
for processes in $(schedule)
{
result += [ testing.make-test
run-mpi : $(sources) /boost/mpi//boost_mpi
: $(requirements) <toolset>msvc:<link>static <mpi:processes>$(processes) : $(name)-$(processes) ] ;
}
return $(result) ;
}

View File

@ -0,0 +1,12 @@
#~ Copyright 2005 Rene Rivera.
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Automatic configuration for VisualStudio toolset. To use, just import this module.
import toolset : using ;
ECHO "warning: msvc-config.jam is deprecated. Use 'using msvc : all ;' instead." ;
using msvc : all ;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,74 @@
# Copyright (c) 2005 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import "class" : new ;
import generators ;
import project ;
import targets ;
import toolset ;
import type ;
type.register NOTFILE_MAIN ;
class notfile-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * : multiple ? )
{
local action ;
local action-name = [ $(property-set).get <action> ] ;
local m = [ MATCH ^@(.*) : $(action-name) ] ;
if $(m)
{
action = [ new action $(sources) : $(m[1])
: $(property-set) ] ;
}
else
{
action = [ new action $(sources) : notfile.run
: $(property-set) ] ;
}
return [ virtual-target.register
[ new notfile-target $(name) : $(project) : $(action) ] ] ;
}
}
generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ;
toolset.flags notfile.run ACTION : <action> ;
actions run
{
$(ACTION)
}
rule notfile ( target-name : action + : sources * : requirements * : default-build * )
{
local project = [ project.current ] ;
requirements += <action>$(action) ;
targets.main-target-alternative
[ new typed-target $(target-name) : $(project) : NOTFILE_MAIN
: [ targets.main-target-sources $(sources) : $(target-name) ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
}
IMPORT $(__name__) : notfile : : notfile ;

View File

@ -0,0 +1,165 @@
# Copyright (c) 2005 Vladimir Prus.
# Copyright 2006 Rene Rivera.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
# Provides mechanism for installing whole packages into a specific directory
# structure. This is opposed to the 'install' rule, that installs a number of
# targets to a single directory, and does not care about directory structure at
# all.
# Example usage:
#
# package.install boost : <properties>
# : <binaries>
# : <libraries>
# : <headers>
# ;
#
# This will install binaries, libraries and headers to the 'proper' location,
# given by command line options --prefix, --exec-prefix, --bindir, --libdir and
# --includedir.
#
# The rule is just a convenient wrapper, avoiding the need to define several
# 'install' targets.
#
# The only install-related feature is <install-source-root>. It will apply to
# headers only and if present, paths of headers relatively to source root will
# be retained after installing. If it is not specified, then "." is assumed, so
# relative paths in headers are always preserved.
import "class" : new ;
import option ;
import project ;
import feature ;
import property ;
import stage ;
import targets ;
import modules ;
feature.feature install-default-prefix : : free incidental ;
rule install ( name package-name ? : requirements * : binaries * : libraries * : headers * )
{
package-name ?= $(name) ;
if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
{
# If --prefix is explicitly specified on the command line,
# then we need wipe away any settings of libdir/includir that
# is specified via options in config files.
option.set bindir : ;
option.set libdir : ;
option.set includedir : ;
}
# If <install-source-root> is not specified, all headers are installed to
# prefix/include, no matter what their relative path is. Sometimes that is
# what is needed.
local install-source-root = [ property.select <install-source-root> :
$(requirements) ] ;
install-source-root = $(install-source-root:G=) ;
requirements = [ property.change $(requirements) : <install-source-root> ] ;
local install-header-subdir = [ property.select <install-header-subdir> :
$(requirements) ] ;
install-header-subdir = /$(install-header-subdir:G=) ;
install-header-subdir ?= "" ;
requirements = [ property.change $(requirements) : <install-header-subdir> ]
;
# First, figure out all locations. Use the default if no prefix option
# given.
local prefix = [ get-prefix $(name) : $(requirements) ] ;
# Architecture dependent files.
local exec-locate = [ option.get exec-prefix : $(prefix) ] ;
# Binaries.
local bin-locate = [ option.get bindir : $(prefix)/bin ] ;
# Object code libraries.
local lib-locate = [ option.get libdir : $(prefix)/lib ] ;
# Source header files.
local include-locate = [ option.get includedir : $(prefix)/include ] ;
stage.install $(name)-bin : $(binaries) : $(requirements)
<location>$(bin-locate) ;
alias $(name)-lib : $(name)-lib-shared $(name)-lib-static ;
# Since the install location of shared libraries differs on universe
# and cygwin, use target alternatives to make different targets.
# We should have used indirection conditioanl requirements, but it's
# awkward to pass bin-locate and lib-locate from there to another rule.
alias $(name)-lib-shared : $(name)-lib-shared-universe ;
alias $(name)-lib-shared : $(name)-lib-shared-cygwin : <target-os>cygwin ;
# For shared libraries, we install both explicitly specified one and the
# shared libraries that the installed executables depend on.
stage.install $(name)-lib-shared-universe : $(binaries) $(libraries) : $(requirements)
<location>$(lib-locate) <install-dependencies>on <install-type>SHARED_LIB ;
stage.install $(name)-lib-shared-cygwin : $(binaries) $(libraries) : $(requirements)
<location>$(bin-locate) <install-dependencies>on <install-type>SHARED_LIB ;
# For static libraries, we do not care about executable dependencies, since
# static libraries are already incorporated into them.
stage.install $(name)-lib-static : $(libraries) : $(requirements)
<location>$(lib-locate) <install-dependencies>on <install-type>STATIC_LIB ;
stage.install $(name)-headers : $(headers) : $(requirements)
<location>$(include-locate)$(install-header-subdir)
<install-source-root>$(install-source-root) ;
alias $(name) : $(name)-bin $(name)-lib $(name)-headers ;
local c = [ project.current ] ;
local project-module = [ $(c).project-module ] ;
module $(project-module)
{
explicit $(1)-bin $(1)-lib $(1)-headers $(1) $(1)-lib-shared $(1)-lib-static
$(1)-lib-shared-universe $(1)-lib-shared-cygwin ;
}
}
rule install-data ( target-name : package-name : data * : requirements * )
{
package-name ?= target-name ;
if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
{
# If --prefix is explicitly specified on the command line,
# then we need wipe away any settings of datarootdir
option.set datarootdir : ;
}
local prefix = [ get-prefix $(package-name) : $(requirements) ] ;
local datadir = [ option.get datarootdir : $(prefix)/share ] ;
stage.install $(target-name)
: $(data)
: $(requirements) <location>$(datadir)/$(package-name)
;
local c = [ project.current ] ;
local project-module = [ $(c).project-module ] ;
module $(project-module)
{
explicit $(1) ;
}
}
local rule get-prefix ( package-name : requirements * )
{
local prefix = [ option.get prefix : [ property.select
<install-default-prefix> : $(requirements) ] ] ;
prefix = $(prefix:G=) ;
requirements = [ property.change $(requirements) : <install-default-prefix>
] ;
# Or some likely defaults if neither is given.
if ! $(prefix)
{
if [ modules.peek : NT ] { prefix = C:\\$(package-name) ; }
else if [ modules.peek : UNIX ] { prefix = /usr/local ; }
}
return $(prefix) ;
}

View File

@ -0,0 +1,168 @@
# Copyright 2006 Noel Belcourt
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import property ;
import generators ;
import toolset : flags ;
import feature ;
import type ;
import common ;
import fortran ;
feature.extend toolset : pathscale ;
toolset.inherit pathscale : unix ;
generators.override pathscale.prebuilt : builtin.prebuilt ;
generators.override pathscale.searched-lib-generator : searched-lib-generator ;
# Documentation and toolchain description located
# http://www.pathscale.com/docs.html
rule init ( version ? : command * : options * )
{
command = [ common.get-invocation-command pathscale : pathCC : $(command)
: /opt/ekopath/bin ] ;
# Determine the version
local command-string = $(command:J=" ") ;
if $(command)
{
version ?= [ MATCH "^([0-9.]+)"
: [ SHELL "$(command-string) -dumpversion" ] ] ;
}
local condition = [ common.check-init-parameters pathscale
: version $(version) ] ;
common.handle-options pathscale : $(condition) : $(command) : $(options) ;
toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) :
[ feature.get-values <fflags> : $(options) ] : unchecked ;
command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ;
toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ;
# fortran support
local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ;
local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ;
toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ;
# always link lib rt to resolve clock_gettime()
flags pathscale.link FINDLIBS-SA : rt : unchecked ;
}
# Declare generators
generators.register-c-compiler pathscale.compile.c : C : OBJ : <toolset>pathscale ;
generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : <toolset>pathscale ;
generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : <toolset>pathscale ;
generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ;
# Declare flags and actions for compilation
flags pathscale.compile OPTIONS <optimization>off : -O0 ;
flags pathscale.compile OPTIONS <optimization>speed : -O3 ;
flags pathscale.compile OPTIONS <optimization>space : -Os ;
flags pathscale.compile OPTIONS <inlining>off : -noinline ;
flags pathscale.compile OPTIONS <inlining>on : -inline ;
flags pathscale.compile OPTIONS <inlining>full : -inline ;
flags pathscale.compile OPTIONS <warnings>off : -woffall ;
flags pathscale.compile OPTIONS <warnings>on : -Wall ;
flags pathscale.compile OPTIONS <warnings>all : -Wall -pedantic ;
flags pathscale.compile OPTIONS <warnings-as-errors>on : -Werror ;
flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ;
flags pathscale.compile OPTIONS <profiling>on : -pg ;
flags pathscale.compile OPTIONS <link>shared : -fPIC ;
flags pathscale.compile OPTIONS <address-model>32 : -m32 ;
flags pathscale.compile OPTIONS <address-model>64 : -m64 ;
flags pathscale.compile USER_OPTIONS <cflags> ;
flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ;
flags pathscale.compile DEFINES <define> ;
flags pathscale.compile INCLUDES <include> ;
flags pathscale.compile.fortran USER_OPTIONS <fflags> ;
flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ;
actions compile.c
{
"$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c++
{
"$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.fortran
{
"$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
rule compile.fortran90 ( targets * : sources * : properties * )
{
# the space rule inserts spaces between targets and it's necessary
SPACE on $(targets) = " " ;
# Serialize execution of the compile.fortran90 action
# F90 source must be compiled in a particular order so we
# serialize the build as a parallel F90 compile might fail
JAM_SEMAPHORE on $(targets) = <s>pathscale-f90-semaphore ;
}
actions compile.fortran90
{
"$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)"
}
# Declare flags and actions for linking
flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ;
# Strip the binary when no debugging is needed
flags pathscale.link OPTIONS <debug-symbols>off : -g0 ;
flags pathscale.link OPTIONS <profiling>on : -pg ;
flags pathscale.link USER_OPTIONS <linkflags> ;
flags pathscale.link LINKPATH <library-path> ;
flags pathscale.link FINDLIBS-ST <find-static-library> ;
flags pathscale.link FINDLIBS-SA <find-shared-library> ;
flags pathscale.link FINDLIBS-SA <threading>multi : pthread ;
flags pathscale.link LIBRARIES <library-file> ;
flags pathscale.link LINK-RUNTIME <runtime-link>static : static ;
flags pathscale.link LINK-RUNTIME <runtime-link>shared : dynamic ;
flags pathscale.link RPATH <dll-path> ;
# On gcc, there are separate options for dll path at runtime and
# link time. On Solaris, there's only one: -R, so we have to use
# it, even though it's bad idea.
flags pathscale.link RPATH <xdll-path> ;
rule link ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
}
actions link bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
}
# Slight mods for dlls
rule link.dll ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
}
actions link.dll bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
}
# Declare action for creating static libraries
# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)"
actions piecemeal archive
{
ar $(ARFLAGS) ru "$(<)" "$(>)"
}

View File

@ -0,0 +1,95 @@
# Copyright (c) 2005 Reece H. Dunn.
# Copyright 2006 Ilya Sokolov
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
##### Using Precompiled Headers (Quick Guide) #####
#
# Make precompiled mypch.hpp:
#
# import pch ;
#
# cpp-pch mypch
# : # sources
# mypch.hpp
# : # requiremnts
# <toolset>msvc:<source>mypch.cpp
# ;
#
# Add cpp-pch to sources:
#
# exe hello
# : main.cpp hello.cpp mypch
# ;
import "class" : new ;
import type ;
import feature ;
import generators ;
type.register PCH : pch ;
type.register C_PCH : : PCH ;
type.register CPP_PCH : : PCH ;
# Control precompiled header (PCH) generation.
feature.feature pch :
on
off
: propagated ;
feature.feature pch-header : : free dependency ;
feature.feature pch-file : : free dependency ;
# Base PCH generator. The 'run' method has the logic to prevent this generator
# from being run unless it's being used for a top-level PCH target.
class pch-generator : generator
{
import property-set ;
rule action-class ( )
{
return compile-action ;
}
rule run ( project name ? : property-set : sources + )
{
if ! $(name)
{
# Unless this generator is invoked as the top-most generator for a
# main target, fail. This allows using 'H' type as input type for
# this generator, while preventing Boost.Build to try this generator
# when not explicitly asked for.
#
# One bad example is msvc, where pch generator produces both PCH
# target and OBJ target, so if there's any header generated (like by
# bison, or by msidl), we'd try to use pch generator to get OBJ from
# that H, which is completely wrong. By restricting this generator
# only to pch main target, such problem is solved.
}
else
{
local r = [ run-pch $(project) $(name)
: [ $(property-set).add-raw <define>BOOST_BUILD_PCH_ENABLED ]
: $(sources) ] ;
return [ generators.add-usage-requirements $(r)
: <define>BOOST_BUILD_PCH_ENABLED ] ;
}
}
# This rule must be overridden by the derived classes.
rule run-pch ( project name ? : property-set : sources + )
{
}
}
# NOTE: requirements are empty, default pch generator can be applied when
# pch=off.
generators.register
[ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
generators.register
[ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;

View File

@ -0,0 +1,147 @@
# Copyright Noel Belcourt 2007.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import property ;
import generators ;
import os ;
import toolset : flags ;
import feature ;
import fortran ;
import type ;
import common ;
import gcc ;
feature.extend toolset : pgi ;
toolset.inherit pgi : unix ;
generators.override pgi.prebuilt : builtin.lib-generator ;
generators.override pgi.searched-lib-generator : searched-lib-generator ;
# Documentation and toolchain description located
# http://www.pgroup.com/resources/docs.htm
rule init ( version ? : command * : options * )
{
local condition = [ common.check-init-parameters pgi : version $(version) ] ;
local l_command = [ common.get-invocation-command pgi : pgCC : $(command) ] ;
common.handle-options pgi : $(condition) : $(l_command) : $(options) ;
command_c = $(command_c[1--2]) $(l_command[-1]:B=cc) ;
toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ;
flags pgi.compile DEFINES $(condition) :
[ feature.get-values <define> : $(options) ] : unchecked ;
# IOV_MAX support
flags pgi.compile DEFINES $(condition) : __need_IOV_MAX : unchecked ;
# set link flags
flags pgi.link FINDLIBS-ST : [
feature.get-values <find-static-library> : $(options) ] : unchecked ;
# always link lib rt to resolve clock_gettime()
flags pgi.link FINDLIBS-SA : rt [
feature.get-values <find-shared-library> : $(options) ] : unchecked ;
gcc.init-link-flags pgi gnu $(condition) ;
}
# Declare generators
generators.register-c-compiler pgi.compile.c : C : OBJ : <toolset>pgi ;
generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : <toolset>pgi ;
generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ;
# Declare flags and actions for compilation
flags pgi.compile OPTIONS : -Kieee ;
flags pgi.compile OPTIONS <link>shared : -fpic -fPIC ;
flags pgi.compile OPTIONS <debug-symbols>on : -gopt ;
flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ;
flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ;
flags pgi.compile OPTIONS <optimization>space : -xO2 -xspace ;
# flags pgi.compile OPTIONS <threading>multi : -mt ;
flags pgi.compile OPTIONS <warnings>off : -Minform=severe ;
flags pgi.compile OPTIONS <warnings>on : -Minform=warn ;
flags pgi.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
flags pgi.compile OPTIONS <cflags> ;
flags pgi.compile.c++ OPTIONS <cxxflags> ;
flags pgi.compile DEFINES <define> ;
flags pgi.compile INCLUDES <include> ;
flags pgi.compile.fortran OPTIONS <fflags> ;
actions compile.c
{
"$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c++
{
"$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.fortran
{
"$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
# Declare flags and actions for linking
flags pgi.link OPTIONS <debug-symbols>on : -gopt ;
# Strip the binary when no debugging is needed
flags pgi.link OPTIONS <debug-symbols>off : -s ;
flags pgi.link OPTIONS <profiling>on : -xprofile=tcov ;
flags pgi.link OPTIONS <linkflags> ;
flags pgi.link OPTIONS <link>shared : -fpic -fPIC ;
flags pgi.link LINKPATH <library-path> ;
flags pgi.link FINDLIBS-ST <find-static-library> ;
flags pgi.link FINDLIBS-SA <find-shared-library> ;
flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ;
flags pgi.link LIBRARIES <library-file> ;
flags pgi.link LINK-RUNTIME <runtime-link>static : static ;
flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ;
flags pgi.link RPATH <dll-path> ;
# On gcc, there are separate options for dll path at runtime and
# link time. On Solaris, there's only one: -R, so we have to use
# it, even though it's bad idea.
flags pgi.link RPATH <xdll-path> ;
rule link ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
}
# reddish can only link statically and, somehow, the presence of -Bdynamic on the link line
# marks the executable as a dynamically linked exec even though no dynamic libraries are supplied.
# Yod on redstorm refuses to load an executable that is dynamically linked.
# removing the dynamic link options should get us where we need to be on redstorm.
# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
actions link bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bstatic -l$(FINDLIBS-ST) -Bdynamic -l$(FINDLIBS-SA) -B$(LINK-RUNTIME)
}
# Slight mods for dlls
rule link.dll ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
}
# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
actions link.dll bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" -Wl,-h -Wl,$(<[1]:D=) "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
}
actions updated together piecemeal pgi.archive
{
ar -rc$(ARFLAGS:E=) "$(<)" "$(>)"
}

View File

@ -0,0 +1,27 @@
#~ Copyright 2005 Rene Rivera.
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Automatic configuration for Python tools and librries. To use, just import this module.
import os ;
import toolset : using ;
if [ os.name ] = NT
{
for local R in 2.4 2.3 2.2
{
local python-path = [ W32_GETREG
"HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\$(R)\\InstallPath" ] ;
local python-version = $(R) ;
if $(python-path)
{
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice:" using python ":" $(python-version) ":" $(python-path) ;
}
using python : $(python-version) : $(python-path) ;
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,236 @@
# Copyright (c) 2001 David Abrahams.
# Copyright (c) 2002-2003 Rene Rivera.
# Copyright (c) 2002-2003 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import "class" : new ;
import common ;
import errors ;
import feature ;
import generators ;
import os ;
import property ;
import set ;
import toolset ;
import type ;
import unix ;
feature.extend toolset : qcc ;
toolset.inherit-generators qcc : unix : unix.link unix.link.dll ;
generators.override builtin.lib-generator : qcc.prebuilt ;
toolset.inherit-flags qcc : unix ;
toolset.inherit-rules qcc : unix ;
# Initializes the qcc toolset for the given version. If necessary, command may
# be used to specify where the compiler is located. The parameter 'options' is a
# space-delimited list of options, each one being specified as
# <option-name>option-value. Valid option names are: cxxflags, linkflags and
# linker-type. Accepted values for linker-type are gnu and sun, gnu being the
# default.
#
# Example:
# using qcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
#
rule init ( version ? : command * : options * )
{
local condition = [ common.check-init-parameters qcc : version $(version) ] ;
local command = [ common.get-invocation-command qcc : QCC : $(command) ] ;
common.handle-options qcc : $(condition) : $(command) : $(options) ;
}
generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : <toolset>qcc ;
generators.register-c-compiler qcc.compile.c : C : OBJ : <toolset>qcc ;
generators.register-c-compiler qcc.compile.asm : ASM : OBJ : <toolset>qcc ;
# Declare flags for compilation.
toolset.flags qcc.compile OPTIONS <debug-symbols>on : -gstabs+ ;
# Declare flags and action for compilation.
toolset.flags qcc.compile OPTIONS <optimization>off : -O0 ;
toolset.flags qcc.compile OPTIONS <optimization>speed : -O3 ;
toolset.flags qcc.compile OPTIONS <optimization>space : -Os ;
toolset.flags qcc.compile OPTIONS <inlining>off : -Wc,-fno-inline ;
toolset.flags qcc.compile OPTIONS <inlining>on : -Wc,-Wno-inline ;
toolset.flags qcc.compile OPTIONS <inlining>full : -Wc,-finline-functions -Wc,-Wno-inline ;
toolset.flags qcc.compile OPTIONS <warnings>off : -w ;
toolset.flags qcc.compile OPTIONS <warnings>all : -Wc,-Wall ;
toolset.flags qcc.compile OPTIONS <warnings-as-errors>on : -Wc,-Werror ;
toolset.flags qcc.compile OPTIONS <profiling>on : -p ;
toolset.flags qcc.compile OPTIONS <cflags> ;
toolset.flags qcc.compile.c++ OPTIONS <cxxflags> ;
toolset.flags qcc.compile DEFINES <define> ;
toolset.flags qcc.compile INCLUDES <include> ;
toolset.flags qcc.compile OPTIONS <link>shared : -shared ;
toolset.flags qcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
rule compile.c++
{
# Here we want to raise the template-depth parameter value to something
# higher than the default value of 17. Note that we could do this using the
# feature.set-default rule but we do not want to set the default value for
# all toolsets as well.
#
# TODO: This 'modified default' has been inherited from some 'older Boost
# Build implementation' and has most likely been added to make some Boost
# library parts compile correctly. We should see what exactly prompted this
# and whether we can get around the problem more locally.
local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
if ! $(template-depth)
{
TEMPLATE_DEPTH on $(1) = 128 ;
}
}
actions compile.c++
{
"$(CONFIG_COMMAND)" -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c
{
"$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.asm
{
"$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
# The class checking that we do not try to use the <runtime-link>static property
# while creating or using a shared library, since it is not supported by qcc/
# /libc.
#
class qcc-linking-generator : unix-linking-generator
{
rule generated-targets ( sources + : property-set : project name ? )
{
if <runtime-link>static in [ $(property-set).raw ]
{
local m ;
if [ id ] = "qcc.link.dll"
{
m = "on qcc, DLL can't be build with <runtime-link>static" ;
}
if ! $(m)
{
for local s in $(sources)
{
local type = [ $(s).type ] ;
if $(type) && [ type.is-derived $(type) SHARED_LIB ]
{
m = "on qcc, using DLLS together with the <runtime-link>static options is not possible " ;
}
}
}
if $(m)
{
errors.user-error $(m) : "It is suggested to use"
"<runtime-link>static together with <link>static." ;
}
}
return [ unix-linking-generator.generated-targets
$(sources) : $(property-set) : $(project) $(name) ] ;
}
}
generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE
: <toolset>qcc ] ;
generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ
: SHARED_LIB : <toolset>qcc ] ;
generators.override qcc.prebuilt : builtin.prebuilt ;
generators.override qcc.searched-lib-generator : searched-lib-generator ;
# Declare flags for linking.
# First, the common flags.
toolset.flags qcc.link OPTIONS <debug-symbols>on : -gstabs+ ;
toolset.flags qcc.link OPTIONS <profiling>on : -p ;
toolset.flags qcc.link OPTIONS <linkflags> ;
toolset.flags qcc.link LINKPATH <library-path> ;
toolset.flags qcc.link FINDLIBS-ST <find-static-library> ;
toolset.flags qcc.link FINDLIBS-SA <find-shared-library> ;
toolset.flags qcc.link LIBRARIES <library-file> ;
toolset.flags qcc.link FINDLIBS-SA : m ;
# For <runtime-link>static we made sure there are no dynamic libraries in the
# link.
toolset.flags qcc.link OPTIONS <runtime-link>static : -static ;
# Assuming this is just like with gcc.
toolset.flags qcc.link RPATH : <dll-path> : unchecked ;
toolset.flags qcc.link RPATH_LINK : <xdll-path> : unchecked ;
# Declare actions for linking.
#
rule link ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
# Serialize execution of the 'link' action, since running N links in
# parallel is just slower. For now, serialize only qcc links while it might
# be a good idea to serialize all links.
JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
}
actions link bind LIBRARIES
{
"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
}
# Always remove archive and start again. Here is the rationale from Andre Hentz:
# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c,
# updated my Jamfiles and rebuilt. My program was crashing with absurd errors.
# After some debugging I traced it back to the fact that a1.o was *still* in
# liba.a
RM = [ common.rm-command ] ;
if [ os.name ] = NT
{
RM = "if exist \"$(<[1])\" DEL \"$(<[1])\"" ;
}
# Declare action for creating static libraries. The 'r' letter means to add
# files to the archive with replacement. Since we remove the archive, we do not
# care about replacement, but there is no option to "add without replacement".
# The 'c' letter suppresses warnings in case the archive does not exists yet.
# That warning is produced only on some platforms, for whatever reasons.
#
actions piecemeal archive
{
$(RM) "$(<)"
ar rc "$(<)" "$(>)"
}
rule link.dll ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
}
# Differ from 'link' above only by -shared.
#
actions link.dll bind LIBRARIES
{
"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
}

View File

@ -0,0 +1,17 @@
# Copyright (c) 2006 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
# Forwarning toolset file to Qt GUI library. Forwards to the toolset file
# for the current version of Qt.
import qt4 ;
rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
{
qt4.init $(prefix) : $(full_bin) : $(full_inc) : $(full_lib) : $(version) : $(condition) ;
}

View File

@ -0,0 +1,209 @@
# Copyright 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Support for the Qt GUI library version 3
# (http://www.trolltech.com/products/qt3/index.html).
# For new developments, it is recommended to use Qt4 via the qt4 Boost.Build
# module.
import modules ;
import feature ;
import errors ;
import type ;
import "class" : new ;
import generators ;
import project ;
import toolset : flags ;
# Convert this module into a project, so that we can declare targets here.
project.initialize $(__name__) ;
project qt3 ;
# Initialized the QT support module. The 'prefix' parameter tells where QT is
# installed. When not given, environmental variable QTDIR should be set.
#
rule init ( prefix ? )
{
if ! $(prefix)
{
prefix = [ modules.peek : QTDIR ] ;
if ! $(prefix)
{
errors.error
"QT installation prefix not given and QTDIR variable is empty" ;
}
}
if $(.initialized)
{
if $(prefix) != $(.prefix)
{
errors.error
"Attempt the reinitialize QT with different installation prefix" ;
}
}
else
{
.initialized = true ;
.prefix = $(prefix) ;
generators.register-standard qt3.moc : H : CPP(moc_%) : <allow>qt3 ;
# Note: the OBJ target type here is fake, take a look at
# qt4.jam/uic-h-generator for explanations that apply in this case as
# well.
generators.register [ new moc-h-generator-qt3
qt3.moc.cpp : MOCCABLE_CPP : OBJ : <allow>qt3 ] ;
# The UI type is defined in types/qt.jam, and UIC_H is only used in
# qt.jam, but not in qt4.jam, so define it here.
type.register UIC_H : : H ;
generators.register-standard qt3.uic-h : UI : UIC_H : <allow>qt3 ;
# The following generator is used to convert UI files to CPP. It creates
# UIC_H from UI, and constructs CPP from UI/UIC_H. In addition, it also
# returns UIC_H target, so that it can be mocced.
class qt::uic-cpp-generator : generator
{
rule __init__ ( )
{
generator.__init__ qt3.uic-cpp : UI UIC_H : CPP : <allow>qt3 ;
}
rule run ( project name ? : properties * : sources + )
{
# Consider this:
# obj test : test_a.cpp : <optimization>off ;
#
# This generator will somehow be called in this case, and,
# will fail -- which is okay. However, if there are <library>
# properties they will be converted to sources, so the size of
# 'sources' will be more than 1. In this case, the base generator
# will just crash -- and that's not good. Just use a quick test
# here.
local result ;
if ! $(sources[2])
{
# Construct CPP as usual
result = [ generator.run $(project) $(name)
: $(properties) : $(sources) ] ;
# If OK, process UIC_H with moc. It's pretty clear that
# the object generated with UIC will have Q_OBJECT macro.
if $(result)
{
local action = [ $(result[1]).action ] ;
local sources = [ $(action).sources ] ;
local mocced = [ generators.construct $(project) $(name)
: CPP : $(properties) : $(sources[2]) ] ;
result += $(mocced[2-]) ;
}
}
return $(result) ;
}
}
generators.register [ new qt::uic-cpp-generator ] ;
# Finally, declare prebuilt target for QT library.
local usage-requirements =
<include>$(.prefix)/include
<dll-path>$(.prefix)/lib
<library-path>$(.prefix)/lib
<allow>qt3
;
lib qt : : <name>qt-mt <threading>multi : : $(usage-requirements) ;
lib qt : : <name>qt <threading>single : : $(usage-requirements) ;
}
}
class moc-h-generator-qt3 : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * )
{
if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
{
name = [ $(sources[1]).name ] ;
name = $(name:B) ;
local a = [ new action $(sources[1]) : qt3.moc.cpp :
$(property-set) ] ;
local target = [
new file-target $(name) : MOC : $(project) : $(a) ] ;
local r = [ virtual-target.register $(target) ] ;
# Since this generator will return a H target, the linking generator
# won't use it at all, and won't set any dependency on it. However,
# we need the target to be seen by bjam, so that the dependency from
# sources to this generated header is detected -- if Jam does not
# know about this target, it won't do anything.
DEPENDS all : [ $(r).actualize ] ;
return $(r) ;
}
}
}
# Query the installation directory. This is needed in at least two scenarios.
# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
# plugins to the Qt-Tree.
#
rule directory
{
return $(.prefix) ;
}
# -f forces moc to include the processed source file. Without it, it would think
# that .qpp is not a header and would not include it from the generated file.
#
actions moc
{
$(.prefix)/bin/moc -f $(>) -o $(<)
}
# When moccing .cpp files, we don't need -f, otherwise generated code will
# include .cpp and we'll get duplicated symbols.
#
actions moc.cpp
{
$(.prefix)/bin/moc $(>) -o $(<)
}
space = " " ;
# Sometimes it's required to make 'plugins' available during uic invocation. To
# help with this we add paths to all dependency libraries to uic commane line.
# The intention is that it's possible to write
#
# exe a : ... a.ui ... : <uses>some_plugin ;
#
# and have everything work. We'd add quite a bunch of unrelated paths but it
# won't hurt.
#
flags qt3.uic-h LIBRARY_PATH <xdll-path> ;
actions uic-h
{
$(.prefix)/bin/uic $(>) -o $(<) -L$(space)$(LIBRARY_PATH)
}
flags qt3.uic-cpp LIBRARY_PATH <xdll-path> ;
# The second target is uic-generated header name. It's placed in build dir, but
# we want to include it using only basename.
actions uic-cpp
{
$(.prefix)/bin/uic $(>[1]) -i $(>[2]:D=) -o $(<) -L$(space)$(LIBRARY_PATH)
}

View File

@ -0,0 +1,713 @@
# Copyright 2002-2006 Vladimir Prus
# Copyright 2005 Alo Sarv
# Copyright 2005-2009 Juergen Hunold
#
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# Qt4 library support module
#
# The module attempts to auto-detect QT installation location from QTDIR
# environment variable; failing that, installation location can be passed as
# argument:
#
# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ;
#
# The module supports code generation from .ui and .qrc files, as well as
# running the moc preprocessor on headers. Note that you must list all your
# moc-able headers in sources.
#
# Example:
#
# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc
# /qt4//QtGui /qt4//QtNetwork ;
#
# It's also possible to run moc on cpp sources:
#
# import cast ;
#
# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ;
#
# When moccing source file myapp.cpp you need to include "myapp.moc" from
# myapp.cpp. When moccing .h files, the output of moc will be automatically
# compiled and linked in, you don't need any includes.
#
# This is consistent with Qt guidelines:
# http://doc.trolltech.com/4.0/moc.html
import modules ;
import feature ;
import errors ;
import type ;
import "class" : new ;
import generators ;
import project ;
import toolset : flags ;
import os ;
import virtual-target ;
import scanner ;
# Qt3Support control feature
#
# Qt4 configure defaults to build Qt4 libraries with Qt3Support.
# The autodetection is missing, so we default to disable Qt3Support.
# This prevents the user from inadvertedly using a deprecated API.
#
# The Qt3Support library can be activated by adding
# "<qt3support>on" to requirements
#
# Use "<qt3support>on:<define>QT3_SUPPORT_WARNINGS"
# to get warnings about deprecated Qt3 support funtions and classes.
# Files ported by the "qt3to4" conversion tool contain _tons_ of
# warnings, so this define is not set as default.
#
# Todo: Detect Qt3Support from Qt's configure data.
# Or add more auto-configuration (like python).
feature.feature qt3support : off on : propagated link-incompatible ;
# The Qt version used for requirements
# Valid are <qt>4.4 or <qt>4.5.0
# Auto-detection via qmake sets '<qt>major.minor.patch'
feature.feature qt : : propagated ;
project.initialize $(__name__) ;
project qt ;
# Save the project so that we tolerate 'import + using' combo.
.project = [ project.current ] ;
# Helper utils for easy debug output
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = TRUE ;
}
local rule debug-message ( message * )
{
if $(.debug-configuration) = TRUE
{
ECHO notice: [qt4-cfg] $(message) ;
}
}
# Capture qmake output line by line
local rule read-output ( content )
{
local lines ;
local nl = "
" ;
local << = "([^$(nl)]*)[$(nl)](.*)" ;
local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
while $(line+)
{
lines += $(line+[1]) ;
line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
}
return $(lines) ;
}
# Capture Qt version from qmake
local rule check-version ( bin_prefix )
{
full-cmd = $(bin_prefix)"/qmake -v" ;
debug-message Running '$(full-cmd)' ;
local output = [ SHELL $(full-cmd) ] ;
for line in [ read-output $(output) ]
{
# Parse the output to get all the results.
if [ MATCH "QMake" : $(line) ]
{
# Skip first line of output
}
else
{
temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ;
}
}
return $(temp) ;
}
# Validate the version string and extract the major/minor part we care about.
#
local rule split-version ( version )
{
local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
if ! $(major-minor[2]) || $(major-minor[3])
{
ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ;
# Add a zero to account for the missing digit if necessary.
major-minor += 0 ;
}
return $(major-minor[1]) $(major-minor[2]) ;
}
# Initialize the QT support module.
# Parameters:
# - 'prefix' parameter tells where Qt is installed.
# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc)
# - 'full_inc' optional full path to Qt top-level include directory
# - 'full_lib' optional full path to Qt library directory
# - 'version' optional version of Qt, else autodetected via 'qmake -v'
# - 'condition' optional requirements
rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
{
project.push-current $(.project) ;
debug-message "==== Configuring Qt ... ====" ;
for local v in version cmd-or-prefix includes libraries condition
{
if $($(v))
{
debug-message " user-specified "$(v): '$($(v))' ;
}
}
# Needed as default value
.prefix = $(prefix) ;
# pre-build paths to detect reinitializations changes
local inc_prefix lib_prefix bin_prefix ;
if $(full_inc)
{
inc_prefix = $(full_inc) ;
}
else
{
inc_prefix = $(prefix)/include ;
}
if $(full_lib)
{
lib_prefix = $(full_lib) ;
}
else
{
lib_prefix = $(prefix)/lib ;
}
if $(full_bin)
{
bin_prefix = $(full_bin) ;
}
else
{
bin_prefix = $(prefix)/bin ;
}
# Globally needed variables
.incprefix = $(inc_prefix) ;
.libprefix = $(lib_prefix) ;
.binprefix = $(bin_prefix) ;
if ! $(.initialized)
{
# Make sure this is initialised only once
.initialized = true ;
# Generates cpp files from header files using "moc" tool
generators.register-standard qt4.moc : H : CPP(moc_%) : <allow>qt4 ;
# The OBJ result type is a fake, 'H' will be really produced. See
# comments on the generator class, defined below the 'init' function.
generators.register [ new uic-generator qt4.uic : UI : OBJ :
<allow>qt4 ] ;
# The OBJ result type is a fake here too.
generators.register [ new moc-h-generator
qt4.moc.inc : MOCCABLE_CPP : OBJ : <allow>qt4 ] ;
generators.register [ new moc-inc-generator
qt4.moc.inc : MOCCABLE_H : OBJ : <allow>qt4 ] ;
# Generates .cpp files from .qrc files.
generators.register-standard qt4.rcc : QRC : CPP(qrc_%) ;
# dependency scanner for wrapped files.
type.set-scanner QRC : qrc-scanner ;
# Save value of first occuring prefix
.PREFIX = $(prefix) ;
}
if $(version)
{
major-minor = [ split-version $(version) ] ;
version = $(major-minor:J=.) ;
}
else
{
version = [ check-version $(bin_prefix) ] ;
if $(version)
{
version = $(version:J=.) ;
}
debug-message Detected version '$(version)' ;
}
local target-requirements = $(condition) ;
# Add the version, if any, to the target requirements.
if $(version)
{
if ! $(version) in [ feature.values qt ]
{
feature.extend qt : $(version) ;
}
target-requirements += <qt>$(version:E=default) ;
}
local target-os = [ feature.get-values target-os : $(condition) ] ;
if ! $(target-os)
{
target-os ?= [ feature.defaults target-os ] ;
target-os = $(target-os:G=) ;
target-requirements += <target-os>$(target-os) ;
}
# Build exact requirements for the tools
local tools-requirements = $(target-requirements:J=/) ;
debug-message "Details of this Qt configuration:" ;
debug-message " prefix: " '$(prefix:E=<empty>)' ;
debug-message " binary path: " '$(bin_prefix:E=<empty>)' ;
debug-message " include path:" '$(inc_prefix:E=<empty>)' ;
debug-message " library path:" '$(lib_prefix:E=<empty>)' ;
debug-message " target requirements:" '$(target-requirements)' ;
debug-message " tool requirements: " '$(tools-requirements)' ;
# setup the paths for the tools
toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
# TODO: 2009-02-12: Better support for directories
# Most likely needed are separate getters for: include,libraries,binaries and sources.
toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ;
# Test for a buildable Qt.
if [ glob $(.prefix)/Jamroot ]
{
.bjam-qt = true
# this will declare QtCore (and qtmain on <target-os>windows)
add-shared-library QtCore ;
}
else
# Setup common pre-built Qt.
# Special setup for QtCore on which everything depends
{
local usage-requirements =
<include>$(.incprefix)
<library-path>$(.libprefix)
<dll-path>$(.libprefix)
<threading>multi
<allow>qt4 ;
local suffix ;
# Since Qt-4.2, debug versions on unix have to be built
# separately and therefore have no suffix.
.suffix_version = "" ;
.suffix_debug = "" ;
# Control flag for auto-configuration of the debug libraries.
# This setup requires Qt 'configure -debug-and-release'.
# Only available on some platforms.
# ToDo: 2009-02-12: Maybe throw this away and
# require separate setup with <variant>debug as condition.
.have_separate_debug = FALSE ;
# Setup other platforms
if $(target-os) in windows cygwin
{
.have_separate_debug = TRUE ;
# On NT, the libs have "4" suffix, and "d" suffix in debug builds.
.suffix_version = "4" ;
.suffix_debug = "d" ;
# On Windows we must link against the qtmain library
lib qtmain
: # sources
: # requirements
<name>qtmain$(.suffix_debug)
<variant>debug
$(target-requirements)
;
lib qtmain
: # sources
: # requirements
<name>qtmain
$(target-requirements)
;
}
else if $(target-os) = darwin
{
# On MacOS X, both debug and release libraries are available.
.suffix_debug = "_debug" ;
.have_separate_debug = TRUE ;
alias qtmain ;
}
else
{
alias qtmain : : $(target-requirements) ;
}
lib QtCore : qtmain
: # requirements
<name>QtCore$(.suffix_version)
$(target-requirements)
: # default-build
: # usage-requirements
<define>QT_CORE_LIB
<define>QT_NO_DEBUG
<include>$(.incprefix)/QtCore
$(usage-requirements)
;
if $(.have_separate_debug) = TRUE
{
debug-message Configure debug libraries with suffix '$(.suffix_debug)' ;
lib QtCore : $(main)
: # requirements
<name>QtCore$(.suffix_debug)$(.suffix_version)
<variant>debug
$(target-requirements)
: # default-build
: # usage-requirements
<define>QT_CORE_LIB
<include>$(.incprefix)/QtCore
$(usage-requirements)
;
}
}
# Initialising the remaining libraries is canonical
# parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include'
# 'include' only for non-canonical include paths.
add-shared-library QtGui : QtCore : QT_GUI_LIB : $(target-requirements) ;
add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ;
add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ;
add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ;
add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql
: QT_QT3SUPPORT_LIB QT3_SUPPORT
: <qt3support>on $(target-requirements) ;
# Dummy target to enable "<qt3support>off" and
# "<library>/qt//Qt3Support" at the same time. This enables quick
# switching from one to the other for test/porting purposes.
alias Qt3Support : : <qt3support>off $(target-requirements) ;
# OpenGl Support
add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ;
# SVG-Support (Qt 4.1)
add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ;
# Test-Support (Qt 4.1)
add-shared-library QtTest : QtCore : : $(target-requirements) ;
# Qt designer library
add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ;
# Support for dynamic Widgets (Qt 4.1)
add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ;
# DBus-Support (Qt 4.2)
add-shared-library QtDBus : QtXml : : $(target-requirements) ;
# Script-Engine (Qt 4.3)
add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ;
# Tools for the Script-Engine (Qt 4.5)
add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ;
# WebKit (Qt 4.4)
add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ;
# Phonon Multimedia (Qt 4.4)
add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ;
# Multimedia engine (Qt 4.6)
add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ;
# XmlPatterns-Engine (Qt 4.4)
add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ;
# Help-Engine (Qt 4.4)
add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ;
# AssistantClient Support
# Compat library
# Pre-4.4 help system, use QtHelp for new programs
add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ;
debug-message "==== Configured Qt-$(version) ====" ;
project.pop-current ;
}
rule initialized ( )
{
return $(.initialized) ;
}
# This custom generator is needed because in QT4, UI files are translated only
# into H files, and no C++ files are created. Further, the H files need not be
# passed via MOC. The header is used only via inclusion. If we define a standard
# UI -> H generator, Boost.Build will run MOC on H, and then compile the
# resulting cpp. It will give a warning, since output from moc will be empty.
#
# This generator is declared with a UI -> OBJ signature, so it gets invoked when
# linking generator tries to convert sources to OBJ, but it produces target of
# type H. This is non-standard, but allowed. That header won't be mocced.
#
class uic-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * )
{
if ! $(name)
{
name = [ $(sources[0]).name ] ;
name = $(name:B) ;
}
local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ;
# The 'ui_' prefix is to match qmake's default behavior.
local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ;
local r = [ virtual-target.register $(target) ] ;
# Since this generator will return a H target, the linking generator
# won't use it at all, and won't set any dependency on it. However, we
# need the target to be seen by bjam, so that dependency from sources to
# this generated header is detected -- if jam does not know about this
# target, it won't do anything.
DEPENDS all : [ $(r).actualize ] ;
return $(r) ;
}
}
class moc-h-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * )
{
if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
{
name = [ $(sources[0]).name ] ;
name = $(name:B) ;
local a = [ new action $(sources[1]) : qt4.moc.inc :
$(property-set) ] ;
local target = [ new file-target $(name) : MOC : $(project) : $(a)
] ;
local r = [ virtual-target.register $(target) ] ;
# Since this generator will return a H target, the linking generator
# won't use it at all, and won't set any dependency on it. However,
# we need the target to be seen by bjam, so that dependency from
# sources to this generated header is detected -- if jam does not
# know about this target, it won't do anything.
DEPENDS all : [ $(r).actualize ] ;
return $(r) ;
}
}
}
class moc-inc-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * )
{
if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H
{
name = [ $(sources[0]).name ] ;
name = $(name:B) ;
local a = [ new action $(sources[1]) : qt4.moc.inc :
$(property-set) ] ;
local target = [ new file-target moc_$(name) : CPP : $(project) :
$(a) ] ;
# Since this generator will return a H target, the linking generator
# won't use it at all, and won't set any dependency on it. However,
# we need the target to be seen by bjam, so that dependency from
# sources to this generated header is detected -- if jam does not
# know about this target, it won't do anything.
DEPENDS all : [ $(target).actualize ] ;
return [ virtual-target.register $(target) ] ;
}
}
}
# Query the installation directory. This is needed in at least two scenarios.
# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
# plugins to the Qt-Tree.
#
rule directory
{
return $(.PREFIX) ;
}
# Add a shared Qt library.
rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
{
add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
}
# Add a static Qt library.
rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
{
add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
}
# Add a Qt library.
# Static libs are unversioned, whereas shared libs have the major number as suffix.
# Creates both release and debug versions on platforms where both are enabled by Qt configure.
# Flags:
# - lib-name Qt library Name
# - version Qt major number used as shared library suffix (QtCore4.so)
# - depends-on other Qt libraries
# - usage-defines those are set by qmake, so set them when using this library
# - requirements addional requirements
# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name).
rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? )
{
if $(.bjam-qt)
{
# Import Qt module
# Eveything will be setup there
alias $(lib-name)
: $(.prefix)//$(lib-name)
:
:
: <allow>qt4 ;
}
else
{
local real_include ;
real_include ?= $(include) ;
real_include ?= $(lib-name) ;
lib $(lib-name)
: # sources
$(depends-on)
: # requirements
<name>$(lib-name)$(version)
$(requirements)
: # default-build
: # usage-requirements
<define>$(usage-defines)
<include>$(.incprefix)/$(real_include)
;
if $(.have_separate_debug) = TRUE
{
lib $(lib-name)
: # sources
$(depends-on)
: # requirements
<name>$(lib-name)$(.suffix_debug)$(version)
$(requirements)
<variant>debug
: # default-build
: # usage-requirements
<define>$(usage-defines)
<include>$(.incprefix)/$(real_include)
;
}
}
# Make library explicit so that a simple <use>qt4 will not bring in everything.
# And some components like QtDBus/Phonon may not be available on all platforms.
explicit $(lib-name) ;
}
# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match.
# The exact match is the last one.
# Get <include> and <defines> from current toolset.
flags qt4.moc INCLUDES <include> ;
flags qt4.moc DEFINES <define> ;
# Processes headers to create Qt MetaObject information. Qt4-moc has its
# c++-parser, so pass INCLUDES and DEFINES.
#
actions moc
{
$(.BINPREFIX[-1])/moc -I"$(INCLUDES)" -D$(DEFINES) -f $(>) -o $(<)
}
# When moccing files for include only, we don't need -f, otherwise the generated
# code will include the .cpp and we'll get duplicated symbols.
#
actions moc.inc
{
$(.BINPREFIX[-1])/moc -I"$(INCLUDES)" -D$(DEFINES) $(>) -o $(<)
}
# Generates source files from resource files.
#
actions rcc
{
$(.BINPREFIX[-1])/rcc $(>) -name $(>:B) -o $(<)
}
# Generates user-interface source from .ui files.
#
actions uic
{
$(.BINPREFIX[-1])/uic $(>) -o $(<)
}
# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore
# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for
# detailed documentation of the Qt Resource System.
#
class qrc-scanner : common-scanner
{
rule pattern ( )
{
return "<file.*>(.*)</file>" ;
}
}
# Wrapped files are "included".
scanner.register qrc-scanner : include ;

View File

@ -0,0 +1,44 @@
#~ Copyright 2005 Rene Rivera.
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Automatic configuration for BoostBook tools. To use, just import this module.
import os ;
import toolset : using ;
if [ os.name ] = NT
{
local boost-dir = ;
for local R in snapshot cvs 1.33.0
{
boost-dir += [ W32_GETREG
"HKEY_LOCAL_MACHINE\\SOFTWARE\\Boost.org\\$(R)"
: "InstallRoot" ] ;
}
local quickbook-path = [ GLOB "$(boost-dir)\\bin" "\\Boost\\bin" : quickbook.exe ] ;
quickbook-path = $(quickbook-path[1]) ;
if $(quickbook-path)
{
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice:" using quickbook ":" $(quickbook-path) ;
}
using quickbook : $(quickbook-path) ;
}
}
else
{
local quickbook-path = [ GLOB "/usr/local/bin" "/usr/bin" "/opt/bin" : quickbook ] ;
quickbook-path = $(quickbook-path[1]) ;
if $(quickbook-path)
{
if --debug-configuration in [ modules.peek : ARGV ]
{
ECHO "notice:" using quickbook ":" $(quickbook-path) ;
}
using quickbook : $(quickbook-path) ;
}
}

View File

@ -0,0 +1,361 @@
#
# Copyright (c) 2005 João Abecasis
# Copyright (c) 2005 Vladimir Prus
# Copyright (c) 2006 Rene Rivera
#
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
#
# This toolset defines a generator to translate QuickBook to BoostBook. It can
# be used to generate nice (!) user documentation in different formats
# (pdf/html/...), from a single text file with simple markup.
#
# The toolset defines the QUICKBOOK type (file extension 'qbk') and
# a QUICKBOOK to XML (BOOSTBOOK) generator.
#
#
# ===========================================================================
# Q & A
# ===========================================================================
#
# If you don't know what this is all about, some Q & A will hopefully get you
# up to speed with QuickBook and this toolset.
#
#
# What is QuickBook ?
#
# QuickBook is a WikiWiki style documentation tool geared towards C++
# documentation using simple rules and markup for simple formatting tasks.
# QuickBook extends the WikiWiki concept. Like the WikiWiki, QuickBook
# documents are simple text files. A single QuickBook document can
# generate a fully linked set of nice HTML and PostScript/PDF documents
# complete with images and syntax-colorized source code.
#
#
# Where can I get QuickBook ?
#
# Quickbook can be found in Boost's repository, under the tools/quickbook
# directory it was added there on Jan 2005, some time after the release of
# Boost v1.32.0 and has been an integral part of the Boost distribution
# since v1.33.
#
# Here's a link to the SVN repository:
# https://svn.boost.org/svn/boost/trunk/tools/quickbook
#
# And to QuickBook's QuickBook-generated docs:
# http://www.boost.org/doc/libs/release/tools/quickbook/index.html
#
#
# How do I use QuickBook and this toolset in my projects ?
#
# The minimal example is:
#
# using boostbook ;
# import quickbook ;
#
# boostbook my_docs : my_docs_source.qbk ;
#
# where my_docs is a target name and my_docs_source.qbk is a QuickBook
# file. The documentation format to be generated is determined by the
# boostbook toolset. By default html documentation should be generated,
# but you should check BoostBook's docs to be sure.
#
#
# What do I need ?
#
# You should start by setting up the BoostBook toolset. Please refer to
# boostbook.jam and the BoostBook documentation for information on how to
# do this.
#
# A QuickBook executable is also needed. The toolset will generate this
# executable if it can find the QuickBook sources. The following
# directories will be searched:
#
# BOOST_ROOT/tools/quickbook/
# BOOST_BUILD_PATH/../../quickbook/
#
# (BOOST_ROOT and BOOST_BUILD_PATH are environment variables)
#
# If QuickBook sources are not found the toolset will then try to use
# the shell command 'quickbook'.
#
#
# How do I provide a custom QuickBook executable ?
#
# You may put the following in your user-config.jam or site-config.jam:
#
# using quickbook : /path/to/quickbook ;
#
# or, if 'quickbook' can be found in your PATH,
#
# using quickbook : quickbook ;
#
#
# For convenience three alternatives are tried to get a QuickBook executable:
#
# 1. If the user points us to the a QuickBook executable, that is used.
#
# 2. Otherwise, we search for the QuickBook sources and compile QuickBook
# using the default toolset.
#
# 3. As a last resort, we rely on the shell for finding 'quickbook'.
#
import boostbook ;
import "class" : new ;
import feature ;
import generators ;
import toolset ;
import type ;
import scanner ;
import project ;
import targets ;
import build-system ;
import path ;
import common ;
import errors ;
# The one and only QUICKBOOK type!
type.register QUICKBOOK : qbk ;
# <quickbook-binary> shell command to run QuickBook
# <quickbook-binary-dependencies> targets to build QuickBook from sources.
feature.feature <quickbook-binary> : : free ;
feature.feature <quickbook-binary-dependencies> : : free dependency ;
feature.feature <quickbook-define> : : free ;
feature.feature <quickbook-indent> : : free ;
feature.feature <quickbook-line-width> : : free ;
# quickbook-binary-generator handles generation of the QuickBook executable, by
# marking it as a dependency for QuickBook docs.
#
# If the user supplied the QuickBook command that will be used.
#
# Otherwise we search some sensible places for the QuickBook sources and compile
# from scratch using the default toolset.
#
# As a last resort we rely on the shell to find 'quickbook'.
#
class quickbook-binary-generator : generator
{
import modules path targets quickbook ;
rule run ( project name ? : property-set : sources * : multiple ? )
{
quickbook.freeze-config ;
# QuickBook invocation command and dependencies.
local quickbook-binary = [ modules.peek quickbook : .quickbook-binary ] ;
local quickbook-binary-dependencies ;
if ! $(quickbook-binary)
{
# If the QuickBook source directory was found, mark its main target
# as a dependency for the current project. Otherwise, try to find
# 'quickbook' in user's PATH
local quickbook-dir = [ modules.peek quickbook : .quickbook-dir ] ;
if $(quickbook-dir)
{
# Get the main-target in QuickBook directory.
local quickbook-main-target = [ targets.resolve-reference $(quickbook-dir) : $(project) ] ;
# The first element are actual targets, the second are
# properties found in target-id. We do not care about these
# since we have passed the id ourselves.
quickbook-main-target =
[ $(quickbook-main-target[1]).main-target quickbook ] ;
quickbook-binary-dependencies =
[ $(quickbook-main-target).generate [ $(property-set).propagated ] ] ;
# Ignore usage-requirements returned as first element.
quickbook-binary-dependencies = $(quickbook-binary-dependencies[2-]) ;
# Some toolsets generate extra targets (e.g. RSP). We must mark
# all targets as dependencies for the project, but we will only
# use the EXE target for quickbook-to-boostbook translation.
for local target in $(quickbook-binary-dependencies)
{
if [ $(target).type ] = EXE
{
quickbook-binary =
[ path.native
[ path.join
[ $(target).path ]
[ $(target).name ]
]
] ;
}
}
}
}
# Add $(quickbook-binary-dependencies) as a dependency of the current
# project and set it as the <quickbook-binary> feature for the
# quickbook-to-boostbook rule, below.
property-set = [ $(property-set).add-raw
<dependency>$(quickbook-binary-dependencies)
<quickbook-binary>$(quickbook-binary)
<quickbook-binary-dependencies>$(quickbook-binary-dependencies)
] ;
return [ generator.run $(project) $(name) : $(property-set) : $(sources) : $(multiple) ] ;
}
}
# Define a scanner for tracking QBK include dependencies.
#
class qbk-scanner : common-scanner
{
rule pattern ( )
{
return "\\[[ ]*include[ ]+([^]]+)\\]"
"\\[[ ]*include:[a-zA-Z0-9_]+[ ]+([^]]+)\\]"
"\\[[ ]*import[ ]+([^]]+)\\]" ;
}
}
scanner.register qbk-scanner : include ;
type.set-scanner QUICKBOOK : qbk-scanner ;
# Initialization of toolset.
#
# Parameters:
# command ? -> path to QuickBook executable.
#
# When command is not supplied toolset will search for QuickBook directory and
# compile the executable from source. If that fails we still search the path for
# 'quickbook'.
#
rule init (
command ? # path to the QuickBook executable.
)
{
if $(command)
{
if $(.config-frozen)
{
errors.user-error "quickbook: configuration cannot be changed after it has been used." ;
}
.command = $(command) ;
}
}
rule freeze-config ( )
{
if ! $(.config-frozen)
{
.config-frozen = true ;
# QuickBook invocation command and dependencies.
.quickbook-binary = $(.command) ;
if $(.quickbook-binary)
{
# Use user-supplied command.
.quickbook-binary = [ common.get-invocation-command quickbook : quickbook : $(.quickbook-binary) ] ;
}
else
{
# Search for QuickBook sources in sensible places, like
# $(BOOST_ROOT)/tools/quickbook
# $(BOOST_BUILD_PATH)/../../quickbook
# And build quickbook executable from sources.
local boost-root = [ modules.peek : BOOST_ROOT ] ;
local boost-build-path = [ build-system.location ] ;
if $(boost-root)
{
.quickbook-dir += [ path.join $(boost-root) tools ] ;
}
if $(boost-build-path)
{
.quickbook-dir += $(boost-build-path)/../.. ;
}
.quickbook-dir = [ path.glob $(.quickbook-dir) : quickbook ] ;
# If the QuickBook source directory was found, mark its main target
# as a dependency for the current project. Otherwise, try to find
# 'quickbook' in user's PATH
if $(.quickbook-dir)
{
.quickbook-dir = [ path.make $(.quickbook-dir[1]) ] ;
}
else
{
ECHO "QuickBook warning: The path to the quickbook executable was" ;
ECHO " not provided. Additionally, couldn't find QuickBook" ;
ECHO " sources searching in" ;
ECHO " * BOOST_ROOT/tools/quickbook" ;
ECHO " * BOOST_BUILD_PATH/../../quickbook" ;
ECHO " Will now try to find a precompiled executable by searching" ;
ECHO " the PATH for 'quickbook'." ;
ECHO " To disable this warning in the future, or to completely" ;
ECHO " avoid compilation of quickbook, you can explicitly set the" ;
ECHO " path to a quickbook executable command in user-config.jam" ;
ECHO " or site-config.jam with the call" ;
ECHO " using quickbook : /path/to/quickbook ;" ;
# As a last resort, search for 'quickbook' command in path. Note
# that even if the 'quickbook' command is not found,
# get-invocation-command will still return 'quickbook' and might
# generate an error while generating the virtual-target.
.quickbook-binary = [ common.get-invocation-command quickbook : quickbook ] ;
}
}
}
}
generators.register [ new quickbook-binary-generator quickbook.quickbook-to-boostbook : QUICKBOOK : XML ] ;
# <quickbook-binary> shell command to run QuickBook
# <quickbook-binary-dependencies> targets to build QuickBook from sources.
toolset.flags quickbook.quickbook-to-boostbook QB-COMMAND <quickbook-binary> ;
toolset.flags quickbook.quickbook-to-boostbook QB-DEPENDENCIES <quickbook-binary-dependencies> ;
toolset.flags quickbook.quickbook-to-boostbook INCLUDES <include> ;
toolset.flags quickbook.quickbook-to-boostbook QB-DEFINES <quickbook-define> ;
toolset.flags quickbook.quickbook-to-boostbook QB-INDENT <quickbook-indent> ;
toolset.flags quickbook.quickbook-to-boostbook QB-LINE-WIDTH <quickbook-line-width> ;
rule quickbook-to-boostbook ( target : source : properties * )
{
# Signal dependency of quickbook sources on <quickbook-binary-dependencies>
# upon invocation of quickbook-to-boostbook.
DEPENDS $(target) : [ on $(target) return $(QB-DEPENDENCIES) ] ;
}
actions quickbook-to-boostbook
{
"$(QB-COMMAND)" -I"$(INCLUDES)" -D"$(QB-DEFINES)" --indent="$(QB-INDENT)" --linewidth="$(QB-LINE-WIDTH)" --output-file="$(1)" "$(2)"
}
# Declare a main target to convert a quickbook source into a boostbook XML file.
#
rule to-boostbook ( target-name : sources * : requirements * : default-build * )
{
local project = [ project.current ] ;
targets.main-target-alternative
[ new typed-target $(target-name) : $(project) : XML
: [ targets.main-target-sources $(sources) : $(target-name) ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
}

View File

@ -0,0 +1,156 @@
# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
# distribute this software is granted provided this copyright notice appears in
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
#
# Copyright (c) 2006 Rene Rivera.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import type ;
import generators ;
import feature ;
import errors ;
import scanner ;
import toolset : flags ;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}
type.register RC : rc ;
rule init ( )
{
}
# Configures a new resource compilation command specific to a condition,
# usually a toolset selection condition. The possible options are:
#
# * <rc-type>(rc|windres) - Indicates the type of options the command
# accepts.
#
# Even though the arguments are all optional, only when a command, condition,
# and at minimum the rc-type option are given will the command be configured.
# This is so that callers don't have to check auto-configuration values
# before calling this. And still get the functionality of build failures when
# the resource compiler can't be found.
#
rule configure ( command ? : condition ? : options * )
{
local rc-type = [ feature.get-values <rc-type> : $(options) ] ;
if $(command) && $(condition) && $(rc-type)
{
flags rc.compile.resource .RC $(condition) : $(command) ;
flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ;
flags rc.compile.resource DEFINES <define> ;
flags rc.compile.resource INCLUDES <include> ;
if $(.debug-configuration)
{
ECHO notice: using rc compiler :: $(condition) :: $(command) ;
}
}
}
rule compile.resource ( target : sources * : properties * )
{
local rc-type = [ on $(target) return $(.RC_TYPE) ] ;
rc-type ?= null ;
compile.resource.$(rc-type) $(target) : $(sources[1]) ;
}
actions compile.resource.rc
{
"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"
}
actions compile.resource.windres
{
"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"
}
actions quietly compile.resource.null
{
as /dev/null -o "$(<)"
}
# Since it's a common practice to write
# exe hello : hello.cpp hello.rc
# we change the name of object created from RC file, to
# avoid conflict with hello.cpp.
# The reason we generate OBJ and not RES, is that gcc does not
# seem to like RES files, but works OK with OBJ.
# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
#
# Using 'register-c-compiler' adds the build directory to INCLUDES
generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ;
# Register scanner for resources
class res-scanner : scanner
{
import regex virtual-target path scanner ;
rule __init__ ( includes * )
{
scanner.__init__ ;
self.includes = $(includes) ;
}
rule pattern ( )
{
return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
}
rule process ( target : matches * : binding )
{
local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ;
local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ;
local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ;
# Icons and other includes may referenced as
#
# IDR_MAINFRAME ICON "res\\icon.ico"
#
# so we have to replace double backslashes to single ones.
res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ;
# CONSIDER: the new scoping rule seem to defeat "on target" variables.
local g = [ on $(target) return $(HDRGRIST) ] ;
local b = [ NORMALIZE_PATH $(binding:D) ] ;
# Attach binding of including file to included targets.
# When target is directly created from virtual target
# this extra information is unnecessary. But in other
# cases, it allows to distinguish between two headers of the
# same name included from different places.
# We don't need this extra information for angle includes,
# since they should not depend on including file (we can't
# get literal "." in include path).
local g2 = $(g)"#"$(b) ;
angle = $(angle:G=$(g)) ;
quoted = $(quoted:G=$(g2)) ;
res = $(res:G=$(g2)) ;
local all = $(angle) $(quoted) ;
INCLUDES $(target) : $(all) ;
DEPENDS $(target) : $(res) ;
NOCARE $(all) $(res) ;
SEARCH on $(angle) = $(self.includes:G=) ;
SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
SEARCH on $(res) = $(b) $(self.includes:G=) ;
# Just propagate current scanner to includes, in a hope
# that includes do not change scanners.
scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
}
}
scanner.register res-scanner : include ;
type.set-scanner RC : res-scanner ;

View File

@ -0,0 +1,524 @@
# Copyright 2003 Dave Abrahams
# Copyright 2005, 2006 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module defines the 'install' rule, used to copy a set of targets to a
# single location.
import targets ;
import "class" : new ;
import errors ;
import type ;
import generators ;
import feature ;
import project ;
import virtual-target ;
import path ;
import types/register ;
feature.feature <install-dependencies> : off on : incidental ;
feature.feature <install-type> : : free incidental ;
feature.feature <install-source-root> : : free path ;
feature.feature <so-version> : : free incidental ;
# If 'on', version symlinks for shared libraries will not be created. Affects
# Unix builds only.
feature.feature <install-no-version-symlinks> : on : optional incidental ;
class install-target-class : basic-target
{
import feature ;
import project ;
import type ;
import errors ;
import generators ;
import path ;
import stage ;
import "class" : new ;
import property ;
import property-set ;
rule __init__ ( name-and-dir : project : sources * : requirements * : default-build * )
{
basic-target.__init__ $(name-and-dir) : $(project) : $(sources) :
$(requirements) : $(default-build) ;
}
# If <location> is not set, sets it based on the project data.
#
rule update-location ( property-set )
{
local loc = [ $(property-set).get <location> ] ;
if ! $(loc)
{
loc = [ path.root $(self.name) [ $(self.project).get location ] ] ;
property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ;
}
return $(property-set) ;
}
# Takes a target that is installed and a property set which is used when
# installing.
#
rule adjust-properties ( target : build-property-set )
{
local ps-raw ;
local a = [ $(target).action ] ;
if $(a)
{
local ps = [ $(a).properties ] ;
ps-raw = [ $(ps).raw ] ;
# Unless <hardcode-dll-paths>true is in properties, which can happen
# only if the user has explicitly requested it, nuke all <dll-path>
# properties.
if [ $(build-property-set).get <hardcode-dll-paths> ] != true
{
ps-raw = [ property.change $(ps-raw) : <dll-path> ] ;
}
# If any <dll-path> properties were specified for installing, add
# them.
local l = [ $(build-property-set).get <dll-path> ] ;
ps-raw += $(l:G=<dll-path>) ;
# Also copy <linkflags> feature from current build set, to be used
# for relinking.
local l = [ $(build-property-set).get <linkflags> ] ;
ps-raw += $(l:G=<linkflags>) ;
# Remove the <tag> feature on original targets.
ps-raw = [ property.change $(ps-raw) : <tag> ] ;
# And <location>. If stage target has another stage target in
# sources, then we shall get virtual targets with the <location>
# property set.
ps-raw = [ property.change $(ps-raw) : <location> ] ;
}
local d = [ $(build-property-set).get <dependency> ] ;
ps-raw += $(d:G=<dependency>) ;
local d = [ $(build-property-set).get <location> ] ;
ps-raw += $(d:G=<location>) ;
local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ;
ps-raw += $(ns:G=<install-no-version-symlinks>) ;
local d = [ $(build-property-set).get <install-source-root> ] ;
# Make the path absolute: we shall use it to compute relative paths and
# making the path absolute will help.
if $(d)
{
d = [ path.root $(d) [ path.pwd ] ] ;
ps-raw += $(d:G=<install-source-root>) ;
}
if $(ps-raw)
{
return [ property-set.create $(ps-raw) ] ;
}
else
{
return [ property-set.empty ] ;
}
}
rule construct ( name : source-targets * : property-set )
{
source-targets = [ targets-to-stage $(source-targets) :
$(property-set) ] ;
property-set = [ update-location $(property-set) ] ;
local ename = [ $(property-set).get <name> ] ;
if $(ename) && $(source-targets[2])
{
errors.error "When <name> property is used in 'install', only one"
"source is allowed" ;
}
local result ;
for local i in $(source-targets)
{
local staged-targets ;
local new-properties = [ adjust-properties $(i) :
$(property-set) ] ;
# See if something special should be done when staging this type. It
# is indicated by the presence of a special "INSTALLED_" type.
local t = [ $(i).type ] ;
if $(t) && [ type.registered INSTALLED_$(t) ]
{
if $(ename)
{
errors.error "In 'install': <name> property specified with target that requires relinking." ;
}
else
{
local targets = [ generators.construct $(self.project)
$(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ;
staged-targets += $(targets[2-]) ;
}
}
else
{
staged-targets = [ stage.copy-file $(self.project) $(ename) :
$(i) : $(new-properties) ] ;
}
if ! $(staged-targets)
{
errors.error "Unable to generate staged version of " [ $(source).str ] ;
}
for t in $(staged-targets)
{
result += [ virtual-target.register $(t) ] ;
}
}
return [ property-set.empty ] $(result) ;
}
# Given the list of source targets explicitly passed to 'stage', returns the
# list of targets which must be staged.
#
rule targets-to-stage ( source-targets * : property-set )
{
local result ;
# Traverse the dependencies, if needed.
if [ $(property-set).get <install-dependencies> ] = "on"
{
source-targets = [ collect-targets $(source-targets) ] ;
}
# Filter the target types, if needed.
local included-types = [ $(property-set).get <install-type> ] ;
for local r in $(source-targets)
{
local ty = [ $(r).type ] ;
if $(ty)
{
# Do not stage searched libs.
if $(ty) != SEARCHED_LIB
{
if $(included-types)
{
if [ include-type $(ty) : $(included-types) ]
{
result += $(r) ;
}
}
else
{
result += $(r) ;
}
}
}
else if ! $(included-types)
{
# Don't install typeless target if there is an explicit list of
# allowed types.
result += $(r) ;
}
}
return $(result) ;
}
# CONSIDER: figure out why we can not use virtual-target.traverse here.
#
rule collect-targets ( targets * )
{
# Find subvariants
local s ;
for local t in $(targets)
{
s += [ $(t).creating-subvariant ] ;
}
s = [ sequence.unique $(s) ] ;
local result = [ new set ] ;
$(result).add $(targets) ;
for local i in $(s)
{
$(i).all-referenced-targets $(result) ;
}
local result2 ;
for local r in [ $(result).list ]
{
if $(r:G) != <use>
{
result2 += $(r:G=) ;
}
}
DELETE_MODULE $(result) ;
result = [ sequence.unique $(result2) ] ;
}
# Returns true iff 'type' is subtype of some element of 'types-to-include'.
#
local rule include-type ( type : types-to-include * )
{
local found ;
while $(types-to-include) && ! $(found)
{
if [ type.is-subtype $(type) $(types-to-include[1]) ]
{
found = true ;
}
types-to-include = $(types-to-include[2-]) ;
}
return $(found) ;
}
}
# Creates a copy of target 'source'. The 'properties' object should have a
# <location> property which specifies where the target must be placed.
#
rule copy-file ( project name ? : source : properties )
{
name ?= [ $(source).name ] ;
local relative ;
local new-a = [ new non-scanning-action $(source) : common.copy :
$(properties) ] ;
local source-root = [ $(properties).get <install-source-root> ] ;
if $(source-root)
{
# Get the real path of the target. We probably need to strip relative
# path from the target name at construction.
local path = [ $(source).path ] ;
path = [ path.root $(name:D) $(path) ] ;
# Make the path absolute. Otherwise, it would be hard to compute the
# relative path. The 'source-root' is already absolute, see the
# 'adjust-properties' method above.
path = [ path.root $(path) [ path.pwd ] ] ;
relative = [ path.relative-to $(source-root) $(path) ] ;
}
# Note: Using $(name:D=$(relative)) might be faster here, but then we would
# need to explicitly check that relative is not ".", otherwise we might get
# paths like '<prefix>/boost/.', try to create it and mkdir would obviously
# fail.
name = [ path.join $(relative) $(name:D=) ] ;
return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
$(new-a) ] ;
}
rule symlink ( name : project : source : properties )
{
local a = [ new action $(source) : symlink.ln : $(properties) ] ;
return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
$(a) ] ;
}
rule relink-file ( project : source : property-set )
{
local action = [ $(source).action ] ;
local cloned-action = [ virtual-target.clone-action $(action) : $(project) :
"" : $(property-set) ] ;
return [ $(cloned-action).targets ] ;
}
# Declare installed version of the EXE type. Generator for this type will cause
# relinking to the new location.
type.register INSTALLED_EXE : : EXE ;
class installed-exe-generator : generator
{
import type ;
import property-set ;
import modules ;
import stage ;
rule __init__ ( )
{
generator.__init__ install-exe : EXE : INSTALLED_EXE ;
}
rule run ( project name ? : property-set : source : multiple ? )
{
local need-relink ;
if [ $(property-set).get <os> ] in NT CYGWIN ||
[ $(property-set).get <target-os> ] in windows cygwin
{
}
else
{
# See if the dll-path properties are not changed during
# install. If so, copy, don't relink.
local a = [ $(source).action ] ;
local p = [ $(a).properties ] ;
local original = [ $(p).get <dll-path> ] ;
local current = [ $(property-set).get <dll-path> ] ;
if $(current) != $(original)
{
need-relink = true ;
}
}
if $(need-relink)
{
return [ stage.relink-file $(project)
: $(source) : $(property-set) ] ;
}
else
{
return [ stage.copy-file $(project)
: $(source) : $(property-set) ] ;
}
}
}
generators.register [ new installed-exe-generator ] ;
# Installing a shared link on Unix might cause a creation of versioned symbolic
# links.
type.register INSTALLED_SHARED_LIB : : SHARED_LIB ;
class installed-shared-lib-generator : generator
{
import type ;
import property-set ;
import modules ;
import stage ;
rule __init__ ( )
{
generator.__init__ install-shared-lib : SHARED_LIB
: INSTALLED_SHARED_LIB ;
}
rule run ( project name ? : property-set : source : multiple ? )
{
if [ $(property-set).get <os> ] in NT CYGWIN ||
[ $(property-set).get <target-os> ] in windows cygwin
{
local copied = [ stage.copy-file $(project) : $(source) :
$(property-set) ] ;
return [ virtual-target.register $(copied) ] ;
}
else
{
local a = [ $(source).action ] ;
local copied ;
if ! $(a)
{
# Non-derived file, just copy.
copied = [ stage.copy-file $(project) : $(source) :
$(property-set) ] ;
}
else
{
local cp = [ $(a).properties ] ;
local current-dll-path = [ $(cp).get <dll-path> ] ;
local new-dll-path = [ $(property-set).get <dll-path> ] ;
if $(current-dll-path) != $(new-dll-path)
{
# Rpath changed, need to relink.
copied = [ stage.relink-file $(project) : $(source) :
$(property-set) ] ;
}
else
{
copied = [ stage.copy-file $(project) : $(source) :
$(property-set) ] ;
}
}
copied = [ virtual-target.register $(copied) ] ;
local result = $(copied) ;
# If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
# 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
# symbolic links.
local m = [ MATCH (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$
: [ $(copied).name ] ] ;
if $(m)
{
# Symlink without version at all is used to make
# -lsome_library work.
result += [ stage.symlink $(m[1]) : $(project) : $(copied) :
$(property-set) ] ;
# Symlinks of some libfoo.N and libfoo.N.M are used so that
# library can found at runtime, if libfoo.N.M.X has soname of
# libfoo.N. That happens when the library makes some binary
# compatibility guarantees. If not, it is possible to skip those
# symlinks.
local suppress =
[ $(property-set).get <install-no-version-symlinks> ] ;
if $(suppress) != "on"
{
result += [ stage.symlink $(m[1]).$(m[2]) : $(project)
: $(copied) : $(property-set) ] ;
result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) : $(project)
: $(copied) : $(property-set) ] ;
}
}
return $(result) ;
}
}
}
generators.register [ new installed-shared-lib-generator ] ;
# Main target rule for 'install'.
#
rule install ( name : sources * : requirements * : default-build * )
{
local project = [ project.current ] ;
# Unless the user has explicitly asked us to hardcode dll paths, add
# <hardcode-dll-paths>false in requirements, to override default value.
if ! <hardcode-dll-paths>true in $(requirements)
{
requirements += <hardcode-dll-paths>false ;
}
if <tag> in $(requirements:G)
{
errors.user-error
"The <tag> property is not allowed for the 'install' rule" ;
}
targets.main-target-alternative
[ new install-target-class $(name) : $(project)
: [ targets.main-target-sources $(sources) : $(name) ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
}
IMPORT $(__name__) : install : : install ;
IMPORT $(__name__) : install : : stage ;

View File

@ -0,0 +1,303 @@
# Copyright Gennadiy Rozental
# Copyright 2006 Rene Rivera
# Copyright 2003, 2004, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# The STLPort is usable by means of 'stdlib' feature. When
# stdlib=stlport is specified, default version of STLPort will be used,
# while stdlib=stlport-4.5 will use specific version.
# The subfeature value 'hostios' means to use host compiler's iostreams.
#
# The specific version of stlport is selected by features:
# The <runtime-link> feature selects between static and shared library
# The <runtime-debugging>on selects STLPort with debug symbols
# and stl debugging.
# There's no way to use STLPort with debug symbols but without
# stl debugging.
# TODO: must implement selection of different STLPort installations based
# on used toolset.
# Also, finish various flags:
#
# This is copied from V1 toolset, "+" means "implemented"
#+flags $(CURR_TOOLSET) DEFINES <stlport-iostream>off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ;
#+flags $(CURR_TOOLSET) DEFINES <stlport-extensions>off : _STLP_NO_EXTENSIONS=1 ;
# flags $(CURR_TOOLSET) DEFINES <stlport-anachronisms>off : _STLP_NO_ANACHRONISMS=1 ;
# flags $(CURR_TOOLSET) DEFINES <stlport-cstd-namespace>global : _STLP_VENDOR_GLOBAL_CSTD=1 ;
# flags $(CURR_TOOLSET) DEFINES <exception-handling>off : _STLP_NO_EXCEPTIONS=1 ;
# flags $(CURR_TOOLSET) DEFINES <stlport-debug-alloc>on : _STLP_DEBUG_ALLOC=1 ;
#+flags $(CURR_TOOLSET) DEFINES <runtime-build>debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ;
#+flags $(CURR_TOOLSET) DEFINES <runtime-link>dynamic : _STLP_USE_DYNAMIC_LIB=1 ;
import feature : feature subfeature ;
import project ;
import "class" : new ;
import targets ;
import property-set ;
import common ;
import type ;
# Make this module into a project.
project.initialize $(__name__) ;
project stlport ;
# The problem: how to request to use host compiler's iostreams?
#
# Solution 1: Global 'stlport-iostream' feature.
# That's ugly. Subfeature make more sense for stlport-specific thing.
# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream")
# is default.
# The problem is that such subfeature will appear in target paths, and that's ugly
# Solution 3: Use optional subfeature with only one value.
feature.extend stdlib : stlport ;
feature.compose <stdlib>stlport : <library>/stlport//stlport ;
# STLport iostreams or native iostreams
subfeature stdlib stlport : iostream : hostios : optional propagated ;
# STLport extensions
subfeature stdlib stlport : extensions : noext : optional propagated ;
# STLport anachronisms -- NOT YET SUPPORTED
# subfeature stdlib stlport : anachronisms : on off ;
# STLport debug allocation -- NOT YET SUPPORTED
#subfeature stdlib stlport : debug-alloc : off on ;
# Declare a special target class to handle the creation of search-lib-target
# instances for STLport. We need a special class, because otherwise we'll have
# - declare prebuilt targets for all possible toolsets. And by the time 'init'
# is called we don't even know the list of toolsets that are registered
# - when host iostreams are used, we really should produce nothing. It would
# be hard/impossible to achieve this using prebuilt targets.
class stlport-target-class : basic-target
{
import feature project type errors generators ;
import set : difference ;
rule __init__ ( project : headers ? : libraries * : version ? )
{
basic-target.__init__ stlport : $(project) ;
self.headers = $(headers) ;
self.libraries = $(libraries) ;
self.version = $(version) ;
self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ;
local requirements ;
requirements += <stdlib-stlport:version>$(self.version) ;
self.requirements = [ property-set.create $(requirements) ] ;
}
rule generate ( property-set )
{
# Since this target is built with <stdlib>stlport, it will also
# have <library>/stlport//stlport in requirements, which will
# cause a loop in main target references. Remove that property
# manually.
property-set = [ property-set.create
[ difference
[ $(property-set).raw ] :
<library>/stlport//stlport
<stdlib>stlport
]
] ;
return [ basic-target.generate $(property-set) ] ;
}
rule construct ( name : source-targets * : property-set )
{
# Deduce the name of stlport library, based on toolset and
# debug setting.
local raw = [ $(property-set).raw ] ;
local hostios = [ feature.get-values <stdlib-stlport:iostream> : $(raw) ] ;
local toolset = [ feature.get-values <toolset> : $(raw) ] ;
if $(self.version.5)
{
# Version 5.x
# STLport host IO streams no longer supported. So we always
# need libraries.
# name: stlport(stl)?[dg]?(_static)?.M.R
local name = stlport ;
if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
{
name += stl ;
switch $(toolset)
{
case gcc* : name += g ;
case darwin* : name += g ;
case * : name += d ;
}
}
if [ feature.get-values <runtime-link> : $(raw) ] = "static"
{
name += _static ;
}
# Starting with version 5.2.0, the STLport static libraries no longer
# include a version number in their name
local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ;
if $(version.pre.5.2) || [ feature.get-values <runtime-link> : $(raw) ] != "static"
{
name += .$(self.version.5) ;
}
name = $(name:J=) ;
if [ feature.get-values <install-dependencies> : $(raw) ] = "on"
{
#~ Allow explicitly asking to install the STLport lib by
#~ refering to it directly: /stlport//stlport/<install-dependencies>on
#~ This allows for install packaging of all libs one might need for
#~ a standalone distribution.
import path : make : path-make ;
local runtime-link
= [ feature.get-values <runtime-link> : $(raw) ] ;
local lib-file.props
= [ property-set.create $(raw) <link>$(runtime-link) ] ;
local lib-file.prefix
= [ type.generated-target-prefix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
local lib-file.suffix
= [ type.generated-target-suffix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
lib-file.prefix
?= "" "lib" ;
lib-file.suffix
?= "" ;
local lib-file
= [ GLOB $(self.libraries) [ modules.peek : PATH ] :
$(lib-file.prefix)$(name).$(lib-file.suffix) ] ;
lib-file
= [ new file-reference [ path-make $(lib-file[1]) ] : $(self.project) ] ;
lib-file
= [ $(lib-file).generate "" ] ;
local lib-file.requirements
= [ targets.main-target-requirements
[ $(lib-file.props).raw ] <file>$(lib-file[-1])
: $(self.project) ] ;
return [ generators.construct $(self.project) $(name) : LIB : $(lib-file.requirements) ] ;
}
else
{
#~ Otherwise, it's just a regular usage of the library.
return [ generators.construct
$(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
}
}
else if ! $(hostios) && $(toolset) != msvc
{
# We don't need libraries if host istreams are used. For
# msvc, automatic library selection will be used.
# name: stlport_<toolset>(_stldebug)?
local name = stlport ;
name = $(name)_$(toolset) ;
if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
{
name = $(name)_stldebug ;
}
return [ generators.construct
$(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
}
else
{
return [ property-set.empty ] ;
}
}
rule compute-usage-requirements ( subvariant )
{
local usage-requirements =
<include>$(self.headers)
<dll-path>$(self.libraries)
<library-path>$(self.libraries)
;
local rproperties = [ $(subvariant).build-properties ] ;
# CONSIDER: should this "if" sequence be replaced with
# some use of 'property-map' class?
if [ $(rproperties).get <runtime-debugging> ] = "on"
{
usage-requirements +=
<define>_STLP_DEBUG=1
<define>_STLP_DEBUG_UNINITIALIZED=1 ;
}
if [ $(rproperties).get <runtime-link> ] = "shared"
{
usage-requirements +=
<define>_STLP_USE_DYNAMIC_LIB=1 ;
}
if [ $(rproperties).get <stdlib-stlport:extensions> ] = noext
{
usage-requirements +=
<define>_STLP_NO_EXTENSIONS=1 ;
}
if [ $(rproperties).get <stdlib-stlport:iostream> ] = hostios
{
usage-requirements +=
<define>_STLP_NO_OWN_IOSTREAMS=1
<define>_STLP_HAS_NO_NEW_IOSTREAMS=1 ;
}
if $(self.version.5)
{
# Version 5.x
if [ $(rproperties).get <threading> ] = "single"
{
# Since STLport5 doesn't normally support single-thread
# we force STLport5 into the multi-thread mode. Hence
# getting what other libs provide of single-thread code
# linking against a multi-thread lib.
usage-requirements +=
<define>_STLP_THREADS=1 ;
}
}
return [ property-set.create $(usage-requirements) ] ;
}
}
rule stlport-target ( headers ? : libraries * : version ? )
{
local project = [ project.current ] ;
targets.main-target-alternative
[ new stlport-target-class $(project) : $(headers) : $(libraries)
: $(version)
] ;
}
local .version-subfeature-defined ;
# Initialize stlport support.
rule init (
version ? :
headers : # Location of header files
libraries * # Location of libraries, lib and bin subdirs of STLport.
)
{
# FIXME: need to use common.check-init-parameters here.
# At the moment, that rule always tries to define subfeature
# of the 'toolset' feature, while we need to define subfeature
# of <stdlib>stlport, so tweaks to check-init-parameters are needed.
if $(version)
{
if ! $(.version-subfeature-defined)
{
feature.subfeature stdlib stlport : version : : propagated ;
.version-subfeature-defined = true ;
}
feature.extend-subfeature stdlib stlport : version : $(version) ;
}
# Declare the main target for this STLPort version.
stlport-target $(headers) : $(libraries) : $(version) ;
}

View File

@ -0,0 +1,142 @@
# Copyright (C) Christopher Currie 2003. Permission to copy, use,
# modify, sell and distribute this software is granted provided this
# copyright notice appears in all copies. This software is provided
# "as is" without express or implied warranty, and with no claim as
# to its suitability for any purpose.
import property ;
import generators ;
import os ;
import toolset : flags ;
import feature ;
import type ;
import common ;
feature.extend toolset : sun ;
toolset.inherit sun : unix ;
generators.override sun.prebuilt : builtin.lib-generator ;
generators.override sun.prebuilt : builtin.prebuilt ;
generators.override sun.searched-lib-generator : searched-lib-generator ;
feature.extend stdlib : sun-stlport ;
feature.compose <stdlib>sun-stlport
: <cxxflags>-library=stlport4 <linkflags>-library=stlport4
;
rule init ( version ? : command * : options * )
{
local condition = [
common.check-init-parameters sun : version $(version) ] ;
command = [ common.get-invocation-command sun : CC
: $(command) : "/opt/SUNWspro/bin" ] ;
# Even if the real compiler is not found, put CC to
# command line so that user see command line that would have being executed.
command ?= CC ;
common.handle-options sun : $(condition) : $(command) : $(options) ;
command_c = $(command[1--2]) $(command[-1]:B=cc) ;
toolset.flags sun CONFIG_C_COMMAND $(condition) : $(command_c) ;
}
# Declare generators
generators.register-c-compiler sun.compile.c : C : OBJ : <toolset>sun ;
generators.register-c-compiler sun.compile.c++ : CPP : OBJ : <toolset>sun ;
# Declare flags and actions for compilation
flags sun.compile OPTIONS <debug-symbols>on : -g ;
flags sun.compile OPTIONS <profiling>on : -xprofile=tcov ;
flags sun.compile OPTIONS <optimization>speed : -xO4 ;
flags sun.compile OPTIONS <optimization>space : -xO2 -xspace ;
flags sun.compile OPTIONS <threading>multi : -mt ;
flags sun.compile OPTIONS <warnings>off : -erroff ;
flags sun.compile OPTIONS <warnings>on : -erroff=%none ;
flags sun.compile OPTIONS <warnings>all : -erroff=%none ;
flags sun.compile OPTIONS <warnings-as-errors>on : -errwarn ;
flags sun.compile.c++ OPTIONS <inlining>off : +d ;
# The -m32 and -m64 options are supported starting
# with Sun Studio 12. On earlier compilers, the
# 'address-model' feature is not supported and should not
# be used. Instead, use -xarch=generic64 command line
# option.
# See http://svn.boost.org/trac/boost/ticket/1186
# for details.
flags sun OPTIONS <address-model>32 : -m32 ;
flags sun OPTIONS <address-model>64 : -m64 ;
# On sparc, there's a difference between -Kpic
# and -KPIC. The first is slightly more efficient,
# but has the limits on the size of GOT table.
# For minimal fuss on user side, we use -KPIC here.
# See http://svn.boost.org/trac/boost/ticket/1186#comment:6
# for detailed explanation.
flags sun OPTIONS <link>shared : -KPIC ;
flags sun.compile OPTIONS <cflags> ;
flags sun.compile.c++ OPTIONS <cxxflags> ;
flags sun.compile DEFINES <define> ;
flags sun.compile INCLUDES <include> ;
actions compile.c
{
"$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c++
{
"$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
# Declare flags and actions for linking
flags sun.link OPTIONS <debug-symbols>on : -g ;
# Strip the binary when no debugging is needed
flags sun.link OPTIONS <debug-symbols>off : -s ;
flags sun.link OPTIONS <profiling>on : -xprofile=tcov ;
flags sun.link OPTIONS <threading>multi : -mt ;
flags sun.link OPTIONS <linkflags> ;
flags sun.link LINKPATH <library-path> ;
flags sun.link FINDLIBS-ST <find-static-library> ;
flags sun.link FINDLIBS-SA <find-shared-library> ;
flags sun.link LIBRARIES <library-file> ;
flags sun.link LINK-RUNTIME <runtime-link>static : static ;
flags sun.link LINK-RUNTIME <runtime-link>shared : dynamic ;
flags sun.link RPATH <dll-path> ;
# On gcc, there are separate options for dll path at runtime and
# link time. On Solaris, there's only one: -R, so we have to use
# it, even though it's bad idea.
flags sun.link RPATH <xdll-path> ;
# The POSIX real-time library is always needed (nanosleep, clock_gettime etc.)
flags sun.link FINDLIBS-SA : rt ;
rule link ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
}
actions link bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
}
# Slight mods for dlls
rule link.dll ( targets * : sources * : properties * )
{
SPACE on $(targets) = " " ;
}
actions link.dll bind LIBRARIES
{
"$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
}
# Declare action for creating static libraries
actions piecemeal archive
{
"$(CONFIG_COMMAND)" -xar -o "$(<)" "$(>)"
}

View File

@ -0,0 +1,140 @@
# Copyright 2003 Dave Abrahams
# Copyright 2002, 2003 Rene Rivera
# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Defines the "symlink" special target. 'symlink' targets make symbolic links
# to the sources.
import targets modules path class os feature project property-set ;
.count = 0 ;
feature.feature symlink-location : project-relative build-relative : incidental ;
# The class representing "symlink" targets.
#
class symlink-targets : basic-target
{
import numbers modules class property project path ;
rule __init__ (
project
: targets *
: sources *
)
{
# Generate a fake name for now. Need unnamed targets eventually.
local c = [ modules.peek symlink : .count ] ;
modules.poke symlink : .count : [ numbers.increment $(c) ] ;
local fake-name = symlink#$(c) ;
basic-target.__init__ $(fake-name) : $(project) : $(sources) ;
# Remember the targets to map the sources onto. Pad or truncate
# to fit the sources given.
self.targets = ;
for local source in $(sources)
{
if $(targets)
{
self.targets += $(targets[1]) ;
targets = $(targets[2-]) ;
}
else
{
self.targets += $(source) ;
}
}
# The virtual targets corresponding to the given targets.
self.virtual-targets = ;
}
rule construct ( name : source-targets * : property-set )
{
local i = 1 ;
for local t in $(source-targets)
{
local s = $(self.targets[$(i)]) ;
local a = [ class.new action $(t) : symlink.ln : $(property-set) ] ;
local vt = [ class.new file-target $(s:D=)
: [ $(t).type ] : $(self.project) : $(a) ] ;
# Place the symlink in the directory relative to the project
# location, instead of placing it in the build directory.
if [ property.select <symlink-location> : [ $(property-set).raw ] ] = <symlink-location>project-relative
{
$(vt).set-path [ path.root $(s:D) [ $(self.project).get location ] ] ;
}
self.virtual-targets += $(vt) ;
i = [ numbers.increment $(i) ] ;
}
return [ property-set.empty ] $(self.virtual-targets) ;
}
}
# Creates a symbolic link from a set of targets to a set of sources.
# The targets and sources map one to one. The symlinks generated are
# limited to be the ones given as the sources. That is, the targets
# are either padded or trimmed to equate to the sources. The padding
# is done with the name of the corresponding source. For example::
#
# symlink : one two ;
#
# Is equal to::
#
# symlink one two : one two ;
#
# Names for symlink are relative to the project location. They cannot
# include ".." path components.
rule symlink (
targets *
: sources *
)
{
local project = [ project.current ] ;
return [ targets.main-target-alternative
[ class.new symlink-targets $(project) : $(targets) :
# Note: inline targets are not supported for symlink, intentionally,
# since it's used to linking existing non-local targets.
$(sources) ] ] ;
}
rule ln
{
local os ;
if [ modules.peek : UNIX ] { os = UNIX ; }
else { os ?= [ os.name ] ; }
# Remember the path to make the link relative to where the symlink is located.
local path-to-source = [ path.relative-to
[ path.make [ on $(<) return $(LOCATE) ] ]
[ path.make [ on $(>) return $(LOCATE) ] ] ] ;
if $(path-to-source) = .
{
PATH_TO_SOURCE on $(<) = "" ;
}
else
{
PATH_TO_SOURCE on $(<) = [ path.native $(path-to-source) ] ;
}
ln-$(os) $(<) : $(>) ;
}
actions ln-UNIX
{
ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'
}
# there is a way to do this; we fall back to a copy for now
actions ln-NT
{
echo "NT symlinks not supported yet, making copy"
del /f /q "$(<)" 2>nul >nul
copy "$(>)" "$(<)" $(NULL_OUT)
}
IMPORT $(__name__) : symlink : : symlink ;

View File

@ -0,0 +1,210 @@
# This module is imported by testing.py. The definitions here are
# too tricky to do in Python
# Causes the 'target' to exist after bjam invocation if and only if all the
# dependencies were successfully built.
#
rule expect-success ( target : dependency + : requirements * )
{
**passed** $(target) : $(sources) ;
}
IMPORT testing : expect-success : : testing.expect-success ;
# Causes the 'target' to exist after bjam invocation if and only if all some of
# the dependencies were not successfully built.
#
rule expect-failure ( target : dependency + : properties * )
{
local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
local marker = $(dependency:G=$(grist)*fail) ;
(failed-as-expected) $(marker) ;
FAIL_EXPECTED $(dependency) ;
LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
RMOLD $(marker) ;
DEPENDS $(marker) : $(dependency) ;
DEPENDS $(target) : $(marker) ;
**passed** $(target) : $(marker) ;
}
IMPORT testing : expect-failure : : testing.expect-failure ;
# The rule/action combination used to report successful passing of a test.
#
rule **passed**
{
# Force deletion of the target, in case any dependencies failed to build.
RMOLD $(<) ;
}
# Used to create test files signifying passed tests.
#
actions **passed**
{
echo passed > "$(<)"
}
# Used to create replacement object files that do not get created during tests
# that are expected to fail.
#
actions (failed-as-expected)
{
echo failed as expected > "$(<)"
}
# Runs executable 'sources' and stores stdout in file 'target'. Unless
# --preserve-test-targets command line option has been specified, removes the
# executable. The 'target-to-remove' parameter controls what should be removed:
# - if 'none', does not remove anything, ever
# - if empty, removes 'source'
# - if non-empty and not 'none', contains a list of sources to remove.
#
rule capture-output ( target : source : properties * : targets-to-remove * )
{
output-file on $(target) = $(target:S=.output) ;
LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
# The INCLUDES kill a warning about independent target...
INCLUDES $(target) : $(target:S=.output) ;
# but it also puts .output into dependency graph, so we must tell jam it is
# OK if it cannot find the target or updating rule.
NOCARE $(target:S=.output) ;
# This has two-fold effect. First it adds input files to the dependendency
# graph, preventing a warning. Second, it causes input files to be bound
# before target is created. Therefore, they are bound using SEARCH setting
# on them and not LOCATE setting of $(target), as in other case (due to jam
# bug).
DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
if $(targets-to-remove) = none
{
targets-to-remove = ;
}
else if ! $(targets-to-remove)
{
targets-to-remove = $(source) ;
}
if [ on $(target) return $(REMOVE_TEST_TARGETS) ]
{
TEMPORARY $(targets-to-remove) ;
# Set a second action on target that will be executed after capture
# output action. The 'RmTemps' rule has the 'ignore' modifier so it is
# always considered succeeded. This is needed for 'run-fail' test. For
# that test the target will be marked with FAIL_EXPECTED, and without
# 'ignore' successful execution will be negated and be reported as
# failure. With 'ignore' we do not detect a case where removing files
# fails, but it is not likely to happen.
RmTemps $(target) : $(targets-to-remove) ;
}
}
if [ os.name ] = NT
{
.STATUS = %status% ;
.SET_STATUS = "set status=%ERRORLEVEL%" ;
.RUN_OUTPUT_NL = "echo." ;
.STATUS_0 = "%status% EQU 0 (" ;
.STATUS_NOT_0 = "%status% NEQ 0 (" ;
.VERBOSE = "%verbose% EQU 1 (" ;
.ENDIF = ")" ;
.SHELL_SET = "set " ;
.CATENATE = type ;
.CP = copy ;
}
else
{
.STATUS = "$status" ;
.SET_STATUS = "status=$?" ;
.RUN_OUTPUT_NL = "echo" ;
.STATUS_0 = "test $status -eq 0 ; then" ;
.STATUS_NOT_0 = "test $status -ne 0 ; then" ;
.VERBOSE = "test $verbose -eq 1 ; then" ;
.ENDIF = "fi" ;
.SHELL_SET = "" ;
.CATENATE = cat ;
.CP = cp ;
}
.VERBOSE_TEST = 0 ;
if --verbose-test in [ modules.peek : ARGV ]
{
.VERBOSE_TEST = 1 ;
}
.RM = [ common.rm-command ] ;
actions capture-output bind INPUT_FILES output-file
{
$(PATH_SETUP)
$(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
$(.SET_STATUS)
$(.RUN_OUTPUT_NL) >> "$(output-file)"
echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
if $(.STATUS_0)
$(.CP) "$(output-file)" "$(<)"
$(.ENDIF)
$(.SHELL_SET)verbose=$(.VERBOSE_TEST)
if $(.STATUS_NOT_0)
$(.SHELL_SET)verbose=1
$(.ENDIF)
if $(.VERBOSE)
echo ====== BEGIN OUTPUT ======
$(.CATENATE) "$(output-file)"
echo ====== END OUTPUT ======
$(.ENDIF)
exit $(.STATUS)
}
IMPORT testing : capture-output : : testing.capture-output ;
actions quietly updated ignore piecemeal together RmTemps
{
$(.RM) "$(>)"
}
.MAKE_FILE = [ common.file-creation-command ] ;
actions unit-test
{
$(PATH_SETUP)
$(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
}
rule record-time ( target : source : start end user system )
{
local src-string = [$(source:G=:J=",")"] " ;
USER_TIME on $(target) += $(src-string)$(user) ;
SYSTEM_TIME on $(target) += $(src-string)$(system) ;
}
# Calling this rule requests that Boost Build time how long it taks to build the
# 'source' target and display the results both on the standard output and in the
# 'target' file.
#
rule time ( target : source : properties * )
{
# Set up rule for recording timing information.
__TIMING_RULE__ on $(source) = testing.record-time $(target) ;
# Make sure that the source is rebuilt any time we need to retrieve that
# information.
REBUILDS $(target) : $(source) ;
}
actions time
{
echo user: $(USER_TIME)
echo system: $(SYSTEM_TIME)
echo user: $(USER_TIME)" seconds" > "$(<)"
echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
}

View File

@ -0,0 +1,581 @@
# Copyright 2005 Dave Abrahams
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module implements regression testing framework. It declares a number of
# main target rules which perform some action and, if the results are OK,
# creates an output file.
#
# The exact list of rules is:
# 'compile' -- creates .test file if compilation of sources was
# successful.
# 'compile-fail' -- creates .test file if compilation of sources failed.
# 'run' -- creates .test file is running of executable produced from
# sources was successful. Also leaves behind .output file
# with the output from program run.
# 'run-fail' -- same as above, but .test file is created if running fails.
#
# In all cases, presence of .test file is an indication that the test passed.
# For more convenient reporting, you might want to use C++ Boost regression
# testing utilities (see http://www.boost.org/more/regression.html).
#
# For historical reason, a 'unit-test' rule is available which has the same
# syntax as 'exe' and behaves just like 'run'.
# Things to do:
# - Teach compiler_status handle Jamfile.v2.
# Notes:
# - <no-warn> is not implemented, since it is Como-specific, and it is not
# clear how to implement it
# - std::locale-support is not implemented (it is used in one test).
import alias ;
import "class" ;
import common ;
import errors ;
import feature ;
import generators ;
import os ;
import path ;
import project ;
import property ;
import property-set ;
import regex ;
import sequence ;
import targets ;
import toolset ;
import type ;
import virtual-target ;
rule init ( )
{
}
# Feature controling the command used to lanch test programs.
feature.feature testing.launcher : : free optional ;
feature.feature test-info : : free incidental ;
feature.feature testing.arg : : free incidental ;
feature.feature testing.input-file : : free dependency ;
feature.feature preserve-test-targets : on off : incidental propagated ;
# Register target types.
type.register TEST : test ;
type.register COMPILE : : TEST ;
type.register COMPILE_FAIL : : TEST ;
type.register RUN_OUTPUT : run ;
type.register RUN : : TEST ;
type.register RUN_FAIL : : TEST ;
type.register LINK_FAIL : : TEST ;
type.register LINK : : TEST ;
type.register UNIT_TEST : passed : TEST ;
# Declare the rules which create main targets. While the 'type' module already
# creates rules with the same names for us, we need extra convenience: default
# name of main target, so write our own versions.
# Helper rule. Create a test target, using basename of first source if no target
# name is explicitly passed. Remembers the created target in a global variable.
#
rule make-test ( target-type : sources + : requirements * : target-name ? )
{
target-name ?= $(sources[1]:D=:S=) ;
# Having periods (".") in the target name is problematic because the typed
# generator will strip the suffix and use the bare name for the file
# targets. Even though the location-prefix averts problems most times it
# does not prevent ambiguity issues when referring to the test targets. For
# example when using the XML log output. So we rename the target to remove
# the periods, and provide an alias for users.
local real-name = [ regex.replace $(target-name) "[.]" "~" ] ;
local project = [ project.current ] ;
# The <location-prefix> forces the build system for generate paths in the
# form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
# post-processing tools to work.
local t = [ targets.create-typed-target [ type.type-from-rule-name
$(target-type) ] : $(project) : $(real-name) : $(sources) :
$(requirements) <location-prefix>$(real-name).test ] ;
# The alias to the real target, per period replacement above.
if $(real-name) != $(target-name)
{
alias $(target-name) : $(t) ;
}
# Remember the test (for --dump-tests). A good way would be to collect all
# given a project. This has some technical problems: e.g. we can not call
# this dump from a Jamfile since projects referred by 'build-project' are
# not available until the whole Jamfile has been loaded.
.all-tests += $(t) ;
return $(t) ;
}
# Note: passing more that one cpp file here is known to fail. Passing a cpp file
# and a library target works.
#
rule compile ( sources + : requirements * : target-name ? )
{
return [ make-test compile : $(sources) : $(requirements) : $(target-name) ]
;
}
rule compile-fail ( sources + : requirements * : target-name ? )
{
return [ make-test compile-fail : $(sources) : $(requirements) :
$(target-name) ] ;
}
rule link ( sources + : requirements * : target-name ? )
{
return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ;
}
rule link-fail ( sources + : requirements * : target-name ? )
{
return [ make-test link-fail : $(sources) : $(requirements) : $(target-name)
] ;
}
rule handle-input-files ( input-files * )
{
if $(input-files[2])
{
# Check that sorting made when creating property-set instance will not
# change the ordering.
if [ sequence.insertion-sort $(input-files) ] != $(input-files)
{
errors.user-error "Names of input files must be sorted alphabetically"
: "due to internal limitations" ;
}
}
return <testing.input-file>$(input-files) ;
}
rule run ( sources + : args * : input-files * : requirements * : target-name ? :
default-build * )
{
requirements += <testing.arg>$(args:J=" ") ;
requirements += [ handle-input-files $(input-files) ] ;
return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ;
}
rule run-fail ( sources + : args * : input-files * : requirements * :
target-name ? : default-build * )
{
requirements += <testing.arg>$(args:J=" ") ;
requirements += [ handle-input-files $(input-files) ] ;
return [ make-test run-fail : $(sources) : $(requirements) : $(target-name)
] ;
}
# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
IMPORT : alias : : test-suite ;
# For all main targets in 'project-module', which are typed targets with type
# derived from 'TEST', produce some interesting information.
#
rule dump-tests
{
for local t in $(.all-tests)
{
dump-test $(t) ;
}
}
# Given a project location in normalized form (slashes are forward), compute the
# name of the Boost library.
#
local rule get-library-name ( path )
{
# Path is in normalized form, so all slashes are forward.
local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ;
local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ;
local match3 = [ MATCH (/status$) : $(path) ] ;
if $(match1) { return $(match1[2]) ; }
else if $(match2) { return $(match2[2]) ; }
else if $(match3) { return "" ; }
else if --dump-tests in [ modules.peek : ARGV ]
{
# The 'run' rule and others might be used outside boost. In that case,
# just return the path, since the 'library name' makes no sense.
return $(path) ;
}
}
# Was an XML dump requested?
.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ;
# Takes a target (instance of 'basic-target') and prints
# - its type
# - its name
# - comments specified via the <test-info> property
# - relative location of all source from the project root.
#
rule dump-test ( target )
{
local type = [ $(target).type ] ;
local name = [ $(target).name ] ;
local project = [ $(target).project ] ;
local project-root = [ $(project).get project-root ] ;
local library = [ get-library-name [ path.root [ $(project).get location ]
[ path.pwd ] ] ] ;
if $(library)
{
name = $(library)/$(name) ;
}
local sources = [ $(target).sources ] ;
local source-files ;
for local s in $(sources)
{
if [ class.is-a $(s) : file-reference ]
{
local location = [ path.root [ path.root [ $(s).name ]
[ $(s).location ] ] [ path.pwd ] ] ;
source-files += [ path.relative-to [ path.root $(project-root)
[ path.pwd ] ] $(location) ] ;
}
}
local target-name = [ $(project).get location ] // [ $(target).name ] .test
;
target-name = $(target-name:J=) ;
local r = [ $(target).requirements ] ;
# Extract values of the <test-info> feature.
local test-info = [ $(r).get <test-info> ] ;
# If the user requested XML output on the command-line, add the test info to
# that XML file rather than dumping them to stdout.
if $(.out-xml)
{
local nl = "
" ;
.contents on $(.out-xml) +=
"$(nl) <test type=\"$(type)\" name=\"$(name)\">"
"$(nl) <target><![CDATA[$(target-name)]]></target>"
"$(nl) <info><![CDATA[$(test-info)]]></info>"
"$(nl) <source><![CDATA[$(source-files)]]></source>"
"$(nl) </test>"
;
}
else
{
# Format them into a single string of quoted strings.
test-info = \"$(test-info:J=\"\ \")\" ;
ECHO boost-test($(type)) \"$(name)\" [$(test-info)] ":"
\"$(source-files)\" ;
}
}
# Register generators. Depending on target type, either 'expect-success' or
# 'expect-failure' rule will be used.
generators.register-standard testing.expect-success : OBJ : COMPILE ;
generators.register-standard testing.expect-failure : OBJ : COMPILE_FAIL ;
generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ;
generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL ;
generators.register-standard testing.expect-failure : EXE : LINK_FAIL ;
generators.register-standard testing.expect-success : EXE : LINK ;
# Generator which runs an EXE and captures output.
generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ;
# Generator which creates a target if sources run successfully. Differs from RUN
# in that run output is not captured. The reason why it exists is that the 'run'
# rule is much better for automated testing, but is not user-friendly (see
# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
# The action rules called by generators.
# Causes the 'target' to exist after bjam invocation if and only if all the
# dependencies were successfully built.
#
rule expect-success ( target : dependency + : requirements * )
{
**passed** $(target) : $(sources) ;
}
# Causes the 'target' to exist after bjam invocation if and only if all some of
# the dependencies were not successfully built.
#
rule expect-failure ( target : dependency + : properties * )
{
local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
local marker = $(dependency:G=$(grist)*fail) ;
(failed-as-expected) $(marker) ;
FAIL_EXPECTED $(dependency) ;
LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
RMOLD $(marker) ;
DEPENDS $(marker) : $(dependency) ;
DEPENDS $(target) : $(marker) ;
**passed** $(target) : $(marker) ;
}
# The rule/action combination used to report successful passing of a test.
#
rule **passed**
{
# Dump all the tests, if needed. We do it here, since dump should happen
# only after all Jamfiles have been read, and there is no such place
# currently defined (but there should be).
if ! $(.dumped-tests) && ( --dump-tests in [ modules.peek : ARGV ] )
{
.dumped-tests = true ;
dump-tests ;
}
# Force deletion of the target, in case any dependencies failed to build.
RMOLD $(<) ;
}
# Used to create test files signifying passed tests.
#
actions **passed**
{
echo passed > "$(<)"
}
# Used to create replacement object files that do not get created during tests
# that are expected to fail.
#
actions (failed-as-expected)
{
echo failed as expected > "$(<)"
}
rule run-path-setup ( target : source : properties * )
{
# For testing, we need to make sure that all dynamic libraries needed by the
# test are found. So, we collect all paths from dependency libraries (via
# xdll-path property) and add whatever explicit dll-path user has specified.
# The resulting paths are added to the environment on each test invocation.
local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ;
dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ;
dll-paths += [ on $(source) return $(RUN_PATH) ] ;
dll-paths = [ sequence.unique $(dll-paths) ] ;
if $(dll-paths)
{
dll-paths = [ sequence.transform path.native : $(dll-paths) ] ;
PATH_SETUP on $(target) = [ common.prepend-path-variable-command
[ os.shared-library-path-variable ] : $(dll-paths) ] ;
}
}
local argv = [ modules.peek : ARGV ] ;
toolset.flags testing.capture-output ARGS <testing.arg> ;
toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ;
toolset.flags testing.capture-output LAUNCHER <testing.launcher> ;
# Runs executable 'sources' and stores stdout in file 'target'. Unless
# --preserve-test-targets command line option has been specified, removes the
# executable. The 'target-to-remove' parameter controls what should be removed:
# - if 'none', does not remove anything, ever
# - if empty, removes 'source'
# - if non-empty and not 'none', contains a list of sources to remove.
#
rule capture-output ( target : source : properties * : targets-to-remove * )
{
output-file on $(target) = $(target:S=.output) ;
LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
# The INCLUDES kill a warning about independent target...
INCLUDES $(target) : $(target:S=.output) ;
# but it also puts .output into dependency graph, so we must tell jam it is
# OK if it cannot find the target or updating rule.
NOCARE $(target:S=.output) ;
# This has two-fold effect. First it adds input files to the dependendency
# graph, preventing a warning. Second, it causes input files to be bound
# before target is created. Therefore, they are bound using SEARCH setting
# on them and not LOCATE setting of $(target), as in other case (due to jam
# bug).
DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
if $(targets-to-remove) = none
{
targets-to-remove = ;
}
else if ! $(targets-to-remove)
{
targets-to-remove = $(source) ;
}
run-path-setup $(target) : $(source) : $(properties) ;
if [ feature.get-values preserve-test-targets : $(properties) ] = off
{
TEMPORARY $(targets-to-remove) ;
# Set a second action on target that will be executed after capture
# output action. The 'RmTemps' rule has the 'ignore' modifier so it is
# always considered succeeded. This is needed for 'run-fail' test. For
# that test the target will be marked with FAIL_EXPECTED, and without
# 'ignore' successful execution will be negated and be reported as
# failure. With 'ignore' we do not detect a case where removing files
# fails, but it is not likely to happen.
RmTemps $(target) : $(targets-to-remove) ;
}
}
if [ os.name ] = NT
{
.STATUS = %status% ;
.SET_STATUS = "set status=%ERRORLEVEL%" ;
.RUN_OUTPUT_NL = "echo." ;
.STATUS_0 = "%status% EQU 0 (" ;
.STATUS_NOT_0 = "%status% NEQ 0 (" ;
.VERBOSE = "%verbose% EQU 1 (" ;
.ENDIF = ")" ;
.SHELL_SET = "set " ;
.CATENATE = type ;
.CP = copy ;
}
else
{
.STATUS = "$status" ;
.SET_STATUS = "status=$?" ;
.RUN_OUTPUT_NL = "echo" ;
.STATUS_0 = "test $status -eq 0 ; then" ;
.STATUS_NOT_0 = "test $status -ne 0 ; then" ;
.VERBOSE = "test $verbose -eq 1 ; then" ;
.ENDIF = "fi" ;
.SHELL_SET = "" ;
.CATENATE = cat ;
.CP = cp ;
}
.VERBOSE_TEST = 0 ;
if --verbose-test in [ modules.peek : ARGV ]
{
.VERBOSE_TEST = 1 ;
}
.RM = [ common.rm-command ] ;
actions capture-output bind INPUT_FILES output-file
{
$(PATH_SETUP)
$(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
$(.SET_STATUS)
$(.RUN_OUTPUT_NL) >> "$(output-file)"
echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
if $(.STATUS_0)
$(.CP) "$(output-file)" "$(<)"
$(.ENDIF)
$(.SHELL_SET)verbose=$(.VERBOSE_TEST)
if $(.STATUS_NOT_0)
$(.SHELL_SET)verbose=1
$(.ENDIF)
if $(.VERBOSE)
echo ====== BEGIN OUTPUT ======
$(.CATENATE) "$(output-file)"
echo ====== END OUTPUT ======
$(.ENDIF)
exit $(.STATUS)
}
actions quietly updated ignore piecemeal together RmTemps
{
$(.RM) "$(>)"
}
.MAKE_FILE = [ common.file-creation-command ] ;
toolset.flags testing.unit-test LAUNCHER <testing.launcher> ;
toolset.flags testing.unit-test ARGS <testing.arg> ;
rule unit-test ( target : source : properties * )
{
run-path-setup $(target) : $(source) : $(properties) ;
}
actions unit-test
{
$(PATH_SETUP)
$(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
}
IMPORT $(__name__) : compile compile-fail run run-fail link link-fail
: : compile compile-fail run run-fail link link-fail ;
type.register TIME : time ;
generators.register-standard testing.time : : TIME ;
rule record-time ( target : source : start end user system )
{
local src-string = [$(source:G=:J=",")"] " ;
USER_TIME on $(target) += $(src-string)$(user) ;
SYSTEM_TIME on $(target) += $(src-string)$(system) ;
}
IMPORT testing : record-time : : testing.record-time ;
# Calling this rule requests that Boost Build time how long it taks to build the
# 'source' target and display the results both on the standard output and in the
# 'target' file.
#
rule time ( target : source : properties * )
{
# Set up rule for recording timing information.
__TIMING_RULE__ on $(source) = testing.record-time $(target) ;
# Make sure that the source is rebuilt any time we need to retrieve that
# information.
REBUILDS $(target) : $(source) ;
}
actions time
{
echo user: $(USER_TIME)
echo system: $(SYSTEM_TIME)
echo user: $(USER_TIME)" seconds" > "$(<)"
echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
}

View File

@ -0,0 +1,4 @@
# Copyright Craig Rodrigues 2005. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
type ASM : s S asm ;

View File

@ -0,0 +1,86 @@
# Copyright David Abrahams 2004.
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Copyright 2010 Rene Rivera
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import type ;
import scanner ;
class c-scanner : scanner
{
import path ;
import regex ;
import scanner ;
import sequence ;
import virtual-target ;
rule __init__ ( includes * )
{
scanner.__init__ ;
for local i in $(includes)
{
self.includes += [ sequence.transform path.native
: [ regex.split $(i:G=) "&&" ] ] ;
}
}
rule pattern ( )
{
return "#[ \t]*include[ ]*(<(.*)>|\"(.*)\")" ;
}
rule process ( target : matches * : binding )
{
local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
angle = [ sequence.transform path.native : $(angle) ] ;
local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ;
quoted = [ sequence.transform path.native : $(quoted) ] ;
# CONSIDER: the new scoping rule seem to defeat "on target" variables.
local g = [ on $(target) return $(HDRGRIST) ] ;
local b = [ NORMALIZE_PATH $(binding:D) ] ;
# Attach binding of including file to included targets. When a target is
# directly created from virtual target this extra information is
# unnecessary. But in other cases, it allows us to distinguish between
# two headers of the same name included from different places. We do not
# need this extra information for angle includes, since they should not
# depend on including file (we can not get literal "." in include path).
local g2 = $(g)"#"$(b) ;
angle = $(angle:G=$(g)) ;
quoted = $(quoted:G=$(g2)) ;
local all = $(angle) $(quoted) ;
INCLUDES $(target) : $(all) ;
NOCARE $(all) ;
SEARCH on $(angle) = $(self.includes:G=) ;
SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
# Just propagate the current scanner to includes in hope that includes
# do not change scanners.
scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
ISFILE $(angle) $(quoted) ;
}
}
scanner.register c-scanner : include ;
type.register CPP : cpp cxx cc ;
type.register H : h ;
type.register HPP : hpp : H ;
type.register C : c ;
# It most cases where a CPP file or a H file is a source of some action, we
# should rebuild the result if any of files included by CPP/H are changed. One
# case when this is not needed is installation, which is handled specifically.
type.set-scanner CPP : c-scanner ;
type.set-scanner C : c-scanner ;
# One case where scanning of H/HPP files is necessary is PCH generation -- if
# any header included by HPP being precompiled changes, we need to recompile the
# header.
type.set-scanner H : c-scanner ;
type.set-scanner HPP : c-scanner ;

View File

@ -0,0 +1,9 @@
# Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import type ;
type.register EXE ;
type.set-generated-target-suffix EXE : <target-os>windows : "exe" ;
type.set-generated-target-suffix EXE : <target-os>cygwin : "exe" ;

View File

@ -0,0 +1,4 @@
# Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
type HTML : html ;

Some files were not shown because too many files have changed in this diff Show More