Imported Debian patch 0.4.3+dfsg-0.1

This commit is contained in:
Koichi Akabe 2012-11-24 17:33:35 +09:00 committed by Ryan Niebur
commit a9def3c602
74 changed files with 11948 additions and 11 deletions

11
debian/changelog vendored
View file

@ -1,3 +1,14 @@
midori (0.4.3+dfsg-0.1) unstable; urgency=low
* Non-maintainer upload
* Repack to extract waf script (Closes: 645191)
* debian/waf-unpack
- add to describe how to extract waf script
* debian/rules
- add get-orig-source target
-- Koichi Akabe <vbkaisetsu@gmail.com> Sat, 24 Nov 2012 17:33:35 +0900
midori (0.4.3-1) unstable; urgency=low
* New upstream release.

9
debian/gbp.conf vendored
View file

@ -1,9 +0,0 @@
[git-import-orig]
upstream-branch=upstream-unstable
debian-branch=master
upstream-tag=%(version)s
[git-import-dsc]
upstream-branch=upstream-unstable
debian-branch=master
upstream-tag=%(version)s

18
debian/rules vendored
View file

@ -23,7 +23,22 @@ endif
%:
dh $@
WAF=./waf
upstream_version ?= $(shell dpkg-parsechangelog | sed -rne 's/^Version: ([0-9.]+)(\+dfsg)?.*$$/\1/p')
dfsg_version = $(upstream_version)+dfsg
upstream_pkg = "midori"
pkg = $(shell dpkg-parsechangelog | sed -ne 's/^Source: //p')
get-orig-source:
uscan --rename --download-current-version --destdir=.
tar -xjf $(pkg)_$(upstream_version).orig.tar.bz2
rm -f $(pkg)_$(upstream_version).orig.tar.bz2
mv $(upstream_pkg)-$(upstream_version) $(pkg)_$(dfsg_version).orig
$(CURDIR)/debian/waf-unpack $(pkg)_$(dfsg_version).orig
rm $(pkg)_$(dfsg_version).orig/waf
tar -cjf $(CURDIR)/../$(pkg)_$(dfsg_version).orig.tar.bz2 $(pkg)_$(dfsg_version).orig
rm -r $(pkg)_$(dfsg_version).orig
WAF=WAFDIR=waf-modules ./waf-unpacked
debian/presubj: debian/presubj.in
@echo "presubj parameters:"
@ -42,6 +57,7 @@ override_dh_auto_clean:
rm -rf _build_
rm -rf .waf*
rm -rf .lock-wscript
rm -rf `find waf-modules -name "*.pyc"`
override_dh_auto_configure:
$(WAF) --nocache configure --debug-level=none --prefix /usr

View file

@ -1 +0,0 @@
unapply-patches

47
debian/waf-unpack vendored Executable file
View file

@ -0,0 +1,47 @@
#!/usr/bin/env python
# This program extracts waf-binary
#
# Please refer the following link for more details:
# http://wiki.debian.org/UnpackWaf
#
from os import path, rename, remove, chmod
import sys
import shutil
basedir = path.join(path.dirname(path.abspath(__file__)), "..")
targetdir = sys.argv[1]
skip = False
waf = open(path.join(targetdir, "waf"), "r")
unp = open(path.join(basedir, "debian/wafunp.py"), "w")
shutil.copyfile(path.join(basedir, "debian/waf-unpack"),
path.join(basedir, "debian/waf-unpack.bup"))
thisfile = open(path.join(basedir, "debian/waf-unpack"), "a")
for l in waf:
if l == "#==>\n":
skip = True
thisfile.write(l)
elif l == "#<==\n":
skip = False
thisfile.write(l)
elif not skip:
unp.write(l)
else:
thisfile.write(l)
waf.close()
unp.close()
thisfile.close()
import wafunp
rename(path.join(basedir, "debian/.%s-%s-%s" % (wafunp.WAF, wafunp.VERSION, wafunp.REVISION)), path.join(targetdir, "waf-modules"))
rename(path.join(basedir, "debian/waf-unpack.bup"), path.join(basedir, "debian/waf-unpack"))
chmod(path.join(basedir, "debian/waf-unpack"), 0744)
remove(path.join(targetdir, "waf-modules/t.bz2"))
remove(path.join(basedir, "debian/wafunp.pyc"))
rename(path.join(basedir, "debian/wafunp.py"), path.join(targetdir, "waf-unpacked"))
chmod(path.join(targetdir, "waf-unpacked"), 0744)

1
debian/watch vendored
View file

@ -1,2 +1,3 @@
version=3
opts=dversionmangle=s/\+dfsg// \
http://archive.xfce.org/src/apps/midori/([\d\.]+)/midori-([\d.]+).tar.bz2

BIN
waf vendored

Binary file not shown.

225
waf-modules/wafadmin/3rdparty/boost.py vendored Normal file
View file

@ -0,0 +1,225 @@
#! /usr/bin/env python
# encoding: utf-8
import os.path,glob,types,re,sys
import Configure,config_c,Options,Utils,Logs
from Logs import warn,debug
from Configure import conf
boost_code='''
#include <iostream>
#include <boost/version.hpp>
int main() { std::cout << BOOST_VERSION << std::endl; }
'''
boost_libpath=['/usr/lib','/usr/local/lib','/opt/local/lib','/sw/lib','/lib']
boost_cpppath=['/usr/include','/usr/local/include','/opt/local/include','/sw/include']
STATIC_NOSTATIC='nostatic'
STATIC_BOTH='both'
STATIC_ONLYSTATIC='onlystatic'
is_versiontag=re.compile('^\d+_\d+_?\d*$')
is_threadingtag=re.compile('^mt$')
is_abitag=re.compile('^[sgydpn]+$')
is_toolsettag=re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
is_pythontag=re.compile('^py[0-9]{2}$')
def set_options(opt):
opt.add_option('--boost-includes',type='string',default='',dest='boostincludes',help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
opt.add_option('--boost-libs',type='string',default='',dest='boostlibs',help='path to the directory where the boost libs are e.g. /usr/local/lib')
def string_to_version(s):
version=s.split('.')
if len(version)<3:return 0
return int(version[0])*100000+int(version[1])*100+int(version[2])
def version_string(version):
major=version/100000
minor=version/100%1000
minor_minor=version%100
if minor_minor==0:
return"%d_%d"%(major,minor)
else:
return"%d_%d_%d"%(major,minor,minor_minor)
def libfiles(lib,pattern,lib_paths):
result=[]
for lib_path in lib_paths:
libname=pattern%('boost_%s[!_]*'%lib)
result+=glob.glob(os.path.join(lib_path,libname))
return result
def get_boost_version_number(self,dir):
try:
return self.run_c_code(compiler='cxx',code=boost_code,includes=dir,execute=1,env=self.env.copy(),type='cprogram',compile_mode='cxx',compile_filename='test.cpp')
except Configure.ConfigurationError,e:
return-1
def set_default(kw,var,val):
if not var in kw:
kw[var]=val
def tags_score(tags,kw):
score=0
needed_tags={'threading':kw['tag_threading'],'abi':kw['tag_abi'],'toolset':kw['tag_toolset'],'version':kw['tag_version'],'python':kw['tag_python']}
if kw['tag_toolset']is None:
v=kw['env']
toolset=v['CXX_NAME']
if v['CXX_VERSION']:
version_no=v['CXX_VERSION'].split('.')
toolset+=version_no[0]
if len(version_no)>1:
toolset+=version_no[1]
needed_tags['toolset']=toolset
found_tags={}
for tag in tags:
if is_versiontag.match(tag):found_tags['version']=tag
if is_threadingtag.match(tag):found_tags['threading']=tag
if is_abitag.match(tag):found_tags['abi']=tag
if is_toolsettag.match(tag):found_tags['toolset']=tag
if is_pythontag.match(tag):found_tags['python']=tag
for tagname in needed_tags.iterkeys():
if needed_tags[tagname]is not None and tagname in found_tags:
if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
score+=kw['score_'+tagname][0]
else:
score+=kw['score_'+tagname][1]
return score
def validate_boost(self,kw):
ver=kw.get('version','')
for x in'min_version max_version version'.split():
set_default(kw,x,ver)
set_default(kw,'lib','')
kw['lib']=Utils.to_list(kw['lib'])
set_default(kw,'env',self.env)
set_default(kw,'libpath',boost_libpath)
set_default(kw,'cpppath',boost_cpppath)
for x in'tag_threading tag_version tag_toolset'.split():
set_default(kw,x,None)
set_default(kw,'tag_abi','^[^d]*$')
set_default(kw,'python',str(sys.version_info[0])+str(sys.version_info[1]))
set_default(kw,'tag_python','^py'+kw['python']+'$')
set_default(kw,'score_threading',(10,-10))
set_default(kw,'score_abi',(10,-10))
set_default(kw,'score_python',(10,-10))
set_default(kw,'score_toolset',(1,-1))
set_default(kw,'score_version',(100,-100))
set_default(kw,'score_min',0)
set_default(kw,'static',STATIC_NOSTATIC)
set_default(kw,'found_includes',False)
set_default(kw,'min_score',0)
set_default(kw,'errmsg','not found')
set_default(kw,'okmsg','ok')
def find_boost_includes(self,kw):
boostPath=getattr(Options.options,'boostincludes','')
if boostPath:
boostPath=[os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
else:
boostPath=Utils.to_list(kw['cpppath'])
min_version=string_to_version(kw.get('min_version',''))
max_version=string_to_version(kw.get('max_version',''))or(sys.maxint-1)
version=0
for include_path in boostPath:
boost_paths=[p for p in glob.glob(os.path.join(include_path,'boost*'))if os.path.isdir(p)]
debug('BOOST Paths: %r'%boost_paths)
for path in boost_paths:
pathname=os.path.split(path)[-1]
ret=-1
if pathname=='boost':
path=include_path
ret=self.get_boost_version_number(path)
elif pathname.startswith('boost-'):
ret=self.get_boost_version_number(path)
ret=int(ret)
if ret!=-1 and ret>=min_version and ret<=max_version and ret>version:
boost_path=path
version=ret
if not version:
self.fatal('boost headers not found! (required version min: %s max: %s)'%(kw['min_version'],kw['max_version']))
return False
found_version=version_string(version)
versiontag='^'+found_version+'$'
if kw['tag_version']is None:
kw['tag_version']=versiontag
elif kw['tag_version']!=versiontag:
warn('boost header version %r and tag_version %r do not match!'%(versiontag,kw['tag_version']))
env=self.env
env['CPPPATH_BOOST']=boost_path
env['BOOST_VERSION']=found_version
self.found_includes=1
ret='Version %s (%s)'%(found_version,boost_path)
return ret
def find_boost_library(self,lib,kw):
def find_library_from_list(lib,files):
lib_pattern=re.compile('.*boost_(.*?)\..*')
result=(None,None)
resultscore=kw['min_score']-1
for file in files:
m=lib_pattern.search(file,1)
if m:
libname=m.group(1)
libtags=libname.split('-')[1:]
currentscore=tags_score(libtags,kw)
if currentscore>resultscore:
result=(libname,file)
resultscore=currentscore
return result
lib_paths=getattr(Options.options,'boostlibs','')
if lib_paths:
lib_paths=[os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
else:
lib_paths=Utils.to_list(kw['libpath'])
v=kw.get('env',self.env)
(libname,file)=(None,None)
if kw['static']in[STATIC_NOSTATIC,STATIC_BOTH]:
st_env_prefix='LIB'
files=libfiles(lib,v['shlib_PATTERN'],lib_paths)
(libname,file)=find_library_from_list(lib,files)
if libname is None and kw['static']in[STATIC_ONLYSTATIC,STATIC_BOTH]:
st_env_prefix='STATICLIB'
staticLibPattern=v['staticlib_PATTERN']
if self.env['CC_NAME']=='msvc':
staticLibPattern='lib'+staticLibPattern
files=libfiles(lib,staticLibPattern,lib_paths)
(libname,file)=find_library_from_list(lib,files)
if libname is not None:
v['LIBPATH_BOOST_'+lib.upper()]=[os.path.split(file)[0]]
if self.env['CC_NAME']=='msvc'and os.path.splitext(file)[1]=='.lib':
v[st_env_prefix+'_BOOST_'+lib.upper()]=['libboost_'+libname]
else:
v[st_env_prefix+'_BOOST_'+lib.upper()]=['boost_'+libname]
return
self.fatal('lib boost_'+lib+' not found!')
def check_boost(self,*k,**kw):
if not self.env['CXX']:
self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
self.validate_boost(kw)
ret=None
try:
if not kw.get('found_includes',None):
self.check_message_1(kw.get('msg_includes','boost headers'))
ret=self.find_boost_includes(kw)
except Configure.ConfigurationError,e:
if'errmsg'in kw:
self.check_message_2(kw['errmsg'],'YELLOW')
if'mandatory'in kw:
if Logs.verbose>1:
raise
else:
self.fatal('the configuration failed (see %r)'%self.log.name)
else:
if'okmsg'in kw:
self.check_message_2(kw.get('okmsg_includes',ret))
for lib in kw['lib']:
self.check_message_1('library boost_'+lib)
try:
self.find_boost_library(lib,kw)
except Configure.ConfigurationError,e:
ret=False
if'errmsg'in kw:
self.check_message_2(kw['errmsg'],'YELLOW')
if'mandatory'in kw:
if Logs.verbose>1:
raise
else:
self.fatal('the configuration failed (see %r)'%self.log.name)
else:
if'okmsg'in kw:
self.check_message_2(kw['okmsg'])
return ret
conf(get_boost_version_number)
conf(validate_boost)
conf(find_boost_includes)
conf(find_boost_library)
conf(check_boost)

17
waf-modules/wafadmin/3rdparty/fluid.py vendored Normal file
View file

@ -0,0 +1,17 @@
#! /usr/bin/env python
# encoding: utf-8
import Task
from TaskGen import extension
Task.simple_task_type('fluid','${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}','BLUE',shell=False,ext_out='.cxx')
def fluid(self,node):
cpp=node.change_ext('.cpp')
hpp=node.change_ext('.hpp')
self.create_task('fluid',node,[cpp,hpp])
if'cxx'in self.features:
self.allnodes.append(cpp)
def detect(conf):
fluid=conf.find_program('fluid',var='FLUID',mandatory=True)
conf.check_cfg(path='fltk-config',package='',args='--cxxflags --ldflags',uselib_store='FLTK',mandatory=True)
extension('.fl')(fluid)

View file

@ -0,0 +1,676 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,sys,errno,re,glob,gc,datetime,shutil
try:import cPickle
except:import pickle as cPickle
import Runner,TaskGen,Node,Scripting,Utils,Environment,Task,Logs,Options
from Logs import debug,error,info
from Constants import*
SAVED_ATTRS='root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
bld=None
class BuildError(Utils.WafError):
def __init__(self,b=None,t=[]):
self.bld=b
self.tasks=t
self.ret=1
Utils.WafError.__init__(self,self.format_error())
def format_error(self):
lst=['Build failed:']
for tsk in self.tasks:
txt=tsk.format_error()
if txt:lst.append(txt)
sep=' '
if len(lst)>2:
sep='\n'
return sep.join(lst)
def group_method(fun):
def f(*k,**kw):
if not k[0].is_install:
return False
postpone=True
if'postpone'in kw:
postpone=kw['postpone']
del kw['postpone']
if postpone:
m=k[0].task_manager
if not m.groups:m.add_group()
m.groups[m.current_group].post_funs.append((fun,k,kw))
if not'cwd'in kw:
kw['cwd']=k[0].path
else:
fun(*k,**kw)
return f
class BuildContext(Utils.Context):
def __init__(self):
global bld
bld=self
self.task_manager=Task.TaskManager()
self.id_nodes=0
self.idx={}
self.all_envs={}
self.bdir=''
self.path=None
self.deps_man=Utils.DefaultDict(list)
self.cache_node_abspath={}
self.cache_scanned_folders={}
self.uninstall=[]
for v in'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
var={}
setattr(self,v,var)
self.cache_dir_contents={}
self.all_task_gen=[]
self.task_gen_cache_names={}
self.cache_sig_vars={}
self.log=None
self.root=None
self.srcnode=None
self.bldnode=None
class node_class(Node.Node):
pass
self.node_class=node_class
self.node_class.__module__="Node"
self.node_class.__name__="Nodu"
self.node_class.bld=self
self.is_install=None
def __copy__(self):
raise Utils.WafError('build contexts are not supposed to be cloned')
def load(self):
try:
env=Environment.Environment(os.path.join(self.cachedir,'build.config.py'))
except(IOError,OSError):
pass
else:
if env['version']<HEXVERSION:
raise Utils.WafError('Version mismatch! reconfigure the project')
for t in env['tools']:
self.setup(**t)
try:
gc.disable()
f=data=None
Node.Nodu=self.node_class
try:
f=open(os.path.join(self.bdir,DBFILE),'rb')
except(IOError,EOFError):
pass
try:
if f:data=cPickle.load(f)
except AttributeError:
if Logs.verbose>1:raise
if data:
for x in SAVED_ATTRS:setattr(self,x,data[x])
else:
debug('build: Build cache loading failed')
finally:
if f:f.close()
gc.enable()
def save(self):
gc.disable()
self.root.__class__.bld=None
Node.Nodu=self.node_class
db=os.path.join(self.bdir,DBFILE)
file=open(db+'.tmp','wb')
data={}
for x in SAVED_ATTRS:data[x]=getattr(self,x)
cPickle.dump(data,file,-1)
file.close()
try:os.unlink(db)
except OSError:pass
os.rename(db+'.tmp',db)
self.root.__class__.bld=self
gc.enable()
def clean(self):
debug('build: clean called')
precious=set([])
for env in self.all_envs.values():
for x in env[CFG_FILES]:
node=self.srcnode.find_resource(x)
if node:
precious.add(node.id)
def clean_rec(node):
for x in list(node.childs.keys()):
nd=node.childs[x]
tp=nd.id&3
if tp==Node.DIR:
clean_rec(nd)
elif tp==Node.BUILD:
if nd.id in precious:continue
for env in self.all_envs.values():
try:os.remove(nd.abspath(env))
except OSError:pass
node.childs.__delitem__(x)
clean_rec(self.srcnode)
for v in'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
setattr(self,v,{})
def compile(self):
debug('build: compile called')
self.flush()
self.generator=Runner.Parallel(self,Options.options.jobs)
def dw(on=True):
if Options.options.progress_bar:
if on:sys.stderr.write(Logs.colors.cursor_on)
else:sys.stderr.write(Logs.colors.cursor_off)
debug('build: executor starting')
back=os.getcwd()
os.chdir(self.bldnode.abspath())
try:
try:
dw(on=False)
self.generator.start()
except KeyboardInterrupt:
dw()
self.save()
raise
except Exception:
dw()
raise
else:
dw()
self.save()
if self.generator.error:
raise BuildError(self,self.task_manager.tasks_done)
finally:
os.chdir(back)
def install(self):
debug('build: install called')
self.flush()
if self.is_install<0:
lst=[]
for x in self.uninstall:
dir=os.path.dirname(x)
if not dir in lst:lst.append(dir)
lst.sort()
lst.reverse()
nlst=[]
for y in lst:
x=y
while len(x)>4:
if not x in nlst:nlst.append(x)
x=os.path.dirname(x)
nlst.sort()
nlst.reverse()
for x in nlst:
try:os.rmdir(x)
except OSError:pass
def new_task_gen(self,*k,**kw):
if self.task_gen_cache_names:
self.task_gen_cache_names={}
kw['bld']=self
if len(k)==0:
ret=TaskGen.task_gen(*k,**kw)
else:
cls_name=k[0]
try:cls=TaskGen.task_gen.classes[cls_name]
except KeyError:raise Utils.WscriptError('%s is not a valid task generator -> %s'%(cls_name,[x for x in TaskGen.task_gen.classes]))
ret=cls(*k,**kw)
return ret
def __call__(self,*k,**kw):
if self.task_gen_cache_names:
self.task_gen_cache_names={}
kw['bld']=self
return TaskGen.task_gen(*k,**kw)
def load_envs(self):
try:
lst=Utils.listdir(self.cachedir)
except OSError,e:
if e.errno==errno.ENOENT:
raise Utils.WafError('The project was not configured: run "waf configure" first!')
else:
raise
if not lst:
raise Utils.WafError('The cache directory is empty: reconfigure the project')
for file in lst:
if file.endswith(CACHE_SUFFIX):
env=Environment.Environment(os.path.join(self.cachedir,file))
name=file[:-len(CACHE_SUFFIX)]
self.all_envs[name]=env
self.init_variants()
for env in self.all_envs.values():
for f in env[CFG_FILES]:
newnode=self.path.find_or_declare(f)
try:
hash=Utils.h_file(newnode.abspath(env))
except(IOError,AttributeError):
error("cannot find "+f)
hash=SIG_NIL
self.node_sigs[env.variant()][newnode.id]=hash
self.bldnode=self.root.find_dir(self.bldnode.abspath())
self.path=self.srcnode=self.root.find_dir(self.srcnode.abspath())
self.cwd=self.bldnode.abspath()
def setup(self,tool,tooldir=None,funs=None):
if isinstance(tool,list):
for i in tool:self.setup(i,tooldir)
return
if not tooldir:tooldir=Options.tooldir
module=Utils.load_tool(tool,tooldir)
if hasattr(module,"setup"):module.setup(self)
def init_variants(self):
debug('build: init variants')
lstvariants=[]
for env in self.all_envs.values():
if not env.variant()in lstvariants:
lstvariants.append(env.variant())
self.lst_variants=lstvariants
debug('build: list of variants is %r',lstvariants)
for name in lstvariants+[0]:
for v in'node_sigs cache_node_abspath'.split():
var=getattr(self,v)
if not name in var:
var[name]={}
def load_dirs(self,srcdir,blddir,load_cache=1):
assert(os.path.isabs(srcdir))
assert(os.path.isabs(blddir))
self.cachedir=os.path.join(blddir,CACHE_DIR)
if srcdir==blddir:
raise Utils.WafError("build dir must be different from srcdir: %s <-> %s "%(srcdir,blddir))
self.bdir=blddir
self.load()
if not self.root:
Node.Nodu=self.node_class
self.root=Node.Nodu('',None,Node.DIR)
if not self.srcnode:
self.srcnode=self.root.ensure_dir_node_from_path(srcdir)
debug('build: srcnode is %s and srcdir %s',self.srcnode.name,srcdir)
self.path=self.srcnode
try:os.makedirs(blddir)
except OSError:pass
if not self.bldnode:
self.bldnode=self.root.ensure_dir_node_from_path(blddir)
self.init_variants()
def rescan(self,src_dir_node):
if self.cache_scanned_folders.get(src_dir_node.id,None):return
self.cache_scanned_folders[src_dir_node.id]=True
if hasattr(self,'repository'):self.repository(src_dir_node)
if not src_dir_node.name and sys.platform=='win32':
return
parent_path=src_dir_node.abspath()
try:
lst=set(Utils.listdir(parent_path))
except OSError:
lst=set([])
self.cache_dir_contents[src_dir_node.id]=lst
cache=self.node_sigs[0]
for x in src_dir_node.childs.values():
if x.id&3!=Node.FILE:continue
if x.name in lst:
try:
cache[x.id]=Utils.h_file(x.abspath())
except IOError:
raise Utils.WafError('The file %s is not readable or has become a dir'%x.abspath())
else:
try:del cache[x.id]
except KeyError:pass
del src_dir_node.childs[x.name]
h1=self.srcnode.height()
h2=src_dir_node.height()
lst=[]
child=src_dir_node
while h2>h1:
lst.append(child.name)
child=child.parent
h2-=1
lst.reverse()
try:
for variant in self.lst_variants:
sub_path=os.path.join(self.bldnode.abspath(),variant,*lst)
self.listdir_bld(src_dir_node,sub_path,variant)
except OSError:
for node in src_dir_node.childs.values():
if node.id&3!=Node.BUILD:
continue
for dct in self.node_sigs.values():
if node.id in dct:
dct.__delitem__(node.id)
src_dir_node.childs.__delitem__(node.name)
for variant in self.lst_variants:
sub_path=os.path.join(self.bldnode.abspath(),variant,*lst)
try:
os.makedirs(sub_path)
except OSError:
pass
def listdir_src(self,parent_node):
pass
def remove_node(self,node):
pass
def listdir_bld(self,parent_node,path,variant):
i_existing_nodes=[x for x in parent_node.childs.values()if x.id&3==Node.BUILD]
lst=set(Utils.listdir(path))
node_names=set([x.name for x in i_existing_nodes])
remove_names=node_names-lst
ids_to_remove=[x.id for x in i_existing_nodes if x.name in remove_names]
cache=self.node_sigs[variant]
for nid in ids_to_remove:
if nid in cache:
cache.__delitem__(nid)
def get_env(self):
return self.env_of_name('default')
def set_env(self,name,val):
self.all_envs[name]=val
env=property(get_env,set_env)
def add_manual_dependency(self,path,value):
if isinstance(path,Node.Node):
node=path
elif os.path.isabs(path):
node=self.root.find_resource(path)
else:
node=self.path.find_resource(path)
self.deps_man[node.id].append(value)
def launch_node(self):
try:
return self.p_ln
except AttributeError:
self.p_ln=self.root.find_dir(Options.launch_dir)
return self.p_ln
def glob(self,pattern,relative=True):
path=self.path.abspath()
files=[self.root.find_resource(x)for x in glob.glob(path+os.sep+pattern)]
if relative:
files=[x.path_to_parent(self.path)for x in files if x]
else:
files=[x.abspath()for x in files if x]
return files
def add_group(self,*k):
self.task_manager.add_group(*k)
def set_group(self,*k,**kw):
self.task_manager.set_group(*k,**kw)
def hash_env_vars(self,env,vars_lst):
idx=str(id(env))+str(vars_lst)
try:return self.cache_sig_vars[idx]
except KeyError:pass
lst=[str(env[a])for a in vars_lst]
ret=Utils.h_list(lst)
debug('envhash: %r %r',ret,lst)
self.cache_sig_vars[idx]=ret
return ret
def name_to_obj(self,name,env):
cache=self.task_gen_cache_names
if not cache:
for x in self.all_task_gen:
vt=x.env.variant()+'_'
if x.name:
cache[vt+x.name]=x
else:
if isinstance(x.target,str):
target=x.target
else:
target=' '.join(x.target)
v=vt+target
if not cache.get(v,None):
cache[v]=x
return cache.get(env.variant()+'_'+name,None)
def flush(self,all=1):
self.ini=datetime.datetime.now()
self.task_gen_cache_names={}
self.name_to_obj('',self.env)
debug('build: delayed operation TaskGen.flush() called')
if Options.options.compile_targets:
debug('task_gen: posting objects %r listed in compile_targets',Options.options.compile_targets)
mana=self.task_manager
to_post=[]
min_grp=0
target_objects=Utils.DefaultDict(list)
for target_name in Options.options.compile_targets.split(','):
target_name=target_name.strip()
for env in self.all_envs.values():
tg=self.name_to_obj(target_name,env)
if tg:
target_objects[target_name].append(tg)
m=mana.group_idx(tg)
if m>min_grp:
min_grp=m
to_post=[tg]
elif m==min_grp:
to_post.append(tg)
if not target_name in target_objects and all:
raise Utils.WafError("target '%s' does not exist"%target_name)
debug('group: Forcing up to group %s for target %s',mana.group_name(min_grp),Options.options.compile_targets)
for i in xrange(len(mana.groups)):
mana.current_group=i
if i==min_grp:
break
g=mana.groups[i]
debug('group: Forcing group %s',mana.group_name(g))
for t in g.tasks_gen:
debug('group: Posting %s',t.name or t.target)
t.post()
for t in to_post:
t.post()
else:
debug('task_gen: posting objects (normal)')
ln=self.launch_node()
if ln.is_child_of(self.bldnode)or not ln.is_child_of(self.srcnode):
ln=self.srcnode
proj_node=self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
if proj_node.id!=self.srcnode.id:
ln=self.srcnode
for i in xrange(len(self.task_manager.groups)):
g=self.task_manager.groups[i]
self.task_manager.current_group=i
if Logs.verbose:
groups=[x for x in self.task_manager.groups_names if id(self.task_manager.groups_names[x])==id(g)]
name=groups and groups[0]or'unnamed'
Logs.debug('group: group',name)
for tg in g.tasks_gen:
if not tg.path.is_child_of(ln):
continue
if Logs.verbose:
Logs.debug('group: %s'%tg)
tg.post()
def env_of_name(self,name):
try:
return self.all_envs[name]
except KeyError:
error('no such environment: '+name)
return None
def progress_line(self,state,total,col1,col2):
n=len(str(total))
Utils.rot_idx+=1
ind=Utils.rot_chr[Utils.rot_idx%4]
ini=self.ini
pc=(100.*state)/total
eta=Utils.get_elapsed_time(ini)
fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind)
left=fs%(state,total,col1,pc,col2)
right='][%s%s%s]'%(col1,eta,col2)
cols=Utils.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2)
if cols<7:cols=7
ratio=int((cols*state)/total)-1
bar=('='*ratio+'>').ljust(cols)
msg=Utils.indicator%(left,bar,right)
return msg
def do_install(self,src,tgt,chmod=O644):
if self.is_install>0:
if not Options.options.force:
try:
st1=os.stat(tgt)
st2=os.stat(src)
except OSError:
pass
else:
if st1.st_mtime>=st2.st_mtime and st1.st_size==st2.st_size:
return False
srclbl=src.replace(self.srcnode.abspath(None)+os.sep,'')
info("* installing %s as %s"%(srclbl,tgt))
try:os.remove(tgt)
except OSError:pass
try:
shutil.copy2(src,tgt)
os.chmod(tgt,chmod)
except IOError:
try:
os.stat(src)
except(OSError,IOError):
error('File %r does not exist'%src)
raise Utils.WafError('Could not install the file %r'%tgt)
return True
elif self.is_install<0:
info("* uninstalling %s"%tgt)
self.uninstall.append(tgt)
try:
os.remove(tgt)
except OSError,e:
if e.errno!=errno.ENOENT:
if not getattr(self,'uninstall_error',None):
self.uninstall_error=True
Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
if Logs.verbose>1:
Logs.warn('could not remove %s (error code %r)'%(e.filename,e.errno))
return True
red=re.compile(r"^([A-Za-z]:)?[/\\\\]*")
def get_install_path(self,path,env=None):
if not env:env=self.env
destdir=env.get_destdir()
path=path.replace('/',os.sep)
destpath=Utils.subst_vars(path,env)
if destdir:
destpath=os.path.join(destdir,self.red.sub('',destpath))
return destpath
def install_dir(self,path,env=None):
if env:
assert isinstance(env,Environment.Environment),"invalid parameter"
else:
env=self.env
if not path:
return[]
destpath=self.get_install_path(path,env)
if self.is_install>0:
info('* creating %s'%destpath)
Utils.check_dir(destpath)
elif self.is_install<0:
info('* removing %s'%destpath)
self.uninstall.append(destpath+'/xxx')
def install_files(self,path,files,env=None,chmod=O644,relative_trick=False,cwd=None):
if env:
assert isinstance(env,Environment.Environment),"invalid parameter"
else:
env=self.env
if not path:return[]
if not cwd:
cwd=self.path
if isinstance(files,str)and'*'in files:
gl=cwd.abspath()+os.sep+files
lst=glob.glob(gl)
else:
lst=Utils.to_list(files)
if not getattr(lst,'__iter__',False):
lst=[lst]
destpath=self.get_install_path(path,env)
Utils.check_dir(destpath)
installed_files=[]
for filename in lst:
if isinstance(filename,str)and os.path.isabs(filename):
alst=Utils.split_path(filename)
destfile=os.path.join(destpath,alst[-1])
else:
if isinstance(filename,Node.Node):
nd=filename
else:
nd=cwd.find_resource(filename)
if not nd:
raise Utils.WafError("Unable to install the file %r (not found in %s)"%(filename,cwd))
if relative_trick:
destfile=os.path.join(destpath,filename)
Utils.check_dir(os.path.dirname(destfile))
else:
destfile=os.path.join(destpath,nd.name)
filename=nd.abspath(env)
if self.do_install(filename,destfile,chmod):
installed_files.append(destfile)
return installed_files
def install_as(self,path,srcfile,env=None,chmod=O644,cwd=None):
if env:
assert isinstance(env,Environment.Environment),"invalid parameter"
else:
env=self.env
if not path:
raise Utils.WafError("where do you want to install %r? (%r?)"%(srcfile,path))
if not cwd:
cwd=self.path
destpath=self.get_install_path(path,env)
dir,name=os.path.split(destpath)
Utils.check_dir(dir)
if isinstance(srcfile,Node.Node):
src=srcfile.abspath(env)
else:
src=srcfile
if not os.path.isabs(srcfile):
node=cwd.find_resource(srcfile)
if not node:
raise Utils.WafError("Unable to install the file %r (not found in %s)"%(srcfile,cwd))
src=node.abspath(env)
return self.do_install(src,destpath,chmod)
def symlink_as(self,path,src,env=None,cwd=None):
if sys.platform=='win32':
return
if not path:
raise Utils.WafError("where do you want to install %r? (%r?)"%(src,path))
tgt=self.get_install_path(path,env)
dir,name=os.path.split(tgt)
Utils.check_dir(dir)
if self.is_install>0:
link=False
if not os.path.islink(tgt):
link=True
elif os.readlink(tgt)!=src:
link=True
if link:
try:os.remove(tgt)
except OSError:pass
info('* symlink %s (-> %s)'%(tgt,src))
os.symlink(src,tgt)
return 0
else:
try:
info('* removing %s'%(tgt))
os.remove(tgt)
return 0
except OSError:
return 1
def exec_command(self,cmd,**kw):
debug('runner: system command -> %s',cmd)
if self.log:
self.log.write('%s\n'%cmd)
kw['log']=self.log
try:
if not kw.get('cwd',None):
kw['cwd']=self.cwd
except AttributeError:
self.cwd=kw['cwd']=self.bldnode.abspath()
return Utils.exec_command(cmd,**kw)
def printout(self,s):
f=self.log or sys.stderr
f.write(s)
f.flush()
def add_subdirs(self,dirs):
self.recurse(dirs,'build')
def pre_recurse(self,name_or_mod,path,nexdir):
if not hasattr(self,'oldpath'):
self.oldpath=[]
self.oldpath.append(self.path)
self.path=self.root.find_dir(nexdir)
return{'bld':self,'ctx':self}
def post_recurse(self,name_or_mod,path,nexdir):
self.path=self.oldpath.pop()
def pre_build(self):
if hasattr(self,'pre_funs'):
for m in self.pre_funs:
m(self)
def post_build(self):
if hasattr(self,'post_funs'):
for m in self.post_funs:
m(self)
def add_pre_fun(self,meth):
try:self.pre_funs.append(meth)
except AttributeError:self.pre_funs=[meth]
def add_post_fun(self,meth):
try:self.post_funs.append(meth)
except AttributeError:self.post_funs=[meth]
def use_the_magic(self):
Task.algotype=Task.MAXPARALLEL
Task.file_deps=Task.extract_deps
self.magic=True
install_as=group_method(install_as)
install_files=group_method(install_files)
symlink_as=group_method(symlink_as)

View file

@ -0,0 +1,316 @@
#! /usr/bin/env python
# encoding: utf-8
import os,shlex,sys,time
try:import cPickle
except ImportError:import pickle as cPickle
import Environment,Utils,Options,Logs
from Logs import warn
from Constants import*
try:
from urllib import request
except:
from urllib import urlopen
else:
urlopen=request.urlopen
conf_template='''# project %(app)s configured on %(now)s by
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
# using %(args)s
#
'''
class ConfigurationError(Utils.WscriptError):
pass
autoconfig=False
def find_file(filename,path_list):
for directory in Utils.to_list(path_list):
if os.path.exists(os.path.join(directory,filename)):
return directory
return''
def find_program_impl(env,filename,path_list=[],var=None,environ=None):
if not environ:
environ=os.environ
try:path_list=path_list.split()
except AttributeError:pass
if var:
if env[var]:return env[var]
if var in environ:env[var]=environ[var]
if not path_list:path_list=environ.get('PATH','').split(os.pathsep)
ext=(Options.platform=='win32')and'.exe,.com,.bat,.cmd'or''
for y in[filename+x for x in ext.split(',')]:
for directory in path_list:
x=os.path.join(directory,y)
if os.path.isfile(x):
if var:env[var]=x
return x
return''
class ConfigurationContext(Utils.Context):
tests={}
error_handlers=[]
def __init__(self,env=None,blddir='',srcdir=''):
self.env=None
self.envname=''
self.environ=dict(os.environ)
self.line_just=40
self.blddir=blddir
self.srcdir=srcdir
self.all_envs={}
self.cwd=self.curdir=os.getcwd()
self.tools=[]
self.setenv(DEFAULT)
self.lastprog=''
self.hash=0
self.files=[]
self.tool_cache=[]
if self.blddir:
self.post_init()
def post_init(self):
self.cachedir=os.path.join(self.blddir,CACHE_DIR)
path=os.path.join(self.blddir,WAF_CONFIG_LOG)
try:os.unlink(path)
except(OSError,IOError):pass
try:
self.log=open(path,'w')
except(OSError,IOError):
self.fatal('could not open %r for writing'%path)
app=Utils.g_module.APPNAME
if app:
ver=getattr(Utils.g_module,'VERSION','')
if ver:
app="%s (%s)"%(app,ver)
now=time.ctime()
pyver=sys.hexversion
systype=sys.platform
args=" ".join(sys.argv)
wafver=WAFVERSION
abi=ABI
self.log.write(conf_template%vars())
def __del__(self):
if hasattr(self,'log')and self.log:
self.log.close()
def fatal(self,msg):
raise ConfigurationError(msg)
def check_tool(self,input,tooldir=None,funs=None):
tools=Utils.to_list(input)
if tooldir:tooldir=Utils.to_list(tooldir)
for tool in tools:
tool=tool.replace('++','xx')
if tool=='java':tool='javaw'
if tool.lower()=='unittest':tool='unittestw'
mag=(tool,id(self.env),funs)
if mag in self.tool_cache:
continue
self.tool_cache.append(mag)
module=None
try:
module=Utils.load_tool(tool,tooldir)
except Exception,e:
ex=e
if Options.options.download:
_3rdparty=os.path.normpath(Options.tooldir[0]+os.sep+'..'+os.sep+'3rdparty')
for x in Utils.to_list(Options.remote_repo):
for sub in['branches/waf-%s/wafadmin/3rdparty'%WAFVERSION,'trunk/wafadmin/3rdparty']:
url='/'.join((x,sub,tool+'.py'))
try:
web=urlopen(url)
if web.getcode()!=200:
continue
except Exception,e:
continue
else:
loc=None
try:
loc=open(_3rdparty+os.sep+tool+'.py','wb')
loc.write(web.read())
web.close()
finally:
if loc:
loc.close()
Logs.warn('downloaded %s from %s'%(tool,url))
try:
module=Utils.load_tool(tool,tooldir)
except:
Logs.warn('module %s from %s is unusable'%(tool,url))
try:
os.unlink(_3rdparty+os.sep+tool+'.py')
except:
pass
continue
else:
break
if not module:
Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
raise ex
else:
Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s'%(tool,sys.path,e))
raise ex
if funs is not None:
self.eval_rules(funs)
else:
func=getattr(module,'detect',None)
if func:
if type(func)is type(find_file):func(self)
else:self.eval_rules(func)
self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
def sub_config(self,k):
self.recurse(k,name='configure')
def pre_recurse(self,name_or_mod,path,nexdir):
return{'conf':self,'ctx':self}
def post_recurse(self,name_or_mod,path,nexdir):
if not autoconfig:
return
self.hash=hash((self.hash,getattr(name_or_mod,'waf_hash_val',name_or_mod)))
self.files.append(path)
def store(self,file=''):
if not os.path.isdir(self.cachedir):
os.makedirs(self.cachedir)
if not file:
file=open(os.path.join(self.cachedir,'build.config.py'),'w')
file.write('version = 0x%x\n'%HEXVERSION)
file.write('tools = %r\n'%self.tools)
file.close()
if not self.all_envs:
self.fatal('nothing to store in the configuration context!')
for key in self.all_envs:
tmpenv=self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir,key+CACHE_SUFFIX))
def set_env_name(self,name,env):
self.all_envs[name]=env
return env
def retrieve(self,name,fromenv=None):
try:
env=self.all_envs[name]
except KeyError:
env=Environment.Environment()
env['PREFIX']=os.path.abspath(os.path.expanduser(Options.options.prefix))
self.all_envs[name]=env
else:
if fromenv:warn("The environment %s may have been configured already"%name)
return env
def setenv(self,name):
self.env=self.retrieve(name)
self.envname=name
def add_os_flags(self,var,dest=None):
try:self.env.append_value(dest or var,Utils.to_list(self.environ[var]))
except KeyError:pass
def check_message_1(self,sr):
self.line_just=max(self.line_just,len(sr))
for x in('\n',self.line_just*'-','\n',sr,'\n'):
self.log.write(x)
Utils.pprint('NORMAL',"%s :"%sr.ljust(self.line_just),sep='')
def check_message_2(self,sr,color='GREEN'):
self.log.write(sr)
self.log.write('\n')
Utils.pprint(color,sr)
def check_message(self,th,msg,state,option=''):
sr='Checking for %s %s'%(th,msg)
self.check_message_1(sr)
p=self.check_message_2
if state:p('ok '+str(option))
else:p('not found','YELLOW')
def check_message_custom(self,th,msg,custom,option='',color='PINK'):
sr='Checking for %s %s'%(th,msg)
self.check_message_1(sr)
self.check_message_2(custom,color)
def msg(self,msg,result,color=None):
self.start_msg('Checking for '+msg)
if not isinstance(color,str):
color=result and'GREEN'or'YELLOW'
self.end_msg(result,color)
def start_msg(self,msg):
try:
if self.in_msg:
return
except:
self.in_msg=0
self.in_msg+=1
self.line_just=max(self.line_just,len(msg))
for x in('\n',self.line_just*'-','\n',msg,'\n'):
self.log.write(x)
Utils.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='')
def end_msg(self,result,color):
self.in_msg-=1
if self.in_msg:
return
if not color:
color='GREEN'
if result==True:
msg='ok'
elif result==False:
msg='not found'
color='YELLOW'
else:
msg=str(result)
self.log.write(msg)
self.log.write('\n')
Utils.pprint(color,msg)
def find_program(self,filename,path_list=[],var=None,mandatory=False):
ret=None
if var:
if self.env[var]:
ret=self.env[var]
elif var in os.environ:
ret=os.environ[var]
if not isinstance(filename,list):filename=[filename]
if not ret:
for x in filename:
ret=find_program_impl(self.env,x,path_list,var,environ=self.environ)
if ret:break
self.check_message_1('Checking for program %s'%' or '.join(filename))
self.log.write(' find program=%r paths=%r var=%r\n -> %r\n'%(filename,path_list,var,ret))
if ret:
Utils.pprint('GREEN',str(ret))
else:
Utils.pprint('YELLOW','not found')
if mandatory:
self.fatal('The program %r is required'%filename)
if var:
self.env[var]=ret
return ret
def cmd_to_list(self,cmd):
if isinstance(cmd,str)and cmd.find(' '):
try:
os.stat(cmd)
except OSError:
return shlex.split(cmd)
else:
return[cmd]
return cmd
def __getattr__(self,name):
r=self.__class__.__dict__.get(name,None)
if r:return r
if name and name.startswith('require_'):
for k in['check_','find_']:
n=name.replace('require_',k)
ret=self.__class__.__dict__.get(n,None)
if ret:
def run(*k,**kw):
r=ret(self,*k,**kw)
if not r:
self.fatal('requirement failure')
return r
return run
self.fatal('No such method %r'%name)
def eval_rules(self,rules):
self.rules=Utils.to_list(rules)
for x in self.rules:
f=getattr(self,x)
if not f:self.fatal("No such method '%s'."%x)
try:
f()
except Exception,e:
ret=self.err_handler(x,e)
if ret==BREAK:
break
elif ret==CONTINUE:
continue
else:
self.fatal(e)
def err_handler(self,fun,error):
pass
def conf(f):
setattr(ConfigurationContext,f.__name__,f)
return f
def conftest(f):
ConfigurationContext.tests[f.__name__]=f
return conf(f)

View file

@ -0,0 +1,47 @@
#! /usr/bin/env python
# encoding: utf-8
HEXVERSION=0x105019
WAFVERSION="1.5.19"
WAFREVISION="9709M"
ABI=7
O644=420
O755=493
MAXJOBS=99999999
CACHE_DIR='c4che'
CACHE_SUFFIX='.cache.py'
DBFILE='.wafpickle-%d'%ABI
WSCRIPT_FILE='wscript'
WSCRIPT_BUILD_FILE='wscript_build'
WAF_CONFIG_LOG='config.log'
WAF_CONFIG_H='config.h'
SIG_NIL='iluvcuteoverload'
VARIANT='_VARIANT_'
DEFAULT='default'
SRCDIR='srcdir'
BLDDIR='blddir'
APPNAME='APPNAME'
VERSION='VERSION'
DEFINES='defines'
UNDEFINED=()
BREAK="break"
CONTINUE="continue"
JOBCONTROL="JOBCONTROL"
MAXPARALLEL="MAXPARALLEL"
NORMAL="NORMAL"
NOT_RUN=0
MISSING=1
CRASHED=2
EXCEPTION=3
SKIPPED=8
SUCCESS=9
ASK_LATER=-1
SKIP_ME=-2
RUN_ME=-3
LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
HOUR_FORMAT="%H:%M:%S"
TEST_OK=True
CFG_FILES='cfg_files'
INSTALL=1337
UNINSTALL=-1337

View file

@ -0,0 +1,158 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,copy,re
import Logs,Options,Utils
from Constants import*
re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
class Environment(object):
__slots__=("table","parent")
def __init__(self,filename=None):
self.table={}
if filename:
self.load(filename)
def __contains__(self,key):
if key in self.table:return True
try:return self.parent.__contains__(key)
except AttributeError:return False
def __str__(self):
keys=set()
cur=self
while cur:
keys.update(cur.table.keys())
cur=getattr(cur,'parent',None)
keys=list(keys)
keys.sort()
return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in keys])
def __getitem__(self,key):
try:
while 1:
x=self.table.get(key,None)
if not x is None:
return x
self=self.parent
except AttributeError:
return[]
def __setitem__(self,key,value):
self.table[key]=value
def __delitem__(self,key):
del self.table[key]
def pop(self,key,*args):
if len(args):
return self.table.pop(key,*args)
return self.table.pop(key)
def set_variant(self,name):
self.table[VARIANT]=name
def variant(self):
try:
while 1:
x=self.table.get(VARIANT,None)
if not x is None:
return x
self=self.parent
except AttributeError:
return DEFAULT
def copy(self):
newenv=Environment()
newenv.parent=self
return newenv
def detach(self):
tbl=self.get_merged_dict()
try:
delattr(self,'parent')
except AttributeError:
pass
else:
keys=tbl.keys()
for x in keys:
tbl[x]=copy.deepcopy(tbl[x])
self.table=tbl
def get_flat(self,key):
s=self[key]
if isinstance(s,str):return s
return' '.join(s)
def _get_list_value_for_modification(self,key):
try:
value=self.table[key]
except KeyError:
try:value=self.parent[key]
except AttributeError:value=[]
if isinstance(value,list):
value=value[:]
else:
value=[value]
else:
if not isinstance(value,list):
value=[value]
self.table[key]=value
return value
def append_value(self,var,value):
current_value=self._get_list_value_for_modification(var)
if isinstance(value,list):
current_value.extend(value)
else:
current_value.append(value)
def prepend_value(self,var,value):
current_value=self._get_list_value_for_modification(var)
if isinstance(value,list):
current_value=value+current_value
self.table[var]=current_value
else:
current_value.insert(0,value)
def append_unique(self,var,value):
current_value=self._get_list_value_for_modification(var)
if isinstance(value,list):
for value_item in value:
if value_item not in current_value:
current_value.append(value_item)
else:
if value not in current_value:
current_value.append(value)
def get_merged_dict(self):
table_list=[]
env=self
while 1:
table_list.insert(0,env.table)
try:env=env.parent
except AttributeError:break
merged_table={}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self,filename):
file=open(filename,'w')
merged_table=self.get_merged_dict()
keys=list(merged_table.keys())
keys.sort()
for k in keys:file.write('%s = %r\n'%(k,merged_table[k]))
file.close()
def load(self,filename):
tbl=self.table
code=Utils.readf(filename)
for m in re_imp.finditer(code):
g=m.group
tbl[g(2)]=eval(g(3))
Logs.debug('env: %s',self.table)
def get_destdir(self):
if self.__getitem__('NOINSTALL'):return''
return Options.options.destdir
def update(self,d):
for k,v in d.iteritems():
self[k]=v
def __getattr__(self,name):
if name in self.__slots__:
return object.__getattr__(self,name)
else:
return self[name]
def __setattr__(self,name,value):
if name in self.__slots__:
object.__setattr__(self,name,value)
else:
self[name]=value
def __delattr__(self,name):
if name in self.__slots__:
object.__delattr__(self,name)
else:
del self[name]

View file

@ -0,0 +1,97 @@
#! /usr/bin/env python
# encoding: utf-8
import ansiterm
import os,re,logging,traceback,sys
from Constants import*
zones=''
verbose=0
colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
got_tty=False
term=os.environ.get('TERM','dumb')
if not term in['dumb','emacs']:
try:
got_tty=sys.stderr.isatty()or(sys.platform=='win32'and term in['xterm','msys'])
except AttributeError:
pass
import Utils
if not got_tty or'NOCOLOR'in os.environ:
colors_lst['USE']=False
def get_color(cl):
if not colors_lst['USE']:return''
return colors_lst.get(cl,'')
class foo(object):
def __getattr__(self,a):
return get_color(a)
def __call__(self,a):
return get_color(a)
colors=foo()
re_log=re.compile(r'(\w+): (.*)',re.M)
class log_filter(logging.Filter):
def __init__(self,name=None):
pass
def filter(self,rec):
rec.c1=colors.PINK
rec.c2=colors.NORMAL
rec.zone=rec.module
if rec.levelno>=logging.INFO:
if rec.levelno>=logging.ERROR:
rec.c1=colors.RED
elif rec.levelno>=logging.WARNING:
rec.c1=colors.YELLOW
else:
rec.c1=colors.GREEN
return True
zone=''
m=re_log.match(rec.msg)
if m:
zone=rec.zone=m.group(1)
rec.msg=m.group(2)
if zones:
return getattr(rec,'zone','')in zones or'*'in zones
elif not verbose>2:
return False
return True
class formatter(logging.Formatter):
def __init__(self):
logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
def format(self,rec):
if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
try:
return'%s%s%s'%(rec.c1,rec.msg.decode('utf-8'),rec.c2)
except:
return rec.c1+rec.msg+rec.c2
return logging.Formatter.format(self,rec)
def debug(*k,**kw):
if verbose:
k=list(k)
k[0]=k[0].replace('\n',' ')
logging.debug(*k,**kw)
def error(*k,**kw):
logging.error(*k,**kw)
if verbose>1:
if isinstance(k[0],Utils.WafError):
st=k[0].stack
else:
st=traceback.extract_stack()
if st:
st=st[:-1]
buf=[]
for filename,lineno,name,line in st:
buf.append(' File "%s", line %d, in %s'%(filename,lineno,name))
if line:
buf.append(' %s'%line.strip())
if buf:logging.error("\n".join(buf))
warn=logging.warn
info=logging.info
def init_log():
log=logging.getLogger()
log.handlers=[]
log.filters=[]
hdlr=logging.StreamHandler()
hdlr.setFormatter(formatter())
log.addHandler(hdlr)
log.addFilter(log_filter())
log.setLevel(logging.DEBUG)
init_log()

View file

@ -0,0 +1,496 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,sys,fnmatch,re,stat
import Utils,Constants
UNDEFINED=0
DIR=1
FILE=2
BUILD=3
type_to_string={UNDEFINED:"unk",DIR:"dir",FILE:"src",BUILD:"bld"}
prune_pats='.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
exclude_pats=prune_pats+'*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
exclude_regs='''
**/*~
**/#*#
**/.#*
**/%*%
**/._*
**/CVS
**/CVS/**
**/.cvsignore
**/SCCS
**/SCCS/**
**/vssver.scc
**/.svn
**/.svn/**
**/.git
**/.git/**
**/.gitignore
**/.bzr
**/.bzr/**
**/.hg
**/.hg/**
**/_MTN
**/_MTN/**
**/_darcs
**/_darcs/**
**/.DS_Store'''
class Node(object):
__slots__=("name","parent","id","childs")
def __init__(self,name,parent,node_type=UNDEFINED):
self.name=name
self.parent=parent
self.__class__.bld.id_nodes+=4
self.id=self.__class__.bld.id_nodes+node_type
if node_type==DIR:self.childs={}
if parent and name in parent.childs:
raise Utils.WafError('node %s exists in the parent files %r already'%(name,parent))
if parent:parent.childs[name]=self
def __setstate__(self,data):
if len(data)==4:
(self.parent,self.name,self.id,self.childs)=data
else:
(self.parent,self.name,self.id)=data
def __getstate__(self):
if getattr(self,'childs',None)is None:
return(self.parent,self.name,self.id)
else:
return(self.parent,self.name,self.id,self.childs)
def __str__(self):
if not self.parent:return''
return"%s://%s"%(type_to_string[self.id&3],self.abspath())
def __repr__(self):
return self.__str__()
def __hash__(self):
raise Utils.WafError('nodes, you are doing it wrong')
def __copy__(self):
raise Utils.WafError('nodes are not supposed to be cloned')
def get_type(self):
return self.id&3
def set_type(self,t):
self.id=self.id+t-self.id&3
def dirs(self):
return[x for x in self.childs.values()if x.id&3==DIR]
def files(self):
return[x for x in self.childs.values()if x.id&3==FILE]
def get_dir(self,name,default=None):
node=self.childs.get(name,None)
if not node or node.id&3!=DIR:return default
return node
def get_file(self,name,default=None):
node=self.childs.get(name,None)
if not node or node.id&3!=FILE:return default
return node
def get_build(self,name,default=None):
node=self.childs.get(name,None)
if not node or node.id&3!=BUILD:return default
return node
def find_resource(self,lst):
if isinstance(lst,str):
lst=Utils.split_path(lst)
if len(lst)==1:
parent=self
else:
parent=self.find_dir(lst[:-1])
if not parent:return None
self.__class__.bld.rescan(parent)
name=lst[-1]
node=parent.childs.get(name,None)
if node:
tp=node.id&3
if tp==FILE or tp==BUILD:
return node
else:
return None
tree=self.__class__.bld
if not name in tree.cache_dir_contents[parent.id]:
return None
path=parent.abspath()+os.sep+name
try:
st=Utils.h_file(path)
except IOError:
return None
child=self.__class__(name,parent,FILE)
tree.node_sigs[0][child.id]=st
return child
def find_or_declare(self,lst):
if isinstance(lst,str):
lst=Utils.split_path(lst)
if len(lst)==1:
parent=self
else:
parent=self.find_dir(lst[:-1])
if not parent:return None
self.__class__.bld.rescan(parent)
name=lst[-1]
node=parent.childs.get(name,None)
if node:
tp=node.id&3
if tp!=BUILD:
raise Utils.WafError('find_or_declare found a source file where a build file was expected %r'%'/'.join(lst))
return node
node=self.__class__(name,parent,BUILD)
return node
def find_dir(self,lst):
if isinstance(lst,str):
lst=Utils.split_path(lst)
current=self
for name in lst:
self.__class__.bld.rescan(current)
prev=current
if not current.parent and name==current.name:
continue
elif not name:
continue
elif name=='.':
continue
elif name=='..':
current=current.parent or current
else:
current=prev.childs.get(name,None)
if current is None:
dir_cont=self.__class__.bld.cache_dir_contents
if prev.id in dir_cont and name in dir_cont[prev.id]:
if not prev.name:
if os.sep=='/':
dirname=os.sep+name
else:
dirname=name
else:
dirname=prev.abspath()+os.sep+name
if not os.path.isdir(dirname):
return None
current=self.__class__(name,prev,DIR)
elif(not prev.name and len(name)==2 and name[1]==':')or name.startswith('\\\\'):
current=self.__class__(name,prev,DIR)
else:
return None
else:
if current.id&3!=DIR:
return None
return current
def ensure_dir_node_from_path(self,lst):
if isinstance(lst,str):
lst=Utils.split_path(lst)
current=self
for name in lst:
if not name:
continue
elif name=='.':
continue
elif name=='..':
current=current.parent or current
else:
prev=current
current=prev.childs.get(name,None)
if current is None:
current=self.__class__(name,prev,DIR)
return current
def exclusive_build_node(self,path):
lst=Utils.split_path(path)
name=lst[-1]
if len(lst)>1:
parent=None
try:
parent=self.find_dir(lst[:-1])
except OSError:
pass
if not parent:
parent=self.ensure_dir_node_from_path(lst[:-1])
self.__class__.bld.rescan(parent)
else:
try:
self.__class__.bld.rescan(parent)
except OSError:
pass
else:
parent=self
node=parent.childs.get(name,None)
if not node:
node=self.__class__(name,parent,BUILD)
return node
def path_to_parent(self,parent):
lst=[]
p=self
h1=parent.height()
h2=p.height()
while h2>h1:
h2-=1
lst.append(p.name)
p=p.parent
if lst:
lst.reverse()
ret=os.path.join(*lst)
else:
ret=''
return ret
def find_ancestor(self,node):
dist=self.height()-node.height()
if dist<0:return node.find_ancestor(self)
cand=self
while dist>0:
cand=cand.parent
dist-=1
if cand==node:return cand
cursor=node
while cand.parent:
cand=cand.parent
cursor=cursor.parent
if cand==cursor:return cand
def relpath_gen(self,from_node):
if self==from_node:return'.'
if from_node.parent==self:return'..'
ancestor=self.find_ancestor(from_node)
lst=[]
cand=self
while not cand.id==ancestor.id:
lst.append(cand.name)
cand=cand.parent
cand=from_node
while not cand.id==ancestor.id:
lst.append('..')
cand=cand.parent
lst.reverse()
return os.sep.join(lst)
def nice_path(self,env=None):
tree=self.__class__.bld
ln=tree.launch_node()
if self.id&3==FILE:return self.relpath_gen(ln)
else:return os.path.join(tree.bldnode.relpath_gen(ln),env.variant(),self.relpath_gen(tree.srcnode))
def is_child_of(self,node):
p=self
diff=self.height()-node.height()
while diff>0:
diff-=1
p=p.parent
return p.id==node.id
def variant(self,env):
if not env:return 0
elif self.id&3==FILE:return 0
else:return env.variant()
def height(self):
d=self
val=-1
while d:
d=d.parent
val+=1
return val
def abspath(self,env=None):
variant=(env and(self.id&3!=FILE)and env.variant())or 0
ret=self.__class__.bld.cache_node_abspath[variant].get(self.id,None)
if ret:return ret
if not variant:
if not self.parent:
val=os.sep=='/'and os.sep or''
elif not self.parent.name:
val=(os.sep=='/'and os.sep or'')+self.name
else:
val=self.parent.abspath()+os.sep+self.name
else:
val=os.sep.join((self.__class__.bld.bldnode.abspath(),variant,self.path_to_parent(self.__class__.bld.srcnode)))
self.__class__.bld.cache_node_abspath[variant][self.id]=val
return val
def change_ext(self,ext):
name=self.name
k=name.rfind('.')
if k>=0:
name=name[:k]+ext
else:
name=name+ext
return self.parent.find_or_declare([name])
def src_dir(self,env):
return self.parent.srcpath(env)
def bld_dir(self,env):
return self.parent.bldpath(env)
def bld_base(self,env):
s=os.path.splitext(self.name)[0]
return os.path.join(self.bld_dir(env),s)
def bldpath(self,env=None):
if self.id&3==FILE:
return self.relpath_gen(self.__class__.bld.bldnode)
p=self.path_to_parent(self.__class__.bld.srcnode)
if p is not'':
return env.variant()+os.sep+p
return env.variant()
def srcpath(self,env=None):
if self.id&3==BUILD:
return self.bldpath(env)
return self.relpath_gen(self.__class__.bld.bldnode)
def read(self,env):
return Utils.readf(self.abspath(env))
def dir(self,env):
return self.parent.abspath(env)
def file(self):
return self.name
def file_base(self):
return os.path.splitext(self.name)[0]
def suffix(self):
k=max(0,self.name.rfind('.'))
return self.name[k:]
def find_iter_impl(self,src=True,bld=True,dir=True,accept_name=None,is_prune=None,maxdepth=25):
bld_ctx=self.__class__.bld
bld_ctx.rescan(self)
for name in bld_ctx.cache_dir_contents[self.id]:
if accept_name(self,name):
node=self.find_resource(name)
if node:
if src and node.id&3==FILE:
yield node
else:
node=self.find_dir(name)
if node and node.id!=bld_ctx.bldnode.id:
if dir:
yield node
if not is_prune(self,name):
if maxdepth:
for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1):
yield k
else:
if not is_prune(self,name):
node=self.find_resource(name)
if not node:
node=self.find_dir(name)
if node and node.id!=bld_ctx.bldnode.id:
if maxdepth:
for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1):
yield k
if bld:
for node in self.childs.values():
if node.id==bld_ctx.bldnode.id:
continue
if node.id&3==BUILD:
if accept_name(self,node.name):
yield node
raise StopIteration
def find_iter(self,in_pat=['*'],ex_pat=exclude_pats,prune_pat=prune_pats,src=True,bld=True,dir=False,maxdepth=25,flat=False):
if not(src or bld or dir):
raise StopIteration
if self.id&3!=DIR:
raise StopIteration
in_pat=Utils.to_list(in_pat)
ex_pat=Utils.to_list(ex_pat)
prune_pat=Utils.to_list(prune_pat)
def accept_name(node,name):
for pat in ex_pat:
if fnmatch.fnmatchcase(name,pat):
return False
for pat in in_pat:
if fnmatch.fnmatchcase(name,pat):
return True
return False
def is_prune(node,name):
for pat in prune_pat:
if fnmatch.fnmatchcase(name,pat):
return True
return False
ret=self.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth)
if flat:
return" ".join([x.relpath_gen(self)for x in ret])
return ret
def ant_glob(self,*k,**kw):
src=kw.get('src',1)
bld=kw.get('bld',0)
dir=kw.get('dir',0)
excl=kw.get('excl',exclude_regs)
incl=k and k[0]or kw.get('incl','**')
def to_pat(s):
lst=Utils.to_list(s)
ret=[]
for x in lst:
x=x.replace('//','/')
if x.endswith('/'):
x+='**'
lst2=x.split('/')
accu=[]
for k in lst2:
if k=='**':
accu.append(k)
else:
k=k.replace('.','[.]').replace('*','.*').replace('?','.')
k='^%s$'%k
accu.append(re.compile(k))
ret.append(accu)
return ret
def filtre(name,nn):
ret=[]
for lst in nn:
if not lst:
pass
elif lst[0]=='**':
ret.append(lst)
if len(lst)>1:
if lst[1].match(name):
ret.append(lst[2:])
else:
ret.append([])
elif lst[0].match(name):
ret.append(lst[1:])
return ret
def accept(name,pats):
nacc=filtre(name,pats[0])
nrej=filtre(name,pats[1])
if[]in nrej:
nacc=[]
return[nacc,nrej]
def ant_iter(nodi,maxdepth=25,pats=[]):
nodi.__class__.bld.rescan(nodi)
tmp=list(nodi.__class__.bld.cache_dir_contents[nodi.id])
tmp.sort()
for name in tmp:
npats=accept(name,pats)
if npats and npats[0]:
accepted=[]in npats[0]
node=nodi.find_resource(name)
if node and accepted:
if src and node.id&3==FILE:
yield node
else:
node=nodi.find_dir(name)
if node and node.id!=nodi.__class__.bld.bldnode.id:
if accepted and dir:
yield node
if maxdepth:
for k in ant_iter(node,maxdepth=maxdepth-1,pats=npats):
yield k
if bld:
for node in nodi.childs.values():
if node.id==nodi.__class__.bld.bldnode.id:
continue
if node.id&3==BUILD:
npats=accept(node.name,pats)
if npats and npats[0]and[]in npats[0]:
yield node
raise StopIteration
ret=[x for x in ant_iter(self,pats=[to_pat(incl),to_pat(excl)])]
if kw.get('flat',True):
return" ".join([x.relpath_gen(self)for x in ret])
return ret
def update_build_dir(self,env=None):
if not env:
for env in bld.all_envs:
self.update_build_dir(env)
return
path=self.abspath(env)
lst=Utils.listdir(path)
try:
self.__class__.bld.cache_dir_contents[self.id].update(lst)
except KeyError:
self.__class__.bld.cache_dir_contents[self.id]=set(lst)
self.__class__.bld.cache_scanned_folders[self.id]=True
for k in lst:
npath=path+os.sep+k
st=os.stat(npath)
if stat.S_ISREG(st[stat.ST_MODE]):
ick=self.find_or_declare(k)
if not(ick.id in self.__class__.bld.node_sigs[env.variant()]):
self.__class__.bld.node_sigs[env.variant()][ick.id]=Constants.SIG_NIL
elif stat.S_ISDIR(st[stat.ST_MODE]):
child=self.find_dir(k)
if not child:
child=self.ensure_dir_node_from_path(k)
child.update_build_dir(env)
class Nodu(Node):
pass

View file

@ -0,0 +1,158 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys,imp,types,tempfile,optparse
import Logs,Utils
from Constants import*
cmds='distclean configure build install clean uninstall check dist distcheck'.split()
commands={}
is_install=False
options={}
arg_line=[]
launch_dir=''
tooldir=''
lockfile=os.environ.get('WAFLOCK','.lock-wscript')
try:cache_global=os.path.abspath(os.environ['WAFCACHE'])
except KeyError:cache_global=''
platform=Utils.unversioned_sys_platform()
conf_file='conf-runs-%s-%d.pickle'%(platform,ABI)
remote_repo=['http://waf.googlecode.com/svn/']
default_prefix=os.environ.get('PREFIX')
if not default_prefix:
if platform=='win32':
d=tempfile.gettempdir()
default_prefix=d[0].upper()+d[1:]
else:default_prefix='/usr/local/'
default_jobs=os.environ.get('JOBS',-1)
if default_jobs<1:
try:
if'SC_NPROCESSORS_ONLN'in os.sysconf_names:
default_jobs=os.sysconf('SC_NPROCESSORS_ONLN')
else:
default_jobs=int(Utils.cmd_output(['sysctl','-n','hw.ncpu']))
except:
if os.name=='java':
from java.lang import Runtime
default_jobs=Runtime.getRuntime().availableProcessors()
else:
default_jobs=int(os.environ.get('NUMBER_OF_PROCESSORS',1))
default_destdir=os.environ.get('DESTDIR','')
def get_usage(self):
cmds_str=[]
module=Utils.g_module
if module:
tbl=module.__dict__
keys=list(tbl.keys())
keys.sort()
if'build'in tbl:
if not module.build.__doc__:
module.build.__doc__='builds the project'
if'configure'in tbl:
if not module.configure.__doc__:
module.configure.__doc__='configures the project'
ban=['set_options','init','shutdown']
optlst=[x for x in keys if not x in ban and type(tbl[x])is type(parse_args_impl)and tbl[x].__doc__ and not x.startswith('_')]
just=max([len(x)for x in optlst])
for x in optlst:
cmds_str.append(' %s: %s'%(x.ljust(just),tbl[x].__doc__))
ret='\n'.join(cmds_str)
else:
ret=' '.join(cmds)
return'''waf [command] [options]
Main commands (example: ./waf build -j4)
%s
'''%ret
setattr(optparse.OptionParser,'get_usage',get_usage)
def create_parser(module=None):
Logs.debug('options: create_parser is called')
parser=optparse.OptionParser(conflict_handler="resolve",version='waf %s (%s)'%(WAFVERSION,WAFREVISION))
parser.formatter.width=Utils.get_term_cols()
p=parser.add_option
p('-j','--jobs',type='int',default=default_jobs,help='amount of parallel jobs (%r)'%default_jobs,dest='jobs')
p('-k','--keep',action='store_true',default=False,help='keep running happily on independent task groups',dest='keep')
p('-v','--verbose',action='count',default=0,help='verbosity level -v -vv or -vvv [default: 0]',dest='verbose')
p('--nocache',action='store_true',default=False,help='ignore the WAFCACHE (if set)',dest='nocache')
p('--zones',action='store',default='',help='debugging zones (task_gen, deps, tasks, etc)',dest='zones')
p('-p','--progress',action='count',default=0,help='-p: progress bar; -pp: ide output',dest='progress_bar')
p('--targets',action='store',default='',help='build given task generators, e.g. "target1,target2"',dest='compile_targets')
gr=optparse.OptionGroup(parser,'configuration options')
parser.add_option_group(gr)
gr.add_option('-b','--blddir',action='store',default='',help='out dir for the project (configuration)',dest='blddir')
gr.add_option('-s','--srcdir',action='store',default='',help='top dir for the project (configuration)',dest='srcdir')
gr.add_option('--prefix',help='installation prefix (configuration) [default: %r]'%default_prefix,default=default_prefix,dest='prefix')
gr.add_option('--download',action='store_true',default=False,help='try to download the tools if missing',dest='download')
gr=optparse.OptionGroup(parser,'installation options')
parser.add_option_group(gr)
gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
gr.add_option('-f','--force',action='store_true',default=False,help='force file installation',dest='force')
return parser
def parse_args_impl(parser,_args=None):
global options,commands,arg_line
(options,args)=parser.parse_args(args=_args)
arg_line=args
commands={}
for var in cmds:commands[var]=0
if not args:
commands['build']=1
args.append('build')
for arg in args:
commands[arg]=True
if'check'in args:
idx=args.index('check')
try:
bidx=args.index('build')
if bidx>idx:
raise ValueError('build before check')
except ValueError,e:
args.insert(idx,'build')
if args[0]!='init':
args.insert(0,'init')
if options.keep:options.jobs=1
if options.jobs<1:options.jobs=1
if'install'in sys.argv or'uninstall'in sys.argv:
options.destdir=options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
Logs.verbose=options.verbose
Logs.init_log()
if options.zones:
Logs.zones=options.zones.split(',')
if not Logs.verbose:Logs.verbose=1
elif Logs.verbose>0:
Logs.zones=['runner']
if Logs.verbose>2:
Logs.zones=['*']
class Handler(Utils.Context):
parser=None
def __init__(self,module=None):
self.parser=create_parser(module)
self.cwd=os.getcwd()
Handler.parser=self
def add_option(self,*k,**kw):
self.parser.add_option(*k,**kw)
def add_option_group(self,*k,**kw):
return self.parser.add_option_group(*k,**kw)
def get_option_group(self,opt_str):
return self.parser.get_option_group(opt_str)
def sub_options(self,*k,**kw):
if not k:raise Utils.WscriptError('folder expected')
self.recurse(k[0],name='set_options')
def tool_options(self,*k,**kw):
if not k[0]:
raise Utils.WscriptError('invalid tool_options call %r %r'%(k,kw))
tools=Utils.to_list(k[0])
path=Utils.to_list(kw.get('tdir',kw.get('tooldir',tooldir)))
for tool in tools:
tool=tool.replace('++','xx')
if tool=='java':tool='javaw'
if tool.lower()=='unittest':tool='unittestw'
module=Utils.load_tool(tool,path)
try:
fun=module.set_options
except AttributeError:
pass
else:
fun(kw.get('option_group',self))
def parse_args(self,args=None):
parse_args_impl(self.parser,args)

View file

@ -0,0 +1,165 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,sys,random,time,threading,traceback
try:from Queue import Queue
except ImportError:from queue import Queue
import Build,Utils,Logs,Options
from Logs import debug,error
from Constants import*
GAP=15
run_old=threading.Thread.run
def run(*args,**kwargs):
try:
run_old(*args,**kwargs)
except(KeyboardInterrupt,SystemExit):
raise
except:
sys.excepthook(*sys.exc_info())
threading.Thread.run=run
def process_task(tsk):
m=tsk.master
if m.stop:
m.out.put(tsk)
return
try:
tsk.generator.bld.printout(tsk.display())
if tsk.__class__.stat:ret=tsk.__class__.stat(tsk)
else:ret=tsk.call_run()
except Exception,e:
tsk.err_msg=Utils.ex_stack()
tsk.hasrun=EXCEPTION
m.error_handler(tsk)
m.out.put(tsk)
return
if ret:
tsk.err_code=ret
tsk.hasrun=CRASHED
else:
try:
tsk.post_run()
except Utils.WafError:
pass
except Exception:
tsk.err_msg=Utils.ex_stack()
tsk.hasrun=EXCEPTION
else:
tsk.hasrun=SUCCESS
if tsk.hasrun!=SUCCESS:
m.error_handler(tsk)
m.out.put(tsk)
class TaskConsumer(threading.Thread):
ready=Queue(0)
consumers=[]
def __init__(self):
threading.Thread.__init__(self)
self.setDaemon(1)
self.start()
def run(self):
try:
self.loop()
except:
pass
def loop(self):
while 1:
tsk=TaskConsumer.ready.get()
process_task(tsk)
class Parallel(object):
def __init__(self,bld,j=2):
self.numjobs=j
self.manager=bld.task_manager
self.manager.current_group=0
self.total=self.manager.total()
self.outstanding=[]
self.maxjobs=MAXJOBS
self.frozen=[]
self.out=Queue(0)
self.count=0
self.processed=1
self.stop=False
self.error=False
def get_next(self):
if not self.outstanding:
return None
return self.outstanding.pop(0)
def postpone(self,tsk):
if random.randint(0,1):
self.frozen.insert(0,tsk)
else:
self.frozen.append(tsk)
def refill_task_list(self):
while self.count>self.numjobs+GAP or self.count>=self.maxjobs:
self.get_out()
while not self.outstanding:
if self.count:
self.get_out()
if self.frozen:
self.outstanding+=self.frozen
self.frozen=[]
elif not self.count:
(jobs,tmp)=self.manager.get_next_set()
if jobs!=None:self.maxjobs=jobs
if tmp:self.outstanding+=tmp
break
def get_out(self):
ret=self.out.get()
self.manager.add_finished(ret)
if not self.stop and getattr(ret,'more_tasks',None):
self.outstanding+=ret.more_tasks
self.total+=len(ret.more_tasks)
self.count-=1
def error_handler(self,tsk):
if not Options.options.keep:
self.stop=True
self.error=True
def start(self):
if TaskConsumer.consumers:
while len(TaskConsumer.consumers)<self.numjobs:
TaskConsumer.consumers.append(TaskConsumer())
while not self.stop:
self.refill_task_list()
tsk=self.get_next()
if not tsk:
if self.count:
continue
else:
break
if tsk.hasrun:
self.processed+=1
self.manager.add_finished(tsk)
continue
try:
st=tsk.runnable_status()
except Exception,e:
self.processed+=1
if self.stop and not Options.options.keep:
tsk.hasrun=SKIPPED
self.manager.add_finished(tsk)
continue
self.error_handler(tsk)
self.manager.add_finished(tsk)
tsk.hasrun=EXCEPTION
tsk.err_msg=Utils.ex_stack()
continue
if st==ASK_LATER:
self.postpone(tsk)
elif st==SKIP_ME:
self.processed+=1
tsk.hasrun=SKIPPED
self.manager.add_finished(tsk)
else:
tsk.position=(self.processed,self.total)
self.count+=1
tsk.master=self
self.processed+=1
if self.numjobs==1:
process_task(tsk)
else:
TaskConsumer.ready.put(tsk)
if not TaskConsumer.consumers:
TaskConsumer.consumers=[TaskConsumer()for i in xrange(self.numjobs)]
while self.error and self.count:
self.get_out()
assert(self.count==0 or self.stop)

View file

@ -0,0 +1,414 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys,shutil,traceback,datetime,inspect,errno
import Utils,Configure,Build,Logs,Options,Environment,Task
from Logs import error,warn,info
from Constants import*
g_gz='bz2'
commands=[]
def prepare_impl(t,cwd,ver,wafdir):
Options.tooldir=[t]
Options.launch_dir=cwd
if'--version'in sys.argv:
opt_obj=Options.Handler()
opt_obj.curdir=cwd
opt_obj.parse_args()
sys.exit(0)
msg1='Waf: Please run waf from a directory containing a file named "%s" or run distclean'%WSCRIPT_FILE
build_dir_override=None
candidate=None
lst=os.listdir(cwd)
search_for_candidate=True
if WSCRIPT_FILE in lst:
candidate=cwd
elif'configure'in sys.argv and not WSCRIPT_BUILD_FILE in lst:
calldir=os.path.abspath(os.path.dirname(sys.argv[0]))
if WSCRIPT_FILE in os.listdir(calldir):
candidate=calldir
search_for_candidate=False
else:
error('arg[0] directory does not contain a wscript file')
sys.exit(1)
build_dir_override=cwd
while search_for_candidate:
if len(cwd)<=3:
break
dirlst=os.listdir(cwd)
if WSCRIPT_FILE in dirlst:
candidate=cwd
if'configure'in sys.argv and candidate:
break
if Options.lockfile in dirlst:
env=Environment.Environment()
try:
env.load(os.path.join(cwd,Options.lockfile))
except:
error('could not load %r'%Options.lockfile)
try:
os.stat(env['cwd'])
except:
candidate=cwd
else:
candidate=env['cwd']
break
cwd=os.path.dirname(cwd)
if not candidate:
if'-h'in sys.argv or'--help'in sys.argv:
warn('No wscript file found: the help message may be incomplete')
opt_obj=Options.Handler()
opt_obj.curdir=cwd
opt_obj.parse_args()
else:
error(msg1)
sys.exit(0)
try:
os.chdir(candidate)
except OSError:
raise Utils.WafError("the folder %r is unreadable"%candidate)
Utils.set_main_module(os.path.join(candidate,WSCRIPT_FILE))
if build_dir_override:
d=getattr(Utils.g_module,BLDDIR,None)
if d:
msg=' Overriding build directory %s with %s'%(d,build_dir_override)
warn(msg)
Utils.g_module.blddir=build_dir_override
def set_def(obj,name=''):
n=name or obj.__name__
if not n in Utils.g_module.__dict__:
setattr(Utils.g_module,n,obj)
for k in[dist,distclean,distcheck,clean,install,uninstall]:
set_def(k)
set_def(Configure.ConfigurationContext,'configure_context')
for k in['build','clean','install','uninstall']:
set_def(Build.BuildContext,k+'_context')
opt_obj=Options.Handler(Utils.g_module)
opt_obj.curdir=candidate
try:
f=Utils.g_module.set_options
except AttributeError:
pass
else:
opt_obj.sub_options([''])
opt_obj.parse_args()
if not'init'in Utils.g_module.__dict__:
Utils.g_module.init=Utils.nada
if not'shutdown'in Utils.g_module.__dict__:
Utils.g_module.shutdown=Utils.nada
main()
def prepare(t,cwd,ver,wafdir):
if WAFVERSION!=ver:
msg='Version mismatch: waf %s <> wafadmin %s (wafdir %s)'%(ver,WAFVERSION,wafdir)
print('\033[91mError: %s\033[0m'%msg)
sys.exit(1)
try:
prepare_impl(t,cwd,ver,wafdir)
except Utils.WafError,e:
error(str(e))
sys.exit(1)
except KeyboardInterrupt:
Utils.pprint('RED','Interrupted')
sys.exit(68)
def main():
global commands
commands=Options.arg_line[:]
while commands:
x=commands.pop(0)
ini=datetime.datetime.now()
if x=='configure':
fun=configure
elif x=='build':
fun=build
else:
fun=getattr(Utils.g_module,x,None)
if not fun:
raise Utils.WscriptError('No such command %r'%x)
ctx=getattr(Utils.g_module,x+'_context',Utils.Context)()
if x in['init','shutdown','dist','distclean','distcheck']:
try:
fun(ctx)
except TypeError:
fun()
else:
fun(ctx)
ela=''
if not Options.options.progress_bar:
ela=' (%s)'%Utils.get_elapsed_time(ini)
if x!='init'and x!='shutdown':
info('%r finished successfully%s'%(x,ela))
if not commands and x!='shutdown':
commands.append('shutdown')
def configure(conf):
src=getattr(Options.options,SRCDIR,None)
if not src:src=getattr(Utils.g_module,SRCDIR,None)
if not src:src=getattr(Utils.g_module,'top',None)
if not src:
src='.'
incomplete_src=1
src=os.path.abspath(src)
bld=getattr(Options.options,BLDDIR,None)
if not bld:bld=getattr(Utils.g_module,BLDDIR,None)
if not bld:bld=getattr(Utils.g_module,'out',None)
if not bld:
bld='build'
incomplete_bld=1
if bld=='.':
raise Utils.WafError('Setting blddir="." may cause distclean problems')
bld=os.path.abspath(bld)
try:os.makedirs(bld)
except OSError:pass
targets=Options.options.compile_targets
Options.options.compile_targets=None
Options.is_install=False
conf.srcdir=src
conf.blddir=bld
conf.post_init()
if'incomplete_src'in vars():
conf.check_message_1('Setting srcdir to')
conf.check_message_2(src)
if'incomplete_bld'in vars():
conf.check_message_1('Setting blddir to')
conf.check_message_2(bld)
conf.sub_config([''])
conf.store()
env=Environment.Environment()
env[BLDDIR]=bld
env[SRCDIR]=src
env['argv']=sys.argv
env['commands']=Options.commands
env['options']=Options.options.__dict__
env['hash']=conf.hash
env['files']=conf.files
env['environ']=dict(conf.environ)
env['cwd']=os.path.split(Utils.g_module.root_path)[0]
if Utils.g_module.root_path!=src:
env.store(os.path.join(src,Options.lockfile))
env.store(Options.lockfile)
Options.options.compile_targets=targets
def clean(bld):
'''removes the build files'''
try:
proj=Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError('Nothing to clean (project not configured)')
bld.load_dirs(proj[SRCDIR],proj[BLDDIR])
bld.load_envs()
bld.is_install=0
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
try:
bld.clean()
finally:
bld.save()
def check_configured(bld):
if not Configure.autoconfig:
return bld
conf_cls=getattr(Utils.g_module,'configure_context',Utils.Context)
bld_cls=getattr(Utils.g_module,'build_context',Utils.Context)
def reconf(proj):
back=(Options.commands,Options.options.__dict__,Logs.zones,Logs.verbose)
Options.commands=proj['commands']
Options.options.__dict__=proj['options']
conf=conf_cls()
conf.environ=proj['environ']
configure(conf)
(Options.commands,Options.options.__dict__,Logs.zones,Logs.verbose)=back
try:
proj=Environment.Environment(Options.lockfile)
except IOError:
conf=conf_cls()
configure(conf)
else:
try:
bld=bld_cls()
bld.load_dirs(proj[SRCDIR],proj[BLDDIR])
bld.load_envs()
except Utils.WafError:
reconf(proj)
return bld_cls()
try:
proj=Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError('Auto-config: project does not configure (bug)')
h=0
try:
for file in proj['files']:
if file.endswith('configure'):
h=hash((h,Utils.readf(file)))
else:
mod=Utils.load_module(file)
h=hash((h,mod.waf_hash_val))
except(OSError,IOError):
warn('Reconfiguring the project: a file is unavailable')
reconf(proj)
else:
if(h!=proj['hash']):
warn('Reconfiguring the project: the configuration has changed')
reconf(proj)
return bld_cls()
def install(bld):
'''installs the build files'''
bld=check_configured(bld)
Options.commands['install']=True
Options.commands['uninstall']=False
Options.is_install=True
bld.is_install=INSTALL
build_impl(bld)
bld.install()
def uninstall(bld):
'''removes the installed files'''
Options.commands['install']=False
Options.commands['uninstall']=True
Options.is_install=True
bld.is_install=UNINSTALL
try:
def runnable_status(self):
return SKIP_ME
setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status)
setattr(Task.Task,'runnable_status',runnable_status)
build_impl(bld)
bld.install()
finally:
setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back)
def build(bld):
bld=check_configured(bld)
Options.commands['install']=False
Options.commands['uninstall']=False
Options.is_install=False
bld.is_install=0
return build_impl(bld)
def build_impl(bld):
try:
proj=Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError("Project not configured (run 'waf configure' first)")
bld.load_dirs(proj[SRCDIR],proj[BLDDIR])
bld.load_envs()
info("Waf: Entering directory `%s'"%bld.bldnode.abspath())
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
bld.pre_build()
try:
bld.compile()
finally:
if Options.options.progress_bar:print('')
info("Waf: Leaving directory `%s'"%bld.bldnode.abspath())
bld.post_build()
bld.install()
excludes='.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
dist_exts='~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
def dont_dist(name,src,build_dir):
global excludes,dist_exts
if(name.startswith(',,')or name.startswith('++')or name.startswith('.waf')or(src=='.'and name==Options.lockfile)or name in excludes or name==build_dir):
return True
for ext in dist_exts:
if name.endswith(ext):
return True
return False
def copytree(src,dst,build_dir):
names=os.listdir(src)
os.makedirs(dst)
for name in names:
srcname=os.path.join(src,name)
dstname=os.path.join(dst,name)
if dont_dist(name,src,build_dir):
continue
if os.path.isdir(srcname):
copytree(srcname,dstname,build_dir)
else:
shutil.copy2(srcname,dstname)
def distclean(ctx=None):
'''removes the build directory'''
global commands
lst=os.listdir('.')
for f in lst:
if f==Options.lockfile:
try:
proj=Environment.Environment(f)
except:
Logs.warn('could not read %r'%f)
continue
try:
shutil.rmtree(proj[BLDDIR])
except IOError:
pass
except OSError,e:
if e.errno!=errno.ENOENT:
Logs.warn('project %r cannot be removed'%proj[BLDDIR])
try:
os.remove(f)
except OSError,e:
if e.errno!=errno.ENOENT:
Logs.warn('file %r cannot be removed'%f)
if not commands and f.startswith('.waf'):
shutil.rmtree(f,ignore_errors=True)
def dist(appname='',version=''):
'''makes a tarball for redistributing the sources'''
import tarfile
if not appname:appname=Utils.g_module.APPNAME
if not version:version=Utils.g_module.VERSION
tmp_folder=appname+'-'+version
if g_gz in['gz','bz2']:
arch_name=tmp_folder+'.tar.'+g_gz
else:
arch_name=tmp_folder+'.'+'zip'
try:
shutil.rmtree(tmp_folder)
except(OSError,IOError):
pass
try:
os.remove(arch_name)
except(OSError,IOError):
pass
blddir=getattr(Utils.g_module,BLDDIR,None)
if not blddir:
blddir=getattr(Utils.g_module,'out',None)
copytree('.',tmp_folder,blddir)
dist_hook=getattr(Utils.g_module,'dist_hook',None)
if dist_hook:
back=os.getcwd()
os.chdir(tmp_folder)
try:
dist_hook()
finally:
os.chdir(back)
if g_gz in['gz','bz2']:
tar=tarfile.open(arch_name,'w:'+g_gz)
tar.add(tmp_folder)
tar.close()
else:
Utils.zip_folder(tmp_folder,arch_name,tmp_folder)
try:from hashlib import sha1 as sha
except ImportError:from sha import sha
try:
digest=" (sha=%r)"%sha(Utils.readf(arch_name)).hexdigest()
except:
digest=''
info('New archive created: %s%s'%(arch_name,digest))
if os.path.exists(tmp_folder):shutil.rmtree(tmp_folder)
return arch_name
def distcheck(appname='',version='',subdir=''):
'''checks if the sources compile (tarball from 'dist')'''
import tempfile,tarfile
if not appname:appname=Utils.g_module.APPNAME
if not version:version=Utils.g_module.VERSION
waf=os.path.abspath(sys.argv[0])
tarball=dist(appname,version)
path=appname+'-'+version
if os.path.exists(path):
shutil.rmtree(path)
t=tarfile.open(tarball)
for x in t:t.extract(x)
t.close()
if subdir:
build_path=os.path.join(path,subdir)
else:
build_path=path
instdir=tempfile.mkdtemp('.inst','%s-%s'%(appname,version))
ret=Utils.pproc.Popen([waf,'configure','build','install','uninstall','--destdir='+instdir],cwd=build_path).wait()
if ret:
raise Utils.WafError('distcheck failed with code %i'%ret)
if os.path.exists(instdir):
raise Utils.WafError('distcheck succeeded, but files were left in %s'%instdir)
shutil.rmtree(path)
def add_subdir(dir,bld):
bld.recurse(dir,'build')

View file

@ -0,0 +1,780 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,shutil,sys,re,random,datetime,tempfile,shlex
from Utils import md5
import Build,Runner,Utils,Node,Logs,Options
from Logs import debug,warn,error
from Constants import*
algotype=NORMAL
COMPILE_TEMPLATE_SHELL='''
def f(task):
env = task.env
wd = getattr(task, 'cwd', None)
p = env.get_flat
cmd = \'\'\' %s \'\'\' % s
return task.exec_command(cmd, cwd=wd)
'''
COMPILE_TEMPLATE_NOSHELL='''
def f(task):
env = task.env
wd = getattr(task, 'cwd', None)
def to_list(xx):
if isinstance(xx, str): return [xx]
return xx
lst = []
%s
lst = [x for x in lst if x]
return task.exec_command(lst, cwd=wd)
'''
file_deps=Utils.nada
class TaskManager(object):
def __init__(self):
self.groups=[]
self.tasks_done=[]
self.current_group=0
self.groups_names={}
def group_name(self,g):
if not isinstance(g,TaskGroup):
g=self.groups[g]
for x in self.groups_names:
if id(self.groups_names[x])==id(g):
return x
return''
def group_idx(self,tg):
se=id(tg)
for i in range(len(self.groups)):
g=self.groups[i]
for t in g.tasks_gen:
if id(t)==se:
return i
return None
def get_next_set(self):
ret=None
while not ret and self.current_group<len(self.groups):
ret=self.groups[self.current_group].get_next_set()
if ret:return ret
else:
self.groups[self.current_group].process_install()
self.current_group+=1
return(None,None)
def add_group(self,name=None,set=True):
g=TaskGroup()
if name and name in self.groups_names:
error('add_group: name %s already present'%name)
self.groups_names[name]=g
self.groups.append(g)
if set:
self.current_group=len(self.groups)-1
def set_group(self,idx):
if isinstance(idx,str):
g=self.groups_names[idx]
for x in xrange(len(self.groups)):
if id(g)==id(self.groups[x]):
self.current_group=x
else:
self.current_group=idx
def add_task_gen(self,tgen):
if not self.groups:self.add_group()
self.groups[self.current_group].tasks_gen.append(tgen)
def add_task(self,task):
if not self.groups:self.add_group()
self.groups[self.current_group].tasks.append(task)
def total(self):
total=0
if not self.groups:return 0
for group in self.groups:
total+=len(group.tasks)
return total
def add_finished(self,tsk):
self.tasks_done.append(tsk)
bld=tsk.generator.bld
if bld.is_install:
f=None
if'install'in tsk.__dict__:
f=tsk.__dict__['install']
if f:f(tsk)
else:
tsk.install()
class TaskGroup(object):
def __init__(self):
self.tasks=[]
self.tasks_gen=[]
self.cstr_groups=Utils.DefaultDict(list)
self.cstr_order=Utils.DefaultDict(set)
self.temp_tasks=[]
self.ready=0
self.post_funs=[]
def reset(self):
for x in self.cstr_groups:
self.tasks+=self.cstr_groups[x]
self.tasks=self.temp_tasks+self.tasks
self.temp_tasks=[]
self.cstr_groups=Utils.DefaultDict(list)
self.cstr_order=Utils.DefaultDict(set)
self.ready=0
def process_install(self):
for(f,k,kw)in self.post_funs:
f(*k,**kw)
def prepare(self):
self.ready=1
file_deps(self.tasks)
self.make_cstr_groups()
self.extract_constraints()
def get_next_set(self):
global algotype
if algotype==NORMAL:
tasks=self.tasks_in_parallel()
maxj=MAXJOBS
elif algotype==JOBCONTROL:
(maxj,tasks)=self.tasks_by_max_jobs()
elif algotype==MAXPARALLEL:
tasks=self.tasks_with_inner_constraints()
maxj=MAXJOBS
else:
raise Utils.WafError("unknown algorithm type %s"%(algotype))
if not tasks:return()
return(maxj,tasks)
def make_cstr_groups(self):
self.cstr_groups=Utils.DefaultDict(list)
for x in self.tasks:
h=x.hash_constraints()
self.cstr_groups[h].append(x)
def set_order(self,a,b):
self.cstr_order[a].add(b)
def compare_exts(self,t1,t2):
x="ext_in"
y="ext_out"
in_=t1.attr(x,())
out_=t2.attr(y,())
for k in in_:
if k in out_:
return-1
in_=t2.attr(x,())
out_=t1.attr(y,())
for k in in_:
if k in out_:
return 1
return 0
def compare_partial(self,t1,t2):
m="after"
n="before"
name=t2.__class__.__name__
if name in Utils.to_list(t1.attr(m,())):return-1
elif name in Utils.to_list(t1.attr(n,())):return 1
name=t1.__class__.__name__
if name in Utils.to_list(t2.attr(m,())):return 1
elif name in Utils.to_list(t2.attr(n,())):return-1
return 0
def extract_constraints(self):
keys=self.cstr_groups.keys()
max=len(keys)
for i in xrange(max):
t1=self.cstr_groups[keys[i]][0]
for j in xrange(i+1,max):
t2=self.cstr_groups[keys[j]][0]
val=(self.compare_exts(t1,t2)or self.compare_partial(t1,t2))
if val>0:
self.set_order(keys[i],keys[j])
elif val<0:
self.set_order(keys[j],keys[i])
def tasks_in_parallel(self):
if not self.ready:self.prepare()
keys=self.cstr_groups.keys()
unconnected=[]
remainder=[]
for u in keys:
for k in self.cstr_order.values():
if u in k:
remainder.append(u)
break
else:
unconnected.append(u)
toreturn=[]
for y in unconnected:
toreturn.extend(self.cstr_groups[y])
for y in unconnected:
try:self.cstr_order.__delitem__(y)
except KeyError:pass
self.cstr_groups.__delitem__(y)
if not toreturn and remainder:
raise Utils.WafError("circular order constraint detected %r"%remainder)
return toreturn
def tasks_by_max_jobs(self):
if not self.ready:self.prepare()
if not self.temp_tasks:self.temp_tasks=self.tasks_in_parallel()
if not self.temp_tasks:return(None,None)
maxjobs=MAXJOBS
ret=[]
remaining=[]
for t in self.temp_tasks:
m=getattr(t,"maxjobs",getattr(self.__class__,"maxjobs",MAXJOBS))
if m>maxjobs:
remaining.append(t)
elif m<maxjobs:
remaining+=ret
ret=[t]
maxjobs=m
else:
ret.append(t)
self.temp_tasks=remaining
return(maxjobs,ret)
def tasks_with_inner_constraints(self):
if not self.ready:self.prepare()
if getattr(self,"done",None):return None
for p in self.cstr_order:
for v in self.cstr_order[p]:
for m in self.cstr_groups[p]:
for n in self.cstr_groups[v]:
n.set_run_after(m)
self.cstr_order=Utils.DefaultDict(set)
self.cstr_groups=Utils.DefaultDict(list)
self.done=1
return self.tasks[:]
class store_task_type(type):
def __init__(cls,name,bases,dict):
super(store_task_type,cls).__init__(name,bases,dict)
name=cls.__name__
if name.endswith('_task'):
name=name.replace('_task','')
if name!='TaskBase':
TaskBase.classes[name]=cls
class TaskBase(object):
__metaclass__=store_task_type
color="GREEN"
maxjobs=MAXJOBS
classes={}
stat=None
def __init__(self,*k,**kw):
self.hasrun=NOT_RUN
try:
self.generator=kw['generator']
except KeyError:
self.generator=self
self.bld=Build.bld
if kw.get('normal',1):
self.generator.bld.task_manager.add_task(self)
def __repr__(self):
return'\n\t{task: %s %s}'%(self.__class__.__name__,str(getattr(self,"fun","")))
def __str__(self):
if hasattr(self,'fun'):
return'executing: %s\n'%self.fun.__name__
return self.__class__.__name__+'\n'
def exec_command(self,*k,**kw):
if self.env['env']:
kw['env']=self.env['env']
return self.generator.bld.exec_command(*k,**kw)
def runnable_status(self):
return RUN_ME
def can_retrieve_cache(self):
return False
def call_run(self):
if self.can_retrieve_cache():
return 0
return self.run()
def run(self):
if hasattr(self,'fun'):
return self.fun(self)
return 0
def post_run(self):
pass
def display(self):
col1=Logs.colors(self.color)
col2=Logs.colors.NORMAL
if Options.options.progress_bar==1:
return self.generator.bld.progress_line(self.position[0],self.position[1],col1,col2)
if Options.options.progress_bar==2:
ela=Utils.get_elapsed_time(self.generator.bld.ini)
try:
ins=','.join([n.name for n in self.inputs])
except AttributeError:
ins=''
try:
outs=','.join([n.name for n in self.outputs])
except AttributeError:
outs=''
return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(self.position[1],self.position[0],ins,outs,ela)
total=self.position[1]
n=len(str(total))
fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n)
return fs%(self.position[0],self.position[1],col1,str(self),col2)
def attr(self,att,default=None):
ret=getattr(self,att,self)
if ret is self:return getattr(self.__class__,att,default)
return ret
def hash_constraints(self):
a=self.attr
sum=hash((self.__class__.__name__,str(a('before','')),str(a('after','')),str(a('ext_in','')),str(a('ext_out','')),self.__class__.maxjobs))
return sum
def format_error(self):
if getattr(self,"err_msg",None):
return self.err_msg
elif self.hasrun==CRASHED:
try:
return" -> task failed (err #%d): %r"%(self.err_code,self)
except AttributeError:
return" -> task failed: %r"%self
elif self.hasrun==MISSING:
return" -> missing files: %r"%self
else:
return''
def install(self):
bld=self.generator.bld
d=self.attr('install')
if self.attr('install_path'):
lst=[a.relpath_gen(bld.srcnode)for a in self.outputs]
perm=self.attr('chmod',O644)
if self.attr('src'):
lst+=[a.relpath_gen(bld.srcnode)for a in self.inputs]
if self.attr('filename'):
dir=self.install_path.rstrip(os.sep)+os.sep+self.attr('filename')
bld.install_as(dir,lst[0],self.env,perm)
else:
bld.install_files(self.install_path,lst,self.env,perm)
class Task(TaskBase):
vars=[]
def __init__(self,env,**kw):
TaskBase.__init__(self,**kw)
self.env=env
self.inputs=[]
self.outputs=[]
self.dep_nodes=[]
self.run_after=[]
def __str__(self):
env=self.env
src_str=' '.join([a.nice_path(env)for a in self.inputs])
tgt_str=' '.join([a.nice_path(env)for a in self.outputs])
if self.outputs:sep=' -> '
else:sep=''
return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str)
def __repr__(self):
return"".join(['\n\t{task: ',self.__class__.__name__," ",",".join([x.name for x in self.inputs])," -> ",",".join([x.name for x in self.outputs]),'}'])
def unique_id(self):
try:
return self.uid
except AttributeError:
m=md5()
up=m.update
up(self.__class__.__name__)
up(self.env.variant())
p=None
for x in self.inputs+self.outputs:
if p!=x.parent.id:
p=x.parent.id
up(x.parent.abspath())
up(x.name)
self.uid=m.digest()
return self.uid
def set_inputs(self,inp):
if isinstance(inp,list):self.inputs+=inp
else:self.inputs.append(inp)
def set_outputs(self,out):
if isinstance(out,list):self.outputs+=out
else:self.outputs.append(out)
def set_run_after(self,task):
assert isinstance(task,TaskBase)
self.run_after.append(task)
def add_file_dependency(self,filename):
node=self.generator.bld.path.find_resource(filename)
self.dep_nodes.append(node)
def signature(self):
try:return self.cache_sig[0]
except AttributeError:pass
self.m=md5()
exp_sig=self.sig_explicit_deps()
var_sig=self.sig_vars()
imp_sig=SIG_NIL
if self.scan:
try:
imp_sig=self.sig_implicit_deps()
except ValueError:
return self.signature()
ret=self.m.digest()
self.cache_sig=(ret,exp_sig,imp_sig,var_sig)
return ret
def runnable_status(self):
if self.inputs and(not self.outputs):
if not getattr(self.__class__,'quiet',None):
warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r"%self)
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
env=self.env
bld=self.generator.bld
new_sig=self.signature()
key=self.unique_id()
try:
prev_sig=bld.task_sigs[key][0]
except KeyError:
debug("task: task %r must run as it was never run before or the task code changed",self)
return RUN_ME
for node in self.outputs:
variant=node.variant(env)
try:
if bld.node_sigs[variant][node.id]!=new_sig:
return RUN_ME
except KeyError:
debug("task: task %r must run as the output nodes do not exist",self)
return RUN_ME
if Logs.verbose:self.debug_why(bld.task_sigs[key])
if new_sig!=prev_sig:
return RUN_ME
return SKIP_ME
def post_run(self):
bld=self.generator.bld
env=self.env
sig=self.signature()
ssig=sig.encode('hex')
variant=env.variant()
for node in self.outputs:
try:
os.stat(node.abspath(env))
except OSError:
self.hasrun=MISSING
self.err_msg='-> missing file: %r'%node.abspath(env)
raise Utils.WafError
bld.node_sigs[variant][node.id]=sig
bld.task_sigs[self.unique_id()]=self.cache_sig
if not Options.cache_global or Options.options.nocache or not self.outputs:
return None
if getattr(self,'cached',None):
return None
dname=os.path.join(Options.cache_global,ssig)
tmpdir=tempfile.mkdtemp(prefix=Options.cache_global+os.sep+'waf')
try:
shutil.rmtree(dname)
except:
pass
try:
i=0
for node in self.outputs:
variant=node.variant(env)
dest=os.path.join(tmpdir,str(i)+node.name)
shutil.copy2(node.abspath(env),dest)
i+=1
except(OSError,IOError):
try:
shutil.rmtree(tmpdir)
except:
pass
else:
try:
os.rename(tmpdir,dname)
except OSError:
try:
shutil.rmtree(tmpdir)
except:
pass
else:
try:
os.chmod(dname,O755)
except:
pass
def can_retrieve_cache(self):
if not Options.cache_global or Options.options.nocache or not self.outputs:
return None
env=self.env
sig=self.signature()
ssig=sig.encode('hex')
dname=os.path.join(Options.cache_global,ssig)
try:
t1=os.stat(dname).st_mtime
except OSError:
return None
i=0
for node in self.outputs:
variant=node.variant(env)
orig=os.path.join(dname,str(i)+node.name)
try:
shutil.copy2(orig,node.abspath(env))
os.utime(orig,None)
except(OSError,IOError):
debug('task: failed retrieving file')
return None
i+=1
try:
t2=os.stat(dname).st_mtime
except OSError:
return None
if t1!=t2:
return None
for node in self.outputs:
self.generator.bld.node_sigs[variant][node.id]=sig
if Options.options.progress_bar<1:
self.generator.bld.printout('restoring from cache %r\n'%node.bldpath(env))
self.cached=True
return 1
def debug_why(self,old_sigs):
new_sigs=self.cache_sig
def v(x):
return x.encode('hex')
debug("Task %r",self)
msgs=['Task must run','* Source file or manual dependency','* Implicit dependency','* Environment variable']
tmp='task: -> %s: %s %s'
for x in xrange(len(msgs)):
if(new_sigs[x]!=old_sigs[x]):
debug(tmp,msgs[x],v(old_sigs[x]),v(new_sigs[x]))
def sig_explicit_deps(self):
bld=self.generator.bld
up=self.m.update
for x in self.inputs+getattr(self,'dep_nodes',[]):
if not x.parent.id in bld.cache_scanned_folders:
bld.rescan(x.parent)
variant=x.variant(self.env)
try:
up(bld.node_sigs[variant][x.id])
except KeyError:
raise Utils.WafError('Missing node signature for %r (required by %r)'%(x,self))
if bld.deps_man:
additional_deps=bld.deps_man
for x in self.inputs+self.outputs:
try:
d=additional_deps[x.id]
except KeyError:
continue
for v in d:
if isinstance(v,Node.Node):
bld.rescan(v.parent)
variant=v.variant(self.env)
try:
v=bld.node_sigs[variant][v.id]
except KeyError:
raise Utils.WafError('Missing node signature for %r (required by %r)'%(v,self))
elif hasattr(v,'__call__'):
v=v()
up(v)
for x in self.dep_nodes:
v=bld.node_sigs[x.variant(self.env)][x.id]
up(v)
return self.m.digest()
def sig_vars(self):
bld=self.generator.bld
env=self.env
act_sig=bld.hash_env_vars(env,self.__class__.vars)
self.m.update(act_sig)
dep_vars=getattr(self,'dep_vars',None)
if dep_vars:
self.m.update(bld.hash_env_vars(env,dep_vars))
return self.m.digest()
scan=None
def sig_implicit_deps(self):
bld=self.generator.bld
key=self.unique_id()
prev_sigs=bld.task_sigs.get(key,())
if prev_sigs:
try:
if prev_sigs[2]==self.compute_sig_implicit_deps():
return prev_sigs[2]
except(KeyError,OSError):
pass
del bld.task_sigs[key]
raise ValueError('rescan')
(nodes,names)=self.scan()
if Logs.verbose:
debug('deps: scanner for %s returned %s %s',str(self),str(nodes),str(names))
bld.node_deps[key]=nodes
bld.raw_deps[key]=names
try:
sig=self.compute_sig_implicit_deps()
except KeyError:
try:
nodes=[]
for k in bld.node_deps.get(self.unique_id(),[]):
if k.id&3==2:
if not k.id in bld.node_sigs[0]:
nodes.append(k)
else:
if not k.id in bld.node_sigs[self.env.variant()]:
nodes.append(k)
except:
nodes='?'
raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)'%(nodes,self))
return sig
def compute_sig_implicit_deps(self):
upd=self.m.update
bld=self.generator.bld
tstamp=bld.node_sigs
env=self.env
for k in bld.node_deps.get(self.unique_id(),[]):
if not k.parent.id in bld.cache_scanned_folders:
bld.rescan(k.parent)
if k.id&3==2:
upd(tstamp[0][k.id])
else:
upd(tstamp[env.variant()][k.id])
return self.m.digest()
def funex(c):
dc={}
exec(c,dc)
return dc['f']
reg_act=re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})",re.M)
def compile_fun_shell(name,line):
extr=[]
def repl(match):
g=match.group
if g('dollar'):return"$"
elif g('backslash'):return'\\\\'
elif g('subst'):extr.append((g('var'),g('code')));return"%s"
return None
line=reg_act.sub(repl,line)or line
parm=[]
dvars=[]
app=parm.append
for(var,meth)in extr:
if var=='SRC':
if meth:app('task.inputs%s'%meth)
else:app('" ".join([a.srcpath(env) for a in task.inputs])')
elif var=='TGT':
if meth:app('task.outputs%s'%meth)
else:app('" ".join([a.bldpath(env) for a in task.outputs])')
else:
if not var in dvars:dvars.append(var)
app("p('%s')"%var)
if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
else:parm=''
c=COMPILE_TEMPLATE_SHELL%(line,parm)
debug('action: %s',c)
return(funex(c),dvars)
def compile_fun_noshell(name,line):
extr=[]
def repl(match):
g=match.group
if g('dollar'):return"$"
elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>"
return None
line2=reg_act.sub(repl,line)
params=line2.split('<<|@|>>')
buf=[]
dvars=[]
app=buf.append
for x in xrange(len(extr)):
params[x]=params[x].strip()
if params[x]:
app("lst.extend(%r)"%params[x].split())
(var,meth)=extr[x]
if var=='SRC':
if meth:app('lst.append(task.inputs%s)'%meth)
else:app("lst.extend([a.srcpath(env) for a in task.inputs])")
elif var=='TGT':
if meth:app('lst.append(task.outputs%s)'%meth)
else:app("lst.extend([a.bldpath(env) for a in task.outputs])")
else:
app('lst.extend(to_list(env[%r]))'%var)
if not var in dvars:dvars.append(var)
if params[-1]:
app("lst.extend(%r)"%shlex.split(params[-1]))
fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf)
debug('action: %s',fun)
return(funex(fun),dvars)
def compile_fun(name,line,shell=None):
if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0:
shell=True
if shell is None:
if sys.platform=='win32':
shell=False
else:
shell=True
if shell:
return compile_fun_shell(name,line)
else:
return compile_fun_noshell(name,line)
def simple_task_type(name,line,color='GREEN',vars=[],ext_in=[],ext_out=[],before=[],after=[],shell=None):
(fun,dvars)=compile_fun(name,line,shell)
fun.code=line
return task_type_from_func(name,fun,vars or dvars,color,ext_in,ext_out,before,after)
def task_type_from_func(name,func,vars=[],color='GREEN',ext_in=[],ext_out=[],before=[],after=[]):
params={'run':func,'vars':vars,'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),}
cls=type(Task)(name,(Task,),params)
TaskBase.classes[name]=cls
return cls
def always_run(cls):
old=cls.runnable_status
def always(self):
ret=old(self)
if ret==SKIP_ME:
return RUN_ME
return ret
cls.runnable_status=always
def update_outputs(cls):
old_post_run=cls.post_run
def post_run(self):
old_post_run(self)
bld=self.generator.bld
for output in self.outputs:
bld.node_sigs[self.env.variant()][output.id]=Utils.h_file(output.abspath(self.env))
cls.post_run=post_run
old_runnable_status=cls.runnable_status
def runnable_status(self):
status=old_runnable_status(self)
if status!=RUN_ME:
return status
try:
bld=self.outputs[0].__class__.bld
new_sig=self.signature()
prev_sig=bld.task_sigs[self.unique_id()][0]
if prev_sig==new_sig:
for x in self.outputs:
if not x.id in bld.node_sigs[self.env.variant()]:
return RUN_ME
return SKIP_ME
except KeyError:
pass
except IndexError:
pass
return RUN_ME
cls.runnable_status=runnable_status
def extract_outputs(tasks):
v={}
for x in tasks:
try:
(ins,outs)=v[x.env.variant()]
except KeyError:
ins={}
outs={}
v[x.env.variant()]=(ins,outs)
for a in getattr(x,'inputs',[]):
try:ins[a.id].append(x)
except KeyError:ins[a.id]=[x]
for a in getattr(x,'outputs',[]):
try:outs[a.id].append(x)
except KeyError:outs[a.id]=[x]
for(ins,outs)in v.values():
links=set(ins.iterkeys()).intersection(outs.iterkeys())
for k in links:
for a in ins[k]:
for b in outs[k]:
a.set_run_after(b)
def extract_deps(tasks):
extract_outputs(tasks)
out_to_task={}
for x in tasks:
v=x.env.variant()
try:
lst=x.outputs
except AttributeError:
pass
else:
for node in lst:
out_to_task[(v,node.id)]=x
dep_to_task={}
for x in tasks:
try:
x.signature()
except:
pass
v=x.env.variant()
key=x.unique_id()
for k in x.generator.bld.node_deps.get(x.unique_id(),[]):
try:dep_to_task[(v,k.id)].append(x)
except KeyError:dep_to_task[(v,k.id)]=[x]
deps=set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
for idx in deps:
for k in dep_to_task[idx]:
k.set_run_after(out_to_task[idx])
for x in tasks:
try:
delattr(x,'cache_sig')
except AttributeError:
pass

View file

@ -0,0 +1,346 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,traceback,copy
import Build,Task,Utils,Logs,Options
from Logs import debug,error,warn
from Constants import*
typos={'sources':'source','targets':'target','include':'includes','define':'defines','importpath':'importpaths','install_var':'install_path','install_subdir':'install_path','inst_var':'install_path','inst_dir':'install_path','feature':'features',}
class register_obj(type):
def __init__(cls,name,bases,dict):
super(register_obj,cls).__init__(name,bases,dict)
name=cls.__name__
suffix='_taskgen'
if name.endswith(suffix):
task_gen.classes[name.replace(suffix,'')]=cls
class task_gen(object):
__metaclass__=register_obj
mappings={}
mapped={}
prec=Utils.DefaultDict(list)
traits=Utils.DefaultDict(set)
classes={}
def __init__(self,*kw,**kwargs):
self.prec=Utils.DefaultDict(list)
self.source=''
self.target=''
self.meths=[]
self.mappings={}
self.features=list(kw)
self.tasks=[]
self.default_chmod=O644
self.default_install_path=None
self.allnodes=[]
self.bld=kwargs.get('bld',Build.bld)
self.env=self.bld.env.copy()
self.path=self.bld.path
self.name=''
self.idx=self.bld.idx[self.path.id]=self.bld.idx.get(self.path.id,0)+1
for key,val in kwargs.iteritems():
setattr(self,key,val)
self.bld.task_manager.add_task_gen(self)
self.bld.all_task_gen.append(self)
def __str__(self):
return("<task_gen '%s' of type %s defined in %s>"%(self.name or self.target,self.__class__.__name__,str(self.path)))
def __setattr__(self,name,attr):
real=typos.get(name,name)
if real!=name:
warn('typo %s -> %s'%(name,real))
if Logs.verbose>0:
traceback.print_stack()
object.__setattr__(self,real,attr)
def to_list(self,value):
if isinstance(value,str):return value.split()
else:return value
def apply(self):
keys=set(self.meths)
self.features=Utils.to_list(self.features)
for x in self.features+['*']:
st=task_gen.traits[x]
if not st:
warn('feature %r does not exist - bind at least one method to it'%x)
keys.update(st)
prec={}
prec_tbl=self.prec or task_gen.prec
for x in prec_tbl:
if x in keys:
prec[x]=prec_tbl[x]
tmp=[]
for a in keys:
for x in prec.values():
if a in x:break
else:
tmp.append(a)
out=[]
while tmp:
e=tmp.pop()
if e in keys:out.append(e)
try:
nlst=prec[e]
except KeyError:
pass
else:
del prec[e]
for x in nlst:
for y in prec:
if x in prec[y]:
break
else:
tmp.append(x)
if prec:raise Utils.WafError("graph has a cycle %s"%str(prec))
out.reverse()
self.meths=out
debug('task_gen: posting %s %d',self,id(self))
for x in out:
try:
v=getattr(self,x)
except AttributeError:
raise Utils.WafError("tried to retrieve %s which is not a valid method"%x)
debug('task_gen: -> %s (%d)',x,id(self))
v()
def post(self):
if not self.name:
if isinstance(self.target,list):
self.name=' '.join(self.target)
else:
self.name=self.target
if getattr(self,'posted',None):
return
self.apply()
self.posted=True
debug('task_gen: posted %s',self.name)
def get_hook(self,ext):
try:return self.mappings[ext]
except KeyError:
try:return task_gen.mappings[ext]
except KeyError:return None
def create_task(self,name,src=None,tgt=None,env=None):
env=env or self.env
task=Task.TaskBase.classes[name](env.copy(),generator=self)
if src:
task.set_inputs(src)
if tgt:
task.set_outputs(tgt)
self.tasks.append(task)
return task
def name_to_obj(self,name):
return self.bld.name_to_obj(name,self.env)
def find_sources_in_dirs(self,dirnames,excludes=[],exts=[]):
err_msg="'%s' attribute must be a list"
if not isinstance(excludes,list):
raise Utils.WscriptError(err_msg%'excludes')
if not isinstance(exts,list):
raise Utils.WscriptError(err_msg%'exts')
lst=[]
dirnames=self.to_list(dirnames)
ext_lst=exts or list(self.mappings.keys())+list(task_gen.mappings.keys())
for name in dirnames:
anode=self.path.find_dir(name)
if not anode or not anode.is_child_of(self.bld.srcnode):
raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path"", or it's not child of '%s'."%(name,self.bld.srcnode))
self.bld.rescan(anode)
for name in self.bld.cache_dir_contents[anode.id]:
if name.startswith('.'):
continue
(base,ext)=os.path.splitext(name)
if ext in ext_lst and not name in lst and not name in excludes:
lst.append((anode.relpath_gen(self.path)or'.')+os.path.sep+name)
lst.sort()
self.source=self.to_list(self.source)
if not self.source:self.source=lst
else:self.source+=lst
def clone(self,env):
newobj=task_gen(bld=self.bld)
for x in self.__dict__:
if x in['env','bld']:
continue
elif x in["path","features"]:
setattr(newobj,x,getattr(self,x))
else:
setattr(newobj,x,copy.copy(getattr(self,x)))
newobj.__class__=self.__class__
if isinstance(env,str):
newobj.env=self.bld.all_envs[env].copy()
else:
newobj.env=env.copy()
return newobj
def get_inst_path(self):
return getattr(self,'_install_path',getattr(self,'default_install_path',''))
def set_inst_path(self,val):
self._install_path=val
install_path=property(get_inst_path,set_inst_path)
def get_chmod(self):
return getattr(self,'_chmod',getattr(self,'default_chmod',O644))
def set_chmod(self,val):
self._chmod=val
chmod=property(get_chmod,set_chmod)
def declare_extension(var,func):
try:
for x in Utils.to_list(var):
task_gen.mappings[x]=func
except:
raise Utils.WscriptError('declare_extension takes either a list or a string %r'%var)
task_gen.mapped[func.__name__]=func
def declare_order(*k):
assert(len(k)>1)
n=len(k)-1
for i in xrange(n):
f1=k[i]
f2=k[i+1]
if not f1 in task_gen.prec[f2]:
task_gen.prec[f2].append(f1)
def declare_chain(name='',action='',ext_in='',ext_out='',reentrant=True,color='BLUE',install=0,before=[],after=[],decider=None,rule=None,scan=None):
action=action or rule
if isinstance(action,str):
act=Task.simple_task_type(name,action,color=color)
else:
act=Task.task_type_from_func(name,action,color=color)
act.ext_in=tuple(Utils.to_list(ext_in))
act.ext_out=tuple(Utils.to_list(ext_out))
act.before=Utils.to_list(before)
act.after=Utils.to_list(after)
act.scan=scan
def x_file(self,node):
if decider:
ext=decider(self,node)
else:
ext=ext_out
if isinstance(ext,str):
out_source=node.change_ext(ext)
if reentrant:
self.allnodes.append(out_source)
elif isinstance(ext,list):
out_source=[node.change_ext(x)for x in ext]
if reentrant:
for i in xrange((reentrant is True)and len(out_source)or reentrant):
self.allnodes.append(out_source[i])
else:
raise Utils.WafError("do not know how to process %s"%str(ext))
tsk=self.create_task(name,node,out_source)
if node.__class__.bld.is_install:
tsk.install=install
declare_extension(act.ext_in,x_file)
return x_file
def bind_feature(name,methods):
lst=Utils.to_list(methods)
task_gen.traits[name].update(lst)
def taskgen(func):
setattr(task_gen,func.__name__,func)
return func
def feature(*k):
def deco(func):
setattr(task_gen,func.__name__,func)
for name in k:
task_gen.traits[name].update([func.__name__])
return func
return deco
def before(*k):
def deco(func):
setattr(task_gen,func.__name__,func)
for fun_name in k:
if not func.__name__ in task_gen.prec[fun_name]:
task_gen.prec[fun_name].append(func.__name__)
return func
return deco
def after(*k):
def deco(func):
setattr(task_gen,func.__name__,func)
for fun_name in k:
if not fun_name in task_gen.prec[func.__name__]:
task_gen.prec[func.__name__].append(fun_name)
return func
return deco
def extension(var):
def deco(func):
setattr(task_gen,func.__name__,func)
try:
for x in Utils.to_list(var):
task_gen.mappings[x]=func
except:
raise Utils.WafError('extension takes either a list or a string %r'%var)
task_gen.mapped[func.__name__]=func
return func
return deco
def apply_core(self):
find_resource=self.path.find_resource
for filename in self.to_list(self.source):
x=self.get_hook(filename)
if x:
x(self,filename)
else:
node=find_resource(filename)
if not node:raise Utils.WafError("source not found: '%s' in '%s'"%(filename,str(self.path)))
self.allnodes.append(node)
for node in self.allnodes:
x=self.get_hook(node.suffix())
if not x:
raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?"%(str(node),self.__class__.mappings.keys(),self.__class__))
x(self,node)
feature('*')(apply_core)
def exec_rule(self):
if not getattr(self,'rule',None):
return
try:
self.meths.remove('apply_core')
except ValueError:
pass
func=self.rule
vars2=[]
if isinstance(func,str):
(func,vars2)=Task.compile_fun('',self.rule,shell=getattr(self,'shell',True))
func.code=self.rule
name=getattr(self,'name',None)or self.target or self.rule
if not isinstance(name,str):
name=str(self.idx)
cls=Task.task_type_from_func(name,func,getattr(self,'vars',vars2))
cls.color=getattr(self,'color','BLUE')
tsk=self.create_task(name)
dep_vars=getattr(self,'dep_vars',['ruledeps'])
if dep_vars:
tsk.dep_vars=dep_vars
if isinstance(self.rule,str):
tsk.env.ruledeps=self.rule
else:
tsk.env.ruledeps=Utils.h_fun(self.rule)
if getattr(self,'target',None):
cls.quiet=True
tsk.outputs=[self.path.find_or_declare(x)for x in self.to_list(self.target)]
if getattr(self,'source',None):
cls.quiet=True
tsk.inputs=[]
for x in self.to_list(self.source):
y=self.path.find_resource(x)
if not y:
raise Utils.WafError('input file %r could not be found (%r)'%(x,self.path.abspath()))
tsk.inputs.append(y)
if self.allnodes:
tsk.inputs.extend(self.allnodes)
if getattr(self,'scan',None):
cls.scan=self.scan
if getattr(self,'install_path',None):
tsk.install_path=self.install_path
if getattr(self,'cwd',None):
tsk.cwd=self.cwd
if getattr(self,'on_results',None):
Task.update_outputs(cls)
if getattr(self,'always',None):
Task.always_run(cls)
for x in['after','before','ext_in','ext_out']:
setattr(cls,x,getattr(self,x,[]))
feature('*')(exec_rule)
before('apply_core')(exec_rule)
def sequence_order(self):
if self.meths and self.meths[-1]!='sequence_order':
self.meths.append('sequence_order')
return
if getattr(self,'seq_start',None):
return
if getattr(self.bld,'prev',None):
self.bld.prev.post()
for x in self.bld.prev.tasks:
for y in self.tasks:
y.set_run_after(x)
self.bld.prev=self
feature('seq')(sequence_order)

View file

@ -0,0 +1,4 @@
#! /usr/bin/env python
# encoding: utf-8

View file

@ -0,0 +1,26 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import Task,Utils
from Configure import conftest
ar_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
cls=Task.simple_task_type('static_link',ar_str,color='YELLOW',ext_in='.o',ext_out='.bin',shell=False)
cls.maxjobs=1
cls.install=Utils.nada
old=cls.run
def wrap(self):
try:os.remove(self.outputs[0].abspath(self.env))
except OSError:pass
return old(self)
setattr(cls,'run',wrap)
def detect(conf):
conf.find_program('ar',var='AR')
conf.find_program('ranlib',var='RANLIB')
conf.env.ARFLAGS='rcs'
def find_ar(conf):
v=conf.env
conf.check_tool('ar')
if not v['AR']:conf.fatal('ar is required for static libraries - not found')
conftest(find_ar)

View file

@ -0,0 +1,26 @@
#! /usr/bin/env python
# encoding: utf-8
import Task
from TaskGen import extension
bison='${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
cls=Task.simple_task_type('bison',bison,'GREEN',ext_in='.yc .y .yy',ext_out='.c .cxx .h .l',shell=False)
def big_bison(self,node):
has_h='-d'in self.env['BISONFLAGS']
outs=[]
if node.name.endswith('.yc'):
outs.append(node.change_ext('.tab.cc'))
if has_h:
outs.append(node.change_ext('.tab.hh'))
else:
outs.append(node.change_ext('.tab.c'))
if has_h:
outs.append(node.change_ext('.tab.h'))
tsk=self.create_task('bison',node,outs)
tsk.cwd=node.bld_dir(tsk.env)
self.allnodes.append(outs[0])
def detect(conf):
bison=conf.find_program('bison',var='BISON',mandatory=True)
conf.env['BISONFLAGS']='-d'
extension(['.y','.yc','.yy'])(big_bison)

View file

@ -0,0 +1,70 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os
import TaskGen,Build,Utils,Task
from Logs import debug
import ccroot
from TaskGen import feature,before,extension,after
g_cc_flag_vars=['CCDEPS','FRAMEWORK','FRAMEWORKPATH','STATICLIB','LIB','LIBPATH','LINKFLAGS','RPATH','CCFLAGS','CPPPATH','CPPFLAGS','CCDEFINES']
EXT_CC=['.c']
g_cc_type_vars=['CCFLAGS','LINKFLAGS']
class cc_taskgen(ccroot.ccroot_abstract):
pass
def init_cc(self):
self.p_flag_vars=set(self.p_flag_vars).union(g_cc_flag_vars)
self.p_type_vars=set(self.p_type_vars).union(g_cc_type_vars)
if not self.env['CC_NAME']:
raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
def apply_obj_vars_cc(self):
env=self.env
app=env.append_unique
cpppath_st=env['CPPPATH_ST']
for i in env['INC_PATHS']:
app('_CCINCFLAGS',cpppath_st%i.bldpath(env))
app('_CCINCFLAGS',cpppath_st%i.srcpath(env))
for i in env['CPPPATH']:
app('_CCINCFLAGS',cpppath_st%i)
def apply_defines_cc(self):
self.defines=getattr(self,'defines',[])
lst=self.to_list(self.defines)+self.to_list(self.env['CCDEFINES'])
milst=[]
for defi in lst:
if not defi in milst:
milst.append(defi)
libs=self.to_list(self.uselib)
for l in libs:
val=self.env['CCDEFINES_'+l]
if val:milst+=val
self.env['DEFLINES']=["%s %s"%(x[0],Utils.trimquotes('='.join(x[1:])))for x in[y.split('=')for y in milst]]
y=self.env['CCDEFINES_ST']
self.env.append_unique('_CCDEFFLAGS',[y%x for x in milst])
def c_hook(self,node):
if getattr(self,'obj_ext',None):
obj_ext=self.obj_ext
else:
obj_ext='_%d.o'%self.idx
task=self.create_task('cc',node,node.change_ext(obj_ext))
try:
self.compiled_tasks.append(task)
except AttributeError:
raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?'%str(self))
return task
cc_str='${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
cls=Task.simple_task_type('cc',cc_str,'GREEN',ext_out='.o',ext_in='.c',shell=False)
cls.scan=ccroot.scan
cls.vars.append('CCDEPS')
link_str='${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
cls=Task.simple_task_type('cc_link',link_str,color='YELLOW',ext_in='.o',ext_out='.bin',shell=False)
cls.maxjobs=1
cls.install=Utils.nada
feature('cc')(init_cc)
before('apply_type_vars')(init_cc)
after('default_cc')(init_cc)
feature('cc')(apply_obj_vars_cc)
after('apply_incpaths')(apply_obj_vars_cc)
feature('cc')(apply_defines_cc)
after('apply_lib_vars')(apply_defines_cc)
extension(EXT_CC)(c_hook)

View file

@ -0,0 +1,397 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,sys,re
import TaskGen,Task,Utils,preproc,Logs,Build,Options
from Logs import error,debug,warn
from Utils import md5
from TaskGen import taskgen,after,before,feature
from Constants import*
from Configure import conftest
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
import config_c
USE_TOP_LEVEL=False
def get_cc_version(conf,cc,gcc=False,icc=False):
cmd=cc+['-dM','-E','-']
try:
p=Utils.pproc.Popen(cmd,stdin=Utils.pproc.PIPE,stdout=Utils.pproc.PIPE,stderr=Utils.pproc.PIPE)
p.stdin.write('\n')
out=p.communicate()[0]
except:
conf.fatal('could not determine the compiler version %r'%cmd)
out=str(out)
if gcc:
if out.find('__INTEL_COMPILER')>=0:
conf.fatal('The intel compiler pretends to be gcc')
if out.find('__GNUC__')<0:
conf.fatal('Could not determine the compiler type')
if icc and out.find('__INTEL_COMPILER')<0:
conf.fatal('Not icc/icpc')
k={}
if icc or gcc:
out=out.split('\n')
import shlex
for line in out:
lst=shlex.split(line)
if len(lst)>2:
key=lst[1]
val=lst[2]
k[key]=val
def isD(var):
return var in k
def isT(var):
return var in k and k[var]!='0'
mp1={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'msys','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__POWERPC__':'powerpc',}
for i in mp1:
if isD(i):
conf.env.DEST_OS=mp1[i]
break
else:
if isD('__APPLE__')and isD('__MACH__'):
conf.env.DEST_OS='darwin'
elif isD('__unix__'):
conf.env.DEST_OS='generic'
if isD('__ELF__'):
conf.env.DEST_BINFMT='elf'
elif isD('__WINNT__')or isD('__CYGWIN__'):
conf.env.DEST_BINFMT='pe'
elif isD('__APPLE__'):
conf.env.DEST_BINFMT='mac-o'
mp2={'__x86_64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc',}
for i in mp2:
if isD(i):
conf.env.DEST_CPU=mp2[i]
break
debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')]))
conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
return k
class DEBUG_LEVELS:
ULTRADEBUG="ultradebug"
DEBUG="debug"
RELEASE="release"
OPTIMIZED="optimized"
CUSTOM="custom"
ALL=[ULTRADEBUG,DEBUG,RELEASE,OPTIMIZED,CUSTOM]
def scan(self):
debug('ccroot: _scan_preprocessor(self, node, env, path_lst)')
if len(self.inputs)==1:
node=self.inputs[0]
(nodes,names)=preproc.get_deps(node,self.env,nodepaths=self.env['INC_PATHS'])
if Logs.verbose:
debug('deps: deps for %s: %r; unresolved %r',str(node),nodes,names)
return(nodes,names)
all_nodes=[]
all_names=[]
seen=set()
for node in self.inputs:
(nodes,names)=preproc.get_deps(node,self.env,nodepaths=self.env['INC_PATHS'])
if Logs.verbose:
debug('deps: deps for %s: %r; unresolved %r',str(node),nodes,names)
for x in nodes:
if id(x)in seen:continue
seen.add(id(x))
all_nodes.append(x)
for x in names:
if not x in all_names:
all_names.append(x)
return(all_nodes,all_names)
class ccroot_abstract(TaskGen.task_gen):
def __init__(self,*k,**kw):
if len(k)>1:
k=list(k)
if k[1][0]!='c':
k[1]='c'+k[1]
TaskGen.task_gen.__init__(self,*k,**kw)
def get_target_name(self):
tp='program'
for x in self.features:
if x in['cshlib','cstaticlib']:
tp=x.lstrip('c')
pattern=self.env[tp+'_PATTERN']
if not pattern:pattern='%s'
dir,name=os.path.split(self.target)
if self.env.DEST_BINFMT=='pe'and getattr(self,'vnum',None)and'cshlib'in self.features:
name=name+'-'+self.vnum.split('.')[0]
return os.path.join(dir,pattern%name)
def default_cc(self):
Utils.def_attrs(self,includes='',defines='',rpaths='',uselib='',uselib_local='',add_objects='',p_flag_vars=[],p_type_vars=[],compiled_tasks=[],link_task=None)
if not self.env.DEST_BINFMT:
self.env.DEST_BINFMT=Utils.unversioned_sys_platform_to_binary_format(self.env.DEST_OS or Utils.unversioned_sys_platform())
if not self.env.BINDIR:self.env.BINDIR=Utils.subst_vars('${PREFIX}/bin',self.env)
if not self.env.LIBDIR:self.env.LIBDIR=Utils.subst_vars('${PREFIX}/lib${LIB_EXT}',self.env)
def apply_verif(self):
if not(self.source or getattr(self,'add_objects',None)or getattr(self,'uselib_local',None)or getattr(self,'obj_files',None)):
raise Utils.WafError('no source files specified for %s'%self)
if not self.target:
raise Utils.WafError('no target for %s'%self)
def vars_target_cprogram(self):
self.default_install_path=self.env.BINDIR
self.default_chmod=O755
def vars_target_cshlib(self):
if self.env.DEST_BINFMT=='pe':
self.default_chmod=O755
self.default_install_path=self.env.BINDIR
else:
self.default_install_path=self.env.LIBDIR
def default_link_install(self):
if self.install_path:
self.bld.install_files(self.install_path,self.link_task.outputs[0],env=self.env,chmod=self.chmod)
def apply_incpaths(self):
lst=[]
for lib in self.to_list(self.uselib):
for path in self.env['CPPPATH_'+lib]:
if not path in lst:
lst.append(path)
if preproc.go_absolute:
for path in preproc.standard_includes:
if not path in lst:
lst.append(path)
for path in self.to_list(self.includes):
if not path in lst:
if preproc.go_absolute or not os.path.isabs(path):
lst.append(path)
else:
self.env.prepend_value('CPPPATH',path)
for path in lst:
node=None
if os.path.isabs(path):
if preproc.go_absolute:
node=self.bld.root.find_dir(path)
elif path[0]=='#':
node=self.bld.srcnode
if len(path)>1:
node=node.find_dir(path[1:])
else:
node=self.path.find_dir(path)
if node:
self.env.append_value('INC_PATHS',node)
if USE_TOP_LEVEL:
self.env.append_value('INC_PATHS',self.bld.srcnode)
def apply_type_vars(self):
for x in self.features:
if not x in['cprogram','cstaticlib','cshlib']:
continue
x=x.lstrip('c')
st=self.env[x+'_USELIB']
if st:self.uselib=self.uselib+' '+st
for var in self.p_type_vars:
compvar='%s_%s'%(x,var)
value=self.env[compvar]
if value:self.env.append_value(var,value)
def apply_link(self):
link=getattr(self,'link',None)
if not link:
if'cstaticlib'in self.features:link='static_link'
elif'cxx'in self.features:link='cxx_link'
else:link='cc_link'
tsk=self.create_task(link)
outputs=[t.outputs[0]for t in self.compiled_tasks]
tsk.set_inputs(outputs)
tsk.set_outputs(self.path.find_or_declare(get_target_name(self)))
self.link_task=tsk
def apply_lib_vars(self):
env=self.env
self.uselib=self.to_list(self.uselib)
names=self.to_list(self.uselib_local)
seen=set([])
tmp=Utils.deque(names)
while tmp:
lib_name=tmp.popleft()
if lib_name in seen:
continue
y=self.name_to_obj(lib_name)
if not y:
raise Utils.WafError('object %r was not found in uselib_local (required by %r)'%(lib_name,self.name))
y.post()
seen.add(lib_name)
if getattr(y,'uselib_local',None):
lst=y.to_list(y.uselib_local)
if'cshlib'in y.features or'cprogram'in y.features:
lst=[x for x in lst if not'cstaticlib'in self.name_to_obj(x).features]
tmp.extend(lst)
if getattr(y,'link_task',None):
link_name=y.target[y.target.rfind(os.sep)+1:]
if'cstaticlib'in y.features:
env.append_value('STATICLIB',link_name)
elif'cshlib'in y.features or'cprogram'in y.features:
env.append_value('LIB',link_name)
self.link_task.set_run_after(y.link_task)
dep_nodes=getattr(self.link_task,'dep_nodes',[])
self.link_task.dep_nodes=dep_nodes+y.link_task.outputs
tmp_path=y.link_task.outputs[0].parent.bldpath(self.env)
if not tmp_path in env['LIBPATH']:env.prepend_value('LIBPATH',tmp_path)
for v in self.to_list(y.uselib):
if not env['STATICLIB_'+v]:
if not v in self.uselib:
self.uselib.insert(0,v)
if getattr(y,'export_incdirs',None):
for x in self.to_list(y.export_incdirs):
node=y.path.find_dir(x)
if not node:
raise Utils.WafError('object %r: invalid folder %r in export_incdirs'%(y.target,x))
self.env.append_unique('INC_PATHS',node)
for x in self.uselib:
for v in self.p_flag_vars:
val=self.env[v+'_'+x]
if val:self.env.append_value(v,val)
def apply_objdeps(self):
if not getattr(self,'add_objects',None):return
seen=[]
names=self.to_list(self.add_objects)
while names:
x=names[0]
if x in seen:
names=names[1:]
continue
y=self.name_to_obj(x)
if not y:
raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)'%(x,self.name))
if getattr(y,'add_objects',None):
added=0
lst=y.to_list(y.add_objects)
lst.reverse()
for u in lst:
if u in seen:continue
added=1
names=[u]+names
if added:continue
y.post()
seen.append(x)
for t in y.compiled_tasks:
self.link_task.inputs.extend(t.outputs)
def apply_obj_vars(self):
v=self.env
lib_st=v['LIB_ST']
staticlib_st=v['STATICLIB_ST']
libpath_st=v['LIBPATH_ST']
staticlibpath_st=v['STATICLIBPATH_ST']
rpath_st=v['RPATH_ST']
app=v.append_unique
if v['FULLSTATIC']:
v.append_value('LINKFLAGS',v['FULLSTATIC_MARKER'])
for i in v['RPATH']:
if i and rpath_st:
app('LINKFLAGS',rpath_st%i)
for i in v['LIBPATH']:
app('LINKFLAGS',libpath_st%i)
app('LINKFLAGS',staticlibpath_st%i)
if v['STATICLIB']:
v.append_value('LINKFLAGS',v['STATICLIB_MARKER'])
k=[(staticlib_st%i)for i in v['STATICLIB']]
app('LINKFLAGS',k)
if not v['FULLSTATIC']:
if v['STATICLIB']or v['LIB']:
v.append_value('LINKFLAGS',v['SHLIB_MARKER'])
app('LINKFLAGS',[lib_st%i for i in v['LIB']])
def process_obj_files(self):
if not hasattr(self,'obj_files'):return
for x in self.obj_files:
node=self.path.find_resource(x)
self.link_task.inputs.append(node)
def add_obj_file(self,file):
if not hasattr(self,'obj_files'):self.obj_files=[]
if not'process_obj_files'in self.meths:self.meths.append('process_obj_files')
self.obj_files.append(file)
c_attrs={'cxxflag':'CXXFLAGS','cflag':'CCFLAGS','ccflag':'CCFLAGS','linkflag':'LINKFLAGS','ldflag':'LINKFLAGS','lib':'LIB','libpath':'LIBPATH','staticlib':'STATICLIB','staticlibpath':'STATICLIBPATH','rpath':'RPATH','framework':'FRAMEWORK','frameworkpath':'FRAMEWORKPATH'}
def add_extra_flags(self):
for x in self.__dict__.keys():
y=x.lower()
if y[-1]=='s':
y=y[:-1]
if c_attrs.get(y,None):
self.env.append_unique(c_attrs[y],getattr(self,x))
def apply_implib(self):
if not self.env.DEST_BINFMT=='pe':
return
self.meths.remove('default_link_install')
bindir=self.install_path
if not bindir:return
dll=self.link_task.outputs[0]
self.bld.install_files(bindir,dll,self.env,self.chmod)
implib=self.env['implib_PATTERN']%os.path.split(self.target)[1]
implib=dll.parent.find_or_declare(implib)
self.link_task.outputs.append(implib)
self.bld.install_as('${LIBDIR}/%s'%implib.name,implib,self.env)
self.env.append_value('LINKFLAGS',(self.env['IMPLIB_ST']%implib.bldpath(self.env)).split())
def apply_vnum(self):
if not getattr(self,'vnum','')or not'cshlib'in self.features or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'):
return
self.meths.remove('default_link_install')
link=self.link_task
nums=self.vnum.split('.')
node=link.outputs[0]
libname=node.name
if libname.endswith('.dylib'):
name3=libname.replace('.dylib','.%s.dylib'%self.vnum)
name2=libname.replace('.dylib','.%s.dylib'%nums[0])
else:
name3=libname+'.'+self.vnum
name2=libname+'.'+nums[0]
if self.env.SONAME_ST:
v=self.env.SONAME_ST%name2
self.env.append_value('LINKFLAGS',v.split())
bld=self.bld
nums=self.vnum.split('.')
path=self.install_path
if not path:return
bld.install_as(path+os.sep+name3,node,env=self.env)
bld.symlink_as(path+os.sep+name2,name3)
bld.symlink_as(path+os.sep+libname,name3)
self.create_task('vnum',node,[node.parent.find_or_declare(name2),node.parent.find_or_declare(name3)])
def exec_vnum_link(self):
for x in self.outputs:
path=x.abspath(self.env)
try:
os.remove(path)
except OSError:
pass
try:
os.symlink(self.inputs[0].name,path)
except OSError:
return 1
cls=Task.task_type_from_func('vnum',func=exec_vnum_link,ext_in='.bin',color='CYAN')
cls.quiet=1
def add_as_needed(conf):
if conf.env.DEST_BINFMT=='elf'and'gcc'in(conf.env.CXX_NAME,conf.env.CC_NAME):
conf.env.append_unique('LINKFLAGS','--as-needed')
feature('cc','cxx')(default_cc)
before('apply_core')(default_cc)
feature('cprogram','dprogram','cstaticlib','dstaticlib','cshlib','dshlib')(apply_verif)
feature('cprogram','dprogram')(vars_target_cprogram)
after('default_cc')(vars_target_cprogram)
before('apply_core')(vars_target_cprogram)
after('default_cc')(vars_target_cshlib)
feature('cshlib','dshlib')(vars_target_cshlib)
before('apply_core')(vars_target_cshlib)
feature('cprogram','dprogram','cstaticlib','dstaticlib','cshlib','dshlib')(default_link_install)
after('apply_link','vars_target_cprogram','vars_target_cshlib')(default_link_install)
feature('cc','cxx')(apply_incpaths)
after('apply_type_vars','apply_lib_vars','apply_core')(apply_incpaths)
feature('cc','cxx')(apply_type_vars)
after('init_cc','init_cxx')(apply_type_vars)
before('apply_lib_vars')(apply_type_vars)
feature('cprogram','cshlib','cstaticlib')(apply_link)
after('apply_core')(apply_link)
feature('cc','cxx')(apply_lib_vars)
after('apply_link','init_cc','init_cxx','apply_core')(apply_lib_vars)
feature('cprogram','cstaticlib','cshlib')(apply_objdeps)
after('init_cc','init_cxx','apply_link')(apply_objdeps)
feature('cprogram','cshlib','cstaticlib')(apply_obj_vars)
after('apply_lib_vars')(apply_obj_vars)
after('apply_link')(process_obj_files)
taskgen(add_obj_file)
feature('cc','cxx')(add_extra_flags)
before('init_cxx','init_cc')(add_extra_flags)
before('apply_lib_vars','apply_obj_vars','apply_incpaths','init_cc')(add_extra_flags)
feature('cshlib')(apply_implib)
after('apply_link','default_cc')(apply_implib)
before('apply_lib_vars','apply_objdeps','default_link_install')(apply_implib)
feature('cshlib')(apply_vnum)
after('apply_link')(apply_vnum)
before('apply_lib_vars','default_link_install')(apply_vnum)
conftest(add_as_needed)

View file

@ -0,0 +1,43 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys,imp,types,ccroot
import optparse
import Utils,Configure,Options
from Logs import debug
c_compiler={'win32':['msvc','gcc'],'cygwin':['gcc'],'darwin':['gcc'],'aix':['xlc','gcc'],'linux':['gcc','icc','suncc'],'sunos':['gcc','suncc'],'irix':['gcc'],'hpux':['gcc'],'gnu':['gcc'],'default':['gcc']}
def __list_possible_compiler(platform):
try:
return c_compiler[platform]
except KeyError:
return c_compiler["default"]
def detect(conf):
try:test_for_compiler=Options.options.check_c_compiler
except AttributeError:conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
orig=conf.env
for compiler in test_for_compiler.split():
conf.env=orig.copy()
try:
conf.check_tool(compiler)
except Configure.ConfigurationError,e:
debug('compiler_cc: %r'%e)
else:
if conf.env['CC']:
orig.table=conf.env.get_merged_dict()
conf.env=orig
conf.check_message(compiler,'',True)
conf.env['COMPILER_CC']=compiler
break
conf.check_message(compiler,'',False)
break
else:
conf.fatal('could not configure a c compiler!')
def set_options(opt):
build_platform=Utils.unversioned_sys_platform()
possible_compiler_list=__list_possible_compiler(build_platform)
test_for_compiler=' '.join(possible_compiler_list)
cc_compiler_opts=opt.add_option_group("C Compiler Options")
cc_compiler_opts.add_option('--check-c-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C-Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_c_compiler")
for c_compiler in test_for_compiler.split():
opt.tool_options('%s'%c_compiler,option_group=cc_compiler_opts)

View file

@ -0,0 +1,43 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys,imp,types,ccroot
import optparse
import Utils,Configure,Options
from Logs import debug
cxx_compiler={'win32':['msvc','g++'],'cygwin':['g++'],'darwin':['g++'],'aix':['xlc++','g++'],'linux':['g++','icpc','sunc++'],'sunos':['g++','sunc++'],'irix':['g++'],'hpux':['g++'],'gnu':['g++'],'default':['g++']}
def __list_possible_compiler(platform):
try:
return cxx_compiler[platform]
except KeyError:
return cxx_compiler["default"]
def detect(conf):
try:test_for_compiler=Options.options.check_cxx_compiler
except AttributeError:raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
orig=conf.env
for compiler in test_for_compiler.split():
try:
conf.env=orig.copy()
conf.check_tool(compiler)
except Configure.ConfigurationError,e:
debug('compiler_cxx: %r'%e)
else:
if conf.env['CXX']:
orig.table=conf.env.get_merged_dict()
conf.env=orig
conf.check_message(compiler,'',True)
conf.env['COMPILER_CXX']=compiler
break
conf.check_message(compiler,'',False)
break
else:
conf.fatal('could not configure a cxx compiler!')
def set_options(opt):
build_platform=Utils.unversioned_sys_platform()
possible_compiler_list=__list_possible_compiler(build_platform)
test_for_compiler=' '.join(possible_compiler_list)
cxx_compiler_opts=opt.add_option_group('C++ Compiler Options')
cxx_compiler_opts.add_option('--check-cxx-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_cxx_compiler")
for cxx_compiler in test_for_compiler.split():
opt.tool_options('%s'%cxx_compiler,option_group=cxx_compiler_opts)

View file

@ -0,0 +1,25 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys,imp,types
import Utils,Configure,Options
def detect(conf):
if getattr(Options.options,'check_dmd_first',None):
test_for_compiler=['dmd','gdc']
else:
test_for_compiler=['gdc','dmd']
for d_compiler in test_for_compiler:
try:
conf.check_tool(d_compiler)
except:
pass
else:
break
else:
conf.fatal('no suitable d compiler was found')
def set_options(opt):
d_compiler_opts=opt.add_option_group('D Compiler Options')
d_compiler_opts.add_option('--check-dmd-first',action='store_true',help='checks for the gdc compiler before dmd (default is the other way round)',dest='check_dmd_first',default=False)
for d_compiler in['gdc','dmd']:
opt.tool_options('%s'%d_compiler,option_group=d_compiler_opts)

View file

@ -0,0 +1,532 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,imp,sys,shlex,shutil
from Utils import md5
import Build,Utils,Configure,Task,Options,Logs,TaskGen
from Constants import*
from Configure import conf,conftest
cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',}
SNIP1='''
int main() {
void *p;
p=(void*)(%s);
return 0;
}
'''
SNIP2='''
int main() {
if ((%(type_name)s *) 0) return 0;
if (sizeof (%(type_name)s)) return 0;
}
'''
SNIP3='''
int main() {
return 0;
}
'''
def parse_flags(line,uselib,env):
lst=shlex.split(line)
while lst:
x=lst.pop(0)
st=x[:2]
ot=x[2:]
if st=='-I'or st=='/I':
if not ot:ot=lst.pop(0)
env.append_unique('CPPPATH_'+uselib,ot)
elif st=='-D':
if not ot:ot=lst.pop(0)
env.append_unique('CXXDEFINES_'+uselib,ot)
env.append_unique('CCDEFINES_'+uselib,ot)
elif st=='-l':
if not ot:ot=lst.pop(0)
env.append_unique('LIB_'+uselib,ot)
elif st=='-L':
if not ot:ot=lst.pop(0)
env.append_unique('LIBPATH_'+uselib,ot)
elif x=='-pthread'or x.startswith('+'):
env.append_unique('CCFLAGS_'+uselib,x)
env.append_unique('CXXFLAGS_'+uselib,x)
env.append_unique('LINKFLAGS_'+uselib,x)
elif x=='-framework':
env.append_unique('FRAMEWORK_'+uselib,lst.pop(0))
elif x.startswith('-F'):
env.append_unique('FRAMEWORKPATH_'+uselib,x[2:])
elif x.startswith('-std'):
env.append_unique('CCFLAGS_'+uselib,x)
env.append_unique('CXXFLAGS_'+uselib,x)
env.append_unique('LINKFLAGS_'+uselib,x)
elif x.startswith('-Wl'):
env.append_unique('LINKFLAGS_'+uselib,x)
elif x.startswith('-m')or x.startswith('-f'):
env.append_unique('CCFLAGS_'+uselib,x)
env.append_unique('CXXFLAGS_'+uselib,x)
def ret_msg(self,f,kw):
if isinstance(f,str):
return f
return f(kw)
def validate_cfg(self,kw):
if not'path'in kw:
kw['path']='pkg-config --errors-to-stdout --print-errors'
if'atleast_pkgconfig_version'in kw:
if not'msg'in kw:
kw['msg']='Checking for pkg-config version >= %s'%kw['atleast_pkgconfig_version']
return
if'modversion'in kw:
return
if'variables'in kw:
if not'msg'in kw:
kw['msg']='Checking for %s variables'%kw['package']
return
for x in cfg_ver.keys():
y=x.replace('-','_')
if y in kw:
if not'package'in kw:
raise ValueError('%s requires a package'%x)
if not'msg'in kw:
kw['msg']='Checking for %s %s %s'%(kw['package'],cfg_ver[x],kw[y])
return
if not'msg'in kw:
kw['msg']='Checking for %s'%(kw['package']or kw['path'])
if not'okmsg'in kw:
kw['okmsg']='yes'
if not'errmsg'in kw:
kw['errmsg']='not found'
def cmd_and_log(self,cmd,kw):
Logs.debug('runner: %s\n'%cmd)
if self.log:
self.log.write('%s\n'%cmd)
try:
p=Utils.pproc.Popen(cmd,stdout=Utils.pproc.PIPE,stderr=Utils.pproc.PIPE,shell=True)
(out,err)=p.communicate()
except OSError,e:
self.log.write('error %r'%e)
self.fatal(str(e))
out=str(out)
err=str(err)
if self.log:
self.log.write(out)
self.log.write(err)
if p.returncode:
if not kw.get('errmsg',''):
if kw.get('mandatory',False):
kw['errmsg']=out.strip()
else:
kw['errmsg']='no'
self.fatal('fail')
return out
def exec_cfg(self,kw):
if'atleast_pkgconfig_version'in kw:
cmd='%s --atleast-pkgconfig-version=%s'%(kw['path'],kw['atleast_pkgconfig_version'])
self.cmd_and_log(cmd,kw)
if not'okmsg'in kw:
kw['okmsg']='yes'
return
for x in cfg_ver:
y=x.replace('-','_')
if y in kw:
self.cmd_and_log('%s --%s=%s %s'%(kw['path'],x,kw[y],kw['package']),kw)
if not'okmsg'in kw:
kw['okmsg']='yes'
self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
break
if'modversion'in kw:
version=self.cmd_and_log('%s --modversion %s'%(kw['path'],kw['modversion']),kw).strip()
self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version)
return version
if'variables'in kw:
env=kw.get('env',self.env)
uselib=kw.get('uselib_store',kw['package'].upper())
vars=Utils.to_list(kw['variables'])
for v in vars:
val=self.cmd_and_log('%s --variable=%s %s'%(kw['path'],v,kw['package']),kw).strip()
var='%s_%s'%(uselib,v)
env[var]=val
if not'okmsg'in kw:
kw['okmsg']='yes'
return
lst=[kw['path']]
defi=kw.get('define_variable',None)
if not defi:
defi=self.env.PKG_CONFIG_DEFINES or{}
for key,val in defi.iteritems():
lst.append('--define-variable=%s=%s'%(key,val))
lst.append(kw.get('args',''))
lst.append(kw['package'])
cmd=' '.join(lst)
ret=self.cmd_and_log(cmd,kw)
if not'okmsg'in kw:
kw['okmsg']='yes'
self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env))
return ret
def check_cfg(self,*k,**kw):
self.validate_cfg(kw)
if'msg'in kw:
self.check_message_1(kw['msg'])
ret=None
try:
ret=self.exec_cfg(kw)
except Configure.ConfigurationError,e:
if'errmsg'in kw:
self.check_message_2(kw['errmsg'],'YELLOW')
if'mandatory'in kw and kw['mandatory']:
if Logs.verbose>1:
raise
else:
self.fatal('the configuration failed (see %r)'%self.log.name)
else:
kw['success']=ret
if'okmsg'in kw:
self.check_message_2(self.ret_msg(kw['okmsg'],kw))
return ret
def validate_c(self,kw):
if not'env'in kw:
kw['env']=self.env.copy()
env=kw['env']
if not'compiler'in kw:
kw['compiler']='cc'
if env['CXX_NAME']and Task.TaskBase.classes.get('cxx',None):
kw['compiler']='cxx'
if not self.env['CXX']:
self.fatal('a c++ compiler is required')
else:
if not self.env['CC']:
self.fatal('a c compiler is required')
if not'type'in kw:
kw['type']='cprogram'
assert not(kw['type']!='cprogram'and kw.get('execute',0)),'can only execute programs'
def to_header(dct):
if'header_name'in dct:
dct=Utils.to_list(dct['header_name'])
return''.join(['#include <%s>\n'%x for x in dct])
return''
if not'compile_mode'in kw:
kw['compile_mode']=(kw['compiler']=='cxx')and'cxx'or'cc'
if not'compile_filename'in kw:
kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'')
if'framework_name'in kw:
try:TaskGen.task_gen.create_task_macapp
except AttributeError:self.fatal('frameworks require the osx tool')
fwkname=kw['framework_name']
if not'uselib_store'in kw:
kw['uselib_store']=fwkname.upper()
if not kw.get('no_header',False):
if not'header_name'in kw:
kw['header_name']=[]
fwk='%s/%s.h'%(fwkname,fwkname)
if kw.get('remove_dot_h',None):
fwk=fwk[:-2]
kw['header_name']=Utils.to_list(kw['header_name'])+[fwk]
kw['msg']='Checking for framework %s'%fwkname
kw['framework']=fwkname
if'function_name'in kw:
fu=kw['function_name']
if not'msg'in kw:
kw['msg']='Checking for function %s'%fu
kw['code']=to_header(kw)+SNIP1%fu
if not'uselib_store'in kw:
kw['uselib_store']=fu.upper()
if not'define_name'in kw:
kw['define_name']=self.have_define(fu)
elif'type_name'in kw:
tu=kw['type_name']
if not'msg'in kw:
kw['msg']='Checking for type %s'%tu
if not'header_name'in kw:
kw['header_name']='stdint.h'
kw['code']=to_header(kw)+SNIP2%{'type_name':tu}
if not'define_name'in kw:
kw['define_name']=self.have_define(tu.upper())
elif'header_name'in kw:
if not'msg'in kw:
kw['msg']='Checking for header %s'%kw['header_name']
l=Utils.to_list(kw['header_name'])
assert len(l)>0,'list of headers in header_name is empty'
kw['code']=to_header(kw)+SNIP3
if not'uselib_store'in kw:
kw['uselib_store']=l[0].upper()
if not'define_name'in kw:
kw['define_name']=self.have_define(l[0])
if'lib'in kw:
if not'msg'in kw:
kw['msg']='Checking for library %s'%kw['lib']
if not'uselib_store'in kw:
kw['uselib_store']=kw['lib'].upper()
if'staticlib'in kw:
if not'msg'in kw:
kw['msg']='Checking for static library %s'%kw['staticlib']
if not'uselib_store'in kw:
kw['uselib_store']=kw['staticlib'].upper()
if'fragment'in kw:
kw['code']=kw['fragment']
if not'msg'in kw:
kw['msg']='Checking for custom code'
if not'errmsg'in kw:
kw['errmsg']='no'
for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]:
if flagsname in kw:
if not'msg'in kw:
kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname])
if not'errmsg'in kw:
kw['errmsg']='no'
if not'execute'in kw:
kw['execute']=False
if not'errmsg'in kw:
kw['errmsg']='not found'
if not'okmsg'in kw:
kw['okmsg']='yes'
if not'code'in kw:
kw['code']=SNIP3
if not kw.get('success'):kw['success']=None
assert'msg'in kw,'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
def post_check(self,*k,**kw):
is_success=False
if kw['execute']:
if kw['success']is not None:
is_success=True
else:
is_success=(kw['success']==0)
if'define_name'in kw:
if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw:
if kw['execute']:
key=kw['success']
if isinstance(key,str):
if key:
self.define(kw['define_name'],key,quote=kw.get('quote',1))
else:
self.define_cond(kw['define_name'],True)
else:
self.define_cond(kw['define_name'],False)
else:
self.define_cond(kw['define_name'],is_success)
if is_success and'uselib_store'in kw:
import cc,cxx
for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
lk=k.lower()
if k=='CPPPATH':lk='includes'
if k=='CXXDEFINES':lk='defines'
if k=='CCDEFINES':lk='defines'
if lk in kw:
val=kw[lk]
if isinstance(val,str):
val=val.rstrip(os.path.sep)
self.env.append_unique(k+'_'+kw['uselib_store'],val)
def check(self,*k,**kw):
self.validate_c(kw)
self.check_message_1(kw['msg'])
ret=None
try:
ret=self.run_c_code(*k,**kw)
except Configure.ConfigurationError,e:
self.check_message_2(kw['errmsg'],'YELLOW')
if'mandatory'in kw and kw['mandatory']:
if Logs.verbose>1:
raise
else:
self.fatal('the configuration failed (see %r)'%self.log.name)
else:
kw['success']=ret
self.check_message_2(self.ret_msg(kw['okmsg'],kw))
self.post_check(*k,**kw)
if not kw.get('execute',False):
return ret==0
return ret
def run_c_code(self,*k,**kw):
test_f_name=kw['compile_filename']
k=0
while k<10000:
dir=os.path.join(self.blddir,'.conf_check_%d'%k)
try:
shutil.rmtree(dir)
except OSError:
pass
try:
os.stat(dir)
except OSError:
break
k+=1
try:
os.makedirs(dir)
except:
self.fatal('cannot create a configuration test folder %r'%dir)
try:
os.stat(dir)
except:
self.fatal('cannot use the configuration test folder %r'%dir)
bdir=os.path.join(dir,'testbuild')
if not os.path.exists(bdir):
os.makedirs(bdir)
env=kw['env']
dest=open(os.path.join(dir,test_f_name),'w')
dest.write(kw['code'])
dest.close()
back=os.path.abspath('.')
bld=Build.BuildContext()
bld.log=self.log
bld.all_envs.update(self.all_envs)
bld.all_envs['default']=env
bld.lst_variants=bld.all_envs.keys()
bld.load_dirs(dir,bdir)
os.chdir(dir)
bld.rescan(bld.srcnode)
if not'features'in kw:
kw['features']=[kw['compile_mode'],kw['type']]
o=bld(features=kw['features'],source=test_f_name,target='testprog')
for k,v in kw.iteritems():
setattr(o,k,v)
self.log.write("==>\n%s\n<==\n"%kw['code'])
try:
bld.compile()
except Utils.WafError:
ret=Utils.ex_stack()
else:
ret=0
os.chdir(back)
if ret:
self.log.write('command returned %r'%ret)
self.fatal(str(ret))
if kw['execute']:
lastprog=o.link_task.outputs[0].abspath(env)
args=Utils.to_list(kw.get('exec_args',[]))
proc=Utils.pproc.Popen([lastprog]+args,stdout=Utils.pproc.PIPE,stderr=Utils.pproc.PIPE)
(out,err)=proc.communicate()
w=self.log.write
w(str(out))
w('\n')
w(str(err))
w('\n')
w('returncode %r'%proc.returncode)
w('\n')
if proc.returncode:
self.fatal(Utils.ex_stack())
ret=out
return ret
def check_cxx(self,*k,**kw):
kw['compiler']='cxx'
return self.check(*k,**kw)
def check_cc(self,*k,**kw):
kw['compiler']='cc'
return self.check(*k,**kw)
def define(self,define,value,quote=1):
assert define and isinstance(define,str)
tbl=self.env[DEFINES]or Utils.ordered_dict()
if isinstance(value,str):
if quote:
tbl[define]='"%s"'%repr('"'+value)[2:-1].replace('"','\\"')
else:
tbl[define]=value
elif isinstance(value,int):
tbl[define]=value
else:
raise TypeError('define %r -> %r must be a string or an int'%(define,value))
self.env[DEFINES]=tbl
self.env[define]=value
def undefine(self,define):
assert define and isinstance(define,str)
tbl=self.env[DEFINES]or Utils.ordered_dict()
value=UNDEFINED
tbl[define]=value
self.env[DEFINES]=tbl
self.env[define]=value
def define_cond(self,name,value):
if value:
self.define(name,1)
else:
self.undefine(name)
def is_defined(self,key):
defines=self.env[DEFINES]
if not defines:
return False
try:
value=defines[key]
except KeyError:
return False
else:
return value!=UNDEFINED
def get_define(self,define):
try:return self.env[DEFINES][define]
except KeyError:return None
def have_define(self,name):
return self.__dict__.get('HAVE_PAT','HAVE_%s')%Utils.quote_define_name(name)
def write_config_header(self,configfile='',env='',guard='',top=False):
if not configfile:configfile=WAF_CONFIG_H
waf_guard=guard or'_%s_WAF'%Utils.quote_define_name(configfile)
if not env:env=self.env
if top:
diff=''
else:
diff=Utils.diff_path(self.srcdir,self.curdir)
full=os.sep.join([self.blddir,env.variant(),diff,configfile])
full=os.path.normpath(full)
(dir,base)=os.path.split(full)
try:os.makedirs(dir)
except:pass
dest=open(full,'w')
dest.write('/* Configuration header created by Waf - do not edit */\n')
dest.write('#ifndef %s\n#define %s\n\n'%(waf_guard,waf_guard))
dest.write(self.get_config_header())
env.append_unique(CFG_FILES,os.path.join(diff,configfile))
dest.write('\n#endif /* %s */\n'%waf_guard)
dest.close()
def get_config_header(self):
config_header=[]
tbl=self.env[DEFINES]or Utils.ordered_dict()
for key in tbl.allkeys:
value=tbl[key]
if value is None:
config_header.append('#define %s'%key)
elif value is UNDEFINED:
config_header.append('/* #undef %s */'%key)
else:
config_header.append('#define %s %s'%(key,value))
return"\n".join(config_header)
def find_cpp(conf):
v=conf.env
cpp=None
if v['CPP']:cpp=v['CPP']
elif'CPP'in conf.environ:cpp=conf.environ['CPP']
if not cpp:cpp=conf.find_program('cpp',var='CPP')
if not cpp:cpp=v['CC']
if not cpp:cpp=v['CXX']
v['CPP']=cpp
def cc_add_flags(conf):
conf.add_os_flags('CFLAGS','CCFLAGS')
conf.add_os_flags('CPPFLAGS')
def cxx_add_flags(conf):
conf.add_os_flags('CXXFLAGS')
conf.add_os_flags('CPPFLAGS')
def link_add_flags(conf):
conf.add_os_flags('LINKFLAGS')
conf.add_os_flags('LDFLAGS','LINKFLAGS')
def cc_load_tools(conf):
conf.check_tool('cc')
def cxx_load_tools(conf):
conf.check_tool('cxx')
conf(ret_msg)
conf(validate_cfg)
conf(cmd_and_log)
conf(exec_cfg)
conf(check_cfg)
conf(validate_c)
conf(post_check)
conf(check)
conf(run_c_code)
conf(check_cxx)
conf(check_cc)
conf(define)
conf(undefine)
conf(define_cond)
conf(is_defined)
conf(get_define)
conf(have_define)
conf(write_config_header)
conf(get_config_header)
conftest(find_cpp)
conftest(cc_add_flags)
conftest(cxx_add_flags)
conftest(link_add_flags)
conftest(cc_load_tools)
conftest(cxx_load_tools)

View file

@ -0,0 +1,44 @@
#! /usr/bin/env python
# encoding: utf-8
import TaskGen,Utils,Task,Options
from Logs import error
from TaskGen import before,after,taskgen,feature
flag_vars=['FLAGS','ASSEMBLIES']
def init_cs(self):
Utils.def_attrs(self,flags='',assemblies='',resources='',uselib='')
def apply_uselib_cs(self):
if not self.uselib:
return
global flag_vars
for var in self.to_list(self.uselib):
for v in self.flag_vars:
val=self.env[v+'_'+var]
if val:self.env.append_value(v,val)
def apply_cs(self):
try:self.meths.remove('apply_core')
except ValueError:pass
for i in self.to_list(self.assemblies)+self.env['ASSEMBLIES']:
self.env.append_unique('_ASSEMBLIES','/r:'+i)
for i in self.to_list(self.resources):
self.env.append_unique('_RESOURCES','/resource:'+i)
self.env['_TYPE']=getattr(self,'type','exe')
self.env.append_unique('_FLAGS',self.to_list(self.flags))
self.env.append_unique('_FLAGS',self.env.FLAGS)
nodes=[self.path.find_resource(i)for i in self.to_list(self.source)]
self.create_task('mcs',nodes,self.path.find_or_declare(self.target))
Task.simple_task_type('mcs','${MCS} ${SRC} /target:${_TYPE} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}',color='YELLOW')
def detect(conf):
csc=getattr(Options.options,'cscbinary',None)
if csc:
conf.env.MCS=csc
conf.find_program(['gmcs','mcs'],var='MCS')
def set_options(opt):
opt.add_option('--with-csc-binary',type='string',dest='cscbinary')
feature('cs')(init_cs)
feature('cs')(apply_uselib_cs)
after('init_cs')(apply_uselib_cs)
feature('cs')(apply_cs)
after('apply_uselib_cs')(apply_cs)
before('apply_core')(apply_cs)

View file

@ -0,0 +1,71 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import TaskGen,Task,Utils
from Logs import debug
import ccroot
from TaskGen import feature,before,extension,after
g_cxx_flag_vars=['CXXDEPS','FRAMEWORK','FRAMEWORKPATH','STATICLIB','LIB','LIBPATH','LINKFLAGS','RPATH','CXXFLAGS','CCFLAGS','CPPPATH','CPPFLAGS','CXXDEFINES']
EXT_CXX=['.cpp','.cc','.cxx','.C','.c++']
g_cxx_type_vars=['CXXFLAGS','LINKFLAGS']
class cxx_taskgen(ccroot.ccroot_abstract):
pass
def init_cxx(self):
if not'cc'in self.features:
self.mappings['.c']=TaskGen.task_gen.mappings['.cxx']
self.p_flag_vars=set(self.p_flag_vars).union(g_cxx_flag_vars)
self.p_type_vars=set(self.p_type_vars).union(g_cxx_type_vars)
if not self.env['CXX_NAME']:
raise Utils.WafError("At least one compiler (g++, ..) must be selected")
def apply_obj_vars_cxx(self):
env=self.env
app=env.append_unique
cxxpath_st=env['CPPPATH_ST']
for i in env['INC_PATHS']:
app('_CXXINCFLAGS',cxxpath_st%i.bldpath(env))
app('_CXXINCFLAGS',cxxpath_st%i.srcpath(env))
for i in env['CPPPATH']:
app('_CXXINCFLAGS',cxxpath_st%i)
def apply_defines_cxx(self):
self.defines=getattr(self,'defines',[])
lst=self.to_list(self.defines)+self.to_list(self.env['CXXDEFINES'])
milst=[]
for defi in lst:
if not defi in milst:
milst.append(defi)
libs=self.to_list(self.uselib)
for l in libs:
val=self.env['CXXDEFINES_'+l]
if val:milst+=self.to_list(val)
self.env['DEFLINES']=["%s %s"%(x[0],Utils.trimquotes('='.join(x[1:])))for x in[y.split('=')for y in milst]]
y=self.env['CXXDEFINES_ST']
self.env.append_unique('_CXXDEFFLAGS',[y%x for x in milst])
def cxx_hook(self,node):
if getattr(self,'obj_ext',None):
obj_ext=self.obj_ext
else:
obj_ext='_%d.o'%self.idx
task=self.create_task('cxx',node,node.change_ext(obj_ext))
try:
self.compiled_tasks.append(task)
except AttributeError:
raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?'%str(self))
return task
cxx_str='${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
cls=Task.simple_task_type('cxx',cxx_str,color='GREEN',ext_out='.o',ext_in='.cxx',shell=False)
cls.scan=ccroot.scan
cls.vars.append('CXXDEPS')
link_str='${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
cls=Task.simple_task_type('cxx_link',link_str,color='YELLOW',ext_in='.o',ext_out='.bin',shell=False)
cls.maxjobs=1
cls.install=Utils.nada
feature('cxx')(init_cxx)
before('apply_type_vars')(init_cxx)
after('default_cc')(init_cxx)
feature('cxx')(apply_obj_vars_cxx)
after('apply_incpaths')(apply_obj_vars_cxx)
feature('cxx')(apply_defines_cxx)
after('apply_lib_vars')(apply_defines_cxx)
extension(EXT_CXX)(cxx_hook)

View file

@ -0,0 +1,368 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,sys,re,optparse
import ccroot
import TaskGen,Utils,Task,Configure,Logs,Build
from Logs import debug,error
from TaskGen import taskgen,feature,after,before,extension
from Configure import conftest
EXT_D=['.d','.di','.D']
D_METHS=['apply_core','apply_vnum','apply_objdeps']
DLIB="""
version(D_Version2) {
import std.stdio;
int main() {
writefln("phobos2");
return 0;
}
} else {
version(Tango) {
import tango.stdc.stdio;
int main() {
printf("tango");
return 0;
}
} else {
import std.stdio;
int main() {
writefln("phobos1");
return 0;
}
}
}
"""
def filter_comments(filename):
txt=Utils.readf(filename)
i=0
buf=[]
max=len(txt)
begin=0
while i<max:
c=txt[i]
if c=='"'or c=="'":
buf.append(txt[begin:i])
delim=c
i+=1
while i<max:
c=txt[i]
if c==delim:break
elif c=='\\':
i+=1
i+=1
i+=1
begin=i
elif c=='/':
buf.append(txt[begin:i])
i+=1
if i==max:break
c=txt[i]
if c=='+':
i+=1
nesting=1
c=None
while i<max:
prev=c
c=txt[i]
if prev=='/'and c=='+':
nesting+=1
c=None
elif prev=='+'and c=='/':
nesting-=1
if nesting==0:break
c=None
i+=1
elif c=='*':
i+=1
c=None
while i<max:
prev=c
c=txt[i]
if prev=='*'and c=='/':break
i+=1
elif c=='/':
i+=1
while i<max and txt[i]!='\n':
i+=1
else:
begin=i-1
continue
i+=1
begin=i
buf.append(' ')
else:
i+=1
buf.append(txt[begin:])
return buf
class d_parser(object):
def __init__(self,env,incpaths):
self.allnames=[]
self.re_module=re.compile("module\s+([^;]+)")
self.re_import=re.compile("import\s+([^;]+)")
self.re_import_bindings=re.compile("([^:]+):(.*)")
self.re_import_alias=re.compile("[^=]+=(.+)")
self.env=env
self.nodes=[]
self.names=[]
self.incpaths=incpaths
def tryfind(self,filename):
found=0
for n in self.incpaths:
found=n.find_resource(filename.replace('.','/')+'.d')
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)
def get_strings(self,code):
self.module=''
lst=[]
mod_name=self.re_module.search(code)
if mod_name:
self.module=re.sub('\s+','',mod_name.group(1))
import_iterator=self.re_import.finditer(code)
if import_iterator:
for import_match in import_iterator:
import_match_str=re.sub('\s+','',import_match.group(1))
bindings_match=self.re_import_bindings.match(import_match_str)
if bindings_match:
import_match_str=bindings_match.group(1)
matches=import_match_str.split(',')
for match in matches:
alias_match=self.re_import_alias.match(match)
if alias_match:
match=alias_match.group(1)
lst.append(match)
return lst
def start(self,node):
self.waiting=[node]
while self.waiting:
nd=self.waiting.pop(0)
self.iter(nd)
def iter(self,node):
path=node.abspath(self.env)
code="".join(filter_comments(path))
names=self.get_strings(code)
for x in names:
if x in self.allnames:continue
self.allnames.append(x)
self.tryfind(x)
def scan(self):
env=self.env
gruik=d_parser(env,env['INC_PATHS'])
gruik.start(self.inputs[0])
if Logs.verbose:
debug('deps: nodes found for %s: %s %s'%(str(self.inputs[0]),str(gruik.nodes),str(gruik.names)))
return(gruik.nodes,gruik.names)
def get_target_name(self):
v=self.env
tp='program'
for x in self.features:
if x in['dshlib','dstaticlib']:
tp=x.lstrip('d')
return v['D_%s_PATTERN'%tp]%self.target
d_params={'dflags':'','importpaths':'','libs':'','libpaths':'','generate_headers':False,}
def init_d(self):
for x in d_params:
setattr(self,x,getattr(self,x,d_params[x]))
class d_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
if len(k)>1:
self.features.append('d'+k[1])
TaskGen.bind_feature('d',D_METHS)
def init_d(self):
Utils.def_attrs(self,dflags='',importpaths='',libs='',libpaths='',uselib='',uselib_local='',generate_headers=False,compiled_tasks=[],add_objects=[],link_task=None)
def apply_d_libs(self):
env=self.env
self.uselib=self.to_list(self.uselib)
names=self.to_list(self.uselib_local)
seen=set([])
tmp=Utils.deque(names)
while tmp:
lib_name=tmp.popleft()
if lib_name in seen:
continue
y=self.name_to_obj(lib_name)
if not y:
raise Utils.WafError('object %r was not found in uselib_local (required by %r)'%(lib_name,self.name))
y.post()
seen.add(lib_name)
if getattr(y,'uselib_local',None):
lst=y.to_list(y.uselib_local)
if'dshlib'in y.features or'dprogram'in y.features:
lst=[x for x in lst if not'dstaticlib'in self.name_to_obj(x).features]
tmp.extend(lst)
if getattr(y,'link_task',None):
link_name=y.target[y.target.rfind(os.sep)+1:]
if'dstaticlib'in y.features or'dshlib'in y.features:
env.append_unique('DLINKFLAGS',env.DLIB_ST%link_name)
env.append_unique('DLINKFLAGS',env.DLIBPATH_ST%y.link_task.outputs[0].parent.bldpath(env))
self.link_task.set_run_after(y.link_task)
dep_nodes=getattr(self.link_task,'dep_nodes',[])
self.link_task.dep_nodes=dep_nodes+y.link_task.outputs
for v in self.to_list(y.uselib):
if not v in self.uselib:
self.uselib.insert(0,v)
if getattr(y,'export_incdirs',None):
for x in self.to_list(y.export_incdirs):
node=y.path.find_dir(x)
if not node:
raise Utils.WafError('object %r: invalid folder %r in export_incdirs'%(y.target,x))
self.env.append_unique('INC_PATHS',node)
def apply_d_link(self):
link=getattr(self,'link',None)
if not link:
if'dstaticlib'in self.features:link='static_link'
else:link='d_link'
outputs=[t.outputs[0]for t in self.compiled_tasks]
self.link_task=self.create_task(link,outputs,self.path.find_or_declare(get_target_name(self)))
def apply_d_vars(self):
env=self.env
dpath_st=env['DPATH_ST']
lib_st=env['DLIB_ST']
libpath_st=env['DLIBPATH_ST']
importpaths=self.to_list(self.importpaths)
libpaths=[]
libs=[]
uselib=self.to_list(self.uselib)
for i in uselib:
if env['DFLAGS_'+i]:
env.append_unique('DFLAGS',env['DFLAGS_'+i])
for x in self.features:
if not x in['dprogram','dstaticlib','dshlib']:
continue
x.lstrip('d')
d_shlib_dflags=env['D_'+x+'_DFLAGS']
if d_shlib_dflags:
env.append_unique('DFLAGS',d_shlib_dflags)
for i in uselib:
if env['DPATH_'+i]:
for entry in self.to_list(env['DPATH_'+i]):
if not entry in importpaths:
importpaths.append(entry)
for path in importpaths:
if os.path.isabs(path):
env.append_unique('_DIMPORTFLAGS',dpath_st%path)
else:
node=self.path.find_dir(path)
self.env.append_unique('INC_PATHS',node)
env.append_unique('_DIMPORTFLAGS',dpath_st%node.srcpath(env))
env.append_unique('_DIMPORTFLAGS',dpath_st%node.bldpath(env))
for i in uselib:
if env['LIBPATH_'+i]:
for entry in self.to_list(env['LIBPATH_'+i]):
if not entry in libpaths:
libpaths.append(entry)
libpaths=self.to_list(self.libpaths)+libpaths
for path in libpaths:
if not os.path.isabs(path):
node=self.path.find_resource(path)
if not node:
raise Utils.WafError('could not find libpath %r from %r'%(path,self))
path=node.abspath(self.env)
env.append_unique('DLINKFLAGS',libpath_st%path)
for i in uselib:
if env['LIB_'+i]:
for entry in self.to_list(env['LIB_'+i]):
if not entry in libs:
libs.append(entry)
libs.extend(self.to_list(self.libs))
for flag in self.to_list(self.dflags):
env.append_unique('DFLAGS',flag)
for lib in libs:
env.append_unique('DLINKFLAGS',lib_st%lib)
for i in uselib:
dlinkflags=env['DLINKFLAGS_'+i]
if dlinkflags:
for linkflag in dlinkflags:
env.append_unique('DLINKFLAGS',linkflag)
def add_shlib_d_flags(self):
for linkflag in self.env['D_shlib_LINKFLAGS']:
self.env.append_unique('DLINKFLAGS',linkflag)
def d_hook(self,node):
task=self.create_task(self.generate_headers and'd_with_header'or'd')
try:obj_ext=self.obj_ext
except AttributeError:obj_ext='_%d.o'%self.idx
task.inputs=[node]
task.outputs=[node.change_ext(obj_ext)]
self.compiled_tasks.append(task)
if self.generate_headers:
header_node=node.change_ext(self.env['DHEADER_ext'])
task.outputs+=[header_node]
d_str='${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}'
d_with_header_str='${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \
${D_HDR_F}${TGT[1].bldpath(env)} \
${D_SRC_F}${SRC} \
${D_TGT_F}${TGT[0].bldpath(env)}'
link_str='${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}'
def override_exec(cls):
old_exec=cls.exec_command
def exec_command(self,*k,**kw):
if isinstance(k[0],list):
lst=k[0]
for i in xrange(len(lst)):
if lst[i]=='-of':
del lst[i]
lst[i]='-of'+lst[i]
break
return old_exec(self,*k,**kw)
cls.exec_command=exec_command
cls=Task.simple_task_type('d',d_str,'GREEN',before='static_link d_link',shell=False)
cls.scan=scan
override_exec(cls)
cls=Task.simple_task_type('d_with_header',d_with_header_str,'GREEN',before='static_link d_link',shell=False)
override_exec(cls)
cls=Task.simple_task_type('d_link',link_str,color='YELLOW',shell=False)
override_exec(cls)
def generate_header(self,filename,install_path):
if not hasattr(self,'header_lst'):self.header_lst=[]
self.meths.append('process_header')
self.header_lst.append([filename,install_path])
def process_header(self):
env=self.env
for i in getattr(self,'header_lst',[]):
node=self.path.find_resource(i[0])
if not node:
raise Utils.WafError('file not found on d obj '+i[0])
task=self.create_task('d_header')
task.set_inputs(node)
task.set_outputs(node.change_ext('.di'))
d_header_str='${D_COMPILER} ${D_HEADER} ${SRC}'
Task.simple_task_type('d_header',d_header_str,color='BLUE',shell=False)
def d_platform_flags(conf):
v=conf.env
binfmt=v.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format(v.DEST_OS or Utils.unversioned_sys_platform())
if binfmt=='pe':
v['D_program_PATTERN']='%s.exe'
v['D_shlib_PATTERN']='lib%s.dll'
v['D_staticlib_PATTERN']='lib%s.a'
else:
v['D_program_PATTERN']='%s'
v['D_shlib_PATTERN']='lib%s.so'
v['D_staticlib_PATTERN']='lib%s.a'
def check_dlibrary(conf):
ret=conf.check_cc(features='d dprogram',fragment=DLIB,mandatory=True,compile_filename='test.d',execute=True)
conf.env.DLIBRARY=ret.strip()
feature('d')(init_d)
before('apply_type_vars')(init_d)
feature('d')(init_d)
before('apply_d_libs')(init_d)
feature('d')(apply_d_libs)
after('apply_d_link','init_d')(apply_d_libs)
before('apply_vnum','apply_d_vars')(apply_d_libs)
feature('dprogram','dshlib','dstaticlib')(apply_d_link)
after('apply_core')(apply_d_link)
feature('d')(apply_d_vars)
after('apply_core')(apply_d_vars)
feature('dshlib')(add_shlib_d_flags)
after('apply_d_vars')(add_shlib_d_flags)
extension(EXT_D)(d_hook)
taskgen(generate_header)
before('apply_core')(process_header)
conftest(d_platform_flags)
conftest(check_dlibrary)

View file

@ -0,0 +1,24 @@
#! /usr/bin/env python
# encoding: utf-8
import Task,Utils
from TaskGen import taskgen,before,after,feature
def add_dbus_file(self,filename,prefix,mode):
if not hasattr(self,'dbus_lst'):
self.dbus_lst=[]
self.meths.append('process_dbus')
self.dbus_lst.append([filename,prefix,mode])
def process_dbus(self):
for filename,prefix,mode in getattr(self,'dbus_lst',[]):
node=self.path.find_resource(filename)
if not node:
raise Utils.WafError('file not found '+filename)
tsk=self.create_task('dbus_binding_tool',node,node.change_ext('.h'))
tsk.env.DBUS_BINDING_TOOL_PREFIX=prefix
tsk.env.DBUS_BINDING_TOOL_MODE=mode
Task.simple_task_type('dbus_binding_tool','${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',color='BLUE',before='cc')
def detect(conf):
dbus_binding_tool=conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL')
taskgen(add_dbus_file)
before('apply_core')(process_dbus)

View file

@ -0,0 +1,44 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
import Utils,ar
from Configure import conftest
def find_dmd(conf):
conf.find_program(['dmd','ldc'],var='D_COMPILER',mandatory=True)
def common_flags_ldc(conf):
v=conf.env
v['DFLAGS']=['-d-version=Posix']
v['DLINKFLAGS']=[]
v['D_shlib_DFLAGS']=['-relocation-model=pic']
def common_flags_dmd(conf):
v=conf.env
v['DFLAGS']=['-version=Posix']
v['D_SRC_F']=''
v['D_TGT_F']=['-c','-of']
v['DPATH_ST']='-I%s'
v['D_LINKER']=v['D_COMPILER']
v['DLNK_SRC_F']=''
v['DLNK_TGT_F']='-of'
v['DLIB_ST']='-L-l%s'
v['DLIBPATH_ST']='-L-L%s'
v['DFLAGS_OPTIMIZED']=['-O']
v['DFLAGS_DEBUG']=['-g','-debug']
v['DFLAGS_ULTRADEBUG']=['-g','-debug']
v['DLINKFLAGS']=['-quiet']
v['D_shlib_DFLAGS']=['-fPIC']
v['D_shlib_LINKFLAGS']=['-L-shared']
v['DHEADER_ext']='.di'
v['D_HDR_F']=['-H','-Hf']
def detect(conf):
conf.find_dmd()
conf.check_tool('ar')
conf.check_tool('d')
conf.common_flags_dmd()
conf.d_platform_flags()
if conf.env.D_COMPILER.find('ldc')>-1:
conf.common_flags_ldc()
conftest(find_dmd)
conftest(common_flags_ldc)
conftest(common_flags_dmd)

View file

@ -0,0 +1,12 @@
#! /usr/bin/env python
# encoding: utf-8
import TaskGen
def decide_ext(self,node):
if'cxx'in self.features:return'.lex.cc'
else:return'.lex.c'
TaskGen.declare_chain(name='flex',rule='${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',ext_in='.l',ext_out='.c .cxx',decider=decide_ext)
def detect(conf):
conf.find_program('flex',var='FLEX',mandatory=True)
conf.env['FLEXFLAGS']=''

View file

@ -0,0 +1,27 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import Task
from TaskGen import extension,taskgen,after,before
EXT_ASM=['.s','.S','.asm','.ASM','.spp','.SPP']
as_str='${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}'
Task.simple_task_type('asm',as_str,'PINK',ext_out='.o',shell=False)
def asm_hook(self,node):
try:obj_ext=self.obj_ext
except AttributeError:obj_ext='_%d.o'%self.idx
task=self.create_task('asm',node,node.change_ext(obj_ext))
self.compiled_tasks.append(task)
self.meths.append('asm_incflags')
def asm_incflags(self):
self.env.append_value('_ASINCFLAGS',self.env.ASINCFLAGS)
var=('cxx'in self.features)and'CXX'or'CC'
self.env.append_value('_ASINCFLAGS',self.env['_%sINCFLAGS'%var])
def detect(conf):
conf.find_program(['gas','as'],var='AS')
if not conf.env.AS:conf.env.AS=conf.env.CC
extension(EXT_ASM)(asm_hook)
after('apply_obj_vars_cc')(asm_incflags)
after('apply_obj_vars_cxx')(asm_incflags)
before('apply_link')(asm_incflags)

View file

@ -0,0 +1,93 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import Configure,Options,Utils
import ccroot,ar
from Configure import conftest
def find_gcc(conf):
cc=conf.find_program(['gcc','cc'],var='CC',mandatory=True)
cc=conf.cmd_to_list(cc)
ccroot.get_cc_version(conf,cc,gcc=True)
conf.env.CC_NAME='gcc'
conf.env.CC=cc
def gcc_common_flags(conf):
v=conf.env
v['CCFLAGS_DEBUG']=['-g']
v['CCFLAGS_RELEASE']=['-O2']
v['CC_SRC_F']=''
v['CC_TGT_F']=['-c','-o','']
v['CPPPATH_ST']='-I%s'
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=''
v['CCLNK_TGT_F']=['-o','']
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STATICLIB_ST']='-l%s'
v['STATICLIBPATH_ST']='-L%s'
v['RPATH_ST']='-Wl,-rpath,%s'
v['CCDEFINES_ST']='-D%s'
v['SONAME_ST']='-Wl,-h,%s'
v['SHLIB_MARKER']='-Wl,-Bdynamic'
v['STATICLIB_MARKER']='-Wl,-Bstatic'
v['FULLSTATIC_MARKER']='-static'
v['program_PATTERN']='%s'
v['shlib_CCFLAGS']=['-fPIC','-DPIC']
v['shlib_LINKFLAGS']=['-shared']
v['shlib_PATTERN']='lib%s.so'
v['staticlib_LINKFLAGS']=['-Wl,-Bstatic']
v['staticlib_PATTERN']='lib%s.a'
v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
v['CCFLAGS_MACBUNDLE']=['-fPIC']
v['macbundle_PATTERN']='%s.bundle'
def gcc_modifier_win32(conf):
v=conf.env
v['program_PATTERN']='%s.exe'
v['shlib_PATTERN']='%s.dll'
v['implib_PATTERN']='lib%s.dll.a'
v['IMPLIB_ST']='-Wl,--out-implib,%s'
dest_arch=v['DEST_CPU']
v['shlib_CCFLAGS']=['-DPIC']
v.append_value('shlib_CCFLAGS','-DDLL_EXPORT')
v.append_value('LINKFLAGS','-Wl,--enable-auto-import')
def gcc_modifier_cygwin(conf):
gcc_modifier_win32(conf)
v=conf.env
v['shlib_PATTERN']='cyg%s.dll'
v.append_value('shlib_LINKFLAGS','-Wl,--enable-auto-image-base')
def gcc_modifier_darwin(conf):
v=conf.env
v['shlib_CCFLAGS']=['-fPIC','-compatibility_version','1','-current_version','1']
v['shlib_LINKFLAGS']=['-dynamiclib']
v['shlib_PATTERN']='lib%s.dylib'
v['staticlib_LINKFLAGS']=[]
v['SHLIB_MARKER']=''
v['STATICLIB_MARKER']=''
v['SONAME_ST']=''
def gcc_modifier_aix(conf):
v=conf.env
v['program_LINKFLAGS']=['-Wl,-brtl']
v['shlib_LINKFLAGS']=['-shared','-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER']=''
def gcc_modifier_platform(conf):
dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
gcc_modifier_func=globals().get('gcc_modifier_'+dest_os)
if gcc_modifier_func:
gcc_modifier_func(conf)
def detect(conf):
conf.find_gcc()
conf.find_cpp()
conf.find_ar()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
conftest(find_gcc)
conftest(gcc_common_flags)
conftest(gcc_modifier_win32)
conftest(gcc_modifier_cygwin)
conftest(gcc_modifier_darwin)
conftest(gcc_modifier_aix)
conftest(gcc_modifier_platform)

View file

@ -0,0 +1,36 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
import Utils,ar
from Configure import conftest
def find_gdc(conf):
conf.find_program('gdc',var='D_COMPILER',mandatory=True)
def common_flags_gdc(conf):
v=conf.env
v['DFLAGS']=[]
v['D_SRC_F']=''
v['D_TGT_F']=['-c','-o','']
v['DPATH_ST']='-I%s'
v['D_LINKER']=v['D_COMPILER']
v['DLNK_SRC_F']=''
v['DLNK_TGT_F']=['-o','']
v['DLIB_ST']='-l%s'
v['DLIBPATH_ST']='-L%s'
v['DLINKFLAGS']=[]
v['DFLAGS_OPTIMIZED']=['-O3']
v['DFLAGS_DEBUG']=['-O0']
v['DFLAGS_ULTRADEBUG']=['-O0']
v['D_shlib_DFLAGS']=[]
v['D_shlib_LINKFLAGS']=['-shared']
v['DHEADER_ext']='.di'
v['D_HDR_F']='-fintfc -fintfc-file='
def detect(conf):
conf.find_gdc()
conf.check_tool('ar')
conf.check_tool('d')
conf.common_flags_gdc()
conf.d_platform_flags()
conftest(find_gdc)
conftest(common_flags_gdc)

View file

@ -0,0 +1,83 @@
#! /usr/bin/env python
# encoding: utf-8
import Task,Utils
from TaskGen import taskgen,before,after,feature
def add_marshal_file(self,filename,prefix):
if not hasattr(self,'marshal_list'):
self.marshal_list=[]
self.meths.append('process_marshal')
self.marshal_list.append((filename,prefix))
def process_marshal(self):
for f,prefix in getattr(self,'marshal_list',[]):
node=self.path.find_resource(f)
if not node:
raise Utils.WafError('file not found %r'%f)
h_node=node.change_ext('.h')
c_node=node.change_ext('.c')
task=self.create_task('glib_genmarshal',node,[h_node,c_node])
task.env.GLIB_GENMARSHAL_PREFIX=prefix
self.allnodes.append(c_node)
def genmarshal_func(self):
bld=self.inputs[0].__class__.bld
get=self.env.get_flat
cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(self.env),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath(self.env))
ret=bld.exec_command(cmd1)
if ret:return ret
f=open(self.outputs[1].abspath(self.env),'wb')
c='''#include "%s"\n'''%self.outputs[0].name
f.write(c)
f.close()
cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(self.env),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath(self.env))
ret=Utils.exec_command(cmd2)
if ret:return ret
def add_enums_from_template(self,source='',target='',template='',comments=''):
if not hasattr(self,'enums_list'):
self.enums_list=[]
self.meths.append('process_enums')
self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments})
def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''):
if not hasattr(self,'enums_list'):
self.enums_list=[]
self.meths.append('process_enums')
self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments})
def process_enums(self):
for enum in getattr(self,'enums_list',[]):
task=self.create_task('glib_mkenums')
env=task.env
inputs=[]
source_list=self.to_list(enum['source'])
if not source_list:
raise Utils.WafError('missing source '+str(enum))
source_list=[self.path.find_resource(k)for k in source_list]
inputs+=source_list
env['GLIB_MKENUMS_SOURCE']=[k.srcpath(env)for k in source_list]
if not enum['target']:
raise Utils.WafError('missing target '+str(enum))
tgt_node=self.path.find_or_declare(enum['target'])
if tgt_node.name.endswith('.c'):
self.allnodes.append(tgt_node)
env['GLIB_MKENUMS_TARGET']=tgt_node.abspath(env)
options=[]
if enum['template']:
template_node=self.path.find_resource(enum['template'])
options.append('--template %s'%(template_node.abspath(env)))
inputs.append(template_node)
params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'}
for param,option in params.iteritems():
if enum[param]:
options.append('%s %r'%(option,enum[param]))
env['GLIB_MKENUMS_OPTIONS']=' '.join(options)
task.set_inputs(inputs)
task.set_outputs(tgt_node)
Task.task_type_from_func('glib_genmarshal',func=genmarshal_func,vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL'],color='BLUE',before='cc cxx')
Task.simple_task_type('glib_mkenums','${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',color='PINK',before='cc cxx')
def detect(conf):
glib_genmarshal=conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL')
mk_enums_tool=conf.find_program('glib-mkenums',var='GLIB_MKENUMS')
taskgen(add_marshal_file)
before('apply_core')(process_marshal)
taskgen(add_enums_from_template)
taskgen(add_enums)
before('apply_core')(process_enums)

View file

@ -0,0 +1,163 @@
#! /usr/bin/env python
# encoding: utf-8
import os,re
import TaskGen,Utils,Runner,Task,Build,Options,Logs
import cc
from Logs import error
from TaskGen import taskgen,before,after,feature
n1_regexp=re.compile('<refentrytitle>(.*)</refentrytitle>',re.M)
n2_regexp=re.compile('<manvolnum>(.*)</manvolnum>',re.M)
def postinstall_schemas(prog_name):
if Build.bld.is_install:
dir=Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas'%prog_name)
if not Options.options.destdir:
Utils.pprint('YELLOW','Installing GConf schema')
command='gconftool-2 --install-schema-file=%s 1> /dev/null'%dir
ret=Utils.exec_command(command)
else:
Utils.pprint('YELLOW','GConf schema not installed. After install, run this:')
Utils.pprint('YELLOW','gconftool-2 --install-schema-file=%s'%dir)
def postinstall_icons():
dir=Build.bld.get_install_path('${DATADIR}/icons/hicolor')
if Build.bld.is_install:
if not Options.options.destdir:
Utils.pprint('YELLOW',"Updating Gtk icon cache.")
command='gtk-update-icon-cache -q -f -t %s'%dir
ret=Utils.exec_command(command)
else:
Utils.pprint('YELLOW','Icon cache not updated. After install, run this:')
Utils.pprint('YELLOW','gtk-update-icon-cache -q -f -t %s'%dir)
def postinstall_scrollkeeper(prog_name):
if Build.bld.is_install:
if os.access('/var/log/scrollkeeper.log',os.W_OK):
dir1=Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
dir2=Build.bld.get_install_path('${DATADIR}/omf/%s'%prog_name)
command='scrollkeeper-update -q -p %s -o %s'%(dir1,dir2)
ret=Utils.exec_command(command)
def postinstall(prog_name='myapp',schemas=1,icons=1,scrollkeeper=1):
if schemas:postinstall_schemas(prog_name)
if icons:postinstall_icons()
if scrollkeeper:postinstall_scrollkeeper(prog_name)
class gnome_doc_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def init_gnome_doc(self):
self.default_install_path='${PREFIX}/share'
def apply_gnome_doc(self):
self.env['APPNAME']=self.doc_module
lst=self.to_list(self.doc_linguas)
bld=self.bld
lst.append('C')
for x in lst:
if not x=='C':
tsk=self.create_task('xml2po')
node=self.path.find_resource(x+'/'+x+'.po')
src=self.path.find_resource('C/%s.xml'%self.doc_module)
out=self.path.find_or_declare('%s/%s.xml'%(x,self.doc_module))
tsk.set_inputs([node,src])
tsk.set_outputs(out)
else:
out=self.path.find_resource('%s/%s.xml'%(x,self.doc_module))
tsk2=self.create_task('xsltproc2po')
out2=self.path.find_or_declare('%s/%s-%s.omf'%(x,self.doc_module,x))
tsk2.set_outputs(out2)
node=self.path.find_resource(self.doc_module+".omf.in")
tsk2.inputs=[node,out]
tsk2.run_after.append(tsk)
if bld.is_install:
path=self.install_path+'/gnome/help/%s/%s'%(self.doc_module,x)
bld.install_files(self.install_path+'/omf',out2,env=self.env)
for y in self.to_list(self.doc_figures):
try:
os.stat(self.path.abspath()+'/'+x+'/'+y)
bld.install_as(path+'/'+y,self.path.abspath()+'/'+x+'/'+y)
except:
bld.install_as(path+'/'+y,self.path.abspath()+'/C/'+y)
bld.install_as(path+'/%s.xml'%self.doc_module,out.abspath(self.env))
if x=='C':
xmls=self.to_list(self.doc_includes)
xmls.append(self.doc_entities)
for z in xmls:
out=self.path.find_resource('%s/%s'%(x,z))
bld.install_as(path+'/%s'%z,out.abspath(self.env))
class xml_to_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def init_xml_to(self):
Utils.def_attrs(self,source='xmlfile',xslt='xlsltfile',target='hey',default_install_path='${PREFIX}',task_created=None)
def apply_xml_to(self):
xmlfile=self.path.find_resource(self.source)
xsltfile=self.path.find_resource(self.xslt)
tsk=self.create_task('xmlto',[xmlfile,xsltfile],xmlfile.change_ext('html'))
tsk.install_path=self.install_path
def sgml_scan(self):
node=self.inputs[0]
env=self.env
variant=node.variant(env)
fi=open(node.abspath(env),'r')
content=fi.read()
fi.close()
name=n1_regexp.findall(content)[0]
num=n2_regexp.findall(content)[0]
doc_name=name+'.'+num
if not self.outputs:
self.outputs=[self.generator.path.find_or_declare(doc_name)]
return([],[doc_name])
class gnome_sgml2man_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def apply_gnome_sgml2man(self):
assert(getattr(self,'appname',None))
def install_result(task):
out=task.outputs[0]
name=out.name
ext=name[-1]
env=task.env
self.bld.install_files('${DATADIR}/man/man%s/'%ext,out,env)
self.bld.rescan(self.path)
for name in self.bld.cache_dir_contents[self.path.id]:
base,ext=os.path.splitext(name)
if ext!='.sgml':continue
task=self.create_task('sgml2man')
task.set_inputs(self.path.find_resource(name))
task.task_generator=self
if self.bld.is_install:task.install=install_result
task.scan()
cls=Task.simple_task_type('sgml2man','${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null',color='BLUE')
cls.scan=sgml_scan
cls.quiet=1
Task.simple_task_type('xmlto','${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
Task.simple_task_type('xml2po','${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}',color='BLUE')
xslt_magic="""${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
--stringparam db2omf.basename ${APPNAME} \
--stringparam db2omf.format docbook \
--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
${DB2OMF} ${SRC[1].abspath(env)}"""
Task.simple_task_type('xsltproc2po',xslt_magic,color='BLUE')
def detect(conf):
conf.check_tool('gnu_dirs glib2 dbus')
sgml2man=conf.find_program('docbook2man',var='SGML2MAN')
def getstr(varname):
return getattr(Options.options,varname,'')
conf.define('GNOMELOCALEDIR',os.path.join(conf.env['DATADIR'],'locale'))
xml2po=conf.find_program('xml2po',var='XML2PO')
xsltproc2po=conf.find_program('xsltproc',var='XSLTPROC2PO')
conf.env['XML2POFLAGS']='-e -p'
conf.env['SCROLLKEEPER_DATADIR']=Utils.cmd_output("scrollkeeper-config --pkgdatadir",silent=1).strip()
conf.env['DB2OMF']=Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils",silent=1).strip()
def set_options(opt):
opt.add_option('--want-rpath',type='int',default=1,dest='want_rpath',help='set rpath to 1 or 0 [Default 1]')
feature('gnome_doc')(init_gnome_doc)
feature('gnome_doc')(apply_gnome_doc)
after('init_gnome_doc')(apply_gnome_doc)
feature('xml_to')(init_xml_to)
feature('xml_to')(apply_xml_to)
after('init_xml_to')(apply_xml_to)
feature('gnome_sgml2man')(apply_gnome_sgml2man)

View file

@ -0,0 +1,63 @@
#! /usr/bin/env python
# encoding: utf-8
import Utils,Options
_options=[x.split(', ')for x in'''
bindir, user executables, ${EXEC_PREFIX}/bin
sbindir, system admin executables, ${EXEC_PREFIX}/sbin
libexecdir, program executables, ${EXEC_PREFIX}/libexec
sysconfdir, read-only single-machine data, ${PREFIX}/etc
sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
localstatedir, modifiable single-machine data, ${PREFIX}/var
libdir, object code libraries, ${EXEC_PREFIX}/lib
includedir, C header files, ${PREFIX}/include
oldincludedir, C header files for non-gcc, /usr/include
datarootdir, read-only arch.-independent data root, ${PREFIX}/share
datadir, read-only architecture-independent data, ${DATAROOTDIR}
infodir, info documentation, ${DATAROOTDIR}/info
localedir, locale-dependent data, ${DATAROOTDIR}/locale
mandir, man documentation, ${DATAROOTDIR}/man
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
htmldir, html documentation, ${DOCDIR}
dvidir, dvi documentation, ${DOCDIR}
pdfdir, pdf documentation, ${DOCDIR}
psdir, ps documentation, ${DOCDIR}
'''.split('\n')if x]
def detect(conf):
def get_param(varname,default):
return getattr(Options.options,varname,'')or default
env=conf.env
env['EXEC_PREFIX']=get_param('EXEC_PREFIX',env['PREFIX'])
env['PACKAGE']=Utils.g_module.APPNAME
complete=False
iter=0
while not complete and iter<len(_options)+1:
iter+=1
complete=True
for name,help,default in _options:
name=name.upper()
if not env[name]:
try:
env[name]=Utils.subst_vars(get_param(name,default),env)
except TypeError:
complete=False
if not complete:
lst=[name for name,_,_ in _options if not env[name.upper()]]
raise Utils.WafError('Variable substitution failure %r'%lst)
def set_options(opt):
inst_dir=opt.add_option_group('Installation directories','By default, "waf install" will put the files in\
"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
for k in('--prefix','--destdir'):
option=opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
inst_dir.add_option(option)
inst_dir.add_option('--exec-prefix',help='installation prefix [Default: ${PREFIX}]',default='',dest='EXEC_PREFIX')
dirs_options=opt.add_option_group('Pre-defined installation directories','')
for name,help,default in _options:
option_name='--'+name
str_default=default
str_help='%s [Default: %s]'%(help,str_default)
dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper())

View file

@ -0,0 +1,10 @@
#! /usr/bin/env python
# encoding: utf-8
import TaskGen
TaskGen.declare_chain(name='gob2',rule='${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}',ext_in='.gob',ext_out='.c')
def detect(conf):
gob2=conf.find_program('gob2',var='GOB2',mandatory=True)
conf.env['GOB2']=gob2
conf.env['GOB2FLAGS']=''

View file

@ -0,0 +1,93 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import Configure,Options,Utils
import ccroot,ar
from Configure import conftest
def find_gxx(conf):
cxx=conf.find_program(['g++','c++'],var='CXX',mandatory=True)
cxx=conf.cmd_to_list(cxx)
ccroot.get_cc_version(conf,cxx,gcc=True)
conf.env.CXX_NAME='gcc'
conf.env.CXX=cxx
def gxx_common_flags(conf):
v=conf.env
v['CXXFLAGS_DEBUG']=['-g']
v['CXXFLAGS_RELEASE']=['-O2']
v['CXX_SRC_F']=''
v['CXX_TGT_F']=['-c','-o','']
v['CPPPATH_ST']='-I%s'
if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
v['CXXLNK_SRC_F']=''
v['CXXLNK_TGT_F']=['-o','']
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STATICLIB_ST']='-l%s'
v['STATICLIBPATH_ST']='-L%s'
v['RPATH_ST']='-Wl,-rpath,%s'
v['CXXDEFINES_ST']='-D%s'
v['SONAME_ST']='-Wl,-h,%s'
v['SHLIB_MARKER']='-Wl,-Bdynamic'
v['STATICLIB_MARKER']='-Wl,-Bstatic'
v['FULLSTATIC_MARKER']='-static'
v['program_PATTERN']='%s'
v['shlib_CXXFLAGS']=['-fPIC','-DPIC']
v['shlib_LINKFLAGS']=['-shared']
v['shlib_PATTERN']='lib%s.so'
v['staticlib_LINKFLAGS']=['-Wl,-Bstatic']
v['staticlib_PATTERN']='lib%s.a'
v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
v['CCFLAGS_MACBUNDLE']=['-fPIC']
v['macbundle_PATTERN']='%s.bundle'
def gxx_modifier_win32(conf):
v=conf.env
v['program_PATTERN']='%s.exe'
v['shlib_PATTERN']='%s.dll'
v['implib_PATTERN']='lib%s.dll.a'
v['IMPLIB_ST']='-Wl,--out-implib,%s'
dest_arch=v['DEST_CPU']
v['shlib_CXXFLAGS']=[]
v.append_value('shlib_CXXFLAGS','-DDLL_EXPORT')
v.append_value('LINKFLAGS','-Wl,--enable-auto-import')
def gxx_modifier_cygwin(conf):
gxx_modifier_win32(conf)
v=conf.env
v['shlib_PATTERN']='cyg%s.dll'
v.append_value('shlib_LINKFLAGS','-Wl,--enable-auto-image-base')
def gxx_modifier_darwin(conf):
v=conf.env
v['shlib_CXXFLAGS']=['-fPIC','-compatibility_version','1','-current_version','1']
v['shlib_LINKFLAGS']=['-dynamiclib']
v['shlib_PATTERN']='lib%s.dylib'
v['staticlib_LINKFLAGS']=[]
v['SHLIB_MARKER']=''
v['STATICLIB_MARKER']=''
v['SONAME_ST']=''
def gxx_modifier_aix(conf):
v=conf.env
v['program_LINKFLAGS']=['-Wl,-brtl']
v['shlib_LINKFLAGS']=['-shared','-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER']=''
def gxx_modifier_platform(conf):
dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
gxx_modifier_func=globals().get('gxx_modifier_'+dest_os)
if gxx_modifier_func:
gxx_modifier_func(conf)
def detect(conf):
conf.find_gxx()
conf.find_cpp()
conf.find_ar()
conf.gxx_common_flags()
conf.gxx_modifier_platform()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
conftest(find_gxx)
conftest(gxx_common_flags)
conftest(gxx_modifier_win32)
conftest(gxx_modifier_cygwin)
conftest(gxx_modifier_darwin)
conftest(gxx_modifier_aix)
conftest(gxx_modifier_platform)

View file

@ -0,0 +1,32 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import Configure,Options,Utils
import ccroot,ar,gcc
from Configure import conftest
def find_icc(conf):
if sys.platform=='cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')
v=conf.env
cc=None
if v['CC']:cc=v['CC']
elif'CC'in conf.environ:cc=conf.environ['CC']
if not cc:cc=conf.find_program('icc',var='CC')
if not cc:cc=conf.find_program('ICL',var='CC')
if not cc:conf.fatal('Intel C Compiler (icc) was not found')
cc=conf.cmd_to_list(cc)
ccroot.get_cc_version(conf,cc,icc=True)
v['CC']=cc
v['CC_NAME']='icc'
detect='''
find_icc
find_ar
gcc_common_flags
gcc_modifier_platform
cc_load_tools
cc_add_flags
link_add_flags
'''
conftest(find_icc)

View file

@ -0,0 +1,31 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import Configure,Options,Utils
import ccroot,ar,gxx
from Configure import conftest
def find_icpc(conf):
if sys.platform=='cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')
v=conf.env
cxx=None
if v['CXX']:cxx=v['CXX']
elif'CXX'in conf.environ:cxx=conf.environ['CXX']
if not cxx:cxx=conf.find_program('icpc',var='CXX')
if not cxx:conf.fatal('Intel C++ Compiler (icpc) was not found')
cxx=conf.cmd_to_list(cxx)
ccroot.get_cc_version(conf,cxx,icc=True)
v['CXX']=cxx
v['CXX_NAME']='icc'
detect='''
find_icpc
find_ar
gxx_common_flags
gxx_modifier_platform
cxx_load_tools
cxx_add_flags
link_add_flags
'''
conftest(find_icpc)

View file

@ -0,0 +1,95 @@
#! /usr/bin/env python
# encoding: utf-8
import os,re
import Configure,TaskGen,Task,Utils,Runner,Options,Build,config_c
from TaskGen import feature,before,taskgen
from Logs import error
class intltool_in_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def iapply_intltool_in_f(self):
try:self.meths.remove('apply_core')
except ValueError:pass
for i in self.to_list(self.source):
node=self.path.find_resource(i)
podir=getattr(self,'podir','po')
podirnode=self.path.find_dir(podir)
if not podirnode:
error("could not find the podir %r"%podir)
continue
cache=getattr(self,'intlcache','.intlcache')
self.env['INTLCACHE']=os.path.join(self.path.bldpath(self.env),podir,cache)
self.env['INTLPODIR']=podirnode.srcpath(self.env)
self.env['INTLFLAGS']=getattr(self,'flags',['-q','-u','-c'])
task=self.create_task('intltool',node,node.change_ext(''))
task.install_path=self.install_path
class intltool_po_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def apply_intltool_po(self):
try:self.meths.remove('apply_core')
except ValueError:pass
self.default_install_path='${LOCALEDIR}'
appname=getattr(self,'appname','set_your_app_name')
podir=getattr(self,'podir','')
def install_translation(task):
out=task.outputs[0]
filename=out.name
(langname,ext)=os.path.splitext(filename)
inst_file=langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo'
self.bld.install_as(os.path.join(self.install_path,inst_file),out,self.env,self.chmod)
linguas=self.path.find_resource(os.path.join(podir,'LINGUAS'))
if linguas:
file=open(linguas.abspath())
langs=[]
for line in file.readlines():
if not line.startswith('#'):
langs+=line.split()
file.close()
re_linguas=re.compile('[-a-zA-Z_@.]+')
for lang in langs:
if re_linguas.match(lang):
node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po'))
task=self.create_task('po')
task.set_inputs(node)
task.set_outputs(node.change_ext('.mo'))
if self.bld.is_install:task.install=install_translation
else:
Utils.pprint('RED',"Error no LINGUAS file found in po directory")
Task.simple_task_type('po','${POCOM} -o ${TGT} ${SRC}',color='BLUE',shell=False)
Task.simple_task_type('intltool','${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',color='BLUE',after="cc_link cxx_link",shell=False)
def detect(conf):
pocom=conf.find_program('msgfmt')
if not pocom:
conf.fatal('The program msgfmt (gettext) is mandatory!')
conf.env['POCOM']=pocom
intltool=conf.find_program('intltool-merge',var='INTLTOOL')
if not intltool:
if Options.platform=='win32':
perl=conf.find_program('perl',var='PERL')
if not perl:
conf.fatal('The program perl (required by intltool) could not be found')
intltooldir=Configure.find_file('intltool-merge',os.environ['PATH'].split(os.pathsep))
if not intltooldir:
conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
conf.env['INTLTOOL']=Utils.to_list(conf.env['PERL'])+[intltooldir+os.sep+'intltool-merge']
conf.check_message('intltool','',True,' '.join(conf.env['INTLTOOL']))
else:
conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
def getstr(varname):
return getattr(Options.options,varname,'')
prefix=conf.env['PREFIX']
datadir=getstr('datadir')
if not datadir:datadir=os.path.join(prefix,'share')
conf.define('LOCALEDIR',os.path.join(datadir,'locale'))
conf.define('DATADIR',datadir)
if conf.env['CC']or conf.env['CXX']:
conf.check(header_name='locale.h')
def set_options(opt):
opt.add_option('--want-rpath',type='int',default=1,dest='want_rpath',help='set rpath to 1 or 0 [Default 1]')
opt.add_option('--datadir',type='string',default='',dest='datadir',help='read-only application data')
before('apply_core')(iapply_intltool_in_f)
feature('intltool_in')(iapply_intltool_in_f)
feature('intltool_po')(apply_intltool_po)

View file

@ -0,0 +1,162 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,re
from Configure import conf
import TaskGen,Task,Utils,Options,Build
from TaskGen import feature,before,taskgen
class_check_source='''
public class Test {
public static void main(String[] argv) {
Class lib;
if (argv.length < 1) {
System.err.println("Missing argument");
System.exit(77);
}
try {
lib = Class.forName(argv[0]);
} catch (ClassNotFoundException e) {
System.err.println("ClassNotFoundException");
System.exit(1);
}
lib = null;
System.exit(0);
}
}
'''
def jar_files(self):
basedir=getattr(self,'basedir','.')
destfile=getattr(self,'destfile','test.jar')
jaropts=getattr(self,'jaropts',[])
jarcreate=getattr(self,'jarcreate','cf')
dir=self.path.find_dir(basedir)
if not dir:raise
jaropts.append('-C')
jaropts.append(dir.abspath(self.env))
jaropts.append('.')
out=self.path.find_or_declare(destfile)
tsk=self.create_task('jar_create')
tsk.set_outputs(out)
tsk.inputs=[x for x in dir.find_iter(src=0,bld=1)if x.id!=out.id]
tsk.env['JAROPTS']=jaropts
tsk.env['JARCREATE']=jarcreate
def apply_java(self):
Utils.def_attrs(self,jarname='',jaropts='',classpath='',sourcepath='.',srcdir='.',source_re='**/*.java',jar_mf_attributes={},jar_mf_classpath=[])
if getattr(self,'source_root',None):
self.srcdir=self.source_root
nodes_lst=[]
if not self.classpath:
if not self.env['CLASSPATH']:
self.env['CLASSPATH']='..'+os.pathsep+'.'
else:
self.env['CLASSPATH']=self.classpath
srcdir_node=self.path.find_dir(self.srcdir)
if not srcdir_node:
raise Utils.WafError('could not find srcdir %r'%self.srcdir)
src_nodes=[x for x in srcdir_node.ant_glob(self.source_re,flat=False)]
bld_nodes=[x.change_ext('.class')for x in src_nodes]
self.env['OUTDIR']=[srcdir_node.bldpath(self.env)]
tsk=self.create_task('javac')
tsk.set_inputs(src_nodes)
tsk.set_outputs(bld_nodes)
if getattr(self,'compat',None):
tsk.env.append_value('JAVACFLAGS',['-source',self.compat])
if hasattr(self,'sourcepath'):
fold=[self.path.find_dir(x)for x in self.to_list(self.sourcepath)]
names=os.pathsep.join([x.srcpath()for x in fold])
else:
names=srcdir_node.srcpath()
if names:
tsk.env.append_value('JAVACFLAGS',['-sourcepath',names])
if self.jarname:
jtsk=self.create_task('jar_create',bld_nodes,self.path.find_or_declare(self.jarname))
jtsk.set_run_after(tsk)
if not self.env.JAROPTS:
if self.jaropts:
self.env.JAROPTS=self.jaropts
else:
dirs='.'
self.env.JAROPTS=['-C',''.join(self.env['OUTDIR']),dirs]
Task.simple_task_type('jar_create','${JAR} ${JARCREATE} ${TGT} ${JAROPTS}',color='GREEN',shell=False)
cls=Task.simple_task_type('javac','${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}',shell=False)
cls.color='BLUE'
def post_run_javac(self):
par={}
for x in self.inputs:
par[x.parent.id]=x.parent
inner={}
for k in par.values():
path=k.abspath(self.env)
lst=os.listdir(path)
for u in lst:
if u.find('$')>=0:
inner_class_node=k.find_or_declare(u)
inner[inner_class_node.id]=inner_class_node
to_add=set(inner.keys())-set([x.id for x in self.outputs])
for x in to_add:
self.outputs.append(inner[x])
self.cached=True
return Task.Task.post_run(self)
cls.post_run=post_run_javac
def detect(conf):
java_path=conf.environ['PATH'].split(os.pathsep)
v=conf.env
if'JAVA_HOME'in conf.environ:
java_path=[os.path.join(conf.environ['JAVA_HOME'],'bin')]+java_path
conf.env['JAVA_HOME']=[conf.environ['JAVA_HOME']]
for x in'javac java jar'.split():
conf.find_program(x,var=x.upper(),path_list=java_path)
conf.env[x.upper()]=conf.cmd_to_list(conf.env[x.upper()])
v['JAVA_EXT']=['.java']
if'CLASSPATH'in conf.environ:
v['CLASSPATH']=conf.environ['CLASSPATH']
if not v['JAR']:conf.fatal('jar is required for making java packages')
if not v['JAVAC']:conf.fatal('javac is required for compiling java classes')
v['JARCREATE']='cf'
def check_java_class(self,classname,with_classpath=None):
import shutil
javatestdir='.waf-javatest'
classpath=javatestdir
if self.env['CLASSPATH']:
classpath+=os.pathsep+self.env['CLASSPATH']
if isinstance(with_classpath,str):
classpath+=os.pathsep+with_classpath
shutil.rmtree(javatestdir,True)
os.mkdir(javatestdir)
java_file=open(os.path.join(javatestdir,'Test.java'),'w')
java_file.write(class_check_source)
java_file.close()
Utils.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False)
cmd=self.env['JAVA']+['-cp',classpath,'Test',classname]
self.log.write("%s\n"%str(cmd))
found=Utils.exec_command(cmd,shell=False,log=self.log)
self.check_message('Java class %s'%classname,"",not found)
shutil.rmtree(javatestdir,True)
return found
def check_jni_headers(conf):
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
conf.fatal('load a compiler first (gcc, g++, ..)')
if not conf.env.JAVA_HOME:
conf.fatal('set JAVA_HOME in the system environment')
javaHome=conf.env['JAVA_HOME'][0]
b=Build.BuildContext()
b.load_dirs(conf.srcdir,conf.blddir)
dir=b.root.find_dir(conf.env.JAVA_HOME[0]+'/include')
f=dir.ant_glob('**/(jni|jni_md).h',flat=False)
incDirs=[x.parent.abspath()for x in f]
dir=b.root.find_dir(conf.env.JAVA_HOME[0])
f=dir.ant_glob('**/*jvm.(so|dll)',flat=False)
libDirs=[x.parent.abspath()for x in f]or[javaHome]
for i,d in enumerate(libDirs):
if conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA'):
break
else:
conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs)
feature('jar')(jar_files)
before('apply_core')(jar_files)
feature('javac')(apply_java)
before('apply_core')(apply_java)
conf(check_java_class)
conf(check_jni_headers)

View file

@ -0,0 +1,57 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys,re
import Options,TaskGen,Task,Utils
from TaskGen import taskgen,feature,after
class msgfmt_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def init_msgfmt(self):
self.default_install_path='${KDE4_LOCALE_INSTALL_DIR}'
def apply_msgfmt(self):
for lang in self.to_list(self.langs):
node=self.path.find_resource(lang+'.po')
task=self.create_task('msgfmt',node,node.change_ext('.mo'))
if not self.bld.is_install:continue
langname=lang.split('/')
langname=langname[-1]
task.install_path=self.install_path+os.sep+langname+os.sep+'LC_MESSAGES'
task.filename=getattr(self,'appname','set_your_appname')+'.mo'
task.chmod=self.chmod
def detect(conf):
kdeconfig=conf.find_program('kde4-config')
if not kdeconfig:
conf.fatal('we need kde4-config')
prefix=Utils.cmd_output('%s --prefix'%kdeconfig,silent=True).strip()
file='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
try:os.stat(file)
except OSError:
file='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
try:os.stat(file)
except OSError:conf.fatal('could not open %s'%file)
try:
txt=Utils.readf(file)
except(OSError,IOError):
conf.fatal('could not read %s'%file)
txt=txt.replace('\\\n','\n')
fu=re.compile('#(.*)\n')
txt=fu.sub('',txt)
setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
found=setregexp.findall(txt)
for(_,key,val)in found:
conf.env[key]=val
conf.env['LIB_KDECORE']='kdecore'
conf.env['LIB_KDEUI']='kdeui'
conf.env['LIB_KIO']='kio'
conf.env['LIB_KHTML']='khtml'
conf.env['LIB_KPARTS']='kparts'
conf.env['LIBPATH_KDECORE']=conf.env['KDE4_LIB_INSTALL_DIR']
conf.env['CPPPATH_KDECORE']=conf.env['KDE4_INCLUDE_INSTALL_DIR']
conf.env.append_value('CPPPATH_KDECORE',conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE")
conf.env['MSGFMT']=conf.find_program('msgfmt')
Task.simple_task_type('msgfmt','${MSGFMT} ${SRC} -o ${TGT}',color='BLUE',shell=False)
feature('msgfmt')(init_msgfmt)
feature('msgfmt')(apply_msgfmt)
after('init_msgfmt')(apply_msgfmt)

View file

@ -0,0 +1,239 @@
#! /usr/bin/env python
# encoding: utf-8
import sys,re,os,optparse
import TaskGen,Task,Utils,preproc
from Logs import error,debug,warn
from TaskGen import taskgen,after,before,feature
REVISION="0.1.3"
fakelibtool_vardeps=['CXX','PREFIX']
def fakelibtool_build(task):
env=task.env
dest=open(task.outputs[0].abspath(env),'w')
sname=task.inputs[0].name
fu=dest.write
fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
if env['vnum']:
nums=env['vnum'].split('.')
libname=task.inputs[0].name
name3=libname+'.'+env['vnum']
name2=libname+'.'+nums[0]
name1=libname
fu("dlname='%s'\n"%name2)
strn=" ".join([name3,name2,name1])
fu("library_names='%s'\n"%(strn))
else:
fu("dlname='%s'\n"%sname)
fu("library_names='%s %s %s'\n"%(sname,sname,sname))
fu("old_library=''\n")
vars=' '.join(env['libtoolvars']+env['LINKFLAGS'])
fu("dependency_libs='%s'\n"%vars)
fu("current=0\n")
fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
fu("dlopen=''\ndlpreopen=''\n")
fu("libdir='%s/lib'\n"%env['PREFIX'])
dest.close()
return 0
def read_la_file(path):
sp=re.compile(r'^([^=]+)=\'(.*)\'$')
dc={}
file=open(path,"r")
for line in file.readlines():
try:
_,left,right,_=sp.split(line.strip())
dc[left]=right
except ValueError:
pass
file.close()
return dc
def apply_link_libtool(self):
if self.type!='program':
linktask=self.link_task
self.latask=self.create_task('fakelibtool',linktask.outputs,linktask.outputs[0].change_ext('.la'))
if self.bld.is_install:
self.bld.install_files('${PREFIX}/lib',linktask.outputs[0],self.env)
def apply_libtool(self):
self.env['vnum']=self.vnum
paths=[]
libs=[]
libtool_files=[]
libtool_vars=[]
for l in self.env['LINKFLAGS']:
if l[:2]=='-L':
paths.append(l[2:])
elif l[:2]=='-l':
libs.append(l[2:])
for l in libs:
for p in paths:
dict=read_la_file(p+'/lib'+l+'.la')
linkflags2=dict.get('dependency_libs','')
for v in linkflags2.split():
if v.endswith('.la'):
libtool_files.append(v)
libtool_vars.append(v)
continue
self.env.append_unique('LINKFLAGS',v)
break
self.env['libtoolvars']=libtool_vars
while libtool_files:
file=libtool_files.pop()
dict=read_la_file(file)
for v in dict['dependency_libs'].split():
if v[-3:]=='.la':
libtool_files.append(v)
continue
self.env.append_unique('LINKFLAGS',v)
Task.task_type_from_func('fakelibtool',vars=fakelibtool_vardeps,func=fakelibtool_build,color='BLUE',after="cc_link cxx_link static_link")
class libtool_la_file:
def __init__(self,la_filename):
self.__la_filename=la_filename
self.linkname=str(os.path.split(la_filename)[-1])[:-3]
if self.linkname.startswith("lib"):
self.linkname=self.linkname[3:]
self.dlname=None
self.library_names=None
self.old_library=None
self.dependency_libs=None
self.current=None
self.age=None
self.revision=None
self.installed=None
self.shouldnotlink=None
self.dlopen=None
self.dlpreopen=None
self.libdir='/usr/lib'
if not self.__parse():
raise ValueError("file %s not found!!"%(la_filename))
def __parse(self):
if not os.path.isfile(self.__la_filename):return 0
la_file=open(self.__la_filename,'r')
for line in la_file:
ln=line.strip()
if not ln:continue
if ln[0]=='#':continue
(key,value)=str(ln).split('=',1)
key=key.strip()
value=value.strip()
if value=="no":value=False
elif value=="yes":value=True
else:
try:value=int(value)
except ValueError:value=value.strip("'")
setattr(self,key,value)
la_file.close()
return 1
def get_libs(self):
libs=[]
if self.dependency_libs:
libs=str(self.dependency_libs).strip().split()
if libs==None:
libs=[]
libs.insert(0,"-l%s"%self.linkname.strip())
libs.insert(0,"-L%s"%self.libdir.strip())
return libs
def __str__(self):
return'''\
dlname = "%(dlname)s"
library_names = "%(library_names)s"
old_library = "%(old_library)s"
dependency_libs = "%(dependency_libs)s"
version = %(current)s.%(age)s.%(revision)s
installed = "%(installed)s"
shouldnotlink = "%(shouldnotlink)s"
dlopen = "%(dlopen)s"
dlpreopen = "%(dlpreopen)s"
libdir = "%(libdir)s"'''%self.__dict__
class libtool_config:
def __init__(self,la_filename):
self.__libtool_la_file=libtool_la_file(la_filename)
tmp=self.__libtool_la_file
self.__version=[int(tmp.current),int(tmp.age),int(tmp.revision)]
self.__sub_la_files=[]
self.__sub_la_files.append(la_filename)
self.__libs=None
def __cmp__(self,other):
if not other:
return 1
othervers=[int(s)for s in str(other).split(".")]
selfvers=self.__version
return cmp(selfvers,othervers)
def __str__(self):
return"\n".join([str(self.__libtool_la_file),' '.join(self.__libtool_la_file.get_libs()),'* New getlibs:',' '.join(self.get_libs())])
def __get_la_libs(self,la_filename):
return libtool_la_file(la_filename).get_libs()
def get_libs(self):
libs_list=list(self.__libtool_la_file.get_libs())
libs_map={}
while len(libs_list)>0:
entry=libs_list.pop(0)
if entry:
if str(entry).endswith(".la"):
if entry not in self.__sub_la_files:
self.__sub_la_files.append(entry)
libs_list.extend(self.__get_la_libs(entry))
else:
libs_map[entry]=1
self.__libs=libs_map.keys()
return self.__libs
def get_libs_only_L(self):
if not self.__libs:self.get_libs()
libs=self.__libs
libs=[s for s in libs if str(s).startswith('-L')]
return libs
def get_libs_only_l(self):
if not self.__libs:self.get_libs()
libs=self.__libs
libs=[s for s in libs if str(s).startswith('-l')]
return libs
def get_libs_only_other(self):
if not self.__libs:self.get_libs()
libs=self.__libs
libs=[s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
return libs
def useCmdLine():
usage='''Usage: %prog [options] PathToFile.la
example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
nor: %prog --libs /usr/lib/libamarok.la'''
parser=optparse.OptionParser(usage)
a=parser.add_option
a("--version",dest="versionNumber",action="store_true",default=False,help="output version of libtool-config")
a("--debug",dest="debug",action="store_true",default=False,help="enable debug")
a("--libs",dest="libs",action="store_true",default=False,help="output all linker flags")
a("--libs-only-l",dest="libs_only_l",action="store_true",default=False,help="output -l flags")
a("--libs-only-L",dest="libs_only_L",action="store_true",default=False,help="output -L flags")
a("--libs-only-other",dest="libs_only_other",action="store_true",default=False,help="output other libs (e.g. -pthread)")
a("--atleast-version",dest="atleast_version",default=None,help="return 0 if the module is at least version ATLEAST_VERSION")
a("--exact-version",dest="exact_version",default=None,help="return 0 if the module is exactly version EXACT_VERSION")
a("--max-version",dest="max_version",default=None,help="return 0 if the module is at no newer than version MAX_VERSION")
(options,args)=parser.parse_args()
if len(args)!=1 and not options.versionNumber:
parser.error("incorrect number of arguments")
if options.versionNumber:
print("libtool-config version %s"%REVISION)
return 0
ltf=libtool_config(args[0])
if options.debug:
print(ltf)
if options.atleast_version:
if ltf>=options.atleast_version:return 0
sys.exit(1)
if options.exact_version:
if ltf==options.exact_version:return 0
sys.exit(1)
if options.max_version:
if ltf<=options.max_version:return 0
sys.exit(1)
def p(x):
print(" ".join(x))
if options.libs:p(ltf.get_libs())
elif options.libs_only_l:p(ltf.get_libs_only_l())
elif options.libs_only_L:p(ltf.get_libs_only_L())
elif options.libs_only_other:p(ltf.get_libs_only_other())
return 0
if __name__=='__main__':
useCmdLine()
feature("libtool")(apply_link_libtool)
after('apply_link')(apply_link_libtool)
feature("libtool")(apply_libtool)
before('apply_core')(apply_libtool)

View file

@ -0,0 +1,13 @@
#! /usr/bin/env python
# encoding: utf-8
import TaskGen
from TaskGen import taskgen,feature
from Constants import*
TaskGen.declare_chain(name='luac',rule='${LUAC} -s -o ${TGT} ${SRC}',ext_in='.lua',ext_out='.luac',reentrant=False,install='LUADIR',)
def init_lua(self):
self.default_chmod=O755
def detect(conf):
conf.find_program('luac',var='LUAC',mandatory=True)
feature('lua')(init_lua)

View file

@ -0,0 +1,302 @@
#! /usr/bin/env python
# encoding: utf-8
import shutil,re,os
import TaskGen,Node,Task,Utils,Build,Constants
from TaskGen import feature,taskgen,after,before
from Logs import debug
def copy_func(tsk):
env=tsk.env
infile=tsk.inputs[0].abspath(env)
outfile=tsk.outputs[0].abspath(env)
try:
shutil.copy2(infile,outfile)
except(OSError,IOError):
return 1
else:
if tsk.chmod:os.chmod(outfile,tsk.chmod)
return 0
def action_process_file_func(tsk):
if not tsk.fun:raise Utils.WafError('task must have a function attached to it for copy_func to work!')
return tsk.fun(tsk)
class cmd_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def apply_cmd(self):
if not self.fun:raise Utils.WafError('cmdobj needs a function!')
tsk=Task.TaskBase()
tsk.fun=self.fun
tsk.env=self.env
self.tasks.append(tsk)
tsk.install_path=self.install_path
class copy_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def apply_copy(self):
Utils.def_attrs(self,fun=copy_func)
self.default_install_path=0
lst=self.to_list(self.source)
self.meths.remove('apply_core')
for filename in lst:
node=self.path.find_resource(filename)
if not node:raise Utils.WafError('cannot find input file %s for processing'%filename)
target=self.target
if not target or len(lst)>1:target=node.name
newnode=self.path.find_or_declare(target)
tsk=self.create_task('copy',node,newnode)
tsk.fun=self.fun
tsk.chmod=self.chmod
tsk.install_path=self.install_path
if not tsk.env:
tsk.debug()
raise Utils.WafError('task without an environment')
def subst_func(tsk):
m4_re=re.compile('@(\w+)@',re.M)
env=tsk.env
infile=tsk.inputs[0].abspath(env)
outfile=tsk.outputs[0].abspath(env)
code=Utils.readf(infile)
code=code.replace('%','%%')
s=m4_re.sub(r'%(\1)s',code)
di=tsk.dict or{}
if not di:
names=m4_re.findall(code)
for i in names:
di[i]=env.get_flat(i)or env.get_flat(i.upper())
file=open(outfile,'w')
file.write(s%di)
file.close()
if tsk.chmod:os.chmod(outfile,tsk.chmod)
class subst_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def apply_subst(self):
Utils.def_attrs(self,fun=subst_func)
self.default_install_path=0
lst=self.to_list(self.source)
self.meths.remove('apply_core')
self.dict=getattr(self,'dict',{})
for filename in lst:
node=self.path.find_resource(filename)
if not node:raise Utils.WafError('cannot find input file %s for processing'%filename)
if self.target:
newnode=self.path.find_or_declare(self.target)
else:
newnode=node.change_ext('')
try:
self.dict=self.dict.get_merged_dict()
except AttributeError:
pass
if self.dict and not self.env['DICT_HASH']:
self.env=self.env.copy()
keys=list(self.dict.keys())
keys.sort()
lst=[self.dict[x]for x in keys]
self.env['DICT_HASH']=str(Utils.h_list(lst))
tsk=self.create_task('copy',node,newnode)
tsk.fun=self.fun
tsk.dict=self.dict
tsk.dep_vars=['DICT_HASH']
tsk.install_path=self.install_path
tsk.chmod=self.chmod
if not tsk.env:
tsk.debug()
raise Utils.WafError('task without an environment')
class cmd_arg(object):
def __init__(self,name,template='%s'):
self.name=name
self.template=template
self.node=None
class input_file(cmd_arg):
def find_node(self,base_path):
assert isinstance(base_path,Node.Node)
self.node=base_path.find_resource(self.name)
if self.node is None:
raise Utils.WafError("Input file %s not found in "%(self.name,base_path))
def get_path(self,env,absolute):
if absolute:
return self.template%self.node.abspath(env)
else:
return self.template%self.node.srcpath(env)
class output_file(cmd_arg):
def find_node(self,base_path):
assert isinstance(base_path,Node.Node)
self.node=base_path.find_or_declare(self.name)
if self.node is None:
raise Utils.WafError("Output file %s not found in "%(self.name,base_path))
def get_path(self,env,absolute):
if absolute:
return self.template%self.node.abspath(env)
else:
return self.template%self.node.bldpath(env)
class cmd_dir_arg(cmd_arg):
def find_node(self,base_path):
assert isinstance(base_path,Node.Node)
self.node=base_path.find_dir(self.name)
if self.node is None:
raise Utils.WafError("Directory %s not found in "%(self.name,base_path))
class input_dir(cmd_dir_arg):
def get_path(self,dummy_env,dummy_absolute):
return self.template%self.node.abspath()
class output_dir(cmd_dir_arg):
def get_path(self,env,dummy_absolute):
return self.template%self.node.abspath(env)
class command_output(Task.Task):
color="BLUE"
def __init__(self,env,command,command_node,command_args,stdin,stdout,cwd,os_env,stderr):
Task.Task.__init__(self,env,normal=1)
assert isinstance(command,(str,Node.Node))
self.command=command
self.command_args=command_args
self.stdin=stdin
self.stdout=stdout
self.cwd=cwd
self.os_env=os_env
self.stderr=stderr
if command_node is not None:self.dep_nodes=[command_node]
self.dep_vars=[]
def run(self):
task=self
def input_path(node,template):
if task.cwd is None:
return template%node.bldpath(task.env)
else:
return template%node.abspath()
def output_path(node,template):
fun=node.abspath
if task.cwd is None:fun=node.bldpath
return template%fun(task.env)
if isinstance(task.command,Node.Node):
argv=[input_path(task.command,'%s')]
else:
argv=[task.command]
for arg in task.command_args:
if isinstance(arg,str):
argv.append(arg)
else:
assert isinstance(arg,cmd_arg)
argv.append(arg.get_path(task.env,(task.cwd is not None)))
if task.stdin:
stdin=open(input_path(task.stdin,'%s'))
else:
stdin=None
if task.stdout:
stdout=open(output_path(task.stdout,'%s'),"w")
else:
stdout=None
if task.stderr:
stderr=open(output_path(task.stderr,'%s'),"w")
else:
stderr=None
if task.cwd is None:
cwd=('None (actually %r)'%os.getcwd())
else:
cwd=repr(task.cwd)
debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r"%(cwd,stdin,stdout,argv))
if task.os_env is None:
os_env=os.environ
else:
os_env=task.os_env
command=Utils.pproc.Popen(argv,stdin=stdin,stdout=stdout,stderr=stderr,cwd=task.cwd,env=os_env)
return command.wait()
class cmd_output_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def init_cmd_output(self):
Utils.def_attrs(self,stdin=None,stdout=None,stderr=None,command=None,command_is_external=False,argv=[],dependencies=[],dep_vars=[],hidden_inputs=[],hidden_outputs=[],cwd=None,os_env=None)
def apply_cmd_output(self):
if self.command is None:
raise Utils.WafError("command-output missing command")
if self.command_is_external:
cmd=self.command
cmd_node=None
else:
cmd_node=self.path.find_resource(self.command)
assert cmd_node is not None,('''Could not find command '%s' in source tree.
Hint: if this is an external command,
use command_is_external=True''')%(self.command,)
cmd=cmd_node
if self.cwd is None:
cwd=None
else:
assert isinstance(cwd,CmdDirArg)
self.cwd.find_node(self.path)
args=[]
inputs=[]
outputs=[]
for arg in self.argv:
if isinstance(arg,cmd_arg):
arg.find_node(self.path)
if isinstance(arg,input_file):
inputs.append(arg.node)
if isinstance(arg,output_file):
outputs.append(arg.node)
if self.stdout is None:
stdout=None
else:
assert isinstance(self.stdout,str)
stdout=self.path.find_or_declare(self.stdout)
if stdout is None:
raise Utils.WafError("File %s not found"%(self.stdout,))
outputs.append(stdout)
if self.stderr is None:
stderr=None
else:
assert isinstance(self.stderr,str)
stderr=self.path.find_or_declare(self.stderr)
if stderr is None:
raise Utils.WafError("File %s not found"%(self.stderr,))
outputs.append(stderr)
if self.stdin is None:
stdin=None
else:
assert isinstance(self.stdin,str)
stdin=self.path.find_resource(self.stdin)
if stdin is None:
raise Utils.WafError("File %s not found"%(self.stdin,))
inputs.append(stdin)
for hidden_input in self.to_list(self.hidden_inputs):
node=self.path.find_resource(hidden_input)
if node is None:
raise Utils.WafError("File %s not found in dir %s"%(hidden_input,self.path))
inputs.append(node)
for hidden_output in self.to_list(self.hidden_outputs):
node=self.path.find_or_declare(hidden_output)
if node is None:
raise Utils.WafError("File %s not found in dir %s"%(hidden_output,self.path))
outputs.append(node)
if not(inputs or getattr(self,'no_inputs',None)):
raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs')
if not(outputs or getattr(self,'no_outputs',None)):
raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs')
task=command_output(self.env,cmd,cmd_node,self.argv,stdin,stdout,cwd,self.os_env,stderr)
Utils.copy_attrs(self,task,'before after ext_in ext_out',only_if_set=True)
self.tasks.append(task)
task.inputs=inputs
task.outputs=outputs
task.dep_vars=self.to_list(self.dep_vars)
for dep in self.dependencies:
assert dep is not self
dep.post()
for dep_task in dep.tasks:
task.set_run_after(dep_task)
if not task.inputs:
task.runnable_status=type(Task.TaskBase.run)(runnable_status,task,task.__class__)
task.post_run=type(Task.TaskBase.run)(post_run,task,task.__class__)
def post_run(self):
for x in self.outputs:
h=Utils.h_file(x.abspath(self.env))
self.generator.bld.node_sigs[self.env.variant()][x.id]=h
def runnable_status(self):
return Constants.RUN_ME
Task.task_type_from_func('copy',vars=[],func=action_process_file_func)
TaskGen.task_gen.classes['command-output']=cmd_output_taskgen
feature('cmd')(apply_cmd)
feature('copy')(apply_copy)
before('apply_core')(apply_copy)
feature('subst')(apply_subst)
before('apply_core')(apply_subst)
feature('command-output')(init_cmd_output)
feature('command-output')(apply_cmd_output)
after('init_cmd_output')(apply_cmd_output)

View file

@ -0,0 +1,586 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys,re,string,optparse
import Utils,TaskGen,Runner,Configure,Task,Options
from Logs import debug,info,warn,error
from TaskGen import after,before,feature
from Configure import conftest,conf
import ccroot,cc,cxx,ar,winres
from libtool import read_la_file
try:
import _winreg
except:
import winreg as _winreg
pproc=Utils.pproc
g_msvc_systemlibs="""
aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
""".split()
all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64')]
all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')]
all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
def setup_msvc(conf,versions):
platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1]
versiondict=dict(versions)
for version in desired_versions:
try:
targets=dict(versiondict[version])
for target in platforms:
try:
arch,(p1,p2,p3)=targets[target]
compiler,revision=version.split()
return compiler,revision,p1,p2,p3
except KeyError:continue
except KeyError:continue
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
def get_msvc_version(conf,compiler,version,target,vcvars):
debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)
batfile=os.path.join(conf.blddir,'waf-print-msvc.bat')
f=open(batfile,'w')
f.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%
"""%(vcvars,target))
f.close()
sout=Utils.cmd_output(['cmd','/E:on','/V:on','/C',batfile])
lines=sout.splitlines()
for x in('Setting environment','Setting SDK environment','Intel(R) C++ Compiler'):
if lines[0].find(x)!=-1:
break
else:
debug('msvc: get_msvc_version: %r %r %r -> not found',compiler,version,target)
conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
for line in lines[1:]:
if line.startswith('PATH='):
path=line[5:]
MSVC_PATH=path.split(';')
elif line.startswith('INCLUDE='):
MSVC_INCDIR=[i for i in line[8:].split(';')if i]
elif line.startswith('LIB='):
MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
env={}
env.update(os.environ)
env.update(PATH=path)
compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
cxx=conf.find_program(compiler_name,path_list=MSVC_PATH)
if env.has_key('CL'):
del(env['CL'])
try:
p=pproc.Popen([cxx,'/help'],env=env,stdout=pproc.PIPE,stderr=pproc.PIPE)
out,err=p.communicate()
if p.returncode!=0:
raise Exception('return code: %r: %r'%(p.returncode,err))
except Exception,e:
debug('msvc: get_msvc_version: %r %r %r -> failure',compiler,version,target)
debug(str(e))
conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
else:
debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target)
return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
def gather_wsdk_versions(conf,versions):
version_pattern=re.compile('^v..?.?\...?.?')
try:
all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
try:
all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
return
index=0
while 1:
try:
version=_winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
try:
msvc_version=_winreg.OpenKey(all_versions,version)
path,type=_winreg.QueryValueEx(msvc_version,'InstallationFolder')
except WindowsError:
continue
if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')):
targets=[]
for target,arch in all_msvc_platforms:
try:
targets.append((target,(arch,conf.get_msvc_version('wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd')))))
except Configure.ConfigurationError:
pass
versions.append(('wsdk '+version[1:],targets))
def gather_msvc_versions(conf,versions):
try:
ce_sdk=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
try:
ce_sdk=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
ce_sdk=''
if ce_sdk:
supported_wince_platforms=[]
ce_index=0
while 1:
try:
sdk_device=_winreg.EnumKey(ce_sdk,ce_index)
except WindowsError:
break
ce_index=ce_index+1
sdk=_winreg.OpenKey(ce_sdk,sdk_device)
path,type=_winreg.QueryValueEx(sdk,'SDKRootDir')
path=str(path)
path,device=os.path.split(path)
if not device:
path,device=os.path.split(path)
for arch,compiler in all_wince_platforms:
platforms=[]
if os.path.isdir(os.path.join(path,device,'Lib',arch)):
platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch)))
if platforms:
supported_wince_platforms.append((device,platforms))
version_pattern=re.compile('^..?\...?')
for vcver,vcvar in[('VCExpress','exp'),('VisualStudio','')]:
try:
all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver)
except WindowsError:
try:
all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\'+vcver)
except WindowsError:
continue
index=0
while 1:
try:
version=_winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
try:
msvc_version=_winreg.OpenKey(all_versions,version+"\\Setup\\VS")
path,type=_winreg.QueryValueEx(msvc_version,'ProductDir')
path=str(path)
targets=[]
if ce_sdk:
for device,platforms in supported_wince_platforms:
cetargets=[]
for platform,compiler,include,lib in platforms:
winCEpath=os.path.join(path,'VC','ce')
if os.path.isdir(winCEpath):
common_bindirs,_1,_2=conf.get_msvc_version('msvc',version,'x86',os.path.join(path,'Common7','Tools','vsvars32.bat'))
if os.path.isdir(os.path.join(winCEpath,'lib',platform)):
bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]+common_bindirs
incdirs=[include,os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include')]
libdirs=[lib,os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform)]
cetargets.append((platform,(platform,(bindirs,incdirs,libdirs))))
versions.append((device+' '+version,cetargets))
if os.path.isfile(os.path.join(path,'VC','vcvarsall.bat')):
for target,realtarget in all_msvc_platforms[::-1]:
try:
targets.append((target,(realtarget,conf.get_msvc_version('msvc',version,target,os.path.join(path,'VC','vcvarsall.bat')))))
except:
pass
elif os.path.isfile(os.path.join(path,'Common7','Tools','vsvars32.bat')):
try:
targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'x86',os.path.join(path,'Common7','Tools','vsvars32.bat')))))
except Configure.ConfigurationError:
pass
versions.append(('msvc '+version,targets))
except WindowsError:
continue
def gather_icl_versions(conf,versions):
version_pattern=re.compile('^...?.?\....?.?')
try:
all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
except WindowsError:
try:
all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++')
except WindowsError:
return
index=0
while 1:
try:
version=_winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
targets=[]
for target,arch in all_icl_platforms:
try:
icl_version=_winreg.OpenKey(all_versions,version+'\\'+target)
path,type=_winreg.QueryValueEx(icl_version,'ProductDir')
if os.path.isfile(os.path.join(path,'bin','iclvars.bat')):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,os.path.join(path,'bin','iclvars.bat')))))
except Configure.ConfigurationError:
pass
except WindowsError:
continue
major=version[0:2]
versions.append(('intel '+major,targets))
def get_msvc_versions(conf):
if not conf.env.MSVC_INSTALLED_VERSIONS:
lst=[]
conf.gather_msvc_versions(lst)
conf.gather_wsdk_versions(lst)
conf.gather_icl_versions(lst)
conf.env.MSVC_INSTALLED_VERSIONS=lst
return conf.env.MSVC_INSTALLED_VERSIONS
def print_all_msvc_detected(conf):
for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
info(version)
for target,l in targets:
info("\t"+target)
def detect_msvc(conf):
versions=get_msvc_versions(conf)
return setup_msvc(conf,versions)
def find_lt_names_msvc(self,libname,is_static=False):
lt_names=['lib%s.la'%libname,'%s.la'%libname,]
for path in self.env['LIBPATH']:
for la in lt_names:
laf=os.path.join(path,la)
dll=None
if os.path.exists(laf):
ltdict=read_la_file(laf)
lt_libdir=None
if ltdict.get('libdir',''):
lt_libdir=ltdict['libdir']
if not is_static and ltdict.get('library_names',''):
dllnames=ltdict['library_names'].split()
dll=dllnames[0].lower()
dll=re.sub('\.dll$','',dll)
return(lt_libdir,dll,False)
elif ltdict.get('old_library',''):
olib=ltdict['old_library']
if os.path.exists(os.path.join(path,olib)):
return(path,olib,True)
elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)):
return(lt_libdir,olib,True)
else:
return(None,olib,True)
else:
raise Utils.WafError('invalid libtool object file: %s'%laf)
return(None,None,None)
def libname_msvc(self,libname,is_static=False,mandatory=False):
lib=libname.lower()
lib=re.sub('\.lib$','',lib)
if lib in g_msvc_systemlibs:
return lib
lib=re.sub('^lib','',lib)
if lib=='m':
return None
(lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static)
if lt_path!=None and lt_libname!=None:
if lt_static==True:
return os.path.join(lt_path,lt_libname)
if lt_path!=None:
_libpaths=[lt_path]+self.env['LIBPATH']
else:
_libpaths=self.env['LIBPATH']
static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,]
dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
libnames=static_libs
if not is_static:
libnames=dynamic_libs+static_libs
for path in _libpaths:
for libn in libnames:
if os.path.exists(os.path.join(path,libn)):
debug('msvc: lib found: %s',os.path.join(path,libn))
return re.sub('\.lib$','',libn)
if mandatory:
self.fatal("The library %r could not be found"%libname)
return re.sub('\.lib$','',libname)
def check_lib_msvc(self,libname,is_static=False,uselib_store=None,mandatory=False):
libn=self.libname_msvc(libname,is_static,mandatory)
if not uselib_store:
uselib_store=libname.upper()
if False and is_static:
self.env['STATICLIB_'+uselib_store]=[libn]
else:
self.env['LIB_'+uselib_store]=[libn]
def check_libs_msvc(self,libnames,is_static=False,mandatory=False):
for libname in Utils.to_list(libnames):
self.check_lib_msvc(libname,is_static,mandatory=mandatory)
def no_autodetect(conf):
conf.eval_rules(detect.replace('autodetect',''))
detect='''
autodetect
find_msvc
msvc_common_flags
cc_load_tools
cxx_load_tools
cc_add_flags
cxx_add_flags
link_add_flags
'''
def autodetect(conf):
v=conf.env
compiler,version,path,includes,libdirs=detect_msvc(conf)
v['PATH']=path
v['CPPPATH']=includes
v['LIBPATH']=libdirs
v['MSVC_COMPILER']=compiler
def _get_prog_names(conf,compiler):
if compiler=='intel':
compiler_name='ICL'
linker_name='XILINK'
lib_name='XILIB'
else:
compiler_name='CL'
linker_name='LINK'
lib_name='LIB'
return compiler_name,linker_name,lib_name
def find_msvc(conf):
if sys.platform!='win32':
conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet')
v=conf.env
compiler,version,path,includes,libdirs=detect_msvc(conf)
compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
has_msvc_manifest=(compiler=='msvc'and float(version)>=8)or(compiler=='wsdk'and float(version)>=6)or(compiler=='intel'and float(version)>=11)
cxx=None
if v.CXX:cxx=v.CXX
elif'CXX'in conf.environ:cxx=conf.environ['CXX']
if not cxx:cxx=conf.find_program(compiler_name,var='CXX',path_list=path,mandatory=True)
cxx=conf.cmd_to_list(cxx)
env=dict(conf.environ)
env.update(PATH=';'.join(path))
if not Utils.cmd_output([cxx,'/nologo','/?'],silent=True,env=env):
conf.fatal('the msvc compiler could not be identified')
link=v.LINK_CXX
if not link:
link=conf.find_program(linker_name,path_list=path,mandatory=True)
ar=v.AR
if not ar:
ar=conf.find_program(lib_name,path_list=path,mandatory=True)
mt=v.MT
if has_msvc_manifest:
mt=conf.find_program('MT',path_list=path,mandatory=True)
v.MSVC_MANIFEST=has_msvc_manifest
v.PATH=path
v.CPPPATH=includes
v.LIBPATH=libdirs
v.CC=v.CXX=cxx
v.CC_NAME=v.CXX_NAME='msvc'
v.LINK=v.LINK_CXX=link
if not v.LINK_CC:
v.LINK_CC=v.LINK_CXX
v.AR=ar
v.MT=mt
v.MTFLAGS=v.ARFLAGS=['/NOLOGO']
conf.check_tool('winres')
if not conf.env.WINRC:
warn('Resource compiler not found. Compiling resource file is disabled')
try:v.prepend_value('CPPPATH',conf.environ['INCLUDE'])
except KeyError:pass
try:v.prepend_value('LIBPATH',conf.environ['LIB'])
except KeyError:pass
def msvc_common_flags(conf):
v=conf.env
v['CPPFLAGS']=['/W3','/nologo']
v['CCDEFINES_ST']='/D%s'
v['CXXDEFINES_ST']='/D%s'
v['CCDEFINES']=['WIN32']
v['CXXDEFINES']=['WIN32']
v['_CCINCFLAGS']=[]
v['_CCDEFFLAGS']=[]
v['_CXXINCFLAGS']=[]
v['_CXXDEFFLAGS']=[]
v['CC_SRC_F']=''
v['CC_TGT_F']=['/c','/Fo']
v['CXX_SRC_F']=''
v['CXX_TGT_F']=['/c','/Fo']
v['CPPPATH_ST']='/I%s'
v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:'
v['CPPFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE']
v['CPPFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE']
v['CPPFLAGS_POSIX']=['/SUBSYSTEM:POSIX']
v['CPPFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS']
v['CPPFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE']
v['CPPFLAGS_CRT_MULTITHREADED']=['/MT']
v['CPPFLAGS_CRT_MULTITHREADED_DLL']=['/MD']
v['CPPDEFINES_CRT_MULTITHREADED']=['_MT']
v['CPPDEFINES_CRT_MULTITHREADED_DLL']=['_MT','_DLL']
v['CPPFLAGS_CRT_MULTITHREADED_DBG']=['/MTd']
v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd']
v['CPPDEFINES_CRT_MULTITHREADED_DBG']=['_DEBUG','_MT']
v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG']=['_DEBUG','_MT','_DLL']
v['CCFLAGS']=['/TC']
v['CCFLAGS_OPTIMIZED']=['/O2','/DNDEBUG']
v['CCFLAGS_RELEASE']=['/O2','/DNDEBUG']
v['CCFLAGS_DEBUG']=['/Od','/RTC1','/ZI']
v['CCFLAGS_ULTRADEBUG']=['/Od','/RTC1','/ZI']
v['CXXFLAGS']=['/TP','/EHsc']
v['CXXFLAGS_OPTIMIZED']=['/O2','/DNDEBUG']
v['CXXFLAGS_RELEASE']=['/O2','/DNDEBUG']
v['CXXFLAGS_DEBUG']=['/Od','/RTC1','/ZI']
v['CXXFLAGS_ULTRADEBUG']=['/Od','/RTC1','/ZI']
v['LIB']=[]
v['LIB_ST']='%s.lib'
v['LIBPATH_ST']='/LIBPATH:%s'
v['STATICLIB_ST']='lib%s.lib'
v['STATICLIBPATH_ST']='/LIBPATH:%s'
v['LINKFLAGS']=['/NOLOGO']
if v['MSVC_MANIFEST']:
v.append_value('LINKFLAGS','/MANIFEST')
v['LINKFLAGS_DEBUG']=['/DEBUG']
v['LINKFLAGS_ULTRADEBUG']=['/DEBUG']
v['shlib_CCFLAGS']=['']
v['shlib_CXXFLAGS']=['']
v['shlib_LINKFLAGS']=['/DLL']
v['shlib_PATTERN']='%s.dll'
v['implib_PATTERN']='%s.lib'
v['IMPLIB_ST']='/IMPLIB:%s'
v['staticlib_LINKFLAGS']=['']
v['staticlib_PATTERN']='lib%s.lib'
v['program_PATTERN']='%s.exe'
def apply_flags_msvc(self):
if self.env.CC_NAME!='msvc'or not self.link_task:
return
subsystem=getattr(self,'subsystem','')
if subsystem:
subsystem='/subsystem:%s'%subsystem
flags='cstaticlib'in self.features and'ARFLAGS'or'LINKFLAGS'
self.env.append_value(flags,subsystem)
if getattr(self,'link_task',None)and not'cstaticlib'in self.features:
for f in self.env.LINKFLAGS:
d=f.lower()
if d[1:]=='debug':
pdbnode=self.link_task.outputs[0].change_ext('.pdb')
pdbfile=pdbnode.bldpath(self.env)
self.link_task.outputs.append(pdbnode)
self.bld.install_files(self.install_path,[pdbnode],env=self.env)
break
def apply_obj_vars_msvc(self):
if self.env['CC_NAME']!='msvc':
return
try:
self.meths.remove('apply_obj_vars')
except ValueError:
pass
libpaths=getattr(self,'libpaths',[])
if not libpaths:self.libpaths=libpaths
env=self.env
app=env.append_unique
cpppath_st=env['CPPPATH_ST']
lib_st=env['LIB_ST']
staticlib_st=env['STATICLIB_ST']
libpath_st=env['LIBPATH_ST']
staticlibpath_st=env['STATICLIBPATH_ST']
for i in env['LIBPATH']:
app('LINKFLAGS',libpath_st%i)
if not libpaths.count(i):
libpaths.append(i)
for i in env['LIBPATH']:
app('LINKFLAGS',staticlibpath_st%i)
if not libpaths.count(i):
libpaths.append(i)
if not env['FULLSTATIC']:
if env['STATICLIB']or env['LIB']:
app('LINKFLAGS',env['SHLIB_MARKER'])
for i in env['STATICLIB']:
app('LINKFLAGS',staticlib_st%i)
for i in env['LIB']:
app('LINKFLAGS',lib_st%i)
def apply_manifest(self):
if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST:
out_node=self.link_task.outputs[0]
man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
self.link_task.outputs.append(man_node)
self.link_task.do_manifest=True
def exec_mf(self):
env=self.env
mtool=env['MT']
if not mtool:
return 0
self.do_manifest=False
outfile=self.outputs[0].bldpath(env)
manifest=None
for out_node in self.outputs:
if out_node.name.endswith('.manifest'):
manifest=out_node.bldpath(env)
break
if manifest is None:
return 0
mode=''
if'cprogram'in self.generator.features:
mode='1'
elif'cshlib'in self.generator.features:
mode='2'
debug('msvc: embedding manifest')
lst=[]
lst.extend([env['MT']])
lst.extend(Utils.to_list(env['MTFLAGS']))
lst.extend(Utils.to_list("-manifest"))
lst.extend(Utils.to_list(manifest))
lst.extend(Utils.to_list("-outputresource:%s;%s"%(outfile,mode)))
lst=[lst]
return self.exec_command(*lst)
def exec_command_msvc(self,*k,**kw):
if self.env['CC_NAME']=='msvc':
if isinstance(k[0],list):
lst=[]
carry=''
for a in k[0]:
if len(a)==3 and a.startswith('/F')or a=='/doc'or a[-1]==':':
carry=a
else:
lst.append(carry+a)
carry=''
k=[lst]
env=dict(os.environ)
env.update(PATH=';'.join(self.env['PATH']))
kw['env']=env
ret=self.generator.bld.exec_command(*k,**kw)
if ret:return ret
if getattr(self,'do_manifest',None):
ret=exec_mf(self)
return ret
for k in'cc cxx winrc cc_link cxx_link static_link qxx'.split():
cls=Task.TaskBase.classes.get(k,None)
if cls:
cls.exec_command=exec_command_msvc
conf(get_msvc_version)
conf(gather_wsdk_versions)
conf(gather_msvc_versions)
conf(gather_icl_versions)
conf(get_msvc_versions)
conf(print_all_msvc_detected)
conf(find_lt_names_msvc)
conf(libname_msvc)
conf(check_lib_msvc)
conf(check_libs_msvc)
conftest(no_autodetect)
conftest(autodetect)
conftest(find_msvc)
conftest(msvc_common_flags)
after('apply_link')(apply_flags_msvc)
feature('cc','cxx')(apply_flags_msvc)
feature('cprogram','cshlib','cstaticlib')(apply_obj_vars_msvc)
after('apply_lib_vars')(apply_obj_vars_msvc)
before('apply_obj_vars')(apply_obj_vars_msvc)
feature('cprogram','cshlib')(apply_manifest)
after('apply_link')(apply_manifest)

View file

@ -0,0 +1,31 @@
#! /usr/bin/env python
# encoding: utf-8
import os
import TaskGen,Task,Utils
from TaskGen import taskgen,before,extension
nasm_str='${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}'
EXT_NASM=['.s','.S','.asm','.ASM','.spp','.SPP']
def apply_nasm_vars(self):
if hasattr(self,'nasm_flags'):
for flag in self.to_list(self.nasm_flags):
self.env.append_value('NASM_FLAGS',flag)
if hasattr(self,'includes'):
for inc in self.to_list(self.includes):
node=self.path.find_dir(inc)
if not node:
raise Utils.WafError('cannot find the dir'+inc)
self.env.append_value('NASM_INCLUDES','-I%s'%node.srcpath(self.env))
self.env.append_value('NASM_INCLUDES','-I%s'%node.bldpath(self.env))
def nasm_file(self,node):
try:obj_ext=self.obj_ext
except AttributeError:obj_ext='_%d.o'%self.idx
task=self.create_task('nasm',node,node.change_ext(obj_ext))
self.compiled_tasks.append(task)
self.meths.append('apply_nasm_vars')
Task.simple_task_type('nasm',nasm_str,color='BLUE',ext_out='.o',shell=False)
def detect(conf):
nasm=conf.find_program(['nasm','yasm'],var='NASM',mandatory=True)
before('apply_link')(apply_nasm_vars)
extension(EXT_NASM)(nasm_file)

View file

@ -0,0 +1,225 @@
#! /usr/bin/env python
# encoding: utf-8
import os,re
import TaskGen,Utils,Task,Build
from Logs import error
from TaskGen import taskgen,feature,before,after,extension
EXT_MLL=['.mll']
EXT_MLY=['.mly']
EXT_MLI=['.mli']
EXT_MLC=['.c']
EXT_ML=['.ml']
open_re=re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$',re.M)
foo=re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""",re.M)
def filter_comments(txt):
meh=[0]
def repl(m):
if m.group(1):meh[0]+=1
elif m.group(2):meh[0]-=1
elif not meh[0]:return m.group(0)
return''
return foo.sub(repl,txt)
def scan(self):
node=self.inputs[0]
code=filter_comments(node.read(self.env))
global open_re
names=[]
import_iterator=open_re.finditer(code)
if import_iterator:
for import_match in import_iterator:
names.append(import_match.group(1))
found_lst=[]
raw_lst=[]
for name in names:
nd=None
for x in self.incpaths:
nd=x.find_resource(name.lower()+'.ml')
if not nd:nd=x.find_resource(name+'.ml')
if nd:
found_lst.append(nd)
break
else:
raw_lst.append(name)
return(found_lst,raw_lst)
native_lst=['native','all','c_object']
bytecode_lst=['bytecode','all']
class ocaml_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def init_ml(self):
Utils.def_attrs(self,type='all',incpaths_lst=[],bld_incpaths_lst=[],mlltasks=[],mlytasks=[],mlitasks=[],native_tasks=[],bytecode_tasks=[],linktasks=[],bytecode_env=None,native_env=None,compiled_tasks=[],includes='',uselib='',are_deps_set=0)
def init_envs_ml(self):
self.islibrary=getattr(self,'islibrary',False)
global native_lst,bytecode_lst
self.native_env=None
if self.type in native_lst:
self.native_env=self.env.copy()
if self.islibrary:self.native_env['OCALINKFLAGS']='-a'
self.bytecode_env=None
if self.type in bytecode_lst:
self.bytecode_env=self.env.copy()
if self.islibrary:self.bytecode_env['OCALINKFLAGS']='-a'
if self.type=='c_object':
self.native_env.append_unique('OCALINKFLAGS_OPT','-output-obj')
def apply_incpaths_ml(self):
inc_lst=self.includes.split()
lst=self.incpaths_lst
for dir in inc_lst:
node=self.path.find_dir(dir)
if not node:
error("node not found: "+str(dir))
continue
self.bld.rescan(node)
if not node in lst:lst.append(node)
self.bld_incpaths_lst.append(node)
def apply_vars_ml(self):
for i in self.incpaths_lst:
if self.bytecode_env:
app=self.bytecode_env.append_value
app('OCAMLPATH','-I')
app('OCAMLPATH',i.srcpath(self.env))
app('OCAMLPATH','-I')
app('OCAMLPATH',i.bldpath(self.env))
if self.native_env:
app=self.native_env.append_value
app('OCAMLPATH','-I')
app('OCAMLPATH',i.bldpath(self.env))
app('OCAMLPATH','-I')
app('OCAMLPATH',i.srcpath(self.env))
varnames=['INCLUDES','OCAMLFLAGS','OCALINKFLAGS','OCALINKFLAGS_OPT']
for name in self.uselib.split():
for vname in varnames:
cnt=self.env[vname+'_'+name]
if cnt:
if self.bytecode_env:self.bytecode_env.append_value(vname,cnt)
if self.native_env:self.native_env.append_value(vname,cnt)
def apply_link_ml(self):
if self.bytecode_env:
ext=self.islibrary and'.cma'or'.run'
linktask=self.create_task('ocalink')
linktask.bytecode=1
linktask.set_outputs(self.path.find_or_declare(self.target+ext))
linktask.obj=self
linktask.env=self.bytecode_env
self.linktasks.append(linktask)
if self.native_env:
if self.type=='c_object':ext='.o'
elif self.islibrary:ext='.cmxa'
else:ext=''
linktask=self.create_task('ocalinkx')
linktask.set_outputs(self.path.find_or_declare(self.target+ext))
linktask.obj=self
linktask.env=self.native_env
self.linktasks.append(linktask)
self.compiled_tasks.append(linktask)
def mll_hook(self,node):
mll_task=self.create_task('ocamllex',node,node.change_ext('.ml'),env=self.native_env)
self.mlltasks.append(mll_task)
self.allnodes.append(mll_task.outputs[0])
def mly_hook(self,node):
mly_task=self.create_task('ocamlyacc',node,[node.change_ext('.ml'),node.change_ext('.mli')],env=self.native_env)
self.mlytasks.append(mly_task)
self.allnodes.append(mly_task.outputs[0])
task=self.create_task('ocamlcmi',mly_task.outputs[1],mly_task.outputs[1].change_ext('.cmi'),env=self.native_env)
def mli_hook(self,node):
task=self.create_task('ocamlcmi',node,node.change_ext('.cmi'),env=self.native_env)
self.mlitasks.append(task)
def mlc_hook(self,node):
task=self.create_task('ocamlcc',node,node.change_ext('.o'),env=self.native_env)
self.compiled_tasks.append(task)
def ml_hook(self,node):
if self.native_env:
task=self.create_task('ocamlx',node,node.change_ext('.cmx'),env=self.native_env)
task.obj=self
task.incpaths=self.bld_incpaths_lst
self.native_tasks.append(task)
if self.bytecode_env:
task=self.create_task('ocaml',node,node.change_ext('.cmo'),env=self.bytecode_env)
task.obj=self
task.bytecode=1
task.incpaths=self.bld_incpaths_lst
self.bytecode_tasks.append(task)
def compile_may_start(self):
if not getattr(self,'flag_deps',''):
self.flag_deps=1
if getattr(self,'bytecode',''):alltasks=self.obj.bytecode_tasks
else:alltasks=self.obj.native_tasks
self.signature()
tree=self.generator.bld
env=self.env
for node in self.inputs:
lst=tree.node_deps[self.unique_id()]
for depnode in lst:
for t in alltasks:
if t==self:continue
if depnode in t.inputs:
self.set_run_after(t)
delattr(self,'cache_sig')
self.signature()
return Task.Task.runnable_status(self)
b=Task.simple_task_type
cls=b('ocamlx','${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}',color='GREEN',shell=False)
cls.runnable_status=compile_may_start
cls.scan=scan
b=Task.simple_task_type
cls=b('ocaml','${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}',color='GREEN',shell=False)
cls.runnable_status=compile_may_start
cls.scan=scan
b('ocamlcmi','${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}',color='BLUE',before="ocaml ocamlcc ocamlx")
b('ocamlcc','cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}',color='GREEN')
b('ocamllex','${OCAMLLEX} ${SRC} -o ${TGT}',color='BLUE',before="ocamlcmi ocaml ocamlcc")
b('ocamlyacc','${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}',color='BLUE',before="ocamlcmi ocaml ocamlcc")
def link_may_start(self):
if not getattr(self,'order',''):
if getattr(self,'bytecode',0):alltasks=self.obj.bytecode_tasks
else:alltasks=self.obj.native_tasks
seen=[]
pendant=[]+alltasks
while pendant:
task=pendant.pop(0)
if task in seen:continue
for x in task.run_after:
if not x in seen:
pendant.append(task)
break
else:
seen.append(task)
self.inputs=[x.outputs[0]for x in seen]
self.order=1
return Task.Task.runnable_status(self)
act=b('ocalink','${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}',color='YELLOW',after="ocaml ocamlcc")
act.runnable_status=link_may_start
act=b('ocalinkx','${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}',color='YELLOW',after="ocamlx ocamlcc")
act.runnable_status=link_may_start
def detect(conf):
opt=conf.find_program('ocamlopt',var='OCAMLOPT')
occ=conf.find_program('ocamlc',var='OCAMLC')
if(not opt)or(not occ):
conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
v=conf.env
v['OCAMLC']=occ
v['OCAMLOPT']=opt
v['OCAMLLEX']=conf.find_program('ocamllex',var='OCAMLLEX')
v['OCAMLYACC']=conf.find_program('ocamlyacc',var='OCAMLYACC')
v['OCAMLFLAGS']=''
v['OCAMLLIB']=Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
v['LIBPATH_OCAML']=Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
v['CPPPATH_OCAML']=Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
v['LIB_OCAML']='camlrun'
feature('ocaml')(init_ml)
feature('ocaml')(init_envs_ml)
after('init_ml')(init_envs_ml)
feature('ocaml')(apply_incpaths_ml)
before('apply_vars_ml')(apply_incpaths_ml)
after('init_envs_ml')(apply_incpaths_ml)
feature('ocaml')(apply_vars_ml)
before('apply_core')(apply_vars_ml)
feature('ocaml')(apply_link_ml)
after('apply_core')(apply_link_ml)
extension(EXT_MLL)(mll_hook)
extension(EXT_MLY)(mly_hook)
extension(EXT_MLI)(mli_hook)
extension(EXT_MLC)(mlc_hook)
extension(EXT_ML)(ml_hook)

View file

@ -0,0 +1,138 @@
#! /usr/bin/env python
# encoding: utf-8
import os,shutil,sys,platform
import TaskGen,Task,Build,Options,Utils
from TaskGen import taskgen,feature,after,before
from Logs import error,debug
app_info='''
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
<plist version="0.9">
<dict>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleGetInfoString</key>
<string>Created by Waf</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>NOTE</key>
<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
<key>CFBundleExecutable</key>
<string>%s</string>
</dict>
</plist>
'''
def set_macosx_deployment_target(self):
if self.env['MACOSX_DEPLOYMENT_TARGET']:
os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET']
elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ:
if sys.platform=='darwin':
os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2])
def apply_framework(self):
for x in self.to_list(self.env['FRAMEWORKPATH']):
frameworkpath_st='-F%s'
self.env.append_unique('CXXFLAGS',frameworkpath_st%x)
self.env.append_unique('CCFLAGS',frameworkpath_st%x)
self.env.append_unique('LINKFLAGS',frameworkpath_st%x)
for x in self.to_list(self.env['FRAMEWORK']):
self.env.append_value('LINKFLAGS',['-framework',x])
def create_bundle_dirs(self,name,out):
bld=self.bld
dir=out.parent.get_dir(name)
if not dir:
dir=out.__class__(name,out.parent,1)
bld.rescan(dir)
contents=out.__class__('Contents',dir,1)
bld.rescan(contents)
macos=out.__class__('MacOS',contents,1)
bld.rescan(macos)
return dir
def bundle_name_for_output(out):
name=out.name
k=name.rfind('.')
if k>=0:
name=name[:k]+'.app'
else:
name=name+'.app'
return name
def create_task_macapp(self):
if self.env['MACAPP']or getattr(self,'mac_app',False):
apptask=self.create_task('macapp')
apptask.set_inputs(self.link_task.outputs)
out=self.link_task.outputs[0]
name=bundle_name_for_output(out)
dir=self.create_bundle_dirs(name,out)
n1=dir.find_or_declare(['Contents','MacOS',out.name])
apptask.set_outputs([n1])
apptask.chmod=0755
apptask.install_path=os.path.join(self.install_path,name,'Contents','MacOS')
self.apptask=apptask
def create_task_macplist(self):
if self.env['MACAPP']or getattr(self,'mac_app',False):
if not getattr(self,'mac_plist',False):
self.mac_plist=app_info
plisttask=self.create_task('macplist')
plisttask.set_inputs(self.link_task.outputs)
out=self.link_task.outputs[0]
self.mac_plist=self.mac_plist%(out.name)
name=bundle_name_for_output(out)
dir=self.create_bundle_dirs(name,out)
n1=dir.find_or_declare(['Contents','Info.plist'])
plisttask.set_outputs([n1])
plisttask.mac_plist=self.mac_plist
plisttask.install_path=os.path.join(self.install_path,name,'Contents')
self.plisttask=plisttask
def apply_link_osx(self):
name=self.link_task.outputs[0].name
if not self.install_path:
return
if getattr(self,'vnum',None):
name=name.replace('.dylib','.%s.dylib'%self.vnum)
path=os.path.join(Utils.subst_vars(self.install_path,self.env),name)
if'-dynamiclib'in self.env['LINKFLAGS']:
self.env.append_value('LINKFLAGS','-install_name')
self.env.append_value('LINKFLAGS',path)
def apply_bundle(self):
if not('cshlib'in self.features or'shlib'in self.features):return
if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False):
self.env['shlib_PATTERN']=self.env['macbundle_PATTERN']
uselib=self.uselib=self.to_list(self.uselib)
if not'MACBUNDLE'in uselib:uselib.append('MACBUNDLE')
def apply_bundle_remove_dynamiclib(self):
if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False):
if not getattr(self,'vnum',None):
try:
self.env['LINKFLAGS'].remove('-dynamiclib')
except ValueError:
pass
app_dirs=['Contents','Contents/MacOS','Contents/Resources']
def app_build(task):
env=task.env
shutil.copy2(task.inputs[0].srcpath(env),task.outputs[0].abspath(env))
return 0
def plist_build(task):
env=task.env
f=open(task.outputs[0].abspath(env),"w")
f.write(task.mac_plist)
f.close()
return 0
Task.task_type_from_func('macapp',vars=[],func=app_build,after="cxx_link cc_link static_link")
Task.task_type_from_func('macplist',vars=[],func=plist_build,after="cxx_link cc_link static_link")
feature('cc','cxx')(set_macosx_deployment_target)
before('apply_lib_vars')(set_macosx_deployment_target)
feature('cc','cxx')(apply_framework)
after('apply_lib_vars')(apply_framework)
taskgen(create_bundle_dirs)
taskgen(create_task_macapp)
after('apply_link')(create_task_macapp)
feature('cprogram')(create_task_macapp)
after('apply_link')(create_task_macplist)
feature('cprogram')(create_task_macplist)
after('apply_link')(apply_link_osx)
feature('cshlib')(apply_link_osx)
before('apply_link','apply_lib_vars')(apply_bundle)
feature('cc','cxx')(apply_bundle)
after('apply_link')(apply_bundle_remove_dynamiclib)
feature('cshlib')(apply_bundle_remove_dynamiclib)

View file

@ -0,0 +1,69 @@
#! /usr/bin/env python
# encoding: utf-8
import os
import Task,Options,Utils
from Configure import conf
from TaskGen import extension,taskgen,feature,before
xsubpp_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
EXT_XS=['.xs']
def init_perlext(self):
self.uselib=self.to_list(getattr(self,'uselib',''))
if not'PERL'in self.uselib:self.uselib.append('PERL')
if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT')
self.env['shlib_PATTERN']=self.env['perlext_PATTERN']
def xsubpp_file(self,node):
outnode=node.change_ext('.c')
self.create_task('xsubpp',node,outnode)
self.allnodes.append(outnode)
Task.simple_task_type('xsubpp',xsubpp_str,color='BLUE',before='cc cxx',shell=False)
def check_perl_version(conf,minver=None):
if getattr(Options.options,'perlbinary',None):
conf.env.PERL=Options.options.perlbinary
else:
conf.find_program('perl',var='PERL',mandatory=True)
try:
version=Utils.cmd_output([conf.env.PERL,'-e','printf "%vd",$^V'])
except:
conf.fatal('could not determine the perl version')
conf.env.PERL_VERSION=version
cver=''
if minver:
try:
ver=tuple(map(int,version.split('.')))
except:
conf.fatal('unsupported perl version %r'%version)
if ver<minver:
conf.fatal('perl is too old')
cver='.'.join(map(str,minver))
conf.check_message('perl',cver,True,version)
def check_perl_module(conf,module):
cmd=[conf.env['PERL'],'-e','use %s'%module]
r=Utils.pproc.call(cmd,stdout=Utils.pproc.PIPE,stderr=Utils.pproc.PIPE)==0
conf.check_message("perl module %s"%module,"",r)
return r
def check_perl_ext_devel(conf):
if not conf.env.PERL:
conf.fatal('perl detection is required first')
def read_out(cmd):
return Utils.to_list(Utils.cmd_output([conf.env.PERL,'-MConfig','-e',cmd]))
conf.env.LINKFLAGS_PERLEXT=read_out('print $Config{lddlflags}')
conf.env.CPPPATH_PERLEXT=read_out('print "$Config{archlib}/CORE"')
conf.env.CCFLAGS_PERLEXT=read_out('print "$Config{ccflags} $Config{cccdlflags}"')
conf.env.XSUBPP=read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
conf.env.EXTUTILS_TYPEMAP=read_out('print "$Config{privlib}/ExtUtils/typemap"')
conf.env.perlext_PATTERN='%s.'+read_out('print $Config{dlext}')[0]
if getattr(Options.options,'perlarchdir',None):
conf.env.ARCHDIR_PERL=Options.options.perlarchdir
else:
conf.env.ARCHDIR_PERL=read_out('print $Config{sitearch}')[0]
def set_options(opt):
opt.add_option("--with-perl-binary",type="string",dest="perlbinary",help='Specify alternate perl binary',default=None)
opt.add_option("--with-perl-archdir",type="string",dest="perlarchdir",help='Specify directory where to install arch specific files',default=None)
before('apply_incpaths','apply_type_vars','apply_lib_vars')(init_perlext)
feature('perlext')(init_perlext)
extension(EXT_XS)(xsubpp_file)
conf(check_perl_version)
conf(check_perl_module)
conf(check_perl_ext_devel)

View file

@ -0,0 +1,616 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import re,sys,os,string
import Logs,Build,Utils
from Logs import debug,error
import traceback
class PreprocError(Utils.WafError):
pass
POPFILE='-'
recursion_limit=5000
go_absolute=0
standard_includes=['/usr/include']
if sys.platform=="win32":
standard_includes=[]
use_trigraphs=0
'apply the trigraph rules first'
strict_quotes=0
g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}
re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
re_mac=re.compile("^[a-zA-Z_]\w*")
re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
re_cpp=re.compile(r"""(/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)|//[^\n]*|("(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|.[^/"'\\]*)""",re.MULTILINE)
trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
NUM='i'
OP='O'
IDENT='T'
STR='s'
CHAR='c'
tok_types=[NUM,STR,IDENT,OP]
exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',]
re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M)
accepted='a'
ignored='i'
undefined='u'
skipped='s'
def repl(m):
if m.group(1):
return' '
s=m.group(2)
if s is None:
return''
return s
def filter_comments(filename):
code=Utils.readf(filename)
if use_trigraphs:
for(a,b)in trig_def:code=code.split(a).join(b)
code=re_nl.sub('',code)
code=re_cpp.sub(repl,code)
return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
prec={}
ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
for x in range(len(ops)):
syms=ops[x]
for u in syms.split():
prec[u]=x
def reduce_nums(val_1,val_2,val_op):
try:a=0+val_1
except TypeError:a=int(val_1)
try:b=0+val_2
except TypeError:b=int(val_2)
d=val_op
if d=='%':c=a%b
elif d=='+':c=a+b
elif d=='-':c=a-b
elif d=='*':c=a*b
elif d=='/':c=a/b
elif d=='^':c=a^b
elif d=='|':c=a|b
elif d=='||':c=int(a or b)
elif d=='&':c=a&b
elif d=='&&':c=int(a and b)
elif d=='==':c=int(a==b)
elif d=='!=':c=int(a!=b)
elif d=='<=':c=int(a<=b)
elif d=='<':c=int(a<b)
elif d=='>':c=int(a>b)
elif d=='>=':c=int(a>=b)
elif d=='^':c=int(a^b)
elif d=='<<':c=a<<b
elif d=='>>':c=a>>b
else:c=0
return c
def get_num(lst):
if not lst:raise PreprocError("empty list for get_num")
(p,v)=lst[0]
if p==OP:
if v=='(':
count_par=1
i=1
while i<len(lst):
(p,v)=lst[i]
if p==OP:
if v==')':
count_par-=1
if count_par==0:
break
elif v=='(':
count_par+=1
i+=1
else:
raise PreprocError("rparen expected %r"%lst)
(num,_)=get_term(lst[1:i])
return(num,lst[i+1:])
elif v=='+':
return get_num(lst[1:])
elif v=='-':
num,lst=get_num(lst[1:])
return(reduce_nums('-1',num,'*'),lst)
elif v=='!':
num,lst=get_num(lst[1:])
return(int(not int(num)),lst)
elif v=='~':
return(~int(num),lst)
else:
raise PreprocError("invalid op token %r for get_num"%lst)
elif p==NUM:
return v,lst[1:]
elif p==IDENT:
return 0,lst[1:]
else:
raise PreprocError("invalid token %r for get_num"%lst)
def get_term(lst):
if not lst:raise PreprocError("empty list for get_term")
num,lst=get_num(lst)
if not lst:
return(num,[])
(p,v)=lst[0]
if p==OP:
if v=='&&'and not num:
return(num,[])
elif v=='||'and num:
return(num,[])
elif v==',':
return get_term(lst[1:])
elif v=='?':
count_par=0
i=1
while i<len(lst):
(p,v)=lst[i]
if p==OP:
if v==')':
count_par-=1
elif v=='(':
count_par+=1
elif v==':':
if count_par==0:
break
i+=1
else:
raise PreprocError("rparen expected %r"%lst)
if int(num):
return get_term(lst[1:i])
else:
return get_term(lst[i+1:])
else:
num2,lst=get_num(lst[1:])
if not lst:
num2=reduce_nums(num,num2,v)
return get_term([(NUM,num2)]+lst)
p2,v2=lst[0]
if p2!=OP:
raise PreprocError("op expected %r"%lst)
if prec[v2]>=prec[v]:
num2=reduce_nums(num,num2,v)
return get_term([(NUM,num2)]+lst)
else:
num3,lst=get_num(lst[1:])
num3=reduce_nums(num2,num3,v2)
return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
raise PreprocError("cannot reduce %r"%lst)
def reduce_eval(lst):
num,lst=get_term(lst)
return(NUM,num)
def stringize(lst):
lst=[str(v2)for(p2,v2)in lst]
return"".join(lst)
def paste_tokens(t1,t2):
p1=None
if t1[0]==OP and t2[0]==OP:
p1=OP
elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM):
p1=IDENT
elif t1[0]==NUM and t2[0]==NUM:
p1=NUM
if not p1:
raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2))
return(p1,t1[1]+t2[1])
def reduce_tokens(lst,defs,ban=[]):
i=0
while i<len(lst):
(p,v)=lst[i]
if p==IDENT and v=="defined":
del lst[i]
if i<len(lst):
(p2,v2)=lst[i]
if p2==IDENT:
if v2 in defs:
lst[i]=(NUM,1)
else:
lst[i]=(NUM,0)
elif p2==OP and v2=='(':
del lst[i]
(p2,v2)=lst[i]
del lst[i]
if v2 in defs:
lst[i]=(NUM,1)
else:
lst[i]=(NUM,0)
else:
raise PreprocError("invalid define expression %r"%lst)
elif p==IDENT and v in defs:
if isinstance(defs[v],str):
a,b=extract_macro(defs[v])
defs[v]=b
macro_def=defs[v]
to_add=macro_def[1]
if isinstance(macro_def[0],list):
del lst[i]
for x in xrange(len(to_add)):
lst.insert(i,to_add[x])
i+=1
else:
args=[]
del lst[i]
if i>=len(lst):
raise PreprocError("expected '(' after %r (got nothing)"%v)
(p2,v2)=lst[i]
if p2!=OP or v2!='(':
raise PreprocError("expected '(' after %r"%v)
del lst[i]
one_param=[]
count_paren=0
while i<len(lst):
p2,v2=lst[i]
del lst[i]
if p2==OP and count_paren==0:
if v2=='(':
one_param.append((p2,v2))
count_paren+=1
elif v2==')':
if one_param:args.append(one_param)
break
elif v2==',':
if not one_param:raise PreprocError("empty param in funcall %s"%p)
args.append(one_param)
one_param=[]
else:
one_param.append((p2,v2))
else:
one_param.append((p2,v2))
if v2=='(':count_paren+=1
elif v2==')':count_paren-=1
else:
raise PreprocError('malformed macro')
accu=[]
arg_table=macro_def[0]
j=0
while j<len(to_add):
(p2,v2)=to_add[j]
if p2==OP and v2=='#':
if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
toks=args[arg_table[to_add[j+1][1]]]
accu.append((STR,stringize(toks)))
j+=1
else:
accu.append((p2,v2))
elif p2==OP and v2=='##':
if accu and j+1<len(to_add):
t1=accu[-1]
if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
toks=args[arg_table[to_add[j+1][1]]]
if toks:
accu[-1]=paste_tokens(t1,toks[0])
accu.extend(toks[1:])
else:
accu.append((p2,v2))
accu.extend(toks)
elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__':
va_toks=[]
st=len(macro_def[0])
pt=len(args)
for x in args[pt-st+1:]:
va_toks.extend(x)
va_toks.append((OP,','))
if va_toks:va_toks.pop()
if len(accu)>1:
(p3,v3)=accu[-1]
(p4,v4)=accu[-2]
if v3=='##':
accu.pop()
if v4==','and pt<st:
accu.pop()
accu+=va_toks
else:
accu[-1]=paste_tokens(t1,to_add[j+1])
j+=1
else:
accu.append((p2,v2))
elif p2==IDENT and v2 in arg_table:
toks=args[arg_table[v2]]
reduce_tokens(toks,defs,ban+[v])
accu.extend(toks)
else:
accu.append((p2,v2))
j+=1
reduce_tokens(accu,defs,ban+[v])
for x in xrange(len(accu)-1,-1,-1):
lst.insert(i,accu[x])
i+=1
def eval_macro(lst,adefs):
reduce_tokens(lst,adefs,[])
if not lst:raise PreprocError("missing tokens to evaluate")
(p,v)=reduce_eval(lst)
return int(v)!=0
def extract_macro(txt):
t=tokenize(txt)
if re_fun.search(txt):
p,name=t[0]
p,v=t[1]
if p!=OP:raise PreprocError("expected open parenthesis")
i=1
pindex=0
params={}
prev='('
while 1:
i+=1
p,v=t[i]
if prev=='(':
if p==IDENT:
params[v]=pindex
pindex+=1
prev=p
elif p==OP and v==')':
break
else:
raise PreprocError("unexpected token (3)")
elif prev==IDENT:
if p==OP and v==',':
prev=v
elif p==OP and v==')':
break
else:
raise PreprocError("comma or ... expected")
elif prev==',':
if p==IDENT:
params[v]=pindex
pindex+=1
prev=p
elif p==OP and v=='...':
raise PreprocError("not implemented (1)")
else:
raise PreprocError("comma or ... expected (2)")
elif prev=='...':
raise PreprocError("not implemented (2)")
else:
raise PreprocError("unexpected else")
return(name,[params,t[i+1:]])
else:
(p,v)=t[0]
return(v,[[],t[1:]])
re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
def extract_include(txt,defs):
m=re_include.search(txt)
if m:
if m.group('a'):return'<',m.group('a')
if m.group('b'):return'"',m.group('b')
toks=tokenize(txt)
reduce_tokens(toks,defs,['waf_include'])
if not toks:
raise PreprocError("could not parse include %s"%txt)
if len(toks)==1:
if toks[0][0]==STR:
return'"',toks[0][1]
else:
if toks[0][1]=='<'and toks[-1][1]=='>':
return stringize(toks).lstrip('<').rstrip('>')
raise PreprocError("could not parse include %s."%txt)
def parse_char(txt):
if not txt:raise PreprocError("attempted to parse a null char")
if txt[0]!='\\':
return ord(txt)
c=txt[1]
if c=='x':
if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16)
return int(txt[2:],16)
elif c.isdigit():
if c=='0'and len(txt)==2:return 0
for i in 3,2,1:
if len(txt)>i and txt[1:1+i].isdigit():
return(1+i,int(txt[1:1+i],8))
else:
try:return chr_esc[c]
except KeyError:raise PreprocError("could not parse char literal '%s'"%txt)
def tokenize(s):
ret=[]
for match in re_clexer.finditer(s):
m=match.group
for name in tok_types:
v=m(name)
if v:
if name==IDENT:
try:v=g_optrans[v];name=OP
except KeyError:
if v.lower()=="true":
v=1
name=NUM
elif v.lower()=="false":
v=0
name=NUM
elif name==NUM:
if m('oct'):v=int(v,8)
elif m('hex'):v=int(m('hex'),16)
elif m('n0'):v=m('n0')
else:
v=m('char')
if v:v=parse_char(v)
else:v=m('n2')or m('n4')
elif name==OP:
if v=='%:':v='#'
elif v=='%:%:':v='##'
elif name==STR:
v=v[1:-1]
ret.append((name,v))
break
return ret
def define_name(line):
return re_mac.match(line).group(0)
class c_parser(object):
def __init__(self,nodepaths=None,defines=None):
self.lines=[]
if defines is None:
self.defs={}
else:
self.defs=dict(defines)
self.state=[]
self.env=None
self.count_files=0
self.currentnode_stack=[]
self.nodepaths=nodepaths or[]
self.nodes=[]
self.names=[]
self.curfile=''
self.ban_includes=set([])
def cached_find_resource(self,node,filename):
try:
nd=node.bld.cache_nd
except:
nd=node.bld.cache_nd={}
tup=(node.id,filename)
try:
return nd[tup]
except KeyError:
ret=node.find_resource(filename)
nd[tup]=ret
return ret
def tryfind(self,filename):
self.curfile=filename
found=self.cached_find_resource(self.currentnode_stack[-1],filename)
for n in self.nodepaths:
if found:
break
found=self.cached_find_resource(n,filename)
if found:
self.nodes.append(found)
if filename[-4:]!='.moc':
self.addlines(found)
else:
if not filename in self.names:
self.names.append(filename)
return found
def addlines(self,node):
self.currentnode_stack.append(node.parent)
filepath=node.abspath(self.env)
self.count_files+=1
if self.count_files>recursion_limit:raise PreprocError("recursion limit exceeded")
pc=self.parse_cache
debug('preproc: reading file %r',filepath)
try:
lns=pc[filepath]
except KeyError:
pass
else:
self.lines.extend(lns)
return
try:
lines=filter_comments(filepath)
lines.append((POPFILE,''))
lines.reverse()
pc[filepath]=lines
self.lines.extend(lines)
except IOError:
raise PreprocError("could not read the file %s"%filepath)
except Exception:
if Logs.verbose>0:
error("parsing %s failed"%filepath)
traceback.print_exc()
def start(self,node,env):
debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
self.env=env
variant=node.variant(env)
bld=node.__class__.bld
try:
self.parse_cache=bld.parse_cache
except AttributeError:
bld.parse_cache={}
self.parse_cache=bld.parse_cache
self.addlines(node)
if env['DEFLINES']:
lst=[('define',x)for x in env['DEFLINES']]
lst.reverse()
self.lines.extend(lst)
while self.lines:
(kind,line)=self.lines.pop()
if kind==POPFILE:
self.currentnode_stack.pop()
continue
try:
self.process_line(kind,line)
except Exception,e:
if Logs.verbose:
debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
def process_line(self,token,line):
ve=Logs.verbose
if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
state=self.state
if token in['ifdef','ifndef','if']:
state.append(undefined)
elif token=='endif':
state.pop()
if not token in['else','elif','endif']:
if skipped in self.state or ignored in self.state:
return
if token=='if':
ret=eval_macro(tokenize(line),self.defs)
if ret:state[-1]=accepted
else:state[-1]=ignored
elif token=='ifdef':
m=re_mac.match(line)
if m and m.group(0)in self.defs:state[-1]=accepted
else:state[-1]=ignored
elif token=='ifndef':
m=re_mac.match(line)
if m and m.group(0)in self.defs:state[-1]=ignored
else:state[-1]=accepted
elif token=='include'or token=='import':
(kind,inc)=extract_include(line,self.defs)
if inc in self.ban_includes:return
if token=='import':self.ban_includes.add(inc)
if ve:debug('preproc: include found %s (%s) ',inc,kind)
if kind=='"'or not strict_quotes:
self.tryfind(inc)
elif token=='elif':
if state[-1]==accepted:
state[-1]=skipped
elif state[-1]==ignored:
if eval_macro(tokenize(line),self.defs):
state[-1]=accepted
elif token=='else':
if state[-1]==accepted:state[-1]=skipped
elif state[-1]==ignored:state[-1]=accepted
elif token=='define':
try:
self.defs[define_name(line)]=line
except:
raise PreprocError("invalid define line %s"%line)
elif token=='undef':
m=re_mac.match(line)
if m and m.group(0)in self.defs:
self.defs.__delitem__(m.group(0))
elif token=='pragma':
if re_pragma_once.match(line.lower()):
self.ban_includes.add(self.curfile)
def get_deps(node,env,nodepaths=[]):
gruik=c_parser(nodepaths)
gruik.start(node,env)
return(gruik.nodes,gruik.names)
re_inc=re.compile('^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
def lines_includes(filename):
code=Utils.readf(filename)
if use_trigraphs:
for(a,b)in trig_def:code=code.split(a).join(b)
code=re_nl.sub('',code)
code=re_cpp.sub(repl,code)
return[(m.group(2),m.group(3))for m in re.finditer(re_inc,code)]
def get_deps_simple(node,env,nodepaths=[],defines={}):
nodes=[]
names=[]
def find_deps(node):
lst=lines_includes(node.abspath(env))
for(_,line)in lst:
(t,filename)=extract_include(line,defines)
if filename in names:
continue
if filename.endswith('.moc'):
names.append(filename)
found=None
for n in nodepaths:
if found:
break
found=n.find_resource(filename)
if not found:
if not filename in names:
names.append(filename)
elif not found in nodes:
nodes.append(found)
find_deps(node)
find_deps(node)
return(nodes,names)
Utils.run_once(tokenize)
Utils.run_once(define_name)

View file

@ -0,0 +1,278 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import TaskGen,Utils,Utils,Runner,Options,Build
from Logs import debug,warn,info
from TaskGen import extension,taskgen,before,after,feature
from Configure import conf
EXT_PY=['.py']
FRAG_2='''
#include "Python.h"
#ifdef __cplusplus
extern "C" {
#endif
void Py_Initialize(void);
void Py_Finalize(void);
#ifdef __cplusplus
}
#endif
int main()
{
Py_Initialize();
Py_Finalize();
return 0;
}
'''
def init_pyext(self):
self.default_install_path='${PYTHONDIR}'
self.uselib=self.to_list(getattr(self,'uselib',''))
if not'PYEXT'in self.uselib:
self.uselib.append('PYEXT')
self.env['MACBUNDLE']=True
def pyext_shlib_ext(self):
self.env['shlib_PATTERN']=self.env['pyext_PATTERN']
def init_pyembed(self):
self.uselib=self.to_list(getattr(self,'uselib',''))
if not'PYEMBED'in self.uselib:
self.uselib.append('PYEMBED')
def process_py(self,node):
if not(self.bld.is_install and self.install_path):
return
def inst_py(ctx):
install_pyfile(self,node)
self.bld.add_post_fun(inst_py)
def install_pyfile(self,node):
path=self.bld.get_install_path(self.install_path+os.sep+node.name,self.env)
self.bld.install_files(self.install_path,[node],self.env,self.chmod,postpone=False)
if self.bld.is_install<0:
info("* removing byte compiled python files")
for x in'co':
try:
os.remove(path+x)
except OSError:
pass
if self.bld.is_install>0:
if self.env['PYC']or self.env['PYO']:
info("* byte compiling %r"%path)
if self.env['PYC']:
program=("""
import sys, py_compile
for pyfile in sys.argv[1:]:
py_compile.compile(pyfile, pyfile + 'c')
""")
argv=[self.env['PYTHON'],'-c',program,path]
ret=Utils.pproc.Popen(argv).wait()
if ret:
raise Utils.WafError('bytecode compilation failed %r'%path)
if self.env['PYO']:
program=("""
import sys, py_compile
for pyfile in sys.argv[1:]:
py_compile.compile(pyfile, pyfile + 'o')
""")
argv=[self.env['PYTHON'],self.env['PYFLAGS_OPT'],'-c',program,path]
ret=Utils.pproc.Popen(argv).wait()
if ret:
raise Utils.WafError('bytecode compilation failed %r'%path)
class py_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def init_py(self):
self.default_install_path='${PYTHONDIR}'
def _get_python_variables(python_exe,variables,imports=['import sys']):
program=list(imports)
program.append('')
for v in variables:
program.append("print(repr(%s))"%v)
os_env=dict(os.environ)
try:
del os_env['MACOSX_DEPLOYMENT_TARGET']
except KeyError:
pass
proc=Utils.pproc.Popen([python_exe,"-c",'\n'.join(program)],stdout=Utils.pproc.PIPE,env=os_env)
output=proc.communicate()[0].split("\n")
if proc.returncode:
if Options.options.verbose:
warn("Python program to extract python configuration variables failed:\n%s"%'\n'.join(["line %03i: %s"%(lineno+1,line)for lineno,line in enumerate(program)]))
raise RuntimeError
return_values=[]
for s in output:
s=s.strip()
if not s:
continue
if s=='None':
return_values.append(None)
elif s[0]=="'"and s[-1]=="'":
return_values.append(s[1:-1])
elif s[0].isdigit():
return_values.append(int(s))
else:break
return return_values
def check_python_headers(conf,mandatory=True):
if not conf.env['CC_NAME']and not conf.env['CXX_NAME']:
conf.fatal('load a compiler first (gcc, g++, ..)')
if not conf.env['PYTHON_VERSION']:
conf.check_python_version()
env=conf.env
python=env['PYTHON']
if not python:
conf.fatal('could not find the python executable')
if Options.platform=='darwin':
conf.check_tool('osx')
try:
v='prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split()
(python_prefix,python_SO,python_SYSLIBS,python_LDFLAGS,python_SHLIBS,python_LIBDIR,python_LIBPL,INCLUDEPY,Py_ENABLE_SHARED,python_MACOSX_DEPLOYMENT_TARGET)=_get_python_variables(python,["get_config_var('%s')"%x for x in v],['from distutils.sysconfig import get_config_var'])
except RuntimeError:
conf.fatal("Python development headers not found (-v for details).")
conf.log.write("""Configuration returned from %r:
python_prefix = %r
python_SO = %r
python_SYSLIBS = %r
python_LDFLAGS = %r
python_SHLIBS = %r
python_LIBDIR = %r
python_LIBPL = %r
INCLUDEPY = %r
Py_ENABLE_SHARED = %r
MACOSX_DEPLOYMENT_TARGET = %r
"""%(python,python_prefix,python_SO,python_SYSLIBS,python_LDFLAGS,python_SHLIBS,python_LIBDIR,python_LIBPL,INCLUDEPY,Py_ENABLE_SHARED,python_MACOSX_DEPLOYMENT_TARGET))
if python_MACOSX_DEPLOYMENT_TARGET:
conf.env['MACOSX_DEPLOYMENT_TARGET']=python_MACOSX_DEPLOYMENT_TARGET
conf.environ['MACOSX_DEPLOYMENT_TARGET']=python_MACOSX_DEPLOYMENT_TARGET
env['pyext_PATTERN']='%s'+python_SO
if python_SYSLIBS is not None:
for lib in python_SYSLIBS.split():
if lib.startswith('-l'):
lib=lib[2:]
env.append_value('LIB_PYEMBED',lib)
if python_SHLIBS is not None:
for lib in python_SHLIBS.split():
if lib.startswith('-l'):
env.append_value('LIB_PYEMBED',lib[2:])
else:
env.append_value('LINKFLAGS_PYEMBED',lib)
if Options.platform!='darwin'and python_LDFLAGS:
env.append_value('LINKFLAGS_PYEMBED',python_LDFLAGS.split())
result=False
name='python'+env['PYTHON_VERSION']
if python_LIBDIR is not None:
path=[python_LIBDIR]
conf.log.write("\n\n# Trying LIBDIR: %r\n"%path)
result=conf.check(lib=name,uselib='PYEMBED',libpath=path)
if not result and python_LIBPL is not None:
conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
path=[python_LIBPL]
result=conf.check(lib=name,uselib='PYEMBED',libpath=path)
if not result:
conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
path=[os.path.join(python_prefix,"libs")]
name='python'+env['PYTHON_VERSION'].replace('.','')
result=conf.check(lib=name,uselib='PYEMBED',libpath=path)
if result:
env['LIBPATH_PYEMBED']=path
env.append_value('LIB_PYEMBED',name)
else:
conf.log.write("\n\n### LIB NOT FOUND\n")
if(sys.platform=='win32'or sys.platform.startswith('os2')or sys.platform=='darwin'or Py_ENABLE_SHARED):
env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED']
env['LIB_PYEXT']=env['LIB_PYEMBED']
python_config=conf.find_program('python%s-config'%('.'.join(env['PYTHON_VERSION'].split('.')[:2])),var='PYTHON_CONFIG')
if not python_config:
python_config=conf.find_program('python-config-%s'%('.'.join(env['PYTHON_VERSION'].split('.')[:2])),var='PYTHON_CONFIG')
includes=[]
if python_config:
for incstr in Utils.cmd_output("%s %s --includes"%(python,python_config)).strip().split():
if(incstr.startswith('-I')or incstr.startswith('/I')):
incstr=incstr[2:]
if incstr not in includes:
includes.append(incstr)
conf.log.write("Include path for Python extensions ""(found via python-config --includes): %r\n"%(includes,))
env['CPPPATH_PYEXT']=includes
env['CPPPATH_PYEMBED']=includes
else:
conf.log.write("Include path for Python extensions ""(found via distutils module): %r\n"%(INCLUDEPY,))
env['CPPPATH_PYEXT']=[INCLUDEPY]
env['CPPPATH_PYEMBED']=[INCLUDEPY]
if env['CC_NAME']=='gcc':
env.append_value('CCFLAGS_PYEMBED','-fno-strict-aliasing')
env.append_value('CCFLAGS_PYEXT','-fno-strict-aliasing')
if env['CXX_NAME']=='gcc':
env.append_value('CXXFLAGS_PYEMBED','-fno-strict-aliasing')
env.append_value('CXXFLAGS_PYEXT','-fno-strict-aliasing')
conf.check(define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG_2,errmsg='Could not find the python development headers',mandatory=mandatory)
def check_python_version(conf,minver=None):
assert minver is None or isinstance(minver,tuple)
python=conf.env['PYTHON']
if not python:
conf.fatal('could not find the python executable')
cmd=[python,"-c","import sys\nfor x in sys.version_info: print(str(x))"]
debug('python: Running python command %r'%cmd)
proc=Utils.pproc.Popen(cmd,stdout=Utils.pproc.PIPE)
lines=proc.communicate()[0].split()
assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines)
pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4]))
result=(minver is None)or(pyver_tuple>=minver)
if result:
pyver='.'.join([str(x)for x in pyver_tuple[:2]])
conf.env['PYTHON_VERSION']=pyver
if'PYTHONDIR'in conf.environ:
pydir=conf.environ['PYTHONDIR']
else:
if sys.platform=='win32':
(python_LIBDEST,pydir)=_get_python_variables(python,["get_config_var('LIBDEST')","get_python_lib(standard_lib=0, prefix=%r)"%conf.env['PREFIX']],['from distutils.sysconfig import get_config_var, get_python_lib'])
else:
python_LIBDEST=None
(pydir,)=_get_python_variables(python,["get_python_lib(standard_lib=0, prefix=%r)"%conf.env['PREFIX']],['from distutils.sysconfig import get_config_var, get_python_lib'])
if python_LIBDEST is None:
if conf.env['LIBDIR']:
python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver)
else:
python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver)
if hasattr(conf,'define'):
conf.define('PYTHONDIR',pydir)
conf.env['PYTHONDIR']=pydir
pyver_full='.'.join(map(str,pyver_tuple[:3]))
if minver is None:
conf.check_message_custom('Python version','',pyver_full)
else:
minver_str='.'.join(map(str,minver))
conf.check_message('Python version',">= %s"%minver_str,result,option=pyver_full)
if not result:
conf.fatal('The python version is too old (%r)'%pyver_full)
def check_python_module(conf,module_name):
result=not Utils.pproc.Popen([conf.env['PYTHON'],"-c","import %s"%module_name],stderr=Utils.pproc.PIPE,stdout=Utils.pproc.PIPE).wait()
conf.check_message('Python module',module_name,result)
if not result:
conf.fatal('Could not find the python module %r'%module_name)
def detect(conf):
if not conf.env.PYTHON:
conf.env.PYTHON=sys.executable
python=conf.find_program('python',var='PYTHON')
if not python:
conf.fatal('Could not find the path of the python executable')
v=conf.env
v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
v['PYFLAGS']=''
v['PYFLAGS_OPT']='-O'
v['PYC']=getattr(Options.options,'pyc',1)
v['PYO']=getattr(Options.options,'pyo',1)
def set_options(opt):
opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc')
opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo')
before('apply_incpaths','apply_lib_vars','apply_type_vars')(init_pyext)
feature('pyext')(init_pyext)
before('apply_bundle')(init_pyext)
before('apply_link','apply_lib_vars','apply_type_vars')(pyext_shlib_ext)
after('apply_bundle')(pyext_shlib_ext)
feature('pyext')(pyext_shlib_ext)
before('apply_incpaths','apply_lib_vars','apply_type_vars')(init_pyembed)
feature('pyembed')(init_pyembed)
extension(EXT_PY)(process_py)
before('apply_core')(init_py)
after('vars_target_cprogram','vars_target_cshlib')(init_py)
feature('py')(init_py)
conf(check_python_headers)
conf(check_python_version)
conf(check_python_module)

View file

@ -0,0 +1,376 @@
#! /usr/bin/env python
# encoding: utf-8
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml=False
ContentHandler=object
else:
has_xml=True
import os,sys
import ccroot,cxx
import TaskGen,Task,Utils,Runner,Options,Node,Configure
from TaskGen import taskgen,feature,after,extension
from Logs import error
from Constants import*
MOC_H=['.h','.hpp','.hxx','.hh']
EXT_RCC=['.qrc']
EXT_UI=['.ui']
EXT_QT4=['.cpp','.cc','.cxx','.C']
class qxx_task(Task.Task):
before=['cxx_link','static_link']
def __init__(self,*k,**kw):
Task.Task.__init__(self,*k,**kw)
self.moc_done=0
def scan(self):
(nodes,names)=ccroot.scan(self)
for x in nodes:
if x.name.endswith('.moc'):
nodes.remove(x)
names.append(x.relpath_gen(self.inputs[0].parent))
return(nodes,names)
def runnable_status(self):
if self.moc_done:
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
self.signature()
return Task.Task.runnable_status(self)
else:
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
self.add_moc_tasks()
return ASK_LATER
def add_moc_tasks(self):
node=self.inputs[0]
tree=node.__class__.bld
try:
self.signature()
except KeyError:
pass
else:
delattr(self,'cache_sig')
moctasks=[]
mocfiles=[]
variant=node.variant(self.env)
try:
tmp_lst=tree.raw_deps[self.unique_id()]
tree.raw_deps[self.unique_id()]=[]
except KeyError:
tmp_lst=[]
for d in tmp_lst:
if not d.endswith('.moc'):continue
if d in mocfiles:
error("paranoia owns")
continue
mocfiles.append(d)
base2=d[:-4]
for path in[node.parent]+self.generator.env['INC_PATHS']:
tree.rescan(path)
vals=getattr(Options.options,'qt_header_ext','')or MOC_H
for ex in vals:
h_node=path.find_resource(base2+ex)
if h_node:
break
else:
continue
break
else:
raise Utils.WafError("no header found for %s which is a moc file"%str(d))
m_node=h_node.change_ext('.moc')
tree.node_deps[(self.inputs[0].parent.id,self.env.variant(),m_node.name)]=h_node
task=Task.TaskBase.classes['moc'](self.env,normal=0)
task.set_inputs(h_node)
task.set_outputs(m_node)
generator=tree.generator
generator.outstanding.insert(0,task)
generator.total+=1
moctasks.append(task)
tmp_lst=tree.raw_deps[self.unique_id()]=mocfiles
lst=tree.node_deps.get(self.unique_id(),())
for d in lst:
name=d.name
if name.endswith('.moc'):
task=Task.TaskBase.classes['moc'](self.env,normal=0)
task.set_inputs(tree.node_deps[(self.inputs[0].parent.id,self.env.variant(),name)])
task.set_outputs(d)
generator=tree.generator
generator.outstanding.insert(0,task)
generator.total+=1
moctasks.append(task)
self.run_after=moctasks
self.moc_done=1
run=Task.TaskBase.classes['cxx'].__dict__['run']
def translation_update(task):
outs=[a.abspath(task.env)for a in task.outputs]
outs=" ".join(outs)
lupdate=task.env['QT_LUPDATE']
for x in task.inputs:
file=x.abspath(task.env)
cmd="%s %s -ts %s"%(lupdate,file,outs)
Utils.pprint('BLUE',cmd)
task.generator.bld.exec_command(cmd)
class XMLHandler(ContentHandler):
def __init__(self):
self.buf=[]
self.files=[]
def startElement(self,name,attrs):
if name=='file':
self.buf=[]
def endElement(self,name):
if name=='file':
self.files.append(''.join(self.buf))
def characters(self,cars):
self.buf.append(cars)
def scan(self):
node=self.inputs[0]
parser=make_parser()
curHandler=XMLHandler()
parser.setContentHandler(curHandler)
fi=open(self.inputs[0].abspath(self.env))
parser.parse(fi)
fi.close()
nodes=[]
names=[]
root=self.inputs[0].parent
for x in curHandler.files:
nd=root.find_resource(x)
if nd:nodes.append(nd)
else:names.append(x)
return(nodes,names)
def create_rcc_task(self,node):
rcnode=node.change_ext('_rc.cpp')
rcctask=self.create_task('rcc',node,rcnode)
cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o'))
self.compiled_tasks.append(cpptask)
return cpptask
def create_uic_task(self,node):
uictask=self.create_task('ui4',node)
uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])]
return uictask
class qt4_taskgen(cxx.cxx_taskgen):
def __init__(self,*k,**kw):
cxx.cxx_taskgen.__init__(self,*k,**kw)
self.features.append('qt4')
def add_lang(self,node):
self.lang=self.to_list(getattr(self,'lang',[]))+[node]
def apply_qt4(self):
if getattr(self,'lang',None):
update=getattr(self,'update',None)
lst=[]
trans=[]
for l in self.to_list(self.lang):
if not isinstance(l,Node.Node):
l=self.path.find_resource(l+'.ts')
t=self.create_task('ts2qm',l,l.change_ext('.qm'))
lst.append(t.outputs[0])
if update:
trans.append(t.inputs[0])
trans_qt4=getattr(Options.options,'trans_qt4',False)
if update and trans_qt4:
u=Task.TaskCmd(translation_update,self.env,2)
u.inputs=[a.inputs[0]for a in self.compiled_tasks]
u.outputs=trans
if getattr(self,'langname',None):
t=Task.TaskBase.classes['qm2rcc'](self.env)
t.set_inputs(lst)
t.set_outputs(self.path.find_or_declare(self.langname+'.qrc'))
t.path=self.path
k=create_rcc_task(self,t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
self.env.append_value('MOC_FLAGS',self.env._CXXDEFFLAGS)
self.env.append_value('MOC_FLAGS',self.env._CXXINCFLAGS)
def cxx_hook(self,node):
try:obj_ext=self.obj_ext
except AttributeError:obj_ext='_%d.o'%self.idx
task=self.create_task('qxx',node,node.change_ext(obj_ext))
self.compiled_tasks.append(task)
return task
def process_qm2rcc(task):
outfile=task.outputs[0].abspath(task.env)
f=open(outfile,'w')
f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n')
for k in task.inputs:
f.write(' <file>')
f.write(k.path_to_parent(task.path))
f.write('</file>\n')
f.write('</qresource>\n</RCC>')
f.close()
b=Task.simple_task_type
b('moc','${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}',color='BLUE',vars=['QT_MOC','MOC_FLAGS'],shell=False)
cls=b('rcc','${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}',color='BLUE',before='cxx moc qxx_task',after="qm2rcc",shell=False)
cls.scan=scan
b('ui4','${QT_UIC} ${SRC} -o ${TGT}',color='BLUE',before='cxx moc qxx_task',shell=False)
b('ts2qm','${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}',color='BLUE',before='qm2rcc',shell=False)
Task.task_type_from_func('qm2rcc',vars=[],func=process_qm2rcc,color='BLUE',before='rcc',after='ts2qm')
def detect_qt4(conf):
env=conf.env
opt=Options.options
qtdir=getattr(opt,'qtdir','')
qtbin=getattr(opt,'qtbin','')
qtlibs=getattr(opt,'qtlibs','')
useframework=getattr(opt,'use_qt4_osxframework',True)
paths=[]
if qtbin:
paths=[qtbin]
if not qtdir:
qtdir=conf.environ.get('QT4_ROOT','')
qtbin=os.path.join(qtdir,'bin')
paths=[qtbin]
if not qtdir:
paths=os.environ.get('PATH','').split(os.pathsep)
paths.append('/usr/share/qt4/bin/')
try:
lst=os.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()
qtdir='/usr/local/Trolltech/%s/'%lst[0]
qtbin=os.path.join(qtdir,'bin')
paths.append(qtbin)
cand=None
prev_ver=['4','0','0']
for qmk in['qmake-qt4','qmake4','qmake']:
qmake=conf.find_program(qmk,path_list=paths)
if qmake:
try:
version=Utils.cmd_output([qmake,'-query','QT_VERSION']).strip()
except ValueError:
pass
else:
if version:
new_ver=version.split('.')
if new_ver>prev_ver:
cand=qmake
prev_ver=new_ver
if cand:
qmake=cand
else:
conf.fatal('could not find qmake for qt4')
conf.env.QMAKE=qmake
qtincludes=Utils.cmd_output([qmake,'-query','QT_INSTALL_HEADERS']).strip()
qtdir=Utils.cmd_output([qmake,'-query','QT_INSTALL_PREFIX']).strip()+os.sep
qtbin=Utils.cmd_output([qmake,'-query','QT_INSTALL_BINS']).strip()+os.sep
if not qtlibs:
try:
qtlibs=Utils.cmd_output([qmake,'-query','QT_INSTALL_LIBS']).strip()+os.sep
except ValueError:
qtlibs=os.path.join(qtdir,'lib')
def find_bin(lst,var):
for f in lst:
ret=conf.find_program(f,path_list=paths)
if ret:
env[var]=ret
break
vars="QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split()
find_bin(['uic-qt3','uic3'],'QT_UIC3')
find_bin(['uic-qt4','uic'],'QT_UIC')
if not env['QT_UIC']:
conf.fatal('cannot find the uic compiler for qt4')
try:
version=Utils.cmd_output(env['QT_UIC']+" -version 2>&1").strip()
except ValueError:
conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path')
version=version.replace('Qt User Interface Compiler ','')
version=version.replace('User Interface Compiler for Qt','')
if version.find(" 3.")!=-1:
conf.check_message('uic version','(too old)',0,option='(%s)'%version)
sys.exit(1)
conf.check_message('uic version','',1,option='(%s)'%version)
find_bin(['moc-qt4','moc'],'QT_MOC')
find_bin(['rcc'],'QT_RCC')
find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE')
find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE')
env['UIC3_ST']='%s -o %s'
env['UIC_ST']='%s -o %s'
env['MOC_ST']='-o'
env['ui_PATTERN']='ui_%s.h'
env['QT_LRELEASE_FLAGS']=['-silent']
vars_debug=[a+'_debug'for a in vars]
try:
conf.find_program('pkg-config',var='pkgconfig',path_list=paths,mandatory=True)
except Configure.ConfigurationError:
for lib in vars_debug+vars:
uselib=lib.upper()
d=(lib.find('_debug')>0)and'd'or''
for(pat,kind)in((conf.env.staticlib_PATTERN,'STATIC'),(conf.env.shlib_PATTERN,'')):
conf.check_message_1('Checking for %s %s'%(lib,kind))
for ext in['','4']:
path=os.path.join(qtlibs,pat%(lib+d+ext))
if os.path.exists(path):
env.append_unique(kind+'LIB_'+uselib,lib+d+ext)
conf.check_message_2('ok '+path,'GREEN')
break
path=os.path.join(qtbin,pat%(lib+d+ext))
if os.path.exists(path):
env.append_unique(kind+'LIB_'+uselib,lib+d+ext)
conf.check_message_2('ok '+path,'GREEN')
break
else:
conf.check_message_2('not found','YELLOW')
continue
break
env.append_unique('LIBPATH_'+uselib,qtlibs)
env.append_unique('CPPPATH_'+uselib,qtincludes)
env.append_unique('CPPPATH_'+uselib,qtincludes+os.sep+lib)
else:
for i in vars_debug+vars:
try:
conf.check_cfg(package=i,args='--cflags --libs --silence-errors',path=conf.env.pkgconfig)
except ValueError:
pass
def process_lib(vars_,coreval):
for d in vars_:
var=d.upper()
if var=='QTCORE':continue
value=env['LIBPATH_'+var]
if value:
core=env[coreval]
accu=[]
for lib in value:
if lib in core:continue
accu.append(lib)
env['LIBPATH_'+var]=accu
process_lib(vars,'LIBPATH_QTCORE')
process_lib(vars_debug,'LIBPATH_QTCORE_DEBUG')
want_rpath=getattr(Options.options,'want_rpath',1)
if want_rpath:
def process_rpath(vars_,coreval):
for d in vars_:
var=d.upper()
value=env['LIBPATH_'+var]
if value:
core=env[coreval]
accu=[]
for lib in value:
if var!='QTCORE':
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_'+var]=accu
process_rpath(vars,'LIBPATH_QTCORE')
process_rpath(vars_debug,'LIBPATH_QTCORE_DEBUG')
env['QTLOCALE']=str(env['PREFIX'])+'/share/locale'
def detect(conf):
detect_qt4(conf)
def set_options(opt):
opt.add_option('--want-rpath',type='int',default=1,dest='want_rpath',help='set rpath to 1 or 0 [Default 1]')
opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext')
for i in'qtdir qtbin qtlibs'.split():
opt.add_option('--'+i,type='string',default='',dest=i)
if sys.platform=="darwin":
opt.add_option('--no-qt4-framework',action="store_false",help='do not use the framework version of Qt4 in OS X',dest='use_qt4_osxframework',default=True)
opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt4",default=False)
extension(EXT_RCC)(create_rcc_task)
extension(EXT_UI)(create_uic_task)
extension('.ts')(add_lang)
feature('qt4')(apply_qt4)
after('apply_link')(apply_qt4)
extension(EXT_QT4)(cxx_hook)

View file

@ -0,0 +1,88 @@
#! /usr/bin/env python
# encoding: utf-8
import os
import Task,Options,Utils
from TaskGen import before,feature,after
from Configure import conf
def init_rubyext(self):
self.default_install_path='${ARCHDIR_RUBY}'
self.uselib=self.to_list(getattr(self,'uselib',''))
if not'RUBY'in self.uselib:
self.uselib.append('RUBY')
if not'RUBYEXT'in self.uselib:
self.uselib.append('RUBYEXT')
def apply_ruby_so_name(self):
self.env['shlib_PATTERN']=self.env['rubyext_PATTERN']
def check_ruby_version(conf,minver=()):
if Options.options.rubybinary:
conf.env.RUBY=Options.options.rubybinary
else:
conf.find_program("ruby",var="RUBY",mandatory=True)
ruby=conf.env.RUBY
try:
version=Utils.cmd_output([ruby,'-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
except:
conf.fatal('could not determine ruby version')
conf.env.RUBY_VERSION=version
try:
ver=tuple(map(int,version.split(".")))
except:
conf.fatal('unsupported ruby version %r'%version)
cver=''
if minver:
if ver<minver:
conf.fatal('ruby is too old')
cver=".".join([str(x)for x in minver])
conf.check_message('ruby',cver,True,version)
def check_ruby_ext_devel(conf):
if not conf.env.RUBY:
conf.fatal('ruby detection is required first')
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
conf.fatal('load a c/c++ compiler first')
version=tuple(map(int,conf.env.RUBY_VERSION.split(".")))
def read_out(cmd):
return Utils.to_list(Utils.cmd_output([conf.env.RUBY,'-rrbconfig','-e',cmd]))
def read_config(key):
return read_out('puts Config::CONFIG[%r]'%key)
ruby=conf.env['RUBY']
archdir=read_config('archdir')
cpppath=archdir
if version>=(1,9,0):
ruby_hdrdir=read_config('rubyhdrdir')
cpppath+=ruby_hdrdir
cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])]
conf.check(header_name='ruby.h',includes=cpppath,mandatory=True,errmsg='could not find ruby header file')
conf.env.LIBPATH_RUBYEXT=read_config('libdir')
conf.env.LIBPATH_RUBYEXT+=archdir
conf.env.CPPPATH_RUBYEXT=cpppath
conf.env.CCFLAGS_RUBYEXT=read_config("CCDLFLAGS")
conf.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0]
flags=read_config('LDSHARED')
while flags and flags[0][0]!='-':
flags=flags[1:]
if len(flags)>1 and flags[1]=="ppc":
flags=flags[2:]
conf.env.LINKFLAGS_RUBYEXT=flags
conf.env.LINKFLAGS_RUBYEXT+=read_config("LIBS")
conf.env.LINKFLAGS_RUBYEXT+=read_config("LIBRUBYARG_SHARED")
if Options.options.rubyarchdir:
conf.env.ARCHDIR_RUBY=Options.options.rubyarchdir
else:
conf.env.ARCHDIR_RUBY=read_config('sitearchdir')[0]
if Options.options.rubylibdir:
conf.env.LIBDIR_RUBY=Options.options.rubylibdir
else:
conf.env.LIBDIR_RUBY=read_config('sitelibdir')[0]
def set_options(opt):
opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files')
opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path')
opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary')
feature('rubyext')(init_rubyext)
before('apply_incpaths','apply_type_vars','apply_lib_vars','apply_bundle')(init_rubyext)
after('default_cc','vars_target_cshlib')(init_rubyext)
feature('rubyext')(apply_ruby_so_name)
before('apply_link')(apply_ruby_so_name)
conf(check_ruby_version)
conf(check_ruby_ext_devel)

View file

@ -0,0 +1,56 @@
#! /usr/bin/env python
# encoding: utf-8
import os,optparse
import Utils,Options,Configure
import ccroot,ar
from Configure import conftest
def find_scc(conf):
v=conf.env
cc=None
if v['CC']:cc=v['CC']
elif'CC'in conf.environ:cc=conf.environ['CC']
if not cc:cc=conf.find_program('cc',var='CC')
if not cc:conf.fatal('suncc was not found')
cc=conf.cmd_to_list(cc)
try:
if not Utils.cmd_output(cc+['-flags']):
conf.fatal('suncc %r was not found'%cc)
except ValueError:
conf.fatal('suncc -flags could not be executed')
v['CC']=cc
v['CC_NAME']='sun'
def scc_common_flags(conf):
v=conf.env
v['CC_SRC_F']=''
v['CC_TGT_F']=['-c','-o','']
v['CPPPATH_ST']='-I%s'
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=''
v['CCLNK_TGT_F']=['-o','']
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STATICLIB_ST']='-l%s'
v['STATICLIBPATH_ST']='-L%s'
v['CCDEFINES_ST']='-D%s'
v['SONAME_ST']='-Wl,-h -Wl,%s'
v['SHLIB_MARKER']='-Bdynamic'
v['STATICLIB_MARKER']='-Bstatic'
v['program_PATTERN']='%s'
v['shlib_CCFLAGS']=['-Kpic','-DPIC']
v['shlib_LINKFLAGS']=['-G']
v['shlib_PATTERN']='lib%s.so'
v['staticlib_LINKFLAGS']=['-Bstatic']
v['staticlib_PATTERN']='lib%s.a'
detect='''
find_scc
find_cpp
find_ar
scc_common_flags
cc_load_tools
cc_add_flags
link_add_flags
'''
conftest(find_scc)
conftest(scc_common_flags)

View file

@ -0,0 +1,56 @@
#! /usr/bin/env python
# encoding: utf-8
import os,optparse
import Utils,Options,Configure
import ccroot,ar
from Configure import conftest
def find_sxx(conf):
v=conf.env
cc=None
if v['CXX']:cc=v['CXX']
elif'CXX'in conf.environ:cc=conf.environ['CXX']
if not cc:cc=conf.find_program('c++',var='CXX')
if not cc:conf.fatal('sunc++ was not found')
cc=conf.cmd_to_list(cc)
try:
if not Utils.cmd_output(cc+['-flags']):
conf.fatal('sunc++ %r was not found'%cc)
except ValueError:
conf.fatal('sunc++ -flags could not be executed')
v['CXX']=cc
v['CXX_NAME']='sun'
def sxx_common_flags(conf):
v=conf.env
v['CXX_SRC_F']=''
v['CXX_TGT_F']=['-c','-o','']
v['CPPPATH_ST']='-I%s'
if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
v['CXXLNK_SRC_F']=''
v['CXXLNK_TGT_F']=['-o','']
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STATICLIB_ST']='-l%s'
v['STATICLIBPATH_ST']='-L%s'
v['CXXDEFINES_ST']='-D%s'
v['SONAME_ST']='-Wl,-h -Wl,%s'
v['SHLIB_MARKER']='-Bdynamic'
v['STATICLIB_MARKER']='-Bstatic'
v['program_PATTERN']='%s'
v['shlib_CXXFLAGS']=['-Kpic','-DPIC']
v['shlib_LINKFLAGS']=['-G']
v['shlib_PATTERN']='lib%s.so'
v['staticlib_LINKFLAGS']=['-Bstatic']
v['staticlib_PATTERN']='lib%s.a'
detect='''
find_sxx
find_cpp
find_ar
sxx_common_flags
cxx_load_tools
cxx_add_flags
link_add_flags
'''
conftest(find_sxx)
conftest(sxx_common_flags)

View file

@ -0,0 +1,183 @@
#! /usr/bin/env python
# encoding: utf-8
import os,re
import Utils,TaskGen,Task,Runner,Build
from TaskGen import feature,before
from Logs import error,warn,debug
re_tex=re.compile(r'\\(?P<type>include|input|import|bringin|lstinputlisting){(?P<file>[^{}]*)}',re.M)
def scan(self):
node=self.inputs[0]
env=self.env
nodes=[]
names=[]
if not node:return(nodes,names)
code=Utils.readf(node.abspath(env))
curdirnode=self.curdirnode
abs=curdirnode.abspath()
for match in re_tex.finditer(code):
path=match.group('file')
if path:
for k in['','.tex','.ltx']:
debug('tex: trying %s%s'%(path,k))
try:
os.stat(abs+os.sep+path+k)
except OSError:
continue
found=path+k
node=curdirnode.find_resource(found)
if node:
nodes.append(node)
else:
debug('tex: could not find %s'%path)
names.append(path)
debug("tex: found the following : %s and names %s"%(nodes,names))
return(nodes,names)
latex_fun,_=Task.compile_fun('latex','${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False)
pdflatex_fun,_=Task.compile_fun('pdflatex','${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False)
bibtex_fun,_=Task.compile_fun('bibtex','${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False)
makeindex_fun,_=Task.compile_fun('bibtex','${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False)
g_bibtex_re=re.compile('bibdata',re.M)
def tex_build(task,command='LATEX'):
env=task.env
bld=task.generator.bld
if not env['PROMPT_LATEX']:
env.append_value('LATEXFLAGS','-interaction=batchmode')
env.append_value('PDFLATEXFLAGS','-interaction=batchmode')
fun=latex_fun
if command=='PDFLATEX':
fun=pdflatex_fun
node=task.inputs[0]
reldir=node.bld_dir(env)
srcfile=node.abspath(env)
sr2=node.parent.abspath()+os.pathsep+node.parent.abspath(env)+os.pathsep
aux_node=node.change_ext('.aux')
idx_node=node.change_ext('.idx')
nm=aux_node.name
docuname=nm[:len(nm)-4]
task.cwd=task.inputs[0].parent.abspath(task.env)
warn('first pass on %s'%command)
task.env.env={'TEXINPUTS':sr2}
task.env.SRCFILE=srcfile
ret=fun(task)
if ret:
return ret
try:
ct=Utils.readf(aux_node.abspath(env))
except(OSError,IOError):
error('error bibtex scan')
else:
fo=g_bibtex_re.findall(ct)
if fo:
warn('calling bibtex')
task.env.env={'BIBINPUTS':sr2,'BSTINPUTS':sr2}
task.env.SRCFILE=docuname
ret=bibtex_fun(task)
if ret:
error('error when calling bibtex %s'%docuname)
return ret
try:
idx_path=idx_node.abspath(env)
os.stat(idx_path)
except OSError:
error('error file.idx scan')
else:
warn('calling makeindex')
task.env.SRCFILE=idx_node.name
task.env.env={}
ret=makeindex_fun(task)
if ret:
error('error when calling makeindex %s'%idx_path)
return ret
hash=''
i=0
while i<10:
i+=1
prev_hash=hash
try:
hash=Utils.h_file(aux_node.abspath(env))
except KeyError:
error('could not read aux.h -> %s'%aux_node.abspath(env))
pass
if hash and hash==prev_hash:
break
warn('calling %s'%command)
task.env.env={'TEXINPUTS':sr2+os.pathsep}
task.env.SRCFILE=srcfile
ret=fun(task)
if ret:
error('error when calling %s %s'%(command,latex_compile_cmd))
return ret
return None
latex_vardeps=['LATEX','LATEXFLAGS']
def latex_build(task):
return tex_build(task,'LATEX')
pdflatex_vardeps=['PDFLATEX','PDFLATEXFLAGS']
def pdflatex_build(task):
return tex_build(task,'PDFLATEX')
class tex_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def apply_tex(self):
if not getattr(self,'type',None)in['latex','pdflatex']:
self.type='pdflatex'
tree=self.bld
outs=Utils.to_list(getattr(self,'outs',[]))
self.env['PROMPT_LATEX']=getattr(self,'prompt',1)
deps_lst=[]
if getattr(self,'deps',None):
deps=self.to_list(self.deps)
for filename in deps:
n=self.path.find_resource(filename)
if not n in deps_lst:deps_lst.append(n)
self.source=self.to_list(self.source)
for filename in self.source:
base,ext=os.path.splitext(filename)
node=self.path.find_resource(filename)
if not node:raise Utils.WafError('cannot find %s'%filename)
if self.type=='latex':
task=self.create_task('latex',node,node.change_ext('.dvi'))
elif self.type=='pdflatex':
task=self.create_task('pdflatex',node,node.change_ext('.pdf'))
task.env=self.env
task.curdirnode=self.path
if deps_lst:
variant=node.variant(self.env)
try:
lst=tree.node_deps[task.unique_id()]
for n in deps_lst:
if not n in lst:
lst.append(n)
except KeyError:
tree.node_deps[task.unique_id()]=deps_lst
if self.type=='latex':
if'ps'in outs:
tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps'))
tsk.env.env={'TEXINPUTS':node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.abspath(self.env)}
if'pdf'in outs:
tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf'))
tsk.env.env={'TEXINPUTS':node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.abspath(self.env)}
elif self.type=='pdflatex':
if'ps'in outs:
self.create_task('pdf2ps',task.outputs,node.change_ext('.ps'))
self.source=[]
def detect(conf):
v=conf.env
for p in'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
conf.find_program(p,var=p.upper())
v[p.upper()+'FLAGS']=''
v['DVIPSFLAGS']='-Ppdf'
b=Task.simple_task_type
b('tex','${TEX} ${TEXFLAGS} ${SRC}',color='BLUE',shell=False)
b('bibtex','${BIBTEX} ${BIBTEXFLAGS} ${SRC}',color='BLUE',shell=False)
b('dvips','${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}',color='BLUE',after="latex pdflatex tex bibtex",shell=False)
b('dvipdf','${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}',color='BLUE',after="latex pdflatex tex bibtex",shell=False)
b('pdf2ps','${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}',color='BLUE',after="dvipdf pdflatex",shell=False)
b=Task.task_type_from_func
cls=b('latex',latex_build,vars=latex_vardeps)
cls.scan=scan
cls=b('pdflatex',pdflatex_build,vars=pdflatex_vardeps)
cls.scan=scan
feature('tex')(apply_tex)
before('apply_core')(apply_tex)

View file

@ -0,0 +1,211 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import Build,TaskGen,Utils,Options,Logs,Task
from TaskGen import before,after,feature
from Constants import*
class unit_test(object):
def __init__(self):
self.returncode_ok=0
self.num_tests_ok=0
self.num_tests_failed=0
self.num_tests_err=0
self.total_num_tests=0
self.max_label_length=0
self.unit_tests=Utils.ordered_dict()
self.unit_test_results={}
self.unit_test_erroneous={}
self.change_to_testfile_dir=False
self.want_to_see_test_output=False
self.want_to_see_test_error=False
self.run_if_waf_does='check'
def run(self):
self.num_tests_ok=0
self.num_tests_failed=0
self.num_tests_err=0
self.total_num_tests=0
self.max_label_length=0
self.unit_tests=Utils.ordered_dict()
self.unit_test_results={}
self.unit_test_erroneous={}
ld_library_path=[]
if not Options.commands[self.run_if_waf_does]:return
for obj in Build.bld.all_task_gen:
try:
link_task=obj.link_task
except AttributeError:
pass
else:
lib_path=link_task.outputs[0].parent.abspath(obj.env)
if lib_path not in ld_library_path:
ld_library_path.append(lib_path)
unit_test=getattr(obj,'unit_test','')
if unit_test and'cprogram'in obj.features:
try:
output=obj.path
filename=os.path.join(output.abspath(obj.env),obj.target)
srcdir=output.abspath()
label=os.path.join(output.bldpath(obj.env),obj.target)
self.max_label_length=max(self.max_label_length,len(label))
self.unit_tests[label]=(filename,srcdir)
except KeyError:
pass
self.total_num_tests=len(self.unit_tests)
Utils.pprint('GREEN','Running the unit tests')
count=0
result=1
for label in self.unit_tests.allkeys:
file_and_src=self.unit_tests[label]
filename=file_and_src[0]
srcdir=file_and_src[1]
count+=1
line=Build.bld.progress_line(count,self.total_num_tests,Logs.colors.GREEN,Logs.colors.NORMAL)
if Options.options.progress_bar and line:
sys.stderr.write(line)
sys.stderr.flush()
try:
kwargs={}
kwargs['env']=os.environ.copy()
if self.change_to_testfile_dir:
kwargs['cwd']=srcdir
if not self.want_to_see_test_output:
kwargs['stdout']=Utils.pproc.PIPE
if not self.want_to_see_test_error:
kwargs['stderr']=Utils.pproc.PIPE
if ld_library_path:
v=kwargs['env']
def add_path(dct,path,var):
dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')])
if sys.platform=='win32':
add_path(v,ld_library_path,'PATH')
elif sys.platform=='darwin':
add_path(v,ld_library_path,'DYLD_LIBRARY_PATH')
add_path(v,ld_library_path,'LD_LIBRARY_PATH')
else:
add_path(v,ld_library_path,'LD_LIBRARY_PATH')
pp=Utils.pproc.Popen(filename,**kwargs)
(out,err)=pp.communicate()
result=int(pp.returncode==self.returncode_ok)
if result:
self.num_tests_ok+=1
else:
self.num_tests_failed+=1
self.unit_test_results[label]=result
self.unit_test_erroneous[label]=0
except OSError:
self.unit_test_erroneous[label]=1
self.num_tests_err+=1
except KeyboardInterrupt:
pass
if Options.options.progress_bar:sys.stdout.write(Logs.colors.cursor_on)
def print_results(self):
if not Options.commands[self.run_if_waf_does]:return
p=Utils.pprint
if self.total_num_tests==0:
p('YELLOW','No unit tests present')
return
for label in self.unit_tests.allkeys:
filename=self.unit_tests[label]
err=0
result=0
try:err=self.unit_test_erroneous[label]
except KeyError:pass
try:result=self.unit_test_results[label]
except KeyError:pass
n=self.max_label_length-len(label)
if err:n+=4
elif result:n+=7
else:n+=3
line='%s %s'%(label,'.'*n)
if err:p('RED','%sERROR'%line)
elif result:p('GREEN','%sOK'%line)
else:p('YELLOW','%sFAILED'%line)
percentage_ok=float(self.num_tests_ok)/float(self.total_num_tests)*100.0
percentage_failed=float(self.num_tests_failed)/float(self.total_num_tests)*100.0
percentage_erroneous=float(self.num_tests_err)/float(self.total_num_tests)*100.0
p('NORMAL','''
Successful tests: %i (%.1f%%)
Failed tests: %i (%.1f%%)
Erroneous tests: %i (%.1f%%)
Total number of tests: %i
'''%(self.num_tests_ok,percentage_ok,self.num_tests_failed,percentage_failed,self.num_tests_err,percentage_erroneous,self.total_num_tests))
p('GREEN','Unit tests finished')
import threading
testlock=threading.Lock()
def set_options(opt):
opt.add_option('--alltests',action='store_true',default=True,help='Exec all unit tests',dest='all_tests')
def make_test(self):
if not'cprogram'in self.features:
Logs.error('test cannot be executed %s'%self)
return
self.default_install_path=None
self.create_task('utest',self.link_task.outputs)
def exec_test(self):
status=0
variant=self.env.variant()
filename=self.inputs[0].abspath(self.env)
self.ut_exec=getattr(self,'ut_exec',[filename])
if getattr(self.generator,'ut_fun',None):
self.generator.ut_fun(self)
try:
fu=getattr(self.generator.bld,'all_test_paths')
except AttributeError:
fu=os.environ.copy()
self.generator.bld.all_test_paths=fu
lst=[]
for obj in self.generator.bld.all_task_gen:
link_task=getattr(obj,'link_task',None)
if link_task and link_task.env.variant()==variant:
lst.append(link_task.outputs[0].parent.abspath(obj.env))
def add_path(dct,path,var):
dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')])
if sys.platform=='win32':
add_path(fu,lst,'PATH')
elif sys.platform=='darwin':
add_path(fu,lst,'DYLD_LIBRARY_PATH')
add_path(fu,lst,'LD_LIBRARY_PATH')
else:
add_path(fu,lst,'LD_LIBRARY_PATH')
cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath(self.env)
proc=Utils.pproc.Popen(self.ut_exec,cwd=cwd,env=fu,stderr=Utils.pproc.PIPE,stdout=Utils.pproc.PIPE)
(stdout,stderr)=proc.communicate()
tup=(filename,proc.returncode,stdout,stderr)
self.generator.utest_result=tup
testlock.acquire()
try:
bld=self.generator.bld
Logs.debug("ut: %r",tup)
try:
bld.utest_results.append(tup)
except AttributeError:
bld.utest_results=[tup]
finally:
testlock.release()
cls=Task.task_type_from_func('utest',func=exec_test,color='PINK',ext_in='.bin')
old=cls.runnable_status
def test_status(self):
ret=old(self)
if ret==SKIP_ME and getattr(Options.options,'all_tests',False):
return RUN_ME
return ret
cls.runnable_status=test_status
cls.quiet=1
def summary(bld):
lst=getattr(bld,'utest_results',[])
if lst:
Utils.pprint('CYAN','execution summary')
total=len(lst)
tfail=len([x for x in lst if x[1]])
Utils.pprint('CYAN',' tests that pass %d/%d'%(total-tfail,total))
for(f,code,out,err)in lst:
if not code:
Utils.pprint('CYAN',' %s'%f)
Utils.pprint('CYAN',' tests that fail %d/%d'%(tfail,total))
for(f,code,out,err)in lst:
if code:
Utils.pprint('CYAN',' %s'%f)
feature('test')(make_test)
after('apply_link','vars_target_cprogram')(make_test)

View file

@ -0,0 +1,235 @@
#! /usr/bin/env python
# encoding: utf-8
import os.path,shutil
import Task,Runner,Utils,Logs,Build,Node,Options
from TaskGen import extension,after,before
EXT_VALA=['.vala','.gs']
class valac_task(Task.Task):
vars=("VALAC","VALAC_VERSION","VALAFLAGS")
before=("cc","cxx")
def run(self):
env=self.env
inputs=[a.srcpath(env)for a in self.inputs]
valac=env['VALAC']
vala_flags=env.get_flat('VALAFLAGS')
top_src=self.generator.bld.srcnode.abspath()
top_bld=self.generator.bld.srcnode.abspath(env)
if env['VALAC_VERSION']>(0,1,6):
cmd=[valac,'-C','--quiet',vala_flags]
else:
cmd=[valac,'-C',vala_flags]
if self.threading:
cmd.append('--thread')
if self.profile:
cmd.append('--profile=%s'%self.profile)
if self.target_glib:
cmd.append('--target-glib=%s'%self.target_glib)
features=self.generator.features
if'cshlib'in features or'cstaticlib'in features:
output_dir=self.outputs[0].bld_dir(env)
cmd.append('--library '+self.target)
if env['VALAC_VERSION']>=(0,7,0):
for x in self.outputs:
if x.name.endswith('.h'):
cmd.append('--header '+x.bldpath(self.env))
cmd.append('--basedir '+top_src)
cmd.append('-d '+top_bld)
if env['VALAC_VERSION']>(0,7,2)and hasattr(self,'gir'):
cmd.append('--gir=%s.gir'%self.gir)
else:
output_dir=self.outputs[0].bld_dir(env)
cmd.append('-d %s'%output_dir)
for vapi_dir in self.vapi_dirs:
cmd.append('--vapidir=%s'%vapi_dir)
for package in self.packages:
cmd.append('--pkg %s'%package)
for package in self.packages_private:
cmd.append('--pkg %s'%package)
cmd.append(" ".join(inputs))
result=self.generator.bld.exec_command(" ".join(cmd))
if not'cprogram'in features:
if self.packages:
filename=os.path.join(self.generator.path.abspath(env),"%s.deps"%self.target)
deps=open(filename,'w')
for package in self.packages:
deps.write(package+'\n')
deps.close()
self._fix_output("../%s.vapi"%self.target)
self._fix_output("%s.vapi"%self.target)
self._fix_output("%s.gidl"%self.target)
self._fix_output("%s.gir"%self.target)
if hasattr(self,'gir'):
self._fix_output("%s.gir"%self.gir)
first=None
for node in self.outputs:
if not first:
first=node
else:
if first.parent.id!=node.parent.id:
if env['VALAC_VERSION']<(0,7,0):
shutil.move(first.parent.abspath(self.env)+os.sep+node.name,node.abspath(self.env))
return result
def install(self):
bld=self.generator.bld
features=self.generator.features
if self.attr("install_path")and("cshlib"in features or"cstaticlib"in features):
headers_list=[o for o in self.outputs if o.suffix()==".h"]
vapi_list=[o for o in self.outputs if(o.suffix()in(".vapi",".deps"))]
gir_list=[o for o in self.outputs if o.suffix()==".gir"]
for header in headers_list:
top_src=self.generator.bld.srcnode
package=self.env['PACKAGE']
try:
api_version=Utils.g_module.API_VERSION
except AttributeError:
version=Utils.g_module.VERSION.split(".")
if version[0]=="0":
api_version="0."+version[1]
else:
api_version=version[0]+".0"
install_path='${INCLUDEDIR}/%s-%s/%s'%(package,api_version,header.relpath_gen(top_src))
bld.install_as(install_path,header,self.env)
bld.install_files('${DATAROOTDIR}/vala/vapi',vapi_list,self.env)
bld.install_files('${DATAROOTDIR}/gir-1.0',gir_list,self.env)
def _fix_output(self,output):
top_bld=self.generator.bld.srcnode.abspath(self.env)
try:
src=os.path.join(top_bld,output)
dst=self.generator.path.abspath(self.env)
shutil.move(src,dst)
except:
pass
def vala_file(self,node):
valatask=getattr(self,"valatask",None)
if not valatask:
valatask=self.create_task('valac')
self.valatask=valatask
self.includes=Utils.to_list(getattr(self,'includes',[]))
self.uselib=self.to_list(self.uselib)
valatask.packages=[]
valatask.packages_private=Utils.to_list(getattr(self,'packages_private',[]))
valatask.vapi_dirs=[]
valatask.target=self.target
valatask.threading=False
valatask.install_path=self.install_path
valatask.profile=getattr(self,'profile','gobject')
valatask.target_glib=None
packages=Utils.to_list(getattr(self,'packages',[]))
vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[]))
includes=[]
if hasattr(self,'uselib_local'):
local_packages=Utils.to_list(self.uselib_local)
seen=[]
while len(local_packages)>0:
package=local_packages.pop()
if package in seen:
continue
seen.append(package)
package_obj=self.name_to_obj(package)
if not package_obj:
raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')"%(package,self.name))
package_name=package_obj.target
package_node=package_obj.path
package_dir=package_node.relpath_gen(self.path)
for task in package_obj.tasks:
for output in task.outputs:
if output.name==package_name+".vapi":
valatask.set_run_after(task)
if package_name not in packages:
packages.append(package_name)
if package_dir not in vapi_dirs:
vapi_dirs.append(package_dir)
if package_dir not in includes:
includes.append(package_dir)
if hasattr(package_obj,'uselib_local'):
lst=self.to_list(package_obj.uselib_local)
lst.reverse()
local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages
valatask.packages=packages
for vapi_dir in vapi_dirs:
try:
valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env))
except AttributeError:
Logs.warn("Unable to locate Vala API directory: '%s'"%vapi_dir)
self.includes.append(node.bld.srcnode.abspath())
self.includes.append(node.bld.srcnode.abspath(self.env))
for include in includes:
try:
self.includes.append(self.path.find_dir(include).abspath())
self.includes.append(self.path.find_dir(include).abspath(self.env))
except AttributeError:
Logs.warn("Unable to locate include directory: '%s'"%include)
if valatask.profile=='gobject':
if hasattr(self,'target_glib'):
Logs.warn('target_glib on vala tasks is deprecated --vala-target-glib=MAJOR.MINOR from the vala tool options')
if getattr(Options.options,'vala_target_glib',None):
valatask.target_glib=Options.options.vala_target_glib
if not'GOBJECT'in self.uselib:
self.uselib.append('GOBJECT')
if hasattr(self,'threading'):
if valatask.profile=='gobject':
valatask.threading=self.threading
if not'GTHREAD'in self.uselib:
self.uselib.append('GTHREAD')
else:
Logs.warn("Profile %s does not have threading support"%valatask.profile)
if hasattr(self,'gir'):
valatask.gir=self.gir
env=valatask.env
output_nodes=[]
c_node=node.change_ext('.c')
output_nodes.append(c_node)
self.allnodes.append(c_node)
if env['VALAC_VERSION']<(0,7,0):
output_nodes.append(node.change_ext('.h'))
else:
if not'cprogram'in self.features:
output_nodes.append(self.path.find_or_declare('%s.h'%self.target))
if not'cprogram'in self.features:
output_nodes.append(self.path.find_or_declare('%s.vapi'%self.target))
if env['VALAC_VERSION']>(0,7,2):
if hasattr(self,'gir'):
output_nodes.append(self.path.find_or_declare('%s.gir'%self.gir))
elif env['VALAC_VERSION']>(0,3,5):
output_nodes.append(self.path.find_or_declare('%s.gir'%self.target))
elif env['VALAC_VERSION']>(0,1,7):
output_nodes.append(self.path.find_or_declare('%s.gidl'%self.target))
if valatask.packages:
output_nodes.append(self.path.find_or_declare('%s.deps'%self.target))
valatask.inputs.append(node)
valatask.outputs.extend(output_nodes)
def detect(conf):
min_version=(0,1,6)
min_version_str="%d.%d.%d"%min_version
valac=conf.find_program('valac',var='VALAC',mandatory=True)
if not conf.env["HAVE_GOBJECT"]:
pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'}
if getattr(Options.options,'vala_target_glib',None):
pkg_args['atleast_version']=Options.options.vala_target_glib
conf.check_cfg(**pkg_args)
if not conf.env["HAVE_GTHREAD"]:
pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'}
if getattr(Options.options,'vala_target_glib',None):
pkg_args['atleast_version']=Options.options.vala_target_glib
conf.check_cfg(**pkg_args)
try:
output=Utils.cmd_output(valac+" --version",silent=True)
version=output.split(' ',1)[-1].strip().split(".")[0:3]
version=[int(x)for x in version]
valac_version=tuple(version)
except Exception:
valac_version=(0,0,0)
conf.check_message('program version','valac >= '+min_version_str,valac_version>=min_version,"%d.%d.%d"%valac_version)
conf.check_tool('gnu_dirs')
if valac_version<min_version:
conf.fatal("valac version too old to be used with this tool")
return
conf.env['VALAC_VERSION']=valac_version
conf.env['VALAFLAGS']=''
def set_options(opt):
valaopts=opt.add_option_group('Vala Compiler Options')
valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation')
extension(EXT_VALA)(vala_file)

View file

@ -0,0 +1,32 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys,re
import TaskGen,Task
from Utils import quote_whitespace
from TaskGen import extension
EXT_WINRC=['.rc']
winrc_str='${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
def rc_file(self,node):
obj_ext='.rc.o'
if self.env['WINRC_TGT_F']=='/fo':obj_ext='.res'
rctask=self.create_task('winrc',node,node.change_ext(obj_ext))
self.compiled_tasks.append(rctask)
Task.simple_task_type('winrc',winrc_str,color='BLUE',before='cc cxx',shell=False)
def detect(conf):
v=conf.env
winrc=v['WINRC']
v['WINRC_TGT_F']='-o'
v['WINRC_SRC_F']='-i'
if not winrc:
if v['CC_NAME']in['gcc','cc','g++','c++']:
winrc=conf.find_program('windres',var='WINRC',path_list=v['PATH'])
elif v['CC_NAME']=='msvc':
winrc=conf.find_program('RC',var='WINRC',path_list=v['PATH'])
v['WINRC_TGT_F']='/fo'
v['WINRC_SRC_F']=''
if not winrc:
conf.fatal('winrc was not found!')
v['WINRCFLAGS']=''
extension(EXT_WINRC)(rc_file)

View file

@ -0,0 +1,58 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import Configure,Options,Utils
import ccroot,ar
from Configure import conftest
def find_xlc(conf):
cc=conf.find_program(['xlc_r','xlc'],var='CC',mandatory=True)
cc=conf.cmd_to_list(cc)
conf.env.CC_NAME='xlc'
conf.env.CC=cc
def find_cpp(conf):
v=conf.env
cpp=None
if v['CPP']:cpp=v['CPP']
elif'CPP'in conf.environ:cpp=conf.environ['CPP']
if not cpp:cpp=v['CC']
v['CPP']=cpp
def xlc_common_flags(conf):
v=conf.env
v['CCFLAGS_DEBUG']=['-g']
v['CCFLAGS_RELEASE']=['-O2']
v['CC_SRC_F']=''
v['CC_TGT_F']=['-c','-o','']
v['CPPPATH_ST']='-I%s'
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=''
v['CCLNK_TGT_F']=['-o','']
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STATICLIB_ST']='-l%s'
v['STATICLIBPATH_ST']='-L%s'
v['RPATH_ST']='-Wl,-rpath,%s'
v['CCDEFINES_ST']='-D%s'
v['SONAME_ST']=''
v['SHLIB_MARKER']=''
v['STATICLIB_MARKER']=''
v['FULLSTATIC_MARKER']='-static'
v['program_LINKFLAGS']=['-Wl,-brtl']
v['program_PATTERN']='%s'
v['shlib_CCFLAGS']=['-fPIC','-DPIC']
v['shlib_LINKFLAGS']=['-G','-Wl,-brtl,-bexpfull']
v['shlib_PATTERN']='lib%s.so'
v['staticlib_LINKFLAGS']=''
v['staticlib_PATTERN']='lib%s.a'
def detect(conf):
conf.find_xlc()
conf.find_cpp()
conf.find_ar()
conf.xlc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
conftest(find_xlc)
conftest(find_cpp)
conftest(xlc_common_flags)

View file

@ -0,0 +1,58 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys
import Configure,Options,Utils
import ccroot,ar
from Configure import conftest
def find_xlcxx(conf):
cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX',mandatory=True)
cxx=conf.cmd_to_list(cxx)
conf.env.CXX_NAME='xlc++'
conf.env.CXX=cxx
def find_cpp(conf):
v=conf.env
cpp=None
if v['CPP']:cpp=v['CPP']
elif'CPP'in conf.environ:cpp=conf.environ['CPP']
if not cpp:cpp=v['CXX']
v['CPP']=cpp
def xlcxx_common_flags(conf):
v=conf.env
v['CXXFLAGS_DEBUG']=['-g']
v['CXXFLAGS_RELEASE']=['-O2']
v['CXX_SRC_F']=''
v['CXX_TGT_F']=['-c','-o','']
v['CPPPATH_ST']='-I%s'
if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
v['CXXLNK_SRC_F']=''
v['CXXLNK_TGT_F']=['-o','']
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STATICLIB_ST']='-l%s'
v['STATICLIBPATH_ST']='-L%s'
v['RPATH_ST']='-Wl,-rpath,%s'
v['CXXDEFINES_ST']='-D%s'
v['SONAME_ST']=''
v['SHLIB_MARKER']=''
v['STATICLIB_MARKER']=''
v['FULLSTATIC_MARKER']='-static'
v['program_LINKFLAGS']=['-Wl,-brtl']
v['program_PATTERN']='%s'
v['shlib_CXXFLAGS']=['-fPIC','-DPIC']
v['shlib_LINKFLAGS']=['-G','-Wl,-brtl,-bexpfull']
v['shlib_PATTERN']='lib%s.so'
v['staticlib_LINKFLAGS']=''
v['staticlib_PATTERN']='lib%s.a'
def detect(conf):
conf.find_xlcxx()
conf.find_cpp()
conf.find_ar()
conf.xlcxx_common_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
conftest(find_xlcxx)
conftest(find_cpp)
conftest(xlcxx_common_flags)

View file

@ -0,0 +1,532 @@
#! /usr/bin/env python
# encoding: utf-8
import os,sys,imp,string,errno,traceback,inspect,re,shutil,datetime,gc
try:from UserDict import UserDict
except ImportError:from collections import UserDict
if sys.hexversion>=0x2060000 or os.name=='java':
import subprocess as pproc
else:
import pproc
import Logs
from Constants import*
try:
from collections import deque
except ImportError:
class deque(list):
def popleft(self):
return self.pop(0)
is_win32=sys.platform=='win32'
try:
from collections import defaultdict as DefaultDict
except ImportError:
class DefaultDict(dict):
def __init__(self,default_factory):
super(DefaultDict,self).__init__()
self.default_factory=default_factory
def __getitem__(self,key):
try:
return super(DefaultDict,self).__getitem__(key)
except KeyError:
value=self.default_factory()
self[key]=value
return value
class WafError(Exception):
def __init__(self,*args):
self.args=args
try:
self.stack=traceback.extract_stack()
except:
pass
Exception.__init__(self,*args)
def __str__(self):
return str(len(self.args)==1 and self.args[0]or self.args)
class WscriptError(WafError):
def __init__(self,message,wscript_file=None):
if wscript_file:
self.wscript_file=wscript_file
self.wscript_line=None
else:
try:
(self.wscript_file,self.wscript_line)=self.locate_error()
except:
(self.wscript_file,self.wscript_line)=(None,None)
msg_file_line=''
if self.wscript_file:
msg_file_line="%s:"%self.wscript_file
if self.wscript_line:
msg_file_line+="%s:"%self.wscript_line
err_message="%s error: %s"%(msg_file_line,message)
WafError.__init__(self,err_message)
def locate_error(self):
stack=traceback.extract_stack()
stack.reverse()
for frame in stack:
file_name=os.path.basename(frame[0])
is_wscript=(file_name==WSCRIPT_FILE or file_name==WSCRIPT_BUILD_FILE)
if is_wscript:
return(frame[0],frame[1])
return(None,None)
indicator=is_win32 and'\x1b[A\x1b[K%s%s%s\r'or'\x1b[K%s%s%s\r'
try:
from fnv import new as md5
import Constants
Constants.SIG_NIL='signofnv'
def h_file(filename):
m=md5()
try:
m.hfile(filename)
x=m.digest()
if x is None:raise OSError("not a file")
return x
except SystemError:
raise OSError("not a file"+filename)
except ImportError:
try:
try:
from hashlib import md5
except ImportError:
from md5 import md5
def h_file(filename):
f=open(filename,'rb')
m=md5()
while(filename):
filename=f.read(100000)
m.update(filename)
f.close()
return m.digest()
except ImportError:
md5=None
class ordered_dict(UserDict):
def __init__(self,dict=None):
self.allkeys=[]
UserDict.__init__(self,dict)
def __delitem__(self,key):
self.allkeys.remove(key)
UserDict.__delitem__(self,key)
def __setitem__(self,key,item):
if key not in self.allkeys:self.allkeys.append(key)
UserDict.__setitem__(self,key,item)
def exec_command(s,**kw):
if'log'in kw:
kw['stdout']=kw['stderr']=kw['log']
del(kw['log'])
kw['shell']=isinstance(s,str)
try:
proc=pproc.Popen(s,**kw)
return proc.wait()
except OSError:
return-1
if is_win32:
def exec_command(s,**kw):
if'log'in kw:
kw['stdout']=kw['stderr']=kw['log']
del(kw['log'])
kw['shell']=isinstance(s,str)
if len(s)>2000:
startupinfo=pproc.STARTUPINFO()
startupinfo.dwFlags|=pproc.STARTF_USESHOWWINDOW
kw['startupinfo']=startupinfo
try:
if'stdout'not in kw:
kw['stdout']=pproc.PIPE
kw['stderr']=pproc.PIPE
kw['universal_newlines']=True
proc=pproc.Popen(s,**kw)
(stdout,stderr)=proc.communicate()
Logs.info(stdout)
if stderr:
Logs.error(stderr)
return proc.returncode
else:
proc=pproc.Popen(s,**kw)
return proc.wait()
except OSError:
return-1
listdir=os.listdir
if is_win32:
def listdir_win32(s):
if re.match('^[A-Za-z]:$',s):
s+=os.sep
if not os.path.isdir(s):
e=OSError()
e.errno=errno.ENOENT
raise e
return os.listdir(s)
listdir=listdir_win32
def waf_version(mini=0x010000,maxi=0x100000):
ver=HEXVERSION
try:min_val=mini+0
except TypeError:min_val=int(mini.replace('.','0'),16)
if min_val>ver:
Logs.error("waf version should be at least %s (%s found)"%(mini,ver))
sys.exit(1)
try:max_val=maxi+0
except TypeError:max_val=int(maxi.replace('.','0'),16)
if max_val<ver:
Logs.error("waf version should be at most %s (%s found)"%(maxi,ver))
sys.exit(1)
def python_24_guard():
if sys.hexversion<0x20400f0 or sys.hexversion>=0x3000000:
raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
def ex_stack():
exc_type,exc_value,tb=sys.exc_info()
if Logs.verbose>1:
exc_lines=traceback.format_exception(exc_type,exc_value,tb)
return''.join(exc_lines)
return str(exc_value)
def to_list(sth):
if isinstance(sth,str):
return sth.split()
else:
return sth
g_loaded_modules={}
g_module=None
def load_module(file_path,name=WSCRIPT_FILE):
try:
return g_loaded_modules[file_path]
except KeyError:
pass
module=imp.new_module(name)
try:
code=readf(file_path,m='rU')
except(IOError,OSError):
raise WscriptError('Could not read the file %r'%file_path)
module.waf_hash_val=code
dt=os.path.dirname(file_path)
sys.path.insert(0,dt)
try:
exec(compile(code,file_path,'exec'),module.__dict__)
except Exception:
exc_type,exc_value,tb=sys.exc_info()
raise WscriptError("".join(traceback.format_exception(exc_type,exc_value,tb)),file_path)
sys.path.remove(dt)
g_loaded_modules[file_path]=module
return module
def set_main_module(file_path):
global g_module
g_module=load_module(file_path,'wscript_main')
g_module.root_path=file_path
try:
g_module.APPNAME
except:
g_module.APPNAME='noname'
try:
g_module.VERSION
except:
g_module.VERSION='1.0'
def to_hashtable(s):
tbl={}
lst=s.split('\n')
for line in lst:
if not line:continue
mems=line.split('=')
tbl[mems[0]]=mems[1]
return tbl
def get_term_cols():
return 80
try:
import struct,fcntl,termios
except ImportError:
pass
else:
if Logs.got_tty:
def myfun():
dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2]
return cols
try:
myfun()
except:
pass
else:
get_term_cols=myfun
rot_idx=0
rot_chr=['\\','|','/','-']
def split_path(path):
return path.split('/')
def split_path_cygwin(path):
if path.startswith('//'):
ret=path.split('/')[2:]
ret[0]='/'+ret[0]
return ret
return path.split('/')
re_sp=re.compile('[/\\\\]')
def split_path_win32(path):
if path.startswith('\\\\'):
ret=re.split(re_sp,path)[2:]
ret[0]='\\'+ret[0]
return ret
return re.split(re_sp,path)
if sys.platform=='cygwin':
split_path=split_path_cygwin
elif is_win32:
split_path=split_path_win32
def copy_attrs(orig,dest,names,only_if_set=False):
for a in to_list(names):
u=getattr(orig,a,())
if u or not only_if_set:
setattr(dest,a,u)
def def_attrs(cls,**kw):
'''
set attributes for class.
@param cls [any class]: the class to update the given attributes in.
@param kw [dictionary]: dictionary of attributes names and values.
if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
'''
for k,v in kw.iteritems():
if not hasattr(cls,k):
setattr(cls,k,v)
def quote_define_name(path):
fu=re.compile("[^a-zA-Z0-9]").sub("_",path)
fu=fu.upper()
return fu
def quote_whitespace(path):
return(path.strip().find(' ')>0 and'"%s"'%path or path).replace('""','"')
def trimquotes(s):
if not s:return''
s=s.rstrip()
if s[0]=="'"and s[-1]=="'":return s[1:-1]
return s
def h_list(lst):
m=md5()
m.update(str(lst))
return m.digest()
def h_fun(fun):
try:
return fun.code
except AttributeError:
try:
h=inspect.getsource(fun)
except IOError:
h="nocode"
try:
fun.code=h
except AttributeError:
pass
return h
def pprint(col,str,label='',sep='\n'):
sys.stderr.write("%s%s%s %s%s"%(Logs.colors(col),str,Logs.colors.NORMAL,label,sep))
def check_dir(dir):
try:
os.stat(dir)
except OSError:
try:
os.makedirs(dir)
except OSError,e:
raise WafError("Cannot create folder '%s' (original error: %s)"%(dir,e))
def cmd_output(cmd,**kw):
silent=False
if'silent'in kw:
silent=kw['silent']
del(kw['silent'])
if'e'in kw:
tmp=kw['e']
del(kw['e'])
kw['env']=tmp
kw['shell']=isinstance(cmd,str)
kw['stdout']=pproc.PIPE
if silent:
kw['stderr']=pproc.PIPE
try:
p=pproc.Popen(cmd,**kw)
output=p.communicate()[0]
except OSError,e:
raise ValueError(str(e))
if p.returncode:
if not silent:
msg="command execution failed: %s -> %r"%(cmd,str(output))
raise ValueError(msg)
output=''
return output
reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
def subst_vars(expr,params):
def repl_var(m):
if m.group(1):
return'\\'
if m.group(2):
return'$'
try:
return params.get_flat(m.group(3))
except AttributeError:
return params[m.group(3)]
return reg_subst.sub(repl_var,expr)
def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
if unversioned_sys_platform in('linux','freebsd','netbsd','openbsd','sunos','gnu'):
return'elf'
elif unversioned_sys_platform=='darwin':
return'mac-o'
elif unversioned_sys_platform in('win32','cygwin','uwin','msys'):
return'pe'
return'elf'
def unversioned_sys_platform():
s=sys.platform
if s=='java':
from java.lang import System
s=System.getProperty('os.name')
if s=='Mac OS X':
return'darwin'
elif s.startswith('Windows '):
return'win32'
elif s=='OS/2':
return'os2'
elif s=='HP-UX':
return'hpux'
elif s in('SunOS','Solaris'):
return'sunos'
else:s=s.lower()
if s=='win32'or s.endswith('os2')and s!='sunos2':return s
return re.split('\d+$',s)[0]
def detect_platform():
s=sys.platform
for x in'cygwin linux irix sunos hpux aix darwin gnu'.split():
if s.find(x)>=0:
return x
if os.name in'posix java os2'.split():
return os.name
return s
def load_tool(tool,tooldir=None):
'''
load_tool: import a Python module, optionally using several directories.
@param tool [string]: name of tool to import.
@param tooldir [list]: directories to look for the tool.
@return: the loaded module.
Warning: this function is not thread-safe: plays with sys.path,
so must run in sequence.
'''
if tooldir:
assert isinstance(tooldir,list)
sys.path=tooldir+sys.path
else:
tooldir=[]
try:
return __import__(tool)
finally:
for dt in tooldir:
sys.path.remove(dt)
def readf(fname,m='r'):
f=open(fname,m)
try:
txt=f.read()
finally:
f.close()
return txt
def nada(*k,**kw):
pass
def diff_path(top,subdir):
top=os.path.normpath(top).replace('\\','/').split('/')
subdir=os.path.normpath(subdir).replace('\\','/').split('/')
if len(top)==len(subdir):return''
diff=subdir[len(top)-len(subdir):]
return os.path.join(*diff)
class Context(object):
def set_curdir(self,dir):
self.curdir_=dir
def get_curdir(self):
try:
return self.curdir_
except AttributeError:
self.curdir_=os.getcwd()
return self.get_curdir()
curdir=property(get_curdir,set_curdir)
def recurse(self,dirs,name=''):
if not name:
name=inspect.stack()[1][3]
if isinstance(dirs,str):
dirs=to_list(dirs)
for x in dirs:
if os.path.isabs(x):
nexdir=x
else:
nexdir=os.path.join(self.curdir,x)
base=os.path.join(nexdir,WSCRIPT_FILE)
file_path=base+'_'+name
try:
txt=readf(file_path,m='rU')
except(OSError,IOError):
try:
module=load_module(base)
except OSError:
raise WscriptError('No such script %s'%base)
try:
f=module.__dict__[name]
except KeyError:
raise WscriptError('No function %s defined in %s'%(name,base))
if getattr(self.__class__,'pre_recurse',None):
self.pre_recurse(f,base,nexdir)
old=self.curdir
self.curdir=nexdir
try:
f(self)
finally:
self.curdir=old
if getattr(self.__class__,'post_recurse',None):
self.post_recurse(module,base,nexdir)
else:
dc={'ctx':self}
if getattr(self.__class__,'pre_recurse',None):
dc=self.pre_recurse(txt,file_path,nexdir)
old=self.curdir
self.curdir=nexdir
try:
try:
exec(compile(txt,file_path,'exec'),dc)
except Exception:
exc_type,exc_value,tb=sys.exc_info()
raise WscriptError("".join(traceback.format_exception(exc_type,exc_value,tb)),base)
finally:
self.curdir=old
if getattr(self.__class__,'post_recurse',None):
self.post_recurse(txt,file_path,nexdir)
if is_win32:
old=shutil.copy2
def copy2(src,dst):
old(src,dst)
shutil.copystat(src,src)
setattr(shutil,'copy2',copy2)
def zip_folder(dir,zip_file_name,prefix):
import zipfile
zip=zipfile.ZipFile(zip_file_name,'w',compression=zipfile.ZIP_DEFLATED)
base=os.path.abspath(dir)
if prefix:
if prefix[-1]!=os.sep:
prefix+=os.sep
n=len(base)
for root,dirs,files in os.walk(base):
for f in files:
archive_name=prefix+root[n:]+os.sep+f
zip.write(root+os.sep+f,archive_name,zipfile.ZIP_DEFLATED)
zip.close()
def get_elapsed_time(start):
delta=datetime.datetime.now()-start
days=int(delta.days)
hours=int(delta.seconds/3600)
minutes=int((delta.seconds-hours*3600)/60)
seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000
result=''
if days:
result+='%dd'%days
if days or hours:
result+='%dh'%hours
if days or hours or minutes:
result+='%dm'%minutes
return'%s%.3fs'%(result,seconds)
if os.name=='java':
try:
gc.disable()
gc.enable()
except NotImplementedError:
gc.disable=gc.enable
def run_once(fun):
cache={}
def wrap(k):
try:
return cache[k]
except KeyError:
ret=fun(k)
cache[k]=ret
return ret
wrap.__cache__=cache
return wrap

View file

@ -0,0 +1,4 @@
#! /usr/bin/env python
# encoding: utf-8

View file

@ -0,0 +1,158 @@
#! /usr/bin/env python
# encoding: utf-8
import sys,os
try:
if(not sys.stderr.isatty())or(not sys.stdout.isatty()):
raise ValueError('not a tty')
from ctypes import*
class COORD(Structure):
_fields_=[("X",c_short),("Y",c_short)]
class SMALL_RECT(Structure):
_fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)]
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
_fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_short),("Window",SMALL_RECT),("MaximumWindowSize",COORD)]
class CONSOLE_CURSOR_INFO(Structure):
_fields_=[('dwSize',c_ulong),('bVisible',c_int)]
sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
csinfo=CONSOLE_CURSOR_INFO()
hconsole=windll.kernel32.GetStdHandle(-11)
windll.kernel32.GetConsoleScreenBufferInfo(hconsole,byref(sbinfo))
if sbinfo.Size.X<10 or sbinfo.Size.Y<10:raise Exception('small console')
windll.kernel32.GetConsoleCursorInfo(hconsole,byref(csinfo))
except Exception:
pass
else:
import re,threading
to_int=lambda number,default:number and int(number)or default
wlock=threading.Lock()
STD_OUTPUT_HANDLE=-11
STD_ERROR_HANDLE=-12
class AnsiTerm(object):
def __init__(self):
self.hconsole=windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
self.cursor_history=[]
self.orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
self.orig_csinfo=CONSOLE_CURSOR_INFO()
windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self.orig_sbinfo))
windll.kernel32.GetConsoleCursorInfo(hconsole,byref(self.orig_csinfo))
def screen_buffer_info(self):
sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
return sbinfo
def clear_line(self,param):
mode=param and int(param)or 0
sbinfo=self.screen_buffer_info()
if mode==1:
line_start=COORD(0,sbinfo.CursorPosition.Y)
line_length=sbinfo.Size.X
elif mode==2:
line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y)
line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
else:
line_start=sbinfo.CursorPosition
line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
chars_written=c_int()
windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_char(' '),line_length,line_start,byref(chars_written))
windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written))
def clear_screen(self,param):
mode=to_int(param,0)
sbinfo=self.screen_buffer_info()
if mode==1:
clear_start=COORD(0,0)
clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y
elif mode==2:
clear_start=COORD(0,0)
clear_length=sbinfo.Size.X*sbinfo.Size.Y
windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start)
else:
clear_start=sbinfo.CursorPosition
clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y))
chars_written=c_int()
windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_char(' '),clear_length,clear_start,byref(chars_written))
windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written))
def push_cursor(self,param):
sbinfo=self.screen_buffer_info()
self.cursor_history.push(sbinfo.CursorPosition)
def pop_cursor(self,param):
if self.cursor_history:
old_pos=self.cursor_history.pop()
windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos)
def set_cursor(self,param):
x,sep,y=param.partition(';')
x=to_int(x,1)-1
y=to_int(y,1)-1
sbinfo=self.screen_buffer_info()
new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y))
windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
def set_column(self,param):
x=to_int(param,1)-1
sbinfo=self.screen_buffer_info()
new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y)
windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
def move_cursor(self,x_offset=0,y_offset=0):
sbinfo=self.screen_buffer_info()
new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y))
windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
def move_up(self,param):
self.move_cursor(y_offset=-to_int(param,1))
def move_down(self,param):
self.move_cursor(y_offset=to_int(param,1))
def move_left(self,param):
self.move_cursor(x_offset=-to_int(param,1))
def move_right(self,param):
self.move_cursor(x_offset=to_int(param,1))
def next_line(self,param):
sbinfo=self.screen_buffer_info()
self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1))
def prev_line(self,param):
sbinfo=self.screen_buffer_info()
self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1))
escape_to_color={(0,30):0x0,(0,31):0x4,(0,32):0x2,(0,33):0x4+0x2,(0,34):0x1,(0,35):0x1+0x4,(0,36):0x2+0x4,(0,37):0x1+0x2+0x4,(1,30):0x1+0x2+0x4,(1,31):0x4+0x8,(1,32):0x2+0x8,(1,33):0x4+0x2+0x8,(1,34):0x1+0x8,(1,35):0x1+0x4+0x8,(1,36):0x1+0x2+0x8,(1,37):0x1+0x2+0x4+0x8,}
def set_color(self,param):
cols=param.split(';')
attr=self.orig_sbinfo.Attributes
for c in cols:
c=to_int(c,0)
if c in range(30,38):
attr=(attr&0xf0)|(self.escape_to_color.get((0,c),0x7))
elif c in range(40,48):
attr=(attr&0x0f)|(self.escape_to_color.get((0,c),0x7)<<8)
elif c in range(90,98):
attr=(attr&0xf0)|(self.escape_to_color.get((1,c-60),0x7))
elif c in range(100,108):
attr=(attr&0x0f)|(self.escape_to_color.get((1,c-60),0x7)<<8)
elif c==1:
attr|=0x08
windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr)
def show_cursor(self,param):
csinfo.bVisible=1
windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
def hide_cursor(self,param):
csinfo.bVisible=0
windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,}
ansi_tokans=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
def write(self,text):
try:
wlock.acquire()
for param,cmd,txt in self.ansi_tokans.findall(text):
if cmd:
cmd_func=self.ansi_command_table.get(cmd)
if cmd_func:
cmd_func(self,param)
else:
chars_written=c_int()
if isinstance(txt,unicode):
windll.kernel32.WriteConsoleW(self.hconsole,txt,len(txt),byref(chars_written),None)
else:
windll.kernel32.WriteConsoleA(self.hconsole,txt,len(txt),byref(chars_written),None)
finally:
wlock.release()
def flush(self):
pass
def isatty(self):
return True
sys.stderr=sys.stdout=AnsiTerm()
os.environ['TERM']='vt100'

View file

@ -0,0 +1,496 @@
#! /usr/bin/env python
# encoding: utf-8
import sys
mswindows=(sys.platform=="win32")
import os
import types
import traceback
import gc
class CalledProcessError(Exception):
def __init__(self,returncode,cmd):
self.returncode=returncode
self.cmd=cmd
def __str__(self):
return"Command '%s' returned non-zero exit status %d"%(self.cmd,self.returncode)
if mswindows:
import threading
import msvcrt
if 0:
import pywintypes
from win32api import GetStdHandle,STD_INPUT_HANDLE,STD_OUTPUT_HANDLE,STD_ERROR_HANDLE
from win32api import GetCurrentProcess,DuplicateHandle,GetModuleFileName,GetVersion
from win32con import DUPLICATE_SAME_ACCESS,SW_HIDE
from win32pipe import CreatePipe
from win32process import CreateProcess,STARTUPINFO,GetExitCodeProcess,STARTF_USESTDHANDLES,STARTF_USESHOWWINDOW,CREATE_NEW_CONSOLE
from win32event import WaitForSingleObject,INFINITE,WAIT_OBJECT_0
else:
from _subprocess import*
class STARTUPINFO:
dwFlags=0
hStdInput=None
hStdOutput=None
hStdError=None
wShowWindow=0
class pywintypes:
error=IOError
else:
import select
import errno
import fcntl
import pickle
__all__=["Popen","PIPE","STDOUT","call","check_call","CalledProcessError"]
try:
MAXFD=os.sysconf("SC_OPEN_MAX")
except:
MAXFD=256
try:
False
except NameError:
False=0
True=1
_active=[]
def _cleanup():
for inst in _active[:]:
if inst.poll(_deadstate=sys.maxint)>=0:
try:
_active.remove(inst)
except ValueError:
pass
PIPE=-1
STDOUT=-2
def call(*popenargs,**kwargs):
return Popen(*popenargs,**kwargs).wait()
def check_call(*popenargs,**kwargs):
retcode=call(*popenargs,**kwargs)
cmd=kwargs.get("args")
if cmd is None:
cmd=popenargs[0]
if retcode:
raise CalledProcessError(retcode,cmd)
return retcode
def list2cmdline(seq):
result=[]
needquote=False
for arg in seq:
bs_buf=[]
if result:
result.append(' ')
needquote=(" "in arg)or("\t"in arg)or arg==""
if needquote:
result.append('"')
for c in arg:
if c=='\\':
bs_buf.append(c)
elif c=='"':
result.append('\\'*len(bs_buf)*2)
bs_buf=[]
result.append('\\"')
else:
if bs_buf:
result.extend(bs_buf)
bs_buf=[]
result.append(c)
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return''.join(result)
class Popen(object):
def __init__(self,args,bufsize=0,executable=None,stdin=None,stdout=None,stderr=None,preexec_fn=None,close_fds=False,shell=False,cwd=None,env=None,universal_newlines=False,startupinfo=None,creationflags=0):
_cleanup()
self._child_created=False
if not isinstance(bufsize,(int,long)):
raise TypeError("bufsize must be an integer")
if mswindows:
if preexec_fn is not None:
raise ValueError("preexec_fn is not supported on Windows platforms")
if close_fds:
raise ValueError("close_fds is not supported on Windows platforms")
else:
if startupinfo is not None:
raise ValueError("startupinfo is only supported on Windows platforms")
if creationflags!=0:
raise ValueError("creationflags is only supported on Windows platforms")
self.stdin=None
self.stdout=None
self.stderr=None
self.pid=None
self.returncode=None
self.universal_newlines=universal_newlines
(p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite)=self._get_handles(stdin,stdout,stderr)
self._execute_child(args,executable,preexec_fn,close_fds,cwd,env,universal_newlines,startupinfo,creationflags,shell,p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite)
if mswindows:
if stdin is None and p2cwrite is not None:
os.close(p2cwrite)
p2cwrite=None
if stdout is None and c2pread is not None:
os.close(c2pread)
c2pread=None
if stderr is None and errread is not None:
os.close(errread)
errread=None
if p2cwrite:
self.stdin=os.fdopen(p2cwrite,'wb',bufsize)
if c2pread:
if universal_newlines:
self.stdout=os.fdopen(c2pread,'rU',bufsize)
else:
self.stdout=os.fdopen(c2pread,'rb',bufsize)
if errread:
if universal_newlines:
self.stderr=os.fdopen(errread,'rU',bufsize)
else:
self.stderr=os.fdopen(errread,'rb',bufsize)
def _translate_newlines(self,data):
data=data.replace("\r\n","\n")
data=data.replace("\r","\n")
return data
def __del__(self,sys=sys):
if not self._child_created:
return
self.poll(_deadstate=sys.maxint)
if self.returncode is None and _active is not None:
_active.append(self)
def communicate(self,input=None):
if[self.stdin,self.stdout,self.stderr].count(None)>=2:
stdout=None
stderr=None
if self.stdin:
if input:
self.stdin.write(input)
self.stdin.close()
elif self.stdout:
stdout=self.stdout.read()
elif self.stderr:
stderr=self.stderr.read()
self.wait()
return(stdout,stderr)
return self._communicate(input)
if mswindows:
def _get_handles(self,stdin,stdout,stderr):
if stdin is None and stdout is None and stderr is None:
return(None,None,None,None,None,None)
p2cread,p2cwrite=None,None
c2pread,c2pwrite=None,None
errread,errwrite=None,None
if stdin is None:
p2cread=GetStdHandle(STD_INPUT_HANDLE)
if p2cread is not None:
pass
elif stdin is None or stdin==PIPE:
p2cread,p2cwrite=CreatePipe(None,0)
p2cwrite=p2cwrite.Detach()
p2cwrite=msvcrt.open_osfhandle(p2cwrite,0)
elif isinstance(stdin,int):
p2cread=msvcrt.get_osfhandle(stdin)
else:
p2cread=msvcrt.get_osfhandle(stdin.fileno())
p2cread=self._make_inheritable(p2cread)
if stdout is None:
c2pwrite=GetStdHandle(STD_OUTPUT_HANDLE)
if c2pwrite is not None:
pass
elif stdout is None or stdout==PIPE:
c2pread,c2pwrite=CreatePipe(None,0)
c2pread=c2pread.Detach()
c2pread=msvcrt.open_osfhandle(c2pread,0)
elif isinstance(stdout,int):
c2pwrite=msvcrt.get_osfhandle(stdout)
else:
c2pwrite=msvcrt.get_osfhandle(stdout.fileno())
c2pwrite=self._make_inheritable(c2pwrite)
if stderr is None:
errwrite=GetStdHandle(STD_ERROR_HANDLE)
if errwrite is not None:
pass
elif stderr is None or stderr==PIPE:
errread,errwrite=CreatePipe(None,0)
errread=errread.Detach()
errread=msvcrt.open_osfhandle(errread,0)
elif stderr==STDOUT:
errwrite=c2pwrite
elif isinstance(stderr,int):
errwrite=msvcrt.get_osfhandle(stderr)
else:
errwrite=msvcrt.get_osfhandle(stderr.fileno())
errwrite=self._make_inheritable(errwrite)
return(p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite)
def _make_inheritable(self,handle):
return DuplicateHandle(GetCurrentProcess(),handle,GetCurrentProcess(),0,1,DUPLICATE_SAME_ACCESS)
def _find_w9xpopen(self):
w9xpopen=os.path.join(os.path.dirname(GetModuleFileName(0)),"w9xpopen.exe")
if not os.path.exists(w9xpopen):
w9xpopen=os.path.join(os.path.dirname(sys.exec_prefix),"w9xpopen.exe")
if not os.path.exists(w9xpopen):
raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
return w9xpopen
def _execute_child(self,args,executable,preexec_fn,close_fds,cwd,env,universal_newlines,startupinfo,creationflags,shell,p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite):
if not isinstance(args,types.StringTypes):
args=list2cmdline(args)
if startupinfo is None:
startupinfo=STARTUPINFO()
if None not in(p2cread,c2pwrite,errwrite):
startupinfo.dwFlags|=STARTF_USESTDHANDLES
startupinfo.hStdInput=p2cread
startupinfo.hStdOutput=c2pwrite
startupinfo.hStdError=errwrite
if shell:
startupinfo.dwFlags|=STARTF_USESHOWWINDOW
startupinfo.wShowWindow=SW_HIDE
comspec=os.environ.get("COMSPEC","cmd.exe")
args=comspec+" /c "+args
if(GetVersion()>=0x80000000L or os.path.basename(comspec).lower()=="command.com"):
w9xpopen=self._find_w9xpopen()
args='"%s" %s'%(w9xpopen,args)
creationflags|=CREATE_NEW_CONSOLE
try:
hp,ht,pid,tid=CreateProcess(executable,args,None,None,1,creationflags,env,cwd,startupinfo)
except pywintypes.error,e:
raise WindowsError(*e.args)
self._child_created=True
self._handle=hp
self.pid=pid
ht.Close()
if p2cread is not None:
p2cread.Close()
if c2pwrite is not None:
c2pwrite.Close()
if errwrite is not None:
errwrite.Close()
def poll(self,_deadstate=None):
if self.returncode is None:
if WaitForSingleObject(self._handle,0)==WAIT_OBJECT_0:
self.returncode=GetExitCodeProcess(self._handle)
return self.returncode
def wait(self):
if self.returncode is None:
obj=WaitForSingleObject(self._handle,INFINITE)
self.returncode=GetExitCodeProcess(self._handle)
return self.returncode
def _readerthread(self,fh,buffer):
buffer.append(fh.read())
def _communicate(self,input):
stdout=None
stderr=None
if self.stdout:
stdout=[]
stdout_thread=threading.Thread(target=self._readerthread,args=(self.stdout,stdout))
stdout_thread.setDaemon(True)
stdout_thread.start()
if self.stderr:
stderr=[]
stderr_thread=threading.Thread(target=self._readerthread,args=(self.stderr,stderr))
stderr_thread.setDaemon(True)
stderr_thread.start()
if self.stdin:
if input is not None:
self.stdin.write(input)
self.stdin.close()
if self.stdout:
stdout_thread.join()
if self.stderr:
stderr_thread.join()
if stdout is not None:
stdout=stdout[0]
if stderr is not None:
stderr=stderr[0]
if self.universal_newlines and hasattr(file,'newlines'):
if stdout:
stdout=self._translate_newlines(stdout)
if stderr:
stderr=self._translate_newlines(stderr)
self.wait()
return(stdout,stderr)
else:
def _get_handles(self,stdin,stdout,stderr):
p2cread,p2cwrite=None,None
c2pread,c2pwrite=None,None
errread,errwrite=None,None
if stdin is None:
pass
elif stdin==PIPE:
p2cread,p2cwrite=os.pipe()
elif isinstance(stdin,int):
p2cread=stdin
else:
p2cread=stdin.fileno()
if stdout is None:
pass
elif stdout==PIPE:
c2pread,c2pwrite=os.pipe()
elif isinstance(stdout,int):
c2pwrite=stdout
else:
c2pwrite=stdout.fileno()
if stderr is None:
pass
elif stderr==PIPE:
errread,errwrite=os.pipe()
elif stderr==STDOUT:
errwrite=c2pwrite
elif isinstance(stderr,int):
errwrite=stderr
else:
errwrite=stderr.fileno()
return(p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite)
def _set_cloexec_flag(self,fd):
try:
cloexec_flag=fcntl.FD_CLOEXEC
except AttributeError:
cloexec_flag=1
old=fcntl.fcntl(fd,fcntl.F_GETFD)
fcntl.fcntl(fd,fcntl.F_SETFD,old|cloexec_flag)
def _close_fds(self,but):
for i in xrange(3,MAXFD):
if i==but:
continue
try:
os.close(i)
except:
pass
def _execute_child(self,args,executable,preexec_fn,close_fds,cwd,env,universal_newlines,startupinfo,creationflags,shell,p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite):
if isinstance(args,types.StringTypes):
args=[args]
else:
args=list(args)
if shell:
args=["/bin/sh","-c"]+args
if executable is None:
executable=args[0]
errpipe_read,errpipe_write=os.pipe()
self._set_cloexec_flag(errpipe_write)
gc_was_enabled=gc.isenabled()
gc.disable()
try:
self.pid=os.fork()
except:
if gc_was_enabled:
gc.enable()
raise
self._child_created=True
if self.pid==0:
try:
if p2cwrite:
os.close(p2cwrite)
if c2pread:
os.close(c2pread)
if errread:
os.close(errread)
os.close(errpipe_read)
if p2cread:
os.dup2(p2cread,0)
if c2pwrite:
os.dup2(c2pwrite,1)
if errwrite:
os.dup2(errwrite,2)
if p2cread and p2cread not in(0,):
os.close(p2cread)
if c2pwrite and c2pwrite not in(p2cread,1):
os.close(c2pwrite)
if errwrite and errwrite not in(p2cread,c2pwrite,2):
os.close(errwrite)
if close_fds:
self._close_fds(but=errpipe_write)
if cwd is not None:
os.chdir(cwd)
if preexec_fn:
apply(preexec_fn)
if env is None:
os.execvp(executable,args)
else:
os.execvpe(executable,args,env)
except:
exc_type,exc_value,tb=sys.exc_info()
exc_lines=traceback.format_exception(exc_type,exc_value,tb)
exc_value.child_traceback=''.join(exc_lines)
os.write(errpipe_write,pickle.dumps(exc_value))
os._exit(255)
if gc_was_enabled:
gc.enable()
os.close(errpipe_write)
if p2cread and p2cwrite:
os.close(p2cread)
if c2pwrite and c2pread:
os.close(c2pwrite)
if errwrite and errread:
os.close(errwrite)
data=os.read(errpipe_read,1048576)
os.close(errpipe_read)
if data!="":
os.waitpid(self.pid,0)
child_exception=pickle.loads(data)
raise child_exception
def _handle_exitstatus(self,sts):
if os.WIFSIGNALED(sts):
self.returncode=-os.WTERMSIG(sts)
elif os.WIFEXITED(sts):
self.returncode=os.WEXITSTATUS(sts)
else:
raise RuntimeError("Unknown child exit status!")
def poll(self,_deadstate=None):
if self.returncode is None:
try:
pid,sts=os.waitpid(self.pid,os.WNOHANG)
if pid==self.pid:
self._handle_exitstatus(sts)
except os.error:
if _deadstate is not None:
self.returncode=_deadstate
return self.returncode
def wait(self):
if self.returncode is None:
pid,sts=os.waitpid(self.pid,0)
self._handle_exitstatus(sts)
return self.returncode
def _communicate(self,input):
read_set=[]
write_set=[]
stdout=None
stderr=None
if self.stdin:
self.stdin.flush()
if input:
write_set.append(self.stdin)
else:
self.stdin.close()
if self.stdout:
read_set.append(self.stdout)
stdout=[]
if self.stderr:
read_set.append(self.stderr)
stderr=[]
input_offset=0
while read_set or write_set:
rlist,wlist,xlist=select.select(read_set,write_set,[])
if self.stdin in wlist:
bytes_written=os.write(self.stdin.fileno(),buffer(input,input_offset,512))
input_offset+=bytes_written
if input_offset>=len(input):
self.stdin.close()
write_set.remove(self.stdin)
if self.stdout in rlist:
data=os.read(self.stdout.fileno(),1024)
if data=="":
self.stdout.close()
read_set.remove(self.stdout)
stdout.append(data)
if self.stderr in rlist:
data=os.read(self.stderr.fileno(),1024)
if data=="":
self.stderr.close()
read_set.remove(self.stderr)
stderr.append(data)
if stdout is not None:
stdout=''.join(stdout)
if stderr is not None:
stderr=''.join(stderr)
if self.universal_newlines and hasattr(file,'newlines'):
if stdout:
stdout=self._translate_newlines(stdout)
if stderr:
stderr=self._translate_newlines(stderr)
self.wait()
return(stdout,stderr)

View file

@ -0,0 +1,104 @@
#! /usr/bin/env python
# encoding: utf-8
import os
all_modifs={}
def modif(dir,name,fun):
if name=='*':
lst=[]
for y in'. Tools 3rdparty'.split():
for x in os.listdir(os.path.join(dir,y)):
if x.endswith('.py'):
lst.append(y+os.sep+x)
for x in lst:
modif(dir,x,fun)
return
filename=os.path.join(dir,name)
f=open(filename,'r')
txt=f.read()
f.close()
txt=fun(txt)
f=open(filename,'w')
f.write(txt)
f.close()
def subst(filename):
def do_subst(fun):
global all_modifs
try:
all_modifs[filename]+=fun
except KeyError:
all_modifs[filename]=[fun]
return fun
return do_subst
def r1(code):
code=code.replace("'iluvcuteoverload'","b'iluvcuteoverload'")
code=code.replace("ABI=7","ABI=37")
return code
def r2(code):
code=code.replace("p.stdin.write('\\n')","p.stdin.write(b'\\n')")
code=code.replace('p.communicate()[0]','p.communicate()[0].decode("utf-8")')
return code
def r3(code):
code=code.replace("m.update(str(lst))","m.update(str(lst).encode())")
code=code.replace('p.communicate()[0]','p.communicate()[0].decode("utf-8")')
return code
def r33(code):
code=code.replace('unicode','str')
return code
def r4(code):
code=code.replace("up(self.__class__.__name__)","up(self.__class__.__name__.encode())")
code=code.replace("up(self.env.variant())","up(self.env.variant().encode())")
code=code.replace("up(x.parent.abspath())","up(x.parent.abspath().encode())")
code=code.replace("up(x.name)","up(x.name.encode())")
code=code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type','import binascii\n\nclass TaskBase(object, metaclass=store_task_type):')
code=code.replace('keys=self.cstr_groups.keys()','keys=list(self.cstr_groups.keys())')
code=code.replace("sig.encode('hex')",'binascii.hexlify(sig)')
code=code.replace("os.path.join(Options.cache_global,ssig)","os.path.join(Options.cache_global,ssig.decode())")
return code
def r5(code):
code=code.replace("cPickle.dump(data,file,-1)","cPickle.dump(data,file)")
code=code.replace('for node in src_dir_node.childs.values():','for node in list(src_dir_node.childs.values()):')
return code
def r6(code):
code=code.replace('xrange','range')
code=code.replace('iteritems','items')
code=code.replace('maxint','maxsize')
code=code.replace('iterkeys','keys')
code=code.replace('Error,e:','Error as e:')
code=code.replace('Exception,e:','Exception as e:')
return code
def r7(code):
code=code.replace('class task_gen(object):\n\t__metaclass__=register_obj','class task_gen(object, metaclass=register_obj):')
return code
def r8(code):
code=code.replace('proc.communicate()[0]','proc.communicate()[0].decode("utf-8")')
return code
def r9(code):
code=code.replace('f.write(c)','f.write(c.encode("utf-8"))')
return code
def r10(code):
code=code.replace("key=kw['success']","key=kw['success']\n\t\t\t\ttry:\n\t\t\t\t\tkey=key.decode('utf-8')\n\t\t\t\texcept:\n\t\t\t\t\tpass")
code=code.replace('out=str(out)','out=out.decode("utf-8")')
code=code.replace('err=str(err)','err=err.decode("utf-8")')
return code
def r11(code):
code=code.replace('ret.strip()','ret.strip().decode("utf-8")')
return code
def fixdir(dir):
global all_modifs
for k in all_modifs:
for v in all_modifs[k]:
modif(os.path.join(dir,'wafadmin'),k,v)
subst('Constants.py')(r1)
subst('Tools/ccroot.py')(r2)
subst('Utils.py')(r3)
subst('ansiterm.py')(r33)
subst('Task.py')(r4)
subst('Build.py')(r5)
subst('*')(r6)
subst('TaskGen.py')(r7)
subst('Tools/python.py')(r8)
subst('Tools/glib2.py')(r9)
subst('Tools/config_c.py')(r10)
subst('Tools/d.py')(r11)

159
waf-unpacked Executable file
View file

@ -0,0 +1,159 @@
#!/usr/bin/env python
# encoding: ISO-8859-1
# Thomas Nagy, 2005-2010
"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import os, sys
if sys.hexversion<0x203000f: raise ImportError("Waf requires Python >= 2.3")
if 'PSYCOWAF' in os.environ:
try:import psyco;psyco.full()
except:pass
VERSION="1.5.19"
REVISION="d046dea57dbefcabd071866ac51eb518"
INSTALL=''
C1='#*'
C2='#%'
cwd = os.getcwd()
join = os.path.join
WAF='waf'
def b(x):
return x
if sys.hexversion>0x300000f:
WAF='waf3'
def b(x):
return x.encode()
def err(m):
print(('\033[91mError: %s\033[0m' % m))
sys.exit(1)
def unpack_wafdir(dir):
f = open(sys.argv[0],'rb')
c = "corrupted waf (%d)"
while 1:
line = f.readline()
if not line: err("run waf-light from a folder containing wafadmin")
if line == b('#==>\n'):
txt = f.readline()
if not txt: err(c % 1)
if f.readline()!=b('#<==\n'): err(c % 2)
break
if not txt: err(c % 3)
txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r'))
import shutil, tarfile
try: shutil.rmtree(dir)
except OSError: pass
try:
for x in ['Tools', '3rdparty']:
os.makedirs(join(dir, 'wafadmin', x))
except OSError:
err("Cannot unpack waf lib into %s\nMove waf into a writeable directory" % dir)
os.chdir(dir)
tmp = 't.bz2'
t = open(tmp,'wb')
t.write(txt)
t.close()
t = None
try:
t = tarfile.open(tmp)
except:
try:
os.system('bunzip2 t.bz2')
t = tarfile.open('t')
except:
os.chdir(cwd)
try: shutil.rmtree(dir)
except OSError: pass
err("Waf cannot be unpacked, check that bzip2 support is present")
for x in t: t.extract(x)
t.close()
for x in ['Tools', '3rdparty']:
os.chmod(join('wafadmin',x), 493)
if sys.hexversion>0x300000f:
sys.path = [join(dir, 'wafadmin')] + sys.path
import py3kfixes
py3kfixes.fixdir(dir)
os.chdir(cwd)
def test(dir):
try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir)
except OSError: pass
def find_lib():
name = sys.argv[0]
base = os.path.dirname(os.path.abspath(name))
#devs use $WAFDIR
w=test(os.environ.get('WAFDIR', ''))
if w: return w
#waf-light
if name.endswith('waf-light'):
w = test(base)
if w: return w
err("waf-light requires wafadmin -> export WAFDIR=/folder")
dir = "/lib/%s-%s-%s/" % (WAF, VERSION, REVISION)
for i in [INSTALL,'/usr','/usr/local','/opt']:
w = test(i+dir)
if w: return w
#waf-local
s = '.%s-%s-%s'
if sys.platform == 'win32': s = s[1:]
dir = join(base, s % (WAF, VERSION, REVISION))
w = test(dir)
if w: return w
#unpack
unpack_wafdir(dir)
return dir
wafdir = find_lib()
w = join(wafdir, 'wafadmin')
t = join(w, 'Tools')
f = join(w, '3rdparty')
sys.path = [w, t, f] + sys.path
if __name__ == '__main__':
import Scripting
Scripting.prepare(t, cwd, VERSION, wafdir)