Compare commits

...

16 Commits

Author SHA1 Message Date
560ff5298e Bump version, add history. 2018-06-23 21:37:05 -05:00
75b04d80e7 Avoid local project-config in git. 2018-06-23 17:28:51 -05:00
bb82002af6 Merge branch 'develop' 2018-06-23 16:34:17 -05:00
5d8818ddcb architecture: add support for __ARM_ARCH macro (#74)
gcc-6 defines __ARM_ARCH
2018-06-12 07:40:29 -05:00
11569d270c add detection for PTX architecture (#73) 2018-06-12 07:39:43 -05:00
c8c0134926 Don't test Appveyor PR branch. (#79)
Another PR test.
2018-05-16 07:19:08 -05:00
c37ef8b959 Remove clang-6, as PR test. (#78) 2018-05-15 09:36:27 -05:00
aba0bb4470 Need full recursive rmdir as it might not be empty. 2018-05-14 23:21:10 -05:00
02d09e78b6 Fix typo on ci script call. 2018-05-14 23:12:18 -05:00
7c0c1f2cb0 Don't do git command on lib dir, and add latest clangs. 2018-05-14 23:07:07 -05:00
d7bad38b5d Make the lib test generic to main or PRs. 2018-05-14 22:59:34 -05:00
9e73c55298 Switch to local CI scripts to make progress on CI errors. 2018-05-13 22:59:56 -05:00
7a4ca9dc90 add nvcc compiler detection (#72) 2018-05-13 10:22:00 -05:00
e8681a9b37 Add support for detecting CUDA (#71)
* add support for detecting CUDA
* add test for new make macro
2018-05-13 10:21:13 -05:00
9e31c6455c Remove undefined BOOST_ARCH_AMD64 (#75)
Remove reference to BOOST_ARCH_AMD64 as it has not been defined since commit b10e3490e.
2018-05-13 10:19:21 -05:00
0bccaa9ead issue-76: fix syntax error in jamfile (#77) 2018-05-13 10:17:57 -05:00
21 changed files with 1719 additions and 39 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
/bin
/boost-build.jam
project-config.jam

View File

@ -5,7 +5,13 @@
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.python.pydev.PyDevBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.python.pydev.pythonNature</nature>
</natures>
</projectDescription>

5
.pydevproject Normal file
View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?eclipse-pydev version="1.0"?><pydev_project>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python interpreter</pydev_property>
</pydev_project>

View File

@ -61,10 +61,8 @@ matrix:
- env: TOOLSET=clang-3.7
- env: TOOLSET=clang-3.8
- env: TOOLSET=clang-3.9
- env: TOOLSET=clang-4.0 CXXFLAGS=-std=c++03
- env: TOOLSET=clang-4.0 CXXFLAGS=-std=c++11
- env: TOOLSET=clang-4.0 CXXFLAGS=-std=c++14
- env: TOOLSET=clang-4.0 CXXFLAGS=-std=c++1z
- env: TOOLSET=clang-4.0
- env: TOOLSET=clang-5.0
- env: TOOLSET=gcc-4.7
- env: TOOLSET=gcc-4.8
- env: TOOLSET=gcc-4.9
@ -99,21 +97,13 @@ matrix:
os: osx
osx_image: xcode8.3
# These are the standard steps to bootstrap the Boost CI scripts
# and to forward the actions to the scripts.
before_install:
# Fetch the scripts to do the actual building/testing.
- |
wget "https://raw.githubusercontent.com/boostorg/regression/develop/ci/src/ci_boost_common.py" -P ..
wget "https://raw.githubusercontent.com/boostorg/regression/develop/ci/src/ci_boost_library_test.py" -P ..
install: python "${TRAVIS_BUILD_DIR}/../ci_boost_library_test.py" install
before_script: python "${TRAVIS_BUILD_DIR}/../ci_boost_library_test.py" before_script
script: python "${TRAVIS_BUILD_DIR}/../ci_boost_library_test.py" script
before_cache: python "${TRAVIS_BUILD_DIR}/../ci_boost_library_test.py" before_cache
after_success: python "${TRAVIS_BUILD_DIR}/../ci_boost_library_test.py" after_success
after_failure: python "${TRAVIS_BUILD_DIR}/../ci_boost_library_test.py" after_failure
after_script: python "${TRAVIS_BUILD_DIR}/../ci_boost_library_test.py" after_script
install: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" install
before_script: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" before_script
script: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" script
before_cache: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" before_cache
after_success: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" after_success
after_failure: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" after_failure
after_script: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" after_script
cache:
directories:
- $HOME/boostorg/boost

View File

@ -132,21 +132,18 @@ configuration:
- debug
- release
# These are the standard steps to bootstrap the Boost CI scripts
# and to forward the actions to the scripts.
# No need to test PR branch as the PR itself is already tested.
skip_branch_with_pr: true
init:
# Fetch the scripts to do the actual building/testing.
- cd %APPVEYOR_BUILD_FOLDER%/..
- appveyor DownloadFile "https://raw.githubusercontent.com/boostorg/regression/develop/ci/src/ci_boost_common.py"
- appveyor DownloadFile "https://raw.githubusercontent.com/boostorg/regression/develop/ci/src/ci_boost_library_test.py"
- cd %APPVEYOR_BUILD_FOLDER%
install: python ../ci_boost_library_test.py install
before_build: python ../ci_boost_library_test.py before_build
build_script: python ../ci_boost_library_test.py build_script
after_build: python ../ci_boost_library_test.py after_build
before_test: python ../ci_boost_library_test.py before_test
test_script: python ../ci_boost_library_test.py test_script
after_test: python ../ci_boost_library_test.py after_test
on_success: python ../ci_boost_library_test.py on_success
on_failure: python ../ci_boost_library_test.py on_failure
on_finish: python ../ci_boost_library_test.py on_finish
install: python tools/ci/library_test.py install
before_build: python tools/ci/library_test.py before_build
build_script: python tools/ci/library_test.py build_script
after_build: python tools/ci/library_test.py after_build
before_test: python tools/ci/library_test.py before_test
test_script: python tools/ci/library_test.py test_script
after_test: python tools/ci/library_test.py after_test
on_success: python tools/ci/library_test.py on_success
on_failure: python tools/ci/library_test.py on_failure
on_finish: python tools/ci/library_test.py on_finish

View File

@ -7,6 +7,14 @@ http://www.boost.org/LICENSE_1_0.txt)
[section History]
[heading 1.8]
* Add support for __ARM_ARCH macro. (from Tim Blechmann)
* Add detection for PTX architecture. (from Benjamin Worpitz)
* Add nvcc compiler detection. (from Benjamin Worpitz)
* Add support for detecting CUDA. (from Benjamin Worpitz)
* Remove reference to obsolete BOOST_ARCH_AMD64. (from Peter Kolbus)
[heading 1.7]
* Fix BOOST_ARCH_PARISK/BOOST_ARCH_PARISC typo.

View File

@ -19,6 +19,7 @@ http://www.boost.org/LICENSE_1_0.txt)
#include <boost/predef/architecture/mips.h>
#include <boost/predef/architecture/parisc.h>
#include <boost/predef/architecture/ppc.h>
#include <boost/predef/architecture/ptx.h>
#include <boost/predef/architecture/pyramid.h>
#include <boost/predef/architecture/rs6k.h>
#include <boost/predef/architecture/sparc.h>

View File

@ -32,6 +32,7 @@ http://www.boost.org/LICENSE_1_0.txt)
[[`__arm64`] [8.0.0]]
[[`__TARGET_ARCH_ARM`] [V.0.0]]
[[`__TARGET_ARCH_THUMB`] [V.0.0]]
[[`__ARM_ARCH`] [V.0.0]]
[[`_M_ARM`] [V.0.0]]
[[`_M_ARM64`] [8.0.0]]
]
@ -41,6 +42,7 @@ http://www.boost.org/LICENSE_1_0.txt)
#if defined(__arm__) || defined(__arm64) || defined(__thumb__) || \
defined(__TARGET_ARCH_ARM) || defined(__TARGET_ARCH_THUMB) || \
defined(__ARM_ARCH) || \
defined(_M_ARM) || defined(_M_ARM64)
# undef BOOST_ARCH_ARM
# if !defined(BOOST_ARCH_ARM) && defined(__arm64)
@ -52,6 +54,9 @@ http://www.boost.org/LICENSE_1_0.txt)
# if !defined(BOOST_ARCH_ARM) && defined(__TARGET_ARCH_THUMB)
# define BOOST_ARCH_ARM BOOST_VERSION_NUMBER(__TARGET_ARCH_THUMB,0,0)
# endif
# if !defined(BOOST_ARCH_ARM) && defined(__ARM_ARCH)
# define BOOST_ARCH_ARM BOOST_VERSION_NUMBER(__ARM_ARCH,0,0)
# endif
# if !defined(BOOST_ARCH_ARM) && defined(_M_ARM64)
# define BOOST_ARCH_ARM BOOST_VERSION_NUMBER(8,0,0)
# endif

View File

@ -0,0 +1,44 @@
/*
Copyright Benjamin Worpitz 2018
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef BOOST_PREDEF_ARCHITECTURE_PTX_H
#define BOOST_PREDEF_ARCHITECTURE_PTX_H
#include <boost/predef/version_number.h>
#include <boost/predef/make.h>
/*`
[heading `BOOST_ARCH_PTX`]
[@https://en.wikipedia.org/wiki/Parallel_Thread_Execution PTX] architecture.
[table
[[__predef_symbol__] [__predef_version__]]
[[`__CUDA_ARCH__`] [__predef_detection__]]
[[`__CUDA_ARCH__`] [V.R.0]]
]
*/
#define BOOST_ARCH_PTX BOOST_VERSION_NUMBER_NOT_AVAILABLE
#if defined(__CUDA_ARCH__)
# undef BOOST_ARCH_PTX
# define BOOST_ARCH_PTX BOOST_PREDEF_MAKE_10_VR0(__CUDA_ARCH__)
#endif
#if BOOST_ARCH_PTX
# define BOOST_ARCH_PTX_AVAILABLE
#endif
#define BOOST_ARCH_PTX_NAME "PTX"
#endif
#include <boost/predef/detail/test.h>
BOOST_PREDEF_DECLARE_TEST(BOOST_ARCH_PTX,BOOST_ARCH_PTX_NAME)

View File

@ -32,6 +32,7 @@ http://www.boost.org/LICENSE_1_0.txt)
#include <boost/predef/compiler/metrowerks.h>
#include <boost/predef/compiler/microtec.h>
#include <boost/predef/compiler/mpw.h>
#include <boost/predef/compiler/nvcc.h>
#include <boost/predef/compiler/palm.h>
#include <boost/predef/compiler/pgi.h>
#include <boost/predef/compiler/sgi_mipspro.h>

View File

@ -0,0 +1,60 @@
/*
Copyright Benjamin Worpitz 2018
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef BOOST_PREDEF_COMPILER_NVCC_H
#define BOOST_PREDEF_COMPILER_NVCC_H
#include <boost/predef/version_number.h>
#include <boost/predef/make.h>
/*`
[heading `BOOST_COMP_NVCC`]
[@https://en.wikipedia.org/wiki/NVIDIA_CUDA_Compiler NVCC] compiler.
Version number available as major, minor, and patch beginning with version 7.5.
[table
[[__predef_symbol__] [__predef_version__]]
[[`__NVCC__`] [__predef_detection__]]
[[`__CUDACC_VER_MAJOR__`, `__CUDACC_VER_MINOR__`, `__CUDACC_VER_BUILD__`] [V.R.P]]
]
*/
#define BOOST_COMP_NVCC BOOST_VERSION_NUMBER_NOT_AVAILABLE
#if defined(__NVCC__)
# if !defined(__CUDACC_VER_MAJOR__) || !defined(__CUDACC_VER_MINOR__) || !defined(__CUDACC_VER_BUILD__)
# define BOOST_COMP_NVCC_DETECTION BOOST_VERSION_NUMBER_AVAILABLE
# else
# define BOOST_COMP_NVCC_DETECTION BOOST_VERSION_NUMBER(__CUDACC_VER_MAJOR__, __CUDACC_VER_MINOR__, __CUDACC_VER_BUILD__)
# endif
#endif
#ifdef BOOST_COMP_NVCC_DETECTION
# if defined(BOOST_PREDEF_DETAIL_COMP_DETECTED)
# define BOOST_COMP_NVCC_EMULATED BOOST_COMP_NVCC_DETECTION
# else
# undef BOOST_COMP_NVCC
# define BOOST_COMP_NVCC BOOST_COMP_NVCC_DETECTION
# endif
# define BOOST_COMP_NVCC_AVAILABLE
# include <boost/predef/detail/comp_detected.h>
#endif
#define BOOST_COMP_NVCC_NAME "NVCC"
#endif
#include <boost/predef/detail/test.h>
BOOST_PREDEF_DECLARE_TEST(BOOST_COMP_NVCC,BOOST_COMP_NVCC_NAME)
#ifdef BOOST_COMP_NVCC_EMULATED
#include <boost/predef/detail/test.h>
BOOST_PREDEF_DECLARE_TEST(BOOST_COMP_NVCC_EMULATED,BOOST_COMP_NVCC_NAME)
#endif

View File

@ -13,5 +13,6 @@ http://www.boost.org/LICENSE_1_0.txt)
#include <boost/predef/language/stdc.h>
#include <boost/predef/language/stdcpp.h>
#include <boost/predef/language/objc.h>
#include <boost/predef/language/cuda.h>
#endif

View File

@ -0,0 +1,52 @@
/*
Copyright Benjamin Worpitz 2018
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef BOOST_PREDEF_LANGUAGE_CUDA_H
#define BOOST_PREDEF_LANGUAGE_CUDA_H
#include <boost/predef/version_number.h>
#include <boost/predef/make.h>
/*`
[heading `BOOST_LANG_CUDA`]
[@https://en.wikipedia.org/wiki/CUDA CUDA C/C++] language.
If available, the version is detected as VV.RR.P.
[table
[[__predef_symbol__] [__predef_version__]]
[[`__CUDACC__`] [__predef_detection__]]
[[`__CUDA__`] [__predef_detection__]]
[[`CUDA_VERSION`] [VV.RR.P]]
]
*/
#define BOOST_LANG_CUDA BOOST_VERSION_NUMBER_NOT_AVAILABLE
#if defined(__CUDACC__) || defined(__CUDA__)
# undef BOOST_LANG_CUDA
# include <cuda.h>
# if defined(CUDA_VERSION)
# define BOOST_LANG_CUDA BOOST_PREDEF_MAKE_10_VVRRP(CUDA_VERSION)
# else
# define BOOST_LANG_CUDA BOOST_VERSION_NUMBER_AVAILABLE
# endif
#endif
#if BOOST_LANG_CUDA
# define BOOST_LANG_CUDA_AVAILABLE
#endif
#define BOOST_LANG_CUDA_NAME "CUDA C/C++"
#endif
#include <boost/predef/detail/test.h>
BOOST_PREDEF_DECLARE_TEST(BOOST_LANG_CUDA,BOOST_LANG_CUDA_NAME)

View File

@ -46,6 +46,8 @@ Macros are:
#define BOOST_PREDEF_MAKE_0X_VVRRPP(V) BOOST_VERSION_NUMBER((V&0xFF0000)>>16,(V&0xFF00)>>8,(V&0xFF))
/*` `BOOST_PREDEF_MAKE_10_VPPP(V)` */
#define BOOST_PREDEF_MAKE_10_VPPP(V) BOOST_VERSION_NUMBER(((V)/1000)%10,0,(V)%1000)
/*` `BOOST_PREDEF_MAKE_10_VR0(V)` */
#define BOOST_PREDEF_MAKE_10_VR0(V) BOOST_VERSION_NUMBER(((V)/100)%10,((V)/10)%10,0)
/*` `BOOST_PREDEF_MAKE_10_VRP(V)` */
#define BOOST_PREDEF_MAKE_10_VRP(V) BOOST_VERSION_NUMBER(((V)/100)%10,((V)/10)%10,(V)%10)
/*` `BOOST_PREDEF_MAKE_10_VRP000(V)` */
@ -64,6 +66,8 @@ Macros are:
#define BOOST_PREDEF_MAKE_10_VV00(V) BOOST_VERSION_NUMBER(((V)/100)%100,0,0)
/*` `BOOST_PREDEF_MAKE_10_VVRR(V)` */
#define BOOST_PREDEF_MAKE_10_VVRR(V) BOOST_VERSION_NUMBER(((V)/100)%100,(V)%100,0)
/*` `BOOST_PREDEF_MAKE_10_VVRRP(V)` */
#define BOOST_PREDEF_MAKE_10_VVRRP(V) BOOST_VERSION_NUMBER(((V)/1000)%100,((V)/10)%100,(V)%10)
/*` `BOOST_PREDEF_MAKE_10_VVRRPP(V)` */
#define BOOST_PREDEF_MAKE_10_VVRRPP(V) BOOST_VERSION_NUMBER(((V)/10000)%100,((V)/100)%100,(V)%100)
/*` `BOOST_PREDEF_MAKE_10_VVRRPPP(V)` */

View File

@ -148,8 +148,7 @@ information and acquired knowledge:
# undef BOOST_ENDIAN_BIG_BYTE
# define BOOST_ENDIAN_BIG_BYTE BOOST_VERSION_NUMBER_AVAILABLE
# endif
# if BOOST_ARCH_AMD64 || \
BOOST_ARCH_IA64 || \
# if BOOST_ARCH_IA64 || \
BOOST_ARCH_X86 || \
BOOST_ARCH_BLACKFIN
# undef BOOST_ENDIAN_LITTLE_BYTE

View File

@ -10,6 +10,6 @@ http://www.boost.org/LICENSE_1_0.txt)
#include <boost/predef/version_number.h>
#define BOOST_PREDEF_VERSION BOOST_VERSION_NUMBER(1,7,0)
#define BOOST_PREDEF_VERSION BOOST_VERSION_NUMBER(1,8,0)
#endif

View File

@ -38,10 +38,12 @@ void test_BOOST_VERSION_NUMBER()
PREDEF_CHECK(BOOST_PREDEF_MAKE_0X_VRRPP000(0xFFFFF000) == BOOST_VERSION_NUMBER(0xF,0xFF,0xFF));
PREDEF_CHECK(BOOST_PREDEF_MAKE_0X_VVRRPP(0xFFFFFF) == BOOST_VERSION_NUMBER(0xFF,0xFF,0xFF));
PREDEF_CHECK(BOOST_PREDEF_MAKE_10_VR0(980) == BOOST_VERSION_NUMBER(9,8,0));
PREDEF_CHECK(BOOST_PREDEF_MAKE_10_VRP(999) == BOOST_VERSION_NUMBER(9,9,9));
PREDEF_CHECK(BOOST_PREDEF_MAKE_10_VPPP(9999) == BOOST_VERSION_NUMBER(9,0,999));
PREDEF_CHECK(BOOST_PREDEF_MAKE_10_VVRRPP(999999) == BOOST_VERSION_NUMBER(99,99,99));
PREDEF_CHECK(BOOST_PREDEF_MAKE_10_VVRR(9999) == BOOST_VERSION_NUMBER(99,99,0));
PREDEF_CHECK(BOOST_PREDEF_MAKE_10_VVRRP(98765) == BOOST_VERSION_NUMBER(98,76,5));
PREDEF_CHECK(BOOST_PREDEF_MAKE_10_VRPP(9999) == BOOST_VERSION_NUMBER(9,9,99));
PREDEF_CHECK(BOOST_PREDEF_MAKE_10_VRRPP(99999) == BOOST_VERSION_NUMBER(9,99,99));
PREDEF_CHECK(BOOST_PREDEF_MAKE_10_VRPPPP(460002) == BOOST_VERSION_NUMBER(4,6,2));

View File

@ -43,7 +43,7 @@ rule check ( expressions + : language ? : true-properties * : false-properties *
else
{
# Create the check run if we don't have one yet.
local key = [ MD5 $(language)::$(expression) ] ;
local key = [ MD5 "$(language)::$(expression)" ] ;
if ! ( $(key) in $(_checks_) )
{
_checks_ += $(key) ;

428
tools/ci/build_log.py Normal file
View File

@ -0,0 +1,428 @@
#!/usr/bin/env python
# Copyright 2008 Rene Rivera
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import re
import optparse
import time
import xml.dom.minidom
import xml.dom.pulldom
from xml.sax.saxutils import unescape, escape
import os.path
from pprint import pprint
from __builtin__ import exit
class BuildOutputXMLParsing(object):
'''
XML parsing utilities for dealing with the Boost Build output
XML format.
'''
def get_child_data( self, root, tag = None, id = None, name = None, strip = False, default = None ):
return self.get_data(self.get_child(root,tag=tag,id=id,name=name),strip=strip,default=default)
def get_data( self, node, strip = False, default = None ):
data = None
if node:
data_node = None
if not data_node:
data_node = self.get_child(node,tag='#text')
if not data_node:
data_node = self.get_child(node,tag='#cdata-section')
data = ""
while data_node:
data += data_node.data
data_node = data_node.nextSibling
if data_node:
if data_node.nodeName != '#text' \
and data_node.nodeName != '#cdata-section':
data_node = None
if not data:
data = default
else:
if strip:
data = data.strip()
return data
def get_child( self, root, tag = None, id = None, name = None, type = None ):
return self.get_sibling(root.firstChild,tag=tag,id=id,name=name,type=type)
def get_sibling( self, sibling, tag = None, id = None, name = None, type = None ):
n = sibling
while n:
found = True
if type and found:
found = found and type == n.nodeType
if tag and found:
found = found and tag == n.nodeName
if (id or name) and found:
found = found and n.nodeType == xml.dom.Node.ELEMENT_NODE
if id and found:
if n.hasAttribute('id'):
found = found and n.getAttribute('id') == id
else:
found = found and n.hasAttribute('id') and n.getAttribute('id') == id
if name and found:
found = found and n.hasAttribute('name') and n.getAttribute('name') == name
if found:
return n
n = n.nextSibling
return None
class BuildOutputProcessor(BuildOutputXMLParsing):
def __init__(self, inputs):
self.test = {}
self.target_to_test = {}
self.target = {}
self.parent = {}
self.timestamps = []
for input in inputs:
self.add_input(input)
def add_input(self, input):
'''
Add a single build XML output file to our data.
'''
events = xml.dom.pulldom.parse(input)
context = []
for (event,node) in events:
if event == xml.dom.pulldom.START_ELEMENT:
context.append(node)
if node.nodeType == xml.dom.Node.ELEMENT_NODE:
x_f = self.x_name_(*context)
if x_f:
events.expandNode(node)
# expanding eats the end element, hence walking us out one level
context.pop()
# call handler
(x_f[1])(node)
elif event == xml.dom.pulldom.END_ELEMENT:
context.pop()
def x_name_(self, *context, **kwargs):
node = None
names = [ ]
for c in context:
if c:
if not isinstance(c,xml.dom.Node):
suffix = '_'+c.replace('-','_').replace('#','_')
else:
suffix = '_'+c.nodeName.replace('-','_').replace('#','_')
node = c
names.append('x')
names = map(lambda x: x+suffix,names)
if node:
for name in names:
if hasattr(self,name):
return (name,getattr(self,name))
return None
def x_build_test(self, node):
'''
Records the initial test information that will eventually
get expanded as we process the rest of the results.
'''
test_node = node
test_name = test_node.getAttribute('name')
test_target = self.get_child_data(test_node,tag='target',strip=True)
## print ">>> %s %s" %(test_name,test_target)
self.test[test_name] = {
'library' : "/".join(test_name.split('/')[0:-1]),
'test-name' : test_name.split('/')[-1],
'test-type' : test_node.getAttribute('type').lower(),
'test-program' : self.get_child_data(test_node,tag='source',strip=True),
'target' : test_target,
'info' : self.get_child_data(test_node,tag='info',strip=True),
'dependencies' : [],
'actions' : [],
}
# Add a lookup for the test given the test target.
self.target_to_test[self.test[test_name]['target']] = test_name
return None
def x_build_targets_target( self, node ):
'''
Process the target dependency DAG into an ancestry tree so we can look up
which top-level library and test targets specific build actions correspond to.
'''
target_node = node
name = self.get_child_data(target_node,tag='name',strip=True)
path = self.get_child_data(target_node,tag='path',strip=True)
jam_target = self.get_child_data(target_node,tag='jam-target',strip=True)
#~ Map for jam targets to virtual targets.
self.target[jam_target] = {
'name' : name,
'path' : path
}
#~ Create the ancestry.
dep_node = self.get_child(self.get_child(target_node,tag='dependencies'),tag='dependency')
while dep_node:
child = self.get_data(dep_node,strip=True)
child_jam_target = '<p%s>%s' % (path,child.split('//',1)[1])
self.parent[child_jam_target] = jam_target
dep_node = self.get_sibling(dep_node.nextSibling,tag='dependency')
return None
def x_build_action( self, node ):
'''
Given a build action log, process into the corresponding test log and
specific test log sub-part.
'''
action_node = node
name = self.get_child(action_node,tag='name')
if name:
name = self.get_data(name)
#~ Based on the action, we decide what sub-section the log
#~ should go into.
action_type = None
if re.match('[^%]+%[^.]+[.](compile)',name):
action_type = 'compile'
elif re.match('[^%]+%[^.]+[.](link|archive)',name):
action_type = 'link'
elif re.match('[^%]+%testing[.](capture-output)',name):
action_type = 'run'
elif re.match('[^%]+%testing[.](expect-failure|expect-success)',name):
action_type = 'result'
else:
# TODO: Enable to see what other actions can be included in the test results.
# action_type = None
action_type = 'other'
#~ print "+ [%s] %s %s :: %s" %(action_type,name,'','')
if action_type:
#~ Get the corresponding test.
(target,test) = self.get_test(action_node,type=action_type)
#~ Skip action that have no corresponding test as they are
#~ regular build actions and don't need to show up in the
#~ regression results.
if not test:
##print "??? [%s] %s %s :: %s" %(action_type,name,target,test)
return None
##print "+++ [%s] %s %s :: %s" %(action_type,name,target,test)
#~ Collect some basic info about the action.
action = {
'command' : self.get_action_command(action_node,action_type),
'output' : self.get_action_output(action_node,action_type),
'info' : self.get_action_info(action_node,action_type)
}
#~ For the test result status we find the appropriate node
#~ based on the type of test. Then adjust the result status
#~ accordingly. This makes the result status reflect the
#~ expectation as the result pages post processing does not
#~ account for this inversion.
action['type'] = action_type
if action_type == 'result':
if re.match(r'^compile',test['test-type']):
action['type'] = 'compile'
elif re.match(r'^link',test['test-type']):
action['type'] = 'link'
elif re.match(r'^run',test['test-type']):
action['type'] = 'run'
#~ The result sub-part we will add this result to.
if action_node.getAttribute('status') == '0':
action['result'] = 'succeed'
else:
action['result'] = 'fail'
# Add the action to the test.
test['actions'].append(action)
# Set the test result if this is the result action for the test.
if action_type == 'result':
test['result'] = action['result']
return None
def x_build_timestamp( self, node ):
'''
The time-stamp goes to the corresponding attribute in the result.
'''
self.timestamps.append(self.get_data(node).strip())
return None
def get_test( self, node, type = None ):
'''
Find the test corresponding to an action. For testing targets these
are the ones pre-declared in the --dump-test option. For libraries
we create a dummy test as needed.
'''
jam_target = self.get_child_data(node,tag='jam-target')
base = self.target[jam_target]['name']
target = jam_target
while target in self.parent:
target = self.parent[target]
#~ print "--- TEST: %s ==> %s" %(jam_target,target)
#~ main-target-type is a precise indicator of what the build target is
#~ originally meant to be.
#main_type = self.get_child_data(self.get_child(node,tag='properties'),
# name='main-target-type',strip=True)
main_type = None
if main_type == 'LIB' and type:
lib = self.target[target]['name']
if not lib in self.test:
self.test[lib] = {
'library' : re.search(r'libs/([^/]+)',lib).group(1),
'test-name' : os.path.basename(lib),
'test-type' : 'lib',
'test-program' : os.path.basename(lib),
'target' : lib
}
test = self.test[lib]
else:
target_name_ = self.target[target]['name']
if self.target_to_test.has_key(target_name_):
test = self.test[self.target_to_test[target_name_]]
else:
test = None
return (base,test)
#~ The command executed for the action. For run actions we omit the command
#~ as it's just noise.
def get_action_command( self, action_node, action_type ):
if action_type != 'run':
return self.get_child_data(action_node,tag='command')
else:
return ''
#~ The command output.
def get_action_output( self, action_node, action_type ):
return self.get_child_data(action_node,tag='output',default='')
#~ Some basic info about the action.
def get_action_info( self, action_node, action_type ):
info = {}
#~ The jam action and target.
info['name'] = self.get_child_data(action_node,tag='name')
info['path'] = self.get_child_data(action_node,tag='path')
#~ The timing of the action.
info['time-start'] = action_node.getAttribute('start')
info['time-end'] = action_node.getAttribute('end')
info['time-user'] = action_node.getAttribute('user')
info['time-system'] = action_node.getAttribute('system')
#~ Testing properties.
test_info_prop = self.get_child_data(self.get_child(action_node,tag='properties'),name='test-info')
info['always_show_run_output'] = test_info_prop == 'always_show_run_output'
#~ And for compiles some context that may be hidden if using response files.
if action_type == 'compile':
info['define'] = []
define = self.get_child(self.get_child(action_node,tag='properties'),name='define')
while define:
info['define'].append(self.get_data(define,strip=True))
define = self.get_sibling(define.nextSibling,name='define')
return info
class BuildConsoleSummaryReport(object):
HEADER = '\033[35m\033[1m'
INFO = '\033[34m'
OK = '\033[32m'
WARNING = '\033[33m'
FAIL = '\033[31m'
ENDC = '\033[0m'
def __init__(self, bop, opt):
self.bop = bop
def generate(self):
self.summary_info = {
'total' : 0,
'success' : 0,
'failed' : [],
}
self.header_print("======================================================================")
self.print_test_log()
self.print_summary()
self.header_print("======================================================================")
@property
def failed(self):
return len(self.summary_info['failed']) > 0
def print_test_log(self):
self.header_print("Tests run..")
self.header_print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
for k in sorted(self.bop.test.keys()):
test = self.bop.test[k]
if len(test['actions']) > 0:
self.summary_info['total'] += 1
##print ">>>> {0}".format(test['test-name'])
if 'result' in test:
succeed = test['result'] == 'succeed'
else:
succeed = test['actions'][-1]['result'] == 'succeed'
if succeed:
self.summary_info['success'] += 1
else:
self.summary_info['failed'].append(test)
if succeed:
self.ok_print("[PASS] {0}",k)
else:
self.fail_print("[FAIL] {0}",k)
for action in test['actions']:
self.print_action(succeed, action)
def print_action(self, test_succeed, action):
'''
Print the detailed info of failed or always print tests.
'''
#self.info_print(">>> {0}",action.keys())
if not test_succeed or action['info']['always_show_run_output']:
output = action['output'].strip()
if output != "":
p = self.fail_print if action['result'] == 'fail' else self.p_print
self.info_print("")
self.info_print("({0}) {1}",action['info']['name'],action['info']['path'])
p("")
p("{0}",action['command'].strip())
p("")
for line in output.splitlines():
p("{0}",line.encode('utf-8'))
def print_summary(self):
self.header_print("")
self.header_print("Testing summary..")
self.header_print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
self.p_print("Total: {0}",self.summary_info['total'])
self.p_print("Success: {0}",self.summary_info['success'])
if self.failed:
self.fail_print("Failed: {0}",len(self.summary_info['failed']))
for test in self.summary_info['failed']:
self.fail_print(" {0}/{1}",test['library'],test['test-name'])
def p_print(self, format, *args, **kargs):
print format.format(*args,**kargs)
def info_print(self, format, *args, **kargs):
print self.INFO+format.format(*args,**kargs)+self.ENDC
def header_print(self, format, *args, **kargs):
print self.HEADER+format.format(*args,**kargs)+self.ENDC
def ok_print(self, format, *args, **kargs):
print self.OK+format.format(*args,**kargs)+self.ENDC
def warn_print(self, format, *args, **kargs):
print self.WARNING+format.format(*args,**kargs)+self.ENDC
def fail_print(self, format, *args, **kargs):
print self.FAIL+format.format(*args,**kargs)+self.ENDC
class Main(object):
def __init__(self,args=None):
op = optparse.OptionParser(
usage="%prog [options] input+")
op.add_option( '--output',
help="type of output to generate" )
( opt, inputs ) = op.parse_args(args)
bop = BuildOutputProcessor(inputs)
output = None
if opt.output == 'console':
output = BuildConsoleSummaryReport(bop, opt)
if output:
output.generate()
self.failed = output.failed
if __name__ == '__main__':
m = Main()
if m.failed:
exit(-1)

878
tools/ci/common.py Normal file
View File

@ -0,0 +1,878 @@
#!/usr/bin/env python
# Copyright Rene Rivera 2016
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import sys
import inspect
import optparse
import os.path
import string
import time
import subprocess
import codecs
import shutil
import threading
toolset_info = {
'clang-3.4' : {
'ppa' : ["ppa:h-rayflood/llvm"],
'package' : 'clang-3.4',
'command' : 'clang++-3.4',
'toolset' : 'clang',
'version' : ''
},
'clang-3.5' : {
'ppa' : ["ppa:h-rayflood/llvm"],
'package' : 'clang-3.5',
'command' : 'clang++-3.5',
'toolset' : 'clang',
'version' : ''
},
'clang-3.6' : {
'ppa' : ["ppa:h-rayflood/llvm"],
'package' : 'clang-3.6',
'command' : 'clang++-3.6',
'toolset' : 'clang',
'version' : ''
},
'clang-3.7' : {
'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-3.7","main"],
'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'],
'package' : 'clang-3.7',
'command' : 'clang++-3.7',
'toolset' : 'clang',
'version' : ''
},
'clang-3.8' : {
'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-3.8","main"],
'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'],
'package' : 'clang-3.8',
'command' : 'clang++-3.8',
'toolset' : 'clang',
'version' : ''
},
'clang-3.9' : {
'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-3.9","main"],
'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'],
'package' : 'clang-3.9',
'command' : 'clang++-3.9',
'toolset' : 'clang',
'version' : ''
},
'clang-4.0' : {
'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-4.0","main"],
'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'],
'package' : 'clang-4.0',
'command' : 'clang++-4.0',
'toolset' : 'clang',
'version' : ''
},
'clang-5.0' : {
'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-5.0","main"],
'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'],
'package' : 'clang-5.0',
'command' : 'clang++-5.0',
'toolset' : 'clang',
'version' : ''
},
'clang-6.0' : {
'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-6.0","main"],
'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'],
'package' : 'clang-6.0',
'command' : 'clang++-6.0',
'toolset' : 'clang',
'version' : ''
},
'gcc-4.7' : {
'ppa' : ["ppa:ubuntu-toolchain-r/test"],
'package' : 'g++-4.7',
'command' : 'g++-4.7',
'toolset' : 'gcc',
'version' : ''
},
'gcc-4.8' : {
'bin' : 'gcc-4.8',
'ppa' : ["ppa:ubuntu-toolchain-r/test"],
'package' : 'g++-4.8',
'command' : 'g++-4.8',
'toolset' : 'gcc',
'version' : ''
},
'gcc-4.9' : {
'ppa' : ["ppa:ubuntu-toolchain-r/test"],
'package' : 'g++-4.9',
'command' : 'g++-4.9',
'toolset' : 'gcc',
'version' : ''
},
'gcc-5.1' : {
'ppa' : ["ppa:ubuntu-toolchain-r/test"],
'package' : 'g++-5',
'command' : 'g++-5',
'toolset' : 'gcc',
'version' : ''
},
'gcc-5' : {
'ppa' : ["ppa:ubuntu-toolchain-r/test"],
'package' : 'g++-5',
'command' : 'g++-5',
'toolset' : 'gcc',
'version' : ''
},
'gcc-6' : {
'ppa' : ["ppa:ubuntu-toolchain-r/test"],
'package' : 'g++-6',
'command' : 'g++-6',
'toolset' : 'gcc',
'version' : ''
},
'gcc-7' : {
'ppa' : ["ppa:ubuntu-toolchain-r/test"],
'package' : 'g++-7',
'command' : 'g++-7',
'toolset' : 'gcc',
'version' : ''
},
'gcc-8' : {
'ppa' : ["ppa:ubuntu-toolchain-r/test"],
'package' : 'g++-8',
'command' : 'g++-8',
'toolset' : 'gcc',
'version' : ''
},
'mingw-5' : {
'toolset' : 'gcc',
'command' : 'C:\\\\MinGW\\\\bin\\\\g++.exe',
'version' : ''
},
'mingw64-6' : {
'toolset' : 'gcc',
'command' : 'C:\\\\mingw-w64\\\\x86_64-6.3.0-posix-seh-rt_v5-rev1\\\\mingw64\\\\bin\\\\g++.exe',
'version' : ''
},
'vs-2008' : {
'toolset' : 'msvc',
'command' : '',
'version' : '9.0'
},
'vs-2010' : {
'toolset' : 'msvc',
'command' : '',
'version' : '10.0'
},
'vs-2012' : {
'toolset' : 'msvc',
'command' : '',
'version' : '11.0'
},
'vs-2013' : {
'toolset' : 'msvc',
'command' : '',
'version' : '12.0'
},
'vs-2015' : {
'toolset' : 'msvc',
'command' : '',
'version' : '14.0'
},
'vs-2017' : {
'toolset' : 'msvc',
'command' : '',
'version' : '14.1'
},
'xcode-6.1' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-6.2' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-6.3' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-6.4' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-7.0' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-7.1' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-7.2' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-7.3' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-8.0' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-8.1' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-8.2' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-8.3' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-9.0' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
'xcode-9.1' : {
'command' : 'clang++',
'toolset' : 'clang',
'version' : ''
},
}
class SystemCallError(Exception):
def __init__(self, command, result):
self.command = command
self.result = result
def __str__(self, *args, **kwargs):
return "'%s' ==> %s"%("' '".join(self.command), self.result)
class utils:
call_stats = []
@staticmethod
def call(*command, **kargs):
utils.log( "%s> '%s'"%(os.getcwd(), "' '".join(command)) )
t = time.time()
result = subprocess.call(command, **kargs)
t = time.time()-t
if result != 0:
print "Failed: '%s' ERROR = %s"%("' '".join(command), result)
utils.call_stats.append((t,os.getcwd(),command,result))
utils.log( "%s> '%s' execution time %s seconds"%(os.getcwd(), "' '".join(command), t) )
return result
@staticmethod
def print_call_stats():
utils.log("================================================================================")
for j in sorted(utils.call_stats, reverse=True):
utils.log("{:>12.4f}\t{}> {} ==> {}".format(*j))
utils.log("================================================================================")
@staticmethod
def check_call(*command, **kargs):
cwd = os.getcwd()
result = utils.call(*command, **kargs)
if result != 0:
raise(SystemCallError([cwd].extend(command), result))
@staticmethod
def makedirs( path ):
if not os.path.exists( path ):
os.makedirs( path )
@staticmethod
def log_level():
frames = inspect.stack()
level = 0
for i in frames[ 3: ]:
if i[0].f_locals.has_key( '__log__' ):
level = level + i[0].f_locals[ '__log__' ]
return level
@staticmethod
def log( message ):
sys.stdout.flush()
sys.stderr.flush()
sys.stderr.write( '# ' + ' ' * utils.log_level() + message + '\n' )
sys.stderr.flush()
@staticmethod
def rmtree(path):
if os.path.exists( path ):
#~ shutil.rmtree( unicode( path ) )
if sys.platform == 'win32':
os.system( 'del /f /s /q "%s" >nul 2>&1' % path )
shutil.rmtree( unicode( path ) )
else:
os.system( 'rm -f -r "%s"' % path )
@staticmethod
def retry( f, max_attempts=5, sleep_secs=10 ):
for attempts in range( max_attempts, -1, -1 ):
try:
return f()
except Exception, msg:
utils.log( '%s failed with message "%s"' % ( f.__name__, msg ) )
if attempts == 0:
utils.log( 'Giving up.' )
raise
utils.log( 'Retrying (%d more attempts).' % attempts )
time.sleep( sleep_secs )
@staticmethod
def web_get( source_url, destination_file, proxy = None ):
import urllib
proxies = None
if proxy is not None:
proxies = {
'https' : proxy,
'http' : proxy
}
src = urllib.urlopen( source_url, proxies = proxies )
f = open( destination_file, 'wb' )
while True:
data = src.read( 16*1024 )
if len( data ) == 0: break
f.write( data )
f.close()
src.close()
@staticmethod
def unpack_archive( archive_path ):
utils.log( 'Unpacking archive ("%s")...' % archive_path )
archive_name = os.path.basename( archive_path )
extension = archive_name[ archive_name.find( '.' ) : ]
if extension in ( ".tar.gz", ".tar.bz2" ):
import tarfile
import stat
mode = os.path.splitext( extension )[1][1:]
tar = tarfile.open( archive_path, 'r:%s' % mode )
for tarinfo in tar:
tar.extract( tarinfo )
if sys.platform == 'win32' and not tarinfo.isdir():
# workaround what appears to be a Win32-specific bug in 'tarfile'
# (modification times for extracted files are not set properly)
f = os.path.join( os.curdir, tarinfo.name )
os.chmod( f, stat.S_IWRITE )
os.utime( f, ( tarinfo.mtime, tarinfo.mtime ) )
tar.close()
elif extension in ( ".zip" ):
import zipfile
z = zipfile.ZipFile( archive_path, 'r', zipfile.ZIP_DEFLATED )
for f in z.infolist():
destination_file_path = os.path.join( os.curdir, f.filename )
if destination_file_path[-1] == "/": # directory
if not os.path.exists( destination_file_path ):
os.makedirs( destination_file_path )
else: # file
result = open( destination_file_path, 'wb' )
result.write( z.read( f.filename ) )
result.close()
z.close()
else:
raise 'Do not know how to unpack archives with extension \"%s\"' % extension
@staticmethod
def make_file(filename, *text):
text = string.join( text, '\n' )
with codecs.open( filename, 'w', 'utf-8' ) as f:
f.write( text )
@staticmethod
def append_file(filename, *text):
with codecs.open( filename, 'a', 'utf-8' ) as f:
f.write( string.join( text, '\n' ) )
@staticmethod
def mem_info():
if sys.platform == "darwin":
utils.call("top","-l","1","-s","0","-n","0")
elif sys.platform.startswith("linux"):
utils.call("free","-m","-l")
@staticmethod
def query_boost_version(boost_root):
'''
Read in the Boost version from a given boost_root.
'''
boost_version = None
if os.path.exists(os.path.join(boost_root,'Jamroot')):
with codecs.open(os.path.join(boost_root,'Jamroot'), 'r', 'utf-8') as f:
for line in f.readlines():
parts = line.split()
if len(parts) >= 5 and parts[1] == 'BOOST_VERSION':
boost_version = parts[3]
break
if not boost_version:
boost_version = 'default'
return boost_version
@staticmethod
def git_clone(sub_repo, branch, commit = None, cwd = None, no_submodules = False):
'''
This clone mimicks the way Travis-CI clones a project's repo. So far
Travis-CI is the most limiting in the sense of only fetching partial
history of the repo.
'''
if not cwd:
cwd = cwd = os.getcwd()
root_dir = os.path.join(cwd,'boostorg',sub_repo)
if not os.path.exists(os.path.join(root_dir,'.git')):
utils.check_call("git","clone",
"--depth=1",
"--branch=%s"%(branch),
"https://github.com/boostorg/%s.git"%(sub_repo),
root_dir)
os.chdir(root_dir)
else:
os.chdir(root_dir)
utils.check_call("git","pull",
# "--depth=1", # Can't do depth as we get merge errors.
"--quiet","--no-recurse-submodules")
if commit:
utils.check_call("git","checkout","-qf",commit)
if os.path.exists(os.path.join('.git','modules')):
if sys.platform == 'win32':
utils.check_call('dir',os.path.join('.git','modules'))
else:
utils.check_call('ls','-la',os.path.join('.git','modules'))
if not no_submodules:
utils.check_call("git","submodule","--quiet","update",
"--quiet","--init","--recursive",
)
utils.check_call("git","submodule","--quiet","foreach","git","fetch")
return root_dir
class parallel_call(threading.Thread):
'''
Runs a synchronous command in a thread waiting for it to complete.
'''
def __init__(self, *command, **kargs):
super(parallel_call,self).__init__()
self.command = command
self.command_kargs = kargs
self.start()
def run(self):
self.result = utils.call(*self.command, **self.command_kargs)
def join(self):
super(parallel_call,self).join()
if self.result != 0:
raise(SystemCallError(self.command, self.result))
def set_arg(args, k, v = None):
if not args.get(k):
args[k] = v
return args[k]
class script_common(object):
'''
Main script to run Boost C++ Libraries continuous integration.
'''
def __init__(self, ci_klass, **kargs):
self.ci = ci_klass(self)
opt = optparse.OptionParser(
usage="%prog [options] [commands]")
#~ Debug Options:
opt.add_option( '--debug-level',
help="debugging level; controls the amount of debugging output printed",
type='int' )
opt.add_option( '-j',
help="maximum number of parallel jobs to use for building with b2",
type='int', dest='jobs')
opt.add_option('--branch')
opt.add_option('--commit')
kargs = self.init(opt,kargs)
kargs = self.ci.init(opt, kargs)
set_arg(kargs,'debug_level',0)
set_arg(kargs,'jobs',2)
set_arg(kargs,'branch',None)
set_arg(kargs,'commit',None)
set_arg(kargs,'repo',None)
set_arg(kargs,'root_dir',None)
set_arg(kargs,'actions',None)
set_arg(kargs,'pull_request', None)
#~ Defaults
for (k,v) in kargs.iteritems():
setattr(self,k,v)
( _opt_, self.actions ) = opt.parse_args(None,self)
if not self.actions or self.actions == []:
self.actions = kargs.get('actions',None)
if not self.actions or self.actions == []:
self.actions = [ 'info' ]
if not self.root_dir:
self.root_dir = os.getcwd()
self.build_dir = os.path.join(os.path.dirname(self.root_dir), "build")
# API keys.
self.bintray_key = os.getenv('BINTRAY_KEY')
try:
self.start()
self.command_info()
self.main()
utils.print_call_stats()
except:
utils.print_call_stats()
raise
def init(self, opt, kargs):
return kargs
def start(self):
pass
def main(self):
for action in self.actions:
action_m = "command_"+action.replace('-','_')
ci_command = getattr(self.ci, action_m, None)
ci_script = getattr(self, action_m, None)
if ci_command or ci_script:
utils.log( "### %s.."%(action) )
if os.path.exists(self.root_dir):
os.chdir(self.root_dir)
if ci_command:
ci_command()
elif ci_script:
ci_script()
def b2( self, *args, **kargs ):
cmd = ['b2','--debug-configuration', '-j%s'%(self.jobs)]
cmd.extend(args)
if 'toolset' in kargs:
cmd.append('toolset=' + kargs['toolset'])
if 'parallel' in kargs:
return parallel_call(*cmd)
else:
return utils.check_call(*cmd)
# Common test commands in the order they should be executed..
def command_info(self):
pass
def command_install(self):
utils.makedirs(self.build_dir)
os.chdir(self.build_dir)
def command_install_toolset(self, toolset):
if self.ci and hasattr(self.ci,'install_toolset'):
self.ci.install_toolset(toolset)
def command_before_build(self):
pass
def command_build(self):
pass
def command_before_cache(self):
pass
def command_after_success(self):
pass
class ci_cli(object):
'''
This version of the script provides a way to do manual building. It sets up
additional environment and adds fetching of the git repos that would
normally be done by the CI system.
The common way to use this variant is to invoke something like:
mkdir boost-ci
cd boost-ci
python path-to/ci_boost_<script>.py --branch=develop [--repo=mylib] ...
Status: In working order.
'''
def __init__(self,script):
if sys.platform == 'darwin':
# Requirements for running on OSX:
# https://www.stack.nl/~dimitri/doxygen/download.html#srcbin
# https://tug.org/mactex/morepackages.html
doxygen_path = "/Applications/Doxygen.app/Contents/Resources"
if os.path.isdir(doxygen_path):
os.environ["PATH"] = doxygen_path+':'+os.environ['PATH']
self.script = script
self.work_dir = os.getcwd()
self.exit_result = 0
def init(self, opt, kargs):
kargs['actions'] = [
'clone',
'install',
'before_build',
'build',
'before_cache',
'finish'
]
opt.add_option( '--repo',
help="Boost repo short name we are testing with, and hence the repo we clone.")
set_arg(kargs,'repo','boost')
return kargs
def finish(self, result):
self.exit_result = result
def command_clone(self):
self.script.root_dir = os.path.join(self.work_dir,'boostorg',self.script.repo)
self.script.build_dir = os.path.join(os.path.dirname(self.script.root_dir), "build")
utils.git_clone(self.script.repo, self.script.branch, self.script.commit, self.work_dir)
def command_finish(self):
exit(self.exit_result)
class ci_travis(object):
'''
This variant build releases in the context of the Travis-CI service.
'''
def __init__(self,script):
self.script = script
self.work_dir = os.getenv("HOME")
def init(self, opt, kargs):
set_arg(kargs,'root_dir', os.getenv("TRAVIS_BUILD_DIR"))
set_arg(kargs,'branch', os.getenv("TRAVIS_BRANCH"))
set_arg(kargs,'commit', os.getenv("TRAVIS_COMMIT"))
set_arg(kargs,'repo', os.getenv("TRAVIS_REPO_SLUG").split("/")[1])
set_arg(kargs,'pull_request',
os.getenv('TRAVIS_PULL_REQUEST') \
if os.getenv('TRAVIS_PULL_REQUEST') != 'false' else None)
return kargs
def finish(self, result):
exit(result)
def install_toolset(self, toolset):
'''
Installs specific toolset on CI system.
'''
info = toolset_info[toolset]
if sys.platform.startswith('linux'):
os.chdir(self.work_dir)
if 'ppa' in info:
for ppa in info['ppa']:
utils.check_call(
'sudo','add-apt-repository','--yes',ppa)
if 'deb' in info:
utils.make_file('sources.list',
"deb %s"%(' '.join(info['deb'])),
"deb-src %s"%(' '.join(info['deb'])))
utils.check_call('sudo','bash','-c','cat sources.list >> /etc/apt/sources.list')
if 'apt-key' in info:
for key in info['apt-key']:
utils.check_call('wget',key,'-O','apt.key')
utils.check_call('sudo','apt-key','add','apt.key')
utils.check_call(
'sudo','apt-get','update','-qq')
utils.check_call(
'sudo','apt-get','install','-qq',info['package'])
if 'debugpackage' in info and info['debugpackage']:
utils.check_call(
'sudo','apt-get','install','-qq',info['debugpackage'])
# Travis-CI commands in the order they are executed. We need
# these to forward to our common commands, if they are different.
def command_before_install(self):
pass
def command_install(self):
self.script.command_install()
def command_before_script(self):
self.script.command_before_build()
def command_script(self):
self.script.command_build()
def command_before_cache(self):
self.script.command_before_cache()
def command_after_success(self):
self.script.command_after_success()
def command_after_failure(self):
pass
def command_before_deploy(self):
pass
def command_after_deploy(self):
pass
def command_after_script(self):
pass
class ci_circleci(object):
'''
This variant build releases in the context of the CircleCI service.
'''
def __init__(self,script):
self.script = script
self.work_dir = os.getenv("HOME")
def init(self, opt, kargs):
set_arg(kargs,'root_dir', os.path.join(os.getenv("HOME"),os.getenv("CIRCLE_PROJECT_REPONAME")))
set_arg(kargs,'branch', os.getenv("CIRCLE_BRANCH"))
set_arg(kargs,'commit', os.getenv("CIRCLE_SHA1"))
set_arg(kargs,'repo', os.getenv("CIRCLE_PROJECT_REPONAME").split("/")[1])
set_arg(kargs,'pull_request', os.getenv('CIRCLE_PR_NUMBER'))
return kargs
def finish(self, result):
exit(result)
def command_machine_post(self):
# Apt update for the pckages installs we'll do later.
utils.check_call('sudo','apt-get','-qq','update')
# Need PyYAML to read Travis yaml in a later step.
utils.check_call("pip","install","--user","PyYAML")
def command_checkout_post(self):
os.chdir(self.script.root_dir)
utils.check_call("git","submodule","update","--quiet","--init","--recursive")
def command_dependencies_pre(self):
# Read in .travis.yml for list of packages to install
# as CircleCI doesn't have a convenient apt install method.
import yaml
utils.check_call('sudo','-E','apt-get','-yqq','update')
utils.check_call('sudo','apt-get','-yqq','purge','texlive*')
with open(os.path.join(self.script.root_dir,'.travis.yml')) as yml:
travis_yml = yaml.load(yml)
utils.check_call('sudo','apt-get','-yqq',
'--no-install-suggests','--no-install-recommends','--force-yes','install',
*travis_yml['addons']['apt']['packages'])
def command_dependencies_override(self):
self.script.command_install()
def command_dependencies_post(self):
pass
def command_database_pre(self):
pass
def command_database_override(self):
pass
def command_database_post(self):
pass
def command_test_pre(self):
self.script.command_install()
self.script.command_before_build()
def command_test_override(self):
# CircleCI runs all the test subsets. So in order to avoid
# running the after_success we do it here as the build step
# will halt accordingly.
self.script.command_build()
self.script.command_before_cache()
self.script.command_after_success()
def command_test_post(self):
pass
class ci_appveyor(object):
def __init__(self,script):
self.script = script
self.work_dir = os.path.dirname(os.getenv("APPVEYOR_BUILD_FOLDER"))
def init(self, opt, kargs):
set_arg(kargs,'root_dir',os.getenv("APPVEYOR_BUILD_FOLDER"))
set_arg(kargs,'branch',os.getenv("APPVEYOR_REPO_BRANCH"))
set_arg(kargs,'commit',os.getenv("APPVEYOR_REPO_COMMIT"))
set_arg(kargs,'repo',os.getenv("APPVEYOR_REPO_NAME").split("/")[1])
set_arg(kargs,'address_model',os.getenv("PLATFORM",None))
set_arg(kargs,'variant',os.getenv("CONFIGURATION","debug"))
set_arg(kargs,'pull_request', os.getenv('APPVEYOR_PULL_REQUEST_NUMBER'))
return kargs
def finish(self, result):
exit(result)
# Appveyor commands in the order they are executed. We need
# these to forward to our common commands, if they are different.
def command_install(self):
self.script.command_install()
def command_before_build(self):
os.chdir(self.script.root_dir)
utils.check_call("git","submodule","update","--quiet","--init","--recursive")
self.script.command_before_build()
def command_build_script(self):
self.script.command_build()
def command_after_build(self):
self.script.command_before_cache()
def command_before_test(self):
pass
def command_test_script(self):
pass
def command_after_test(self):
pass
def command_on_success(self):
self.script.command_after_success()
def command_on_failure(self):
pass
def command_on_finish(self):
pass
def main(script_klass):
if os.getenv('TRAVIS', False):
script_klass(ci_travis)
elif os.getenv('CIRCLECI', False):
script_klass(ci_circleci)
elif os.getenv('APPVEYOR', False):
script_klass(ci_appveyor)
else:
script_klass(ci_cli)

198
tools/ci/library_test.py Normal file
View File

@ -0,0 +1,198 @@
#!/usr/bin/env python
# Copyright Rene Rivera 2016
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import os.path
import shutil
import sys
from common import toolset_info, main, utils, script_common, ci_cli, set_arg
__dirname__ = os.path.dirname(os.path.realpath(__file__))
class script(script_common):
'''
Main script to test a Boost C++ Library.
'''
def __init__(self, ci_klass, **kargs):
script_common.__init__(self, ci_klass, **kargs)
def init(self, opt, kargs):
opt.add_option( '--toolset',
help="single toolset to test with" )
opt.add_option( '--target',
help="test target to build for testing, defaults to TARGET or 'minimal'")
opt.add_option( '--address-model',
help="address model to test, ie 64 or 32" )
opt.add_option( '--variant',
help="variant to test, ie debug, release" )
set_arg(kargs, 'toolset', os.getenv("TOOLSET"))
set_arg(kargs, 'target', os.getenv('TARGET', 'minimal'))
set_arg(kargs, 'address_model', os.getenv("ADDRESS_MODEL",None))
set_arg(kargs, 'variant', os.getenv("VARIANT","debug"))
set_arg(kargs, 'cxxflags', os.getenv("CXXFLAGS",None))
return kargs
def start(self):
script_common.start(self)
# Some setup we need to redo for each invocation.
self.boost_root = os.path.join(self.ci.work_dir,'boostorg','boost')
@property
def repo_path(self):
if not hasattr(self,'_repo_path'):
# Find the path for the submodule of the repo we are testing.
if self.repo != 'boost':
self._repo_path = None
with open(os.path.join(self.boost_root,'.gitmodules'),"rU") as f:
path = None
url = None
for line in f:
line = line.strip()
if line.startswith("[submodule"):
path = None
url = None
else:
name = line.split("=")[0].strip()
value = line.split("=")[1].strip()
if name == "path":
path = value
elif name == "url":
url = value
if name and url and url.endswith("/%s.git"%(self.repo)):
self._repo_path = path
if not self._repo_path:
self._repo_path = "libs/%s"%(self.repo)
return self._repo_path
def command_install(self):
script_common.command_install(self)
# Fetch & install toolset..
utils.log( "Install toolset: %s"%(self.toolset) )
if self.toolset:
self.command_install_toolset(self.toolset)
def command_before_build(self):
script_common.command_before_build(self)
# Clone boost super-project.
if self.repo != 'boost':
utils.git_clone('boost',self.branch,cwd=self.ci.work_dir,no_submodules=True)
utils.check_call("git","submodule","update","--quiet","--init","tools/build")
utils.check_call("git","submodule","update","--quiet","--init","tools/boostdep")
# The global jamfiles require config as they trigger build config checks.
utils.check_call("git","submodule","update","--quiet","--init","libs/config")
# Find the path for the submodule of the repo we are testing.
if self.repo != 'boost':
self.repo_dir = os.path.join(self.boost_root,self.repo_path)
if self.repo != 'boost':
# Copy in the existing library tree checkout.
shutil.rmtree(self.repo_path)
shutil.copytree(self.root_dir, self.repo_path)
# Fetch the dependencies for the library we are testing.
if self.repo != 'boost':
os.chdir(self.boost_root)
utils.check_call(
sys.executable,
'tools/boostdep/depinst/depinst.py',
self.repo)
# Create config file for toolset.
if not isinstance(self.ci, ci_cli):
cxxflags = None
if self.cxxflags:
cxxflags = self.cxxflags.split()
cxxflags = " <cxxflags>".join(cxxflags)
utils.make_file(os.path.join(self.boost_root, 'project-config.jam'),
"""
using %(toolset)s : %(version)s : %(command)s : %(cxxflags)s ;
using python : %(pyversion)s : "%(python)s" ;
"""%{
'toolset':toolset_info[self.toolset]['toolset'],
'version':toolset_info[self.toolset]['version'],
'command':toolset_info[self.toolset]['command'],
'cxxflags':"<cxxflags>"+cxxflags if cxxflags else "",
'pyversion':"%s.%s"%(sys.version_info[0],sys.version_info[1]),
'python':sys.executable.replace("\\","\\\\")
})
def command_build(self):
script_common.command_build(self)
# Set up tools.
utils.makedirs(os.path.join(self.build_dir,'dist','bin'))
if not isinstance(self.ci, ci_cli) and toolset_info[self.toolset]['command']:
os.environ['PATH'] = os.pathsep.join([
os.path.dirname(toolset_info[self.toolset]['command']),
os.path.join(self.build_dir,'dist','bin'),
os.environ['PATH']])
else:
os.environ['PATH'] = os.pathsep.join([
os.path.join(self.build_dir,'dist','bin'),
os.environ['PATH']])
os.environ['BOOST_BUILD_PATH'] = self.build_dir
# Bootstrap Boost Build engine.
os.chdir(os.path.join(self.boost_root,"tools","build"))
if sys.platform == 'win32':
utils.check_call(".\\bootstrap.bat")
shutil.copy2("b2.exe", os.path.join(self.build_dir,"dist","bin","b2.exe"))
else:
utils.check_call("./bootstrap.sh")
shutil.copy2("b2", os.path.join(self.build_dir,"dist","bin","b2"))
utils.check_call("git","clean","-dfqx")
# Run the limited tests.
if self.repo != 'boost':
print("--- Testing %s ---"%(self.repo_path))
os.chdir(os.path.join(self.boost_root,'status'))
to_test = self.repo_path.split("/")
del to_test[0]
toolset_to_test = ""
if self.toolset:
if not isinstance(self.ci, ci_cli):
toolset_to_test = toolset_info[self.toolset]['toolset']
else:
toolset_to_test = self.toolset
self.b2(
'-d1',
'-p0',
'--include-tests=%s'%("/".join(to_test)),
'preserve-test-targets=off',
'--dump-tests',
'--build-dir=%s'%(self.build_dir),
'--out-xml=%s'%(os.path.join(self.build_dir,'regression.xml')),
'' if not toolset_to_test else 'toolset=%s'%(toolset_to_test),
'' if not self.address_model else 'address-model=%s'%(self.address_model),
'variant=%s'%(self.variant),
'--test-type=%s'%(self.target),
'--verbose-test'
)
# Generate a readable test report.
import build_log
log_main = build_log.Main([
'--output=console',
os.path.join(self.build_dir,'regression.xml')])
# And exit with an error if the report contains failures.
# This lets the CI notice the error and report a failed build.
# And hence trigger the failure machinery, like sending emails.
if log_main.failed:
self.ci.finish(-1)
def command_before_cache(self):
script_common.command_before_cache(self)
os.chdir(self.boost_root)
utils.check_call("git","clean","-dfqx")
utils.check_call("git","submodule","--quiet","foreach","git","clean","-dfqx")
utils.check_call("git","status","-bs")
utils.check_call("git","submodule","foreach","git","status","-bs")
main(script)