From 094ac2ffdd7ae0c98ec69bf18567d4fe24c0af50 Mon Sep 17 00:00:00 2001 From: Rene Rivera Date: Sun, 21 Apr 2019 22:15:17 -0500 Subject: [PATCH] Move to AZP for testing. --- .gitignore | 1 + .travis.yml | 129 ----- appveyor.yml | 149 ------ azure-pipelines.yml | 203 +++++++ tools/ci/build_log.py | 352 +++++++------ tools/ci/common.py | 1076 +++++++++++++++++++++----------------- tools/ci/library_test.py | 117 +++-- 7 files changed, 1048 insertions(+), 979 deletions(-) delete mode 100644 .travis.yml delete mode 100644 appveyor.yml create mode 100644 azure-pipelines.yml diff --git a/.gitignore b/.gitignore index 7d4da21..067fbeb 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ boost-build.jam project-config.jam *.pyc .vscode/settings.json +.vscode/ipch diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f904bbb..0000000 --- a/.travis.yml +++ /dev/null @@ -1,129 +0,0 @@ -# Use, modification, and distribution are -# subject to the Boost Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -# -# Copyright Rene Rivera 2015-2018. - -# Setting up notifications like this is optional as the default behavior -# of Travis is to notify the commiter of problems. But setting a specific -# recipient this way ensures you get all the communications about the -# builds. -notifications: - email: - recipients: - - grafikrobot@gmail.com - -# We specify a generic language instead of C++ as Travis sets up -# additional environment vars that point to its default toolset -# instead of the one we install. The extra env doesn't interfere, -# but at the same time it's misleading. So to avoid confusion and -# possible unseen side effects we stick with no C++ default setup. -language: generic - -# Speficy the default as Linux here, to avoid duplication in the matrix -# below. We use Trusty as that's the latest we can use. And it's better -# supported by the whole range of C++ toolsets we like to test on. -dist: trusty -os: linux - -# Because we install our own toolsets and other software we need -# to run the sudo support. -sudo: required - -# Travis has a long standing bug with their rather complicated -# build matrix evaluation that causes empty jobs to be created. -# This global matrix entry in combination with the exclusion -# below works around that bug. This is the suggested fix from -# the Travis support people. -env: - matrix: - - TRAVIS_EMPTY_JOB_WORKAROUND=true - -# This lists all the toolsets we will test with the Boost CI -# scripts. Predef needs to check all of them as its job is to -# distiguish between all of them. For other libraries you would -# want to limit the list to the toolsets that are important -# for that. -matrix: - exclude: - - env: TRAVIS_EMPTY_JOB_WORKAROUND=true - include: - # Check CMake project use support. - - env: TEST_CMAKE - # Skip all the unneeded steps from the normal unit test jobs - install: true - before_script: true - before_cache: true - before_cache: true - after_success: true - after_failure: true - after_script: true - # Build CMake simple test project that uses Predef. - script: - - mkdir __build__ && cd __build__ - - cmake ../test/test_cmake - - cmake --build . - - - - env: TOOLSET=clang-3.4 - - env: TOOLSET=clang-3.5 - - env: TOOLSET=clang-3.6 - - env: TOOLSET=clang-3.7 - - env: TOOLSET=clang-3.8 - - env: TOOLSET=clang-3.9 - - env: TOOLSET=clang-4.0 - - env: TOOLSET=clang-5.0 - - env: TOOLSET=gcc-4.7 - - env: TOOLSET=gcc-4.8 - - env: TOOLSET=gcc-4.9 - - env: TOOLSET=gcc-5 - - env: TOOLSET=gcc-6 - - env: TOOLSET=gcc-7 - - env: TOOLSET=gcc-8 - - env: TOOLSET=gcc-8 CXXFLAGS=-std=c++03 - - env: TOOLSET=gcc-8 CXXFLAGS=-std=c++11 - - env: TOOLSET=gcc-8 CXXFLAGS=-std=c++14 - - env: TOOLSET=gcc-8 CXXFLAGS=-std=c++17 - - env: TOOLSET=gcc-8 CXXFLAGS=-std=c++2a - - env: TOOLSET=gcc-8 CXXFLAGS=-std=gnu++03 - - env: TOOLSET=gcc-8 CXXFLAGS=-std=gnu++11 - - env: TOOLSET=gcc-8 CXXFLAGS=-std=gnu++14 - - env: TOOLSET=gcc-8 CXXFLAGS=-std=gnu++17 - - env: TOOLSET=gcc-8 CXXFLAGS=-std=gnu++2a - - env: TOOLSET=xcode-6.1 - os: osx - - env: TOOLSET=xcode-6.4 - os: osx - osx_image: xcode6.4 - - env: TOOLSET=xcode-7.3 - os: osx - osx_image: xcode7.3 - - env: TOOLSET=xcode-8.3 - os: osx - osx_image: xcode8.3 - - env: TOOLSET=xcode-9.4 CXXFLAGS=-std=c++03 - os: osx - osx_image: xcode9.4 - - env: TOOLSET=xcode-9.4 CXXFLAGS=-std=c++11 - os: osx - osx_image: xcode9.4 - - env: TOOLSET=xcode-9.4 CXXFLAGS=-std=c++14 - os: osx - osx_image: xcode9.4 - - env: TOOLSET=xcode-9.4 CXXFLAGS=-std=c++17 - os: osx - osx_image: xcode9.4 - - env: TOOLSET=xcode-9.4 CXXFLAGS=-std=c++2a - os: osx - osx_image: xcode9.4 - - env: TOOLSET=xcode-10.0 - os: osx - osx_image: xcode10.0 - -install: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" install -before_script: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" before_script -script: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" script -before_cache: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" before_cache -after_success: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" after_success -after_failure: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" after_failure -after_script: python "${TRAVIS_BUILD_DIR}/tools/ci/library_test.py" after_script diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 644347e..0000000 --- a/appveyor.yml +++ /dev/null @@ -1,149 +0,0 @@ -# Use, modification, and distribution are -# subject to the Boost Software License, Version 1.0. (See accompanying -# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) -# -# Copyright Rene Rivera 2015-2016. - -# Set up notifications so that the maintainers of the library get -# build status messages. -notifications: - - provider: Email - to: - - grafikrobot@gmail.com - on_build_status_changed: true - -# This lists all the toolsets we will test with the Boost CI -# scripts. Predef needs to check all of them as its job is to -# distiguish between all of them. For other libraries you would -# want to limit the list to the toolsets that are important -# for that. -# -# This also includes setting up how to create the cache. We -# opt for slightly better compression and solid archives. -# As we have a lot of files in the boost tree which is what -# we are putting in the cache. -environment: - APPVEYOR_CACHE_ENTRY_ZIP_ARGS: -t7z -m0=lzma2 -mx=3 - matrix: - - TOOLSET: vs-2008 - platform: 32 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2010 - platform: 32 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2012 - platform: 32 - - TOOLSET: vs-2012 - COMMENT: UWP DESKTOP - CXXFLAGS: /D_WIN32_WINNT=0x0602 /DWINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP - platform: 32 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2012 - COMMENT: UWP STORE - CXXFLAGS: /D_WIN32_WINNT=0x0602 /DWINAPI_FAMILY=WINAPI_FAMILY_APP - platform: 32 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2013 - platform: 32 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2013 - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2013 - COMMENT: UWP DESKTOP - CXXFLAGS: /D_WIN32_WINNT=0x0603 /DWINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2013 - COMMENT: UWP PHONE - CXXFLAGS: /D_WIN32_WINNT=0x0603 /DWINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2013 - COMMENT: UWP STORE - CXXFLAGS: /D_WIN32_WINNT=0x0603 /DWINAPI_FAMILY=WINAPI_FAMILY_PC_APP - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2015 - platform: 32 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2015 - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: vs-2017 - platform: 32 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - - TOOLSET: vs-2017 - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - - TOOLSET: vs-2017 - CXXFLAGS: /std:c++14 - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - - TOOLSET: vs-2017 - CXXFLAGS: /std:c++latest - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - - TOOLSET: vs-2017 - COMMENT: UWP DESKTOP - CXXFLAGS: /D_WIN32_WINNT=0x0A00 /DWINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - - TOOLSET: vs-2017 - COMMENT: UWP PHONE - CXXFLAGS: /D_WIN32_WINNT=0x0A00 /DWINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - - TOOLSET: vs-2017 - COMMENT: UWP STORE - CXXFLAGS: /D_WIN32_WINNT=0x0A00 /DWINAPI_FAMILY=WINAPI_FAMILY_PC_APP - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - - TOOLSET: vs-2017 - COMMENT: UWP SERVER - CXXFLAGS: /D_WIN32_WINNT=0x0A00 /DWINAPI_FAMILY=WINAPI_FAMILY_SERVER - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - - TOOLSET: vs-2017 - COMMENT: UWP SYSTEM - CXXFLAGS: /D_WIN32_WINNT=0x0A00 /DWINAPI_FAMILY=WINAPI_FAMILY_SYSTEM - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - - TOOLSET: mingw-5 - platform: 32 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: mingw64-6 - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: mingw64-6 - COMMENT: UWP DESKTOP - CXXFLAGS: -D_WIN32_WINNT=0x0602 -DWINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - - TOOLSET: mingw64-6 - COMMENT: UWP STORE - CXXFLAGS: -D_WIN32_WINNT=0x0602 -DWINAPI_FAMILY=WINAPI_FAMILY_APP - platform: 64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 - -# We can also set up configurations for testing which map to -# the b2 build variants. -configuration: - - debug - - release - -# No need to test PR branch as the PR itself is already tested. -skip_branch_with_pr: true - -init: - - cd %APPVEYOR_BUILD_FOLDER% -install: python tools/ci/library_test.py install -before_build: python tools/ci/library_test.py before_build -build_script: python tools/ci/library_test.py build_script -after_build: python tools/ci/library_test.py after_build -before_test: python tools/ci/library_test.py before_test -test_script: python tools/ci/library_test.py test_script -after_test: python tools/ci/library_test.py after_test -on_success: python tools/ci/library_test.py on_success -on_failure: python tools/ci/library_test.py on_failure -on_finish: python tools/ci/library_test.py on_finish diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000..fda52b3 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,203 @@ +# Use, modification, and distribution are +# subject to the Boost Software License, Version 1.0. (See accompanying +# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) +# +# Copyright Rene Rivera 2019. + +trigger: + branches: + include: + - develop + - master + - feature/* +pr: + branches: + include: + - develop + +variables: + AZP: 1 + AZP_REPO_DIR: $(Build.Repository.LocalPath) + AZP_BRANCH: $(Build.SourceBranch) + AZP_COMMIT: $(Build.SourceVersion) + AZP_REPO: $(Build.Repository.Name) + AZP_PULL_REQUEST: $(System.PullRequest.PullRequestNumber) + +jobs: + +- job: 'Linux' + pool: + vmImage: 'ubuntu-16.04' + strategy: + matrix: + GCC 8 (GNU): + TOOLSET: gcc-8 + CXXSTD: 03,11,14,17,2a + CXXDIALECT: gnu + GCC 8 (ISO): + TOOLSET: gcc-8 + CXXSTD: 03,11,14,17,2a + GCC 7: + TOOLSET: gcc-7 + GCC 6: + TOOLSET: gcc-6 + GCC 5: + TOOLSET: gcc-5 + GCC 4.9: + TOOLSET: gcc-4.9 + GCC 4.8: + TOOLSET: gcc-4.8 + GCC 4.7: + TOOLSET: gcc-4.7 + Clang 8: + TOOLSET: clang-8 + Clang 7: + TOOLSET: clang-7 + Clang 6: + TOOLSET: clang-6.0 + Clang 5: + TOOLSET: clang-5.0 + Clang 4: + TOOLSET: clang-4.0 + Clang 3.9: + TOOLSET: clang-3.9 + Clang 3.8: + TOOLSET: clang-3.8 + Clang 3.7: + TOOLSET: clang-3.7 + Clang 3.6: + TOOLSET: clang-3.6 + Clang 3.5: + TOOLSET: clang-3.5 + steps: + - task: UsePythonVersion@0 + - script: python tools/ci/library_test.py install + failOnStderr: false + displayName: Install + - script: python tools/ci/library_test.py script + failOnStderr: false + displayName: Test + +- job: 'macOS' + strategy: + matrix: + Xcode 10.1: + TOOLSET: xcode-10.1 + XCODE_APP: /Applications/Xcode_10.1.app + VM_IMAGE: 'macOS-10.14' + Xcode 10.0: + TOOLSET: xcode-10.0 + XCODE_APP: /Applications/Xcode_10.app + VM_IMAGE: 'macOS-10.14' + Xcode 9.4.1: + TOOLSET: xcode-9.4.1 + XCODE_APP: /Applications/Xcode_9.4.1.app + VM_IMAGE: 'macOS-10.13' + Xcode 9.4: + TOOLSET: xcode-9.4 + XCODE_APP: /Applications/Xcode_9.4.app + VM_IMAGE: 'macOS-10.13' + Xcode 9.3.1: + TOOLSET: xcode-9.3.1 + XCODE_APP: /Applications/Xcode_9.3.1.app + VM_IMAGE: 'macOS-10.13' + Xcode 9.3: + TOOLSET: xcode-9.3 + XCODE_APP: /Applications/Xcode_9.3.app + VM_IMAGE: 'macOS-10.13' + Xcode 9.2: + TOOLSET: xcode-9.2 + XCODE_APP: /Applications/Xcode_9.2.app + VM_IMAGE: 'macOS-10.13' + Xcode 9.1: + TOOLSET: xcode-9.1 + XCODE_APP: /Applications/Xcode_9.1.app + VM_IMAGE: 'macOS-10.13' + Xcode 9.0.1: + TOOLSET: xcode-9.0.1 + XCODE_APP: /Applications/Xcode_9.0.1.app + VM_IMAGE: 'macOS-10.13' + Xcode 9.0: + TOOLSET: xcode-9.0 + XCODE_APP: /Applications/Xcode_9.app + VM_IMAGE: 'macOS-10.13' + Xcode 8.3.3: + TOOLSET: xcode-8.3 + XCODE_APP: /Applications/Xcode_8.3.3.app + VM_IMAGE: 'macOS-10.13' + pool: + vmImage: $(VM_IMAGE) + steps: + - task: UsePythonVersion@0 + - script: sudo xcode-select -switch ${XCODE_APP} + failOnStderr: false + displayName: Xcode Select + - script: python tools/ci/library_test.py install + failOnStderr: false + displayName: Install + - script: python tools/ci/library_test.py script + failOnStderr: false + displayName: Test + +- job: 'Windows' + strategy: + matrix: + VS 2019: + TOOLSET: vs-2019 + ADDRESS_MODEL: 32,64 + CXXSTD: 14,latest + VM_IMAGE: 'windows-2019' + VS 2019 (UWP DESKTOP): + TOOLSET: vs-2019 + ADDRESS_MODEL: 64 + CXXDEFS: _WIN32_WINNT=0x0A00,WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP + VM_IMAGE: 'windows-2019' + VS 2019 (UWP PHONE): + TOOLSET: vs-2019 + ADDRESS_MODEL: 64 + CXXDEFS: _WIN32_WINNT=0x0A00,WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP + VM_IMAGE: 'windows-2019' + VS 2019 (UWP STORE): + TOOLSET: vs-2019 + ADDRESS_MODEL: 64 + CXXDEFS: _WIN32_WINNT=0x0A00,WINAPI_FAMILY=WINAPI_FAMILY_PC_APP + VM_IMAGE: 'windows-2019' + VS 2019 (UWP SERVER): + TOOLSET: vs-2019 + ADDRESS_MODEL: 64 + CXXDEFS: _WIN32_WINNT=0x0A00,WINAPI_FAMILY=WINAPI_FAMILY_SERVER + VM_IMAGE: 'windows-2019' + VS 2019 (UWP SYSTEM): + TOOLSET: vs-2019 + ADDRESS_MODEL: 64 + CXXDEFS: _WIN32_WINNT=0x0A00,WINAPI_FAMILY=WINAPI_FAMILY_SYSTEM + VM_IMAGE: 'windows-2019' + VS 2017: + TOOLSET: vs-2017 + VM_IMAGE: 'vs2017-win2016' + VS 2015: + TOOLSET: vs-2015 + VM_IMAGE: 'vs2015-win2012r2' + VS 2013: + TOOLSET: vs-2013 + VM_IMAGE: 'vs2015-win2012r2' + MinGW 8.1.0: + TOOLSET: mingw-8 + VM_IMAGE: 'vs2017-win2016' + MinGW 8.1.0 (UWP DESKTOP): + TOOLSET: mingw-8 + CXXDEFS: _WIN32_WINNT=0x0A00,WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP + VM_IMAGE: 'vs2017-win2016' + MinGW 8.1.0 (UWP STORE): + TOOLSET: mingw-8 + CXXDEFS: _WIN32_WINNT=0x0A00,WINAPI_FAMILY=WINAPI_FAMILY_PC_APP + VM_IMAGE: 'vs2017-win2016' + pool: + vmImage: $(VM_IMAGE) + steps: + - script: python tools/ci/library_test.py install + failOnStderr: false + displayName: Install + - script: python tools/ci/library_test.py script + failOnStderr: false + displayName: Test diff --git a/tools/ci/build_log.py b/tools/ci/build_log.py index 6ae156b..f862cc5 100644 --- a/tools/ci/build_log.py +++ b/tools/ci/build_log.py @@ -1,9 +1,10 @@ -#!/usr/bin/env python -# Copyright 2008 Rene Rivera +# Copyright 2008-2019 Rene Rivera # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +from __future__ import print_function + import re import optparse import time @@ -12,32 +13,33 @@ import xml.dom.pulldom from xml.sax.saxutils import unescape, escape import os.path from pprint import pprint -from __builtin__ import exit +from sys import exit + class BuildOutputXMLParsing(object): ''' XML parsing utilities for dealing with the Boost Build output XML format. ''' - - def get_child_data( self, root, tag = None, id = None, name = None, strip = False, default = None ): - return self.get_data(self.get_child(root,tag=tag,id=id,name=name),strip=strip,default=default) - - def get_data( self, node, strip = False, default = None ): + + def get_child_data(self, root, tag=None, id=None, name=None, strip=False, default=None): + return self.get_data(self.get_child(root, tag=tag, id=id, name=name), strip=strip, default=default) + + def get_data(self, node, strip=False, default=None): data = None if node: data_node = None if not data_node: - data_node = self.get_child(node,tag='#text') + data_node = self.get_child(node, tag='#text') if not data_node: - data_node = self.get_child(node,tag='#cdata-section') + data_node = self.get_child(node, tag='#cdata-section') data = "" while data_node: data += data_node.data data_node = data_node.nextSibling if data_node: if data_node.nodeName != '#text' \ - and data_node.nodeName != '#cdata-section': + and data_node.nodeName != '#cdata-section': data_node = None if not data: data = default @@ -45,11 +47,11 @@ class BuildOutputXMLParsing(object): if strip: data = data.strip() return data - - def get_child( self, root, tag = None, id = None, name = None, type = None ): - return self.get_sibling(root.firstChild,tag=tag,id=id,name=name,type=type) - - def get_sibling( self, sibling, tag = None, id = None, name = None, type = None ): + + def get_child(self, root, tag=None, id=None, name=None, type=None): + return self.get_sibling(root.firstChild, tag=tag, id=id, name=name, type=type) + + def get_sibling(self, sibling, tag=None, id=None, name=None, type=None): n = sibling while n: found = True @@ -63,16 +65,19 @@ class BuildOutputXMLParsing(object): if n.hasAttribute('id'): found = found and n.getAttribute('id') == id else: - found = found and n.hasAttribute('id') and n.getAttribute('id') == id + found = found and n.hasAttribute( + 'id') and n.getAttribute('id') == id if name and found: - found = found and n.hasAttribute('name') and n.getAttribute('name') == name + found = found and n.hasAttribute( + 'name') and n.getAttribute('name') == name if found: return n n = n.nextSibling return None + class BuildOutputProcessor(BuildOutputXMLParsing): - + def __init__(self, inputs): self.test = {} self.target_to_test = {} @@ -81,14 +86,14 @@ class BuildOutputProcessor(BuildOutputXMLParsing): self.timestamps = [] for input in inputs: self.add_input(input) - + def add_input(self, input): ''' Add a single build XML output file to our data. ''' events = xml.dom.pulldom.parse(input) context = [] - for (event,node) in events: + for (event, node) in events: if event == xml.dom.pulldom.START_ELEMENT: context.append(node) if node.nodeType == xml.dom.Node.ELEMENT_NODE: @@ -101,25 +106,25 @@ class BuildOutputProcessor(BuildOutputXMLParsing): (x_f[1])(node) elif event == xml.dom.pulldom.END_ELEMENT: context.pop() - + def x_name_(self, *context, **kwargs): node = None - names = [ ] + names = [] for c in context: if c: - if not isinstance(c,xml.dom.Node): - suffix = '_'+c.replace('-','_').replace('#','_') + if not isinstance(c, xml.dom.Node): + suffix = '_'+c.replace('-', '_').replace('#', '_') else: - suffix = '_'+c.nodeName.replace('-','_').replace('#','_') + suffix = '_'+c.nodeName.replace('-', '_').replace('#', '_') node = c names.append('x') - names = map(lambda x: x+suffix,names) + names = [x+suffix for x in names] if node: for name in names: - if hasattr(self,name): - return (name,getattr(self,name)) + if hasattr(self, name): + return (name, getattr(self, name)) return None - + def x_build_test(self, node): ''' Records the initial test information that will eventually @@ -127,100 +132,102 @@ class BuildOutputProcessor(BuildOutputXMLParsing): ''' test_node = node test_name = test_node.getAttribute('name') - test_target = self.get_child_data(test_node,tag='target',strip=True) - ## print ">>> %s %s" %(test_name,test_target) + test_target = self.get_child_data(test_node, tag='target', strip=True) + # print ">>> %s %s" %(test_name,test_target) self.test[test_name] = { - 'library' : "/".join(test_name.split('/')[0:-1]), - 'test-name' : test_name.split('/')[-1], - 'test-type' : test_node.getAttribute('type').lower(), - 'test-program' : self.get_child_data(test_node,tag='source',strip=True), - 'target' : test_target, - 'info' : self.get_child_data(test_node,tag='info',strip=True), - 'dependencies' : [], - 'actions' : [], - } + 'library': "/".join(test_name.split('/')[0:-1]), + 'test-name': test_name.split('/')[-1], + 'test-type': test_node.getAttribute('type').lower(), + 'test-program': self.get_child_data(test_node, tag='source', strip=True), + 'target': test_target, + 'info': self.get_child_data(test_node, tag='info', strip=True), + 'dependencies': [], + 'actions': [], + } # Add a lookup for the test given the test target. self.target_to_test[self.test[test_name]['target']] = test_name return None - - def x_build_targets_target( self, node ): + + def x_build_targets_target(self, node): ''' Process the target dependency DAG into an ancestry tree so we can look up which top-level library and test targets specific build actions correspond to. ''' target_node = node - name = self.get_child_data(target_node,tag='name',strip=True) - path = self.get_child_data(target_node,tag='path',strip=True) - jam_target = self.get_child_data(target_node,tag='jam-target',strip=True) - #~ Map for jam targets to virtual targets. + name = self.get_child_data(target_node, tag='name', strip=True) + path = self.get_child_data(target_node, tag='path', strip=True) + jam_target = self.get_child_data( + target_node, tag='jam-target', strip=True) + # ~ Map for jam targets to virtual targets. self.target[jam_target] = { - 'name' : name, - 'path' : path - } - #~ Create the ancestry. - dep_node = self.get_child(self.get_child(target_node,tag='dependencies'),tag='dependency') + 'name': name, + 'path': path + } + # ~ Create the ancestry. + dep_node = self.get_child(self.get_child( + target_node, tag='dependencies'), tag='dependency') while dep_node: - child = self.get_data(dep_node,strip=True) - child_jam_target = '%s' % (path,child.split('//',1)[1]) + child = self.get_data(dep_node, strip=True) + child_jam_target = '%s' % (path, child.split('//', 1)[1]) self.parent[child_jam_target] = jam_target - dep_node = self.get_sibling(dep_node.nextSibling,tag='dependency') + dep_node = self.get_sibling(dep_node.nextSibling, tag='dependency') return None - - def x_build_action( self, node ): + + def x_build_action(self, node): ''' Given a build action log, process into the corresponding test log and specific test log sub-part. ''' action_node = node - name = self.get_child(action_node,tag='name') + name = self.get_child(action_node, tag='name') if name: name = self.get_data(name) - #~ Based on the action, we decide what sub-section the log - #~ should go into. + # ~ Based on the action, we decide what sub-section the log + # ~ should go into. action_type = None - if re.match('[^%]+%[^.]+[.](compile)',name): + if re.match('[^%]+%[^.]+[.](compile)', name): action_type = 'compile' - elif re.match('[^%]+%[^.]+[.](link|archive)',name): + elif re.match('[^%]+%[^.]+[.](link|archive)', name): action_type = 'link' - elif re.match('[^%]+%testing[.](capture-output)',name): + elif re.match('[^%]+%testing[.](capture-output)', name): action_type = 'run' - elif re.match('[^%]+%testing[.](expect-failure|expect-success)',name): + elif re.match('[^%]+%testing[.](expect-failure|expect-success)', name): action_type = 'result' else: # TODO: Enable to see what other actions can be included in the test results. # action_type = None action_type = 'other' - #~ print "+ [%s] %s %s :: %s" %(action_type,name,'','') + # ~ print "+ [%s] %s %s :: %s" %(action_type,name,'','') if action_type: - #~ Get the corresponding test. - (target,test) = self.get_test(action_node,type=action_type) - #~ Skip action that have no corresponding test as they are - #~ regular build actions and don't need to show up in the - #~ regression results. + # ~ Get the corresponding test. + (target, test) = self.get_test(action_node, type=action_type) + # ~ Skip action that have no corresponding test as they are + # ~ regular build actions and don't need to show up in the + # ~ regression results. if not test: - ##print "??? [%s] %s %s :: %s" %(action_type,name,target,test) + # print "??? [%s] %s %s :: %s" %(action_type,name,target,test) return None - ##print "+++ [%s] %s %s :: %s" %(action_type,name,target,test) - #~ Collect some basic info about the action. + # print "+++ [%s] %s %s :: %s" %(action_type,name,target,test) + # ~ Collect some basic info about the action. action = { - 'command' : self.get_action_command(action_node,action_type), - 'output' : self.get_action_output(action_node,action_type), - 'info' : self.get_action_info(action_node,action_type) - } - #~ For the test result status we find the appropriate node - #~ based on the type of test. Then adjust the result status - #~ accordingly. This makes the result status reflect the - #~ expectation as the result pages post processing does not - #~ account for this inversion. + 'command': self.get_action_command(action_node, action_type), + 'output': self.get_action_output(action_node, action_type), + 'info': self.get_action_info(action_node, action_type) + } + # ~ For the test result status we find the appropriate node + # ~ based on the type of test. Then adjust the result status + # ~ accordingly. This makes the result status reflect the + # ~ expectation as the result pages post processing does not + # ~ account for this inversion. action['type'] = action_type if action_type == 'result': - if re.match(r'^compile',test['test-type']): + if re.match(r'^compile', test['test-type']): action['type'] = 'compile' - elif re.match(r'^link',test['test-type']): + elif re.match(r'^link', test['test-type']): action['type'] = 'link' - elif re.match(r'^run',test['test-type']): + elif re.match(r'^run', test['test-type']): action['type'] = 'run' - #~ The result sub-part we will add this result to. + # ~ The result sub-part we will add this result to. if action_node.getAttribute('status') == '0': action['result'] = 'succeed' else: @@ -231,120 +238,126 @@ class BuildOutputProcessor(BuildOutputXMLParsing): if action_type == 'result': test['result'] = action['result'] return None - - def x_build_timestamp( self, node ): + + def x_build_timestamp(self, node): ''' The time-stamp goes to the corresponding attribute in the result. ''' self.timestamps.append(self.get_data(node).strip()) return None - - def get_test( self, node, type = None ): + + def get_test(self, node, type=None): ''' Find the test corresponding to an action. For testing targets these are the ones pre-declared in the --dump-test option. For libraries we create a dummy test as needed. ''' - jam_target = self.get_child_data(node,tag='jam-target') + jam_target = self.get_child_data(node, tag='jam-target') base = self.target[jam_target]['name'] target = jam_target while target in self.parent: target = self.parent[target] - #~ print "--- TEST: %s ==> %s" %(jam_target,target) - #~ main-target-type is a precise indicator of what the build target is - #~ originally meant to be. - #main_type = self.get_child_data(self.get_child(node,tag='properties'), + # ~ print "--- TEST: %s ==> %s" %(jam_target,target) + # ~ main-target-type is a precise indicator of what the build target is + # ~ originally meant to be. + # main_type = self.get_child_data(self.get_child(node,tag='properties'), # name='main-target-type',strip=True) main_type = None if main_type == 'LIB' and type: lib = self.target[target]['name'] if not lib in self.test: self.test[lib] = { - 'library' : re.search(r'libs/([^/]+)',lib).group(1), - 'test-name' : os.path.basename(lib), - 'test-type' : 'lib', - 'test-program' : os.path.basename(lib), - 'target' : lib - } + 'library': re.search(r'libs/([^/]+)', lib).group(1), + 'test-name': os.path.basename(lib), + 'test-type': 'lib', + 'test-program': os.path.basename(lib), + 'target': lib + } test = self.test[lib] else: target_name_ = self.target[target]['name'] - if self.target_to_test.has_key(target_name_): + if target_name_ in self.target_to_test: test = self.test[self.target_to_test[target_name_]] else: test = None - return (base,test) - - #~ The command executed for the action. For run actions we omit the command - #~ as it's just noise. - def get_action_command( self, action_node, action_type ): + return (base, test) + + # ~ The command executed for the action. For run actions we omit the command + # ~ as it's just noise. + def get_action_command(self, action_node, action_type): if action_type != 'run': - return self.get_child_data(action_node,tag='command') + return self.get_child_data(action_node, tag='command') else: return '' - - #~ The command output. - def get_action_output( self, action_node, action_type ): - return self.get_child_data(action_node,tag='output',default='') - - #~ Some basic info about the action. - def get_action_info( self, action_node, action_type ): + + # ~ The command output. + def get_action_output(self, action_node, action_type): + return self.get_child_data(action_node, tag='output', default='') + + # ~ Some basic info about the action. + def get_action_info(self, action_node, action_type): info = {} - #~ The jam action and target. - info['name'] = self.get_child_data(action_node,tag='name') - info['path'] = self.get_child_data(action_node,tag='path') - #~ The timing of the action. + # ~ The jam action and target. + info['name'] = self.get_child_data(action_node, tag='name') + info['path'] = self.get_child_data(action_node, tag='path') + # ~ The timing of the action. info['time-start'] = action_node.getAttribute('start') info['time-end'] = action_node.getAttribute('end') info['time-user'] = action_node.getAttribute('user') info['time-system'] = action_node.getAttribute('system') - #~ Testing properties. - test_info_prop = self.get_child_data(self.get_child(action_node,tag='properties'),name='test-info') + # ~ Testing properties. + test_info_prop = self.get_child_data(self.get_child( + action_node, tag='properties'), name='test-info') info['always_show_run_output'] = test_info_prop == 'always_show_run_output' - #~ And for compiles some context that may be hidden if using response files. + # ~ And for compiles some context that may be hidden if using response files. if action_type == 'compile': info['define'] = [] - define = self.get_child(self.get_child(action_node,tag='properties'),name='define') + define = self.get_child(self.get_child( + action_node, tag='properties'), name='define') while define: - info['define'].append(self.get_data(define,strip=True)) - define = self.get_sibling(define.nextSibling,name='define') + info['define'].append(self.get_data(define, strip=True)) + define = self.get_sibling(define.nextSibling, name='define') return info + class BuildConsoleSummaryReport(object): - + HEADER = '\033[35m\033[1m' INFO = '\033[34m' OK = '\033[32m' WARNING = '\033[33m' FAIL = '\033[31m' ENDC = '\033[0m' - + def __init__(self, bop, opt): self.bop = bop - + def generate(self): self.summary_info = { - 'total' : 0, - 'success' : 0, - 'failed' : [], - } - self.header_print("======================================================================") + 'total': 0, + 'success': 0, + 'failed': [], + } + self.header_print( + "======================================================================") self.print_test_log() self.print_summary() - self.header_print("======================================================================") - + self.header_print( + "======================================================================") + @property def failed(self): return len(self.summary_info['failed']) > 0 - + def print_test_log(self): self.header_print("Tests run..") - self.header_print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + self.header_print( + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") for k in sorted(self.bop.test.keys()): test = self.bop.test[k] if len(test['actions']) > 0: self.summary_info['total'] += 1 - ##print ">>>> {0}".format(test['test-name']) + # print ">>>> {0}".format(test['test-name']) if 'result' in test: succeed = test['result'] == 'succeed' else: @@ -354,12 +367,12 @@ class BuildConsoleSummaryReport(object): else: self.summary_info['failed'].append(test) if succeed: - self.ok_print("[PASS] {0}",k) + self.ok_print("[PASS] {0}", k) else: - self.fail_print("[FAIL] {0}",k) + self.fail_print("[FAIL] {0}", k) for action in test['actions']: self.print_action(succeed, action) - + def print_action(self, test_succeed, action): ''' Print the detailed info of failed or always print tests. @@ -370,50 +383,54 @@ class BuildConsoleSummaryReport(object): if output != "": p = self.fail_print if action['result'] == 'fail' else self.p_print self.info_print("") - self.info_print("({0}) {1}",action['info']['name'],action['info']['path']) + self.info_print( + "({0}) {1}", action['info']['name'], action['info']['path']) p("") - p("{0}",action['command'].strip()) + p("{0}", action['command'].strip()) p("") for line in output.splitlines(): - p("{0}",line.encode('utf-8')) - + p("{0}", line.encode('utf-8')) + def print_summary(self): self.header_print("") self.header_print("Testing summary..") - self.header_print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - self.p_print("Total: {0}",self.summary_info['total']) - self.p_print("Success: {0}",self.summary_info['success']) + self.header_print( + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + self.p_print("Total: {0}", self.summary_info['total']) + self.p_print("Success: {0}", self.summary_info['success']) if self.failed: - self.fail_print("Failed: {0}",len(self.summary_info['failed'])) + self.fail_print("Failed: {0}", len(self.summary_info['failed'])) for test in self.summary_info['failed']: - self.fail_print(" {0}/{1}",test['library'],test['test-name']) - + self.fail_print( + " {0}/{1}", test['library'], test['test-name']) + def p_print(self, format, *args, **kargs): - print format.format(*args,**kargs) - + print(format.format(*args, **kargs)) + def info_print(self, format, *args, **kargs): - print self.INFO+format.format(*args,**kargs)+self.ENDC - + print(self.INFO+format.format(*args, **kargs)+self.ENDC) + def header_print(self, format, *args, **kargs): - print self.HEADER+format.format(*args,**kargs)+self.ENDC - + print(self.HEADER+format.format(*args, **kargs)+self.ENDC) + def ok_print(self, format, *args, **kargs): - print self.OK+format.format(*args,**kargs)+self.ENDC - + print(self.OK+format.format(*args, **kargs)+self.ENDC) + def warn_print(self, format, *args, **kargs): - print self.WARNING+format.format(*args,**kargs)+self.ENDC - + print(self.WARNING+format.format(*args, **kargs)+self.ENDC) + def fail_print(self, format, *args, **kargs): - print self.FAIL+format.format(*args,**kargs)+self.ENDC + print(self.FAIL+format.format(*args, **kargs)+self.ENDC) + class Main(object): - - def __init__(self,args=None): + + def __init__(self, args=None): op = optparse.OptionParser( usage="%prog [options] input+") - op.add_option( '--output', - help="type of output to generate" ) - ( opt, inputs ) = op.parse_args(args) + op.add_option('--output', + help="type of output to generate") + (opt, inputs) = op.parse_args(args) bop = BuildOutputProcessor(inputs) output = None if opt.output == 'console': @@ -422,6 +439,7 @@ class Main(object): output.generate() self.failed = output.failed + if __name__ == '__main__': m = Main() if m.failed: diff --git a/tools/ci/common.py b/tools/ci/common.py index 7536276..81b4afb 100644 --- a/tools/ci/common.py +++ b/tools/ci/common.py @@ -1,6 +1,4 @@ -#!/usr/bin/env python - -# Copyright Rene Rivera 2016 +# Copyright Rene Rivera 2016-2019 # # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at @@ -18,433 +16,472 @@ import shutil import threading toolset_info = { - 'clang-3.4' : { - 'ppa' : ["ppa:h-rayflood/llvm"], - 'package' : 'clang-3.4', - 'command' : 'clang++-3.4', - 'toolset' : 'clang', - 'version' : '' - }, - 'clang-3.5' : { - 'ppa' : ["ppa:h-rayflood/llvm"], - 'package' : 'clang-3.5', - 'command' : 'clang++-3.5', - 'toolset' : 'clang', - 'version' : '' - }, - 'clang-3.6' : { - 'ppa' : ["ppa:h-rayflood/llvm"], - 'package' : 'clang-3.6', - 'command' : 'clang++-3.6', - 'toolset' : 'clang', - 'version' : '' - }, - 'clang-3.7' : { - 'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-3.7","main"], - 'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'], - 'package' : 'clang-3.7', - 'command' : 'clang++-3.7', - 'toolset' : 'clang', - 'version' : '' - }, - 'clang-3.8' : { - 'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-3.8","main"], - 'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'], - 'package' : 'clang-3.8', - 'command' : 'clang++-3.8', - 'toolset' : 'clang', - 'version' : '' - }, - 'clang-3.9' : { - 'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-3.9","main"], - 'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'], - 'package' : 'clang-3.9', - 'command' : 'clang++-3.9', - 'toolset' : 'clang', - 'version' : '' - }, - 'clang-4.0' : { - 'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-4.0","main"], - 'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'], - 'package' : 'clang-4.0', - 'command' : 'clang++-4.0', - 'toolset' : 'clang', - 'version' : '' - }, - 'clang-5.0' : { - 'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-5.0","main"], - 'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'], - 'package' : 'clang-5.0', - 'command' : 'clang++-5.0', - 'toolset' : 'clang', - 'version' : '' - }, - 'clang-6.0' : { - 'deb' : ["http://apt.llvm.org/trusty/","llvm-toolchain-trusty-6.0","main"], - 'apt-key' : ['http://apt.llvm.org/llvm-snapshot.gpg.key'], - 'package' : 'clang-6.0', - 'command' : 'clang++-6.0', - 'toolset' : 'clang', - 'version' : '' - }, - 'gcc-4.7' : { - 'ppa' : ["ppa:ubuntu-toolchain-r/test"], - 'package' : 'g++-4.7', - 'command' : 'g++-4.7', - 'toolset' : 'gcc', - 'version' : '' - }, - 'gcc-4.8' : { - 'bin' : 'gcc-4.8', - 'ppa' : ["ppa:ubuntu-toolchain-r/test"], - 'package' : 'g++-4.8', - 'command' : 'g++-4.8', - 'toolset' : 'gcc', - 'version' : '' - }, - 'gcc-4.9' : { - 'ppa' : ["ppa:ubuntu-toolchain-r/test"], - 'package' : 'g++-4.9', - 'command' : 'g++-4.9', - 'toolset' : 'gcc', - 'version' : '' - }, - 'gcc-5.1' : { - 'ppa' : ["ppa:ubuntu-toolchain-r/test"], - 'package' : 'g++-5', - 'command' : 'g++-5', - 'toolset' : 'gcc', - 'version' : '' - }, - 'gcc-5' : { - 'ppa' : ["ppa:ubuntu-toolchain-r/test"], - 'package' : 'g++-5', - 'command' : 'g++-5', - 'toolset' : 'gcc', - 'version' : '' - }, - 'gcc-6' : { - 'ppa' : ["ppa:ubuntu-toolchain-r/test"], - 'package' : 'g++-6', - 'command' : 'g++-6', - 'toolset' : 'gcc', - 'version' : '' - }, - 'gcc-7' : { - 'ppa' : ["ppa:ubuntu-toolchain-r/test"], - 'package' : 'g++-7', - 'command' : 'g++-7', - 'toolset' : 'gcc', - 'version' : '' - }, - 'gcc-8' : { - 'ppa' : ["ppa:ubuntu-toolchain-r/test"], - 'package' : 'g++-8', - 'command' : 'g++-8', - 'toolset' : 'gcc', - 'version' : '' - }, - 'mingw-5' : { - 'toolset' : 'gcc', - 'command' : 'C:\\\\MinGW\\\\bin\\\\g++.exe', - 'version' : '' - }, - 'mingw64-6' : { - 'toolset' : 'gcc', - 'command' : 'C:\\\\mingw-w64\\\\x86_64-6.3.0-posix-seh-rt_v5-rev1\\\\mingw64\\\\bin\\\\g++.exe', - 'version' : '' - }, - 'vs-2008' : { - 'toolset' : 'msvc', - 'command' : '', - 'version' : '9.0' - }, - 'vs-2010' : { - 'toolset' : 'msvc', - 'command' : '', - 'version' : '10.0' - }, - 'vs-2012' : { - 'toolset' : 'msvc', - 'command' : '', - 'version' : '11.0' - }, - 'vs-2013' : { - 'toolset' : 'msvc', - 'command' : '', - 'version' : '12.0' - }, - 'vs-2015' : { - 'toolset' : 'msvc', - 'command' : '', - 'version' : '14.0' - }, - 'vs-2017' : { - 'toolset' : 'msvc', - 'command' : '', - 'version' : '14.1' - }, - 'xcode-6.1' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-6.2' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-6.3' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-6.4' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-7.0' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-7.1' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-7.2' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-7.3' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-8.0' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-8.1' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-8.2' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-8.3' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-9.0' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-9.1' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-9.2' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-9.3' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-9.4' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - 'xcode-10.0' : { - 'command' : 'clang++', - 'toolset' : 'clang', - 'version' : '' - }, - } + 'clang-3.4': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'clang-3.4', + 'command': 'clang++-3.4', + 'toolset': 'clang', + 'version': '' + }, + 'clang-3.5': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'clang-3.5', + 'command': 'clang++-3.5', + 'toolset': 'clang', + 'version': '' + }, + 'clang-3.6': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'clang-3.6', + 'command': 'clang++-3.6', + 'toolset': 'clang', + 'version': '' + }, + 'clang-3.7': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'clang-3.7', + 'command': 'clang++-3.7', + 'toolset': 'clang', + 'version': '' + }, + 'clang-3.8': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'clang-3.8', + 'command': 'clang++-3.8', + 'toolset': 'clang', + 'version': '' + }, + 'clang-3.9': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'clang-3.9', + 'command': 'clang++-3.9', + 'toolset': 'clang', + 'version': '' + }, + 'clang-4.0': { + 'deb': ["http://apt.llvm.org/xenial/", "llvm-toolchain-xenial-4.0", "main"], + 'apt-key': ['http://apt.llvm.org/llvm-snapshot.gpg.key'], + 'package': 'clang-4.0', + 'command': 'clang++-4.0', + 'toolset': 'clang', + 'version': '' + }, + 'clang-5.0': { + 'deb': ["http://apt.llvm.org/xenial/", "llvm-toolchain-xenial-5.0", "main"], + 'apt-key': ['http://apt.llvm.org/llvm-snapshot.gpg.key'], + 'package': 'clang-5.0', + 'command': 'clang++-5.0', + 'toolset': 'clang', + 'version': '' + }, + 'clang-6.0': { + 'deb': ["http://apt.llvm.org/xenial/", "llvm-toolchain-xenial-6.0", "main"], + 'apt-key': ['http://apt.llvm.org/llvm-snapshot.gpg.key'], + 'package': 'clang-6.0', + 'command': 'clang++-6.0', + 'toolset': 'clang', + 'version': '' + }, + 'clang-7': { + 'deb': ["http://apt.llvm.org/xenial/", "llvm-toolchain-xenial-7", "main"], + 'apt-key': ['http://apt.llvm.org/llvm-snapshot.gpg.key'], + 'package': 'clang-6.0', + 'command': 'clang++-6.0', + 'toolset': 'clang', + 'version': '' + }, + 'clang-8': { + 'deb': ["http://apt.llvm.org/xenial/", "llvm-toolchain-xenial-8", "main"], + 'apt-key': ['http://apt.llvm.org/llvm-snapshot.gpg.key'], + 'package': 'clang-6.0', + 'command': 'clang++-6.0', + 'toolset': 'clang', + 'version': '' + }, + 'gcc-4.7': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'g++-4.7', + 'command': 'g++-4.7', + 'toolset': 'gcc', + 'version': '' + }, + 'gcc-4.8': { + 'bin': 'gcc-4.8', + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'g++-4.8', + 'command': 'g++-4.8', + 'toolset': 'gcc', + 'version': '' + }, + 'gcc-4.9': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'g++-4.9', + 'command': 'g++-4.9', + 'toolset': 'gcc', + 'version': '' + }, + 'gcc-5.1': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'g++-5', + 'command': 'g++-5', + 'toolset': 'gcc', + 'version': '' + }, + 'gcc-5': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'g++-5', + 'command': 'g++-5', + 'toolset': 'gcc', + 'version': '' + }, + 'gcc-6': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'g++-6', + 'command': 'g++-6', + 'toolset': 'gcc', + 'version': '' + }, + 'gcc-7': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'g++-7', + 'command': 'g++-7', + 'toolset': 'gcc', + 'version': '' + }, + 'gcc-8': { + 'ppa': ["ppa:ubuntu-toolchain-r/test"], + 'package': 'g++-8', + 'command': 'g++-8', + 'toolset': 'gcc', + 'version': '' + }, + 'mingw-8': { + 'toolset': 'gcc', + 'command': 'g++.exe', + 'version': '' + }, + 'vs-2008': { + 'toolset': 'msvc', + 'command': '', + 'version': '9.0' + }, + 'vs-2010': { + 'toolset': 'msvc', + 'command': '', + 'version': '10.0' + }, + 'vs-2012': { + 'toolset': 'msvc', + 'command': '', + 'version': '11.0' + }, + 'vs-2013': { + 'toolset': 'msvc', + 'command': '', + 'version': '12.0' + }, + 'vs-2015': { + 'toolset': 'msvc', + 'command': '', + 'version': '14.0' + }, + 'vs-2017': { + 'toolset': 'msvc', + 'command': '', + 'version': '14.1' + }, + 'vs-2019': { + 'toolset': 'msvc', + 'command': '', + 'version': '14.2' + }, + 'xcode-6.1': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-6.2': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-6.3': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-6.4': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-7.0': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-7.1': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-7.2': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-7.3': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-8.0': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-8.1': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-8.2': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-8.3': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-9.0': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-9.0.1': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-9.1': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-9.2': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-9.3': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-9.3.1': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-9.4': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-9.4.1': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-10.0': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, + 'xcode-10.1': { + 'command': 'clang++', + 'toolset': 'clang', + 'version': '' + }, +} + class SystemCallError(Exception): def __init__(self, command, result): self.command = command self.result = result + def __str__(self, *args, **kwargs): - return "'%s' ==> %s"%("' '".join(self.command), self.result) + return "'%s' ==> %s" % ("' '".join(self.command), self.result) + class utils: - + call_stats = [] - + @staticmethod def call(*command, **kargs): - utils.log( "%s> '%s'"%(os.getcwd(), "' '".join(command)) ) + utils.log("%s> '%s'" % (os.getcwd(), "' '".join(command))) t = time.time() result = subprocess.call(command, **kargs) t = time.time()-t if result != 0: - print "Failed: '%s' ERROR = %s"%("' '".join(command), result) - utils.call_stats.append((t,os.getcwd(),command,result)) - utils.log( "%s> '%s' execution time %s seconds"%(os.getcwd(), "' '".join(command), t) ) + print("Failed: '%s' ERROR = %s" % ("' '".join(command), result)) + utils.call_stats.append((t, os.getcwd(), command, result)) + utils.log("%s> '%s' execution time %s seconds" % + (os.getcwd(), "' '".join(command), t)) return result - + @staticmethod def print_call_stats(): - utils.log("================================================================================") + utils.log( + "================================================================================") for j in sorted(utils.call_stats, reverse=True): utils.log("{:>12.4f}\t{}> {} ==> {}".format(*j)) - utils.log("================================================================================") - + utils.log( + "================================================================================") + @staticmethod def check_call(*command, **kargs): cwd = os.getcwd() result = utils.call(*command, **kargs) if result != 0: raise(SystemCallError([cwd].extend(command), result)) - + @staticmethod - def makedirs( path ): - if not os.path.exists( path ): - os.makedirs( path ) - + def makedirs(path): + if not os.path.exists(path): + os.makedirs(path) + @staticmethod def log_level(): frames = inspect.stack() level = 0 - for i in frames[ 3: ]: - if i[0].f_locals.has_key( '__log__' ): - level = level + i[0].f_locals[ '__log__' ] + for i in frames[3:]: + if '__log__' in i[0].f_locals: + level = level + i[0].f_locals['__log__'] return level - + @staticmethod - def log( message ): + def log(message): + sys.stdout.flush() + sys.stdout.write('# ' + ' ' * utils.log_level() + message + '\n') sys.stdout.flush() - sys.stderr.flush() - sys.stderr.write( '# ' + ' ' * utils.log_level() + message + '\n' ) - sys.stderr.flush() @staticmethod def rmtree(path): - if os.path.exists( path ): + if os.path.exists(path): #~ shutil.rmtree( unicode( path ) ) if sys.platform == 'win32': - os.system( 'del /f /s /q "%s" >nul 2>&1' % path ) - shutil.rmtree( unicode( path ) ) + os.system('del /f /s /q "%s" >nul 2>&1' % path) + shutil.rmtree(unicode(path)) else: - os.system( 'rm -f -r "%s"' % path ) + os.system('rm -f -r "%s"' % path) @staticmethod - def retry( f, max_attempts=5, sleep_secs=10 ): - for attempts in range( max_attempts, -1, -1 ): + def retry(f, max_attempts=5, sleep_secs=10): + for attempts in range(max_attempts, -1, -1): try: return f() - except Exception, msg: - utils.log( '%s failed with message "%s"' % ( f.__name__, msg ) ) + except Exception as msg: + utils.log('%s failed with message "%s"' % (f.__name__, msg)) if attempts == 0: - utils.log( 'Giving up.' ) + utils.log('Giving up.') raise - utils.log( 'Retrying (%d more attempts).' % attempts ) - time.sleep( sleep_secs ) + utils.log('Retrying (%d more attempts).' % attempts) + time.sleep(sleep_secs) @staticmethod - def web_get( source_url, destination_file, proxy = None ): + def web_get(source_url, destination_file, proxy=None): import urllib proxies = None if proxy is not None: proxies = { - 'https' : proxy, - 'http' : proxy - } + 'https': proxy, + 'http': proxy + } - src = urllib.urlopen( source_url, proxies = proxies ) + src = urllib.urlopen(source_url, proxies=proxies) - f = open( destination_file, 'wb' ) + f = open(destination_file, 'wb') while True: - data = src.read( 16*1024 ) - if len( data ) == 0: break - f.write( data ) + data = src.read(16*1024) + if len(data) == 0: + break + f.write(data) f.close() src.close() @staticmethod - def unpack_archive( archive_path ): - utils.log( 'Unpacking archive ("%s")...' % archive_path ) + def unpack_archive(archive_path): + utils.log('Unpacking archive ("%s")...' % archive_path) - archive_name = os.path.basename( archive_path ) - extension = archive_name[ archive_name.find( '.' ) : ] + archive_name = os.path.basename(archive_path) + extension = archive_name[archive_name.find('.'):] - if extension in ( ".tar.gz", ".tar.bz2" ): + if extension in (".tar.gz", ".tar.bz2"): import tarfile import stat - mode = os.path.splitext( extension )[1][1:] - tar = tarfile.open( archive_path, 'r:%s' % mode ) + mode = os.path.splitext(extension)[1][1:] + tar = tarfile.open(archive_path, 'r:%s' % mode) for tarinfo in tar: - tar.extract( tarinfo ) + tar.extract(tarinfo) if sys.platform == 'win32' and not tarinfo.isdir(): # workaround what appears to be a Win32-specific bug in 'tarfile' # (modification times for extracted files are not set properly) - f = os.path.join( os.curdir, tarinfo.name ) - os.chmod( f, stat.S_IWRITE ) - os.utime( f, ( tarinfo.mtime, tarinfo.mtime ) ) + f = os.path.join(os.curdir, tarinfo.name) + os.chmod(f, stat.S_IWRITE) + os.utime(f, (tarinfo.mtime, tarinfo.mtime)) tar.close() - elif extension in ( ".zip" ): + elif extension in (".zip"): import zipfile - z = zipfile.ZipFile( archive_path, 'r', zipfile.ZIP_DEFLATED ) + z = zipfile.ZipFile(archive_path, 'r', zipfile.ZIP_DEFLATED) for f in z.infolist(): - destination_file_path = os.path.join( os.curdir, f.filename ) - if destination_file_path[-1] == "/": # directory - if not os.path.exists( destination_file_path ): - os.makedirs( destination_file_path ) - else: # file - result = open( destination_file_path, 'wb' ) - result.write( z.read( f.filename ) ) + destination_file_path = os.path.join(os.curdir, f.filename) + if destination_file_path[-1] == "/": # directory + if not os.path.exists(destination_file_path): + os.makedirs(destination_file_path) + else: # file + result = open(destination_file_path, 'wb') + result.write(z.read(f.filename)) result.close() z.close() else: raise 'Do not know how to unpack archives with extension \"%s\"' % extension - + @staticmethod def make_file(filename, *text): - text = string.join( text, '\n' ) - with codecs.open( filename, 'w', 'utf-8' ) as f: - f.write( text ) - + text = '\n'.join(text) + with codecs.open(filename, 'w', 'utf-8') as f: + f.write(text) + @staticmethod def append_file(filename, *text): - with codecs.open( filename, 'a', 'utf-8' ) as f: - f.write( string.join( text, '\n' ) ) - + with codecs.open(filename, 'a', 'utf-8') as f: + f.write('\n'.join(text)) + @staticmethod def mem_info(): if sys.platform == "darwin": - utils.call("top","-l","1","-s","0","-n","0") + utils.call("top", "-l", "1", "-s", "0", "-n", "0") elif sys.platform.startswith("linux"): - utils.call("free","-m","-l") - + utils.call("free", "-m", "-l") + @staticmethod def query_boost_version(boost_root): ''' Read in the Boost version from a given boost_root. ''' boost_version = None - if os.path.exists(os.path.join(boost_root,'Jamroot')): - with codecs.open(os.path.join(boost_root,'Jamroot'), 'r', 'utf-8') as f: + if os.path.exists(os.path.join(boost_root, 'Jamroot')): + with codecs.open(os.path.join(boost_root, 'Jamroot'), 'r', 'utf-8') as f: for line in f.readlines(): parts = line.split() if len(parts) >= 5 and parts[1] == 'BOOST_VERSION': @@ -453,9 +490,9 @@ class utils: if not boost_version: boost_version = 'default' return boost_version - + @staticmethod - def git_clone(owner, repo, branch, commit = None, repo_dir = None, submodules = False, url_format = "https://github.com/%(owner)s/%(repo)s.git"): + def git_clone(owner, repo, branch, commit=None, repo_dir=None, submodules=False, url_format="https://github.com/%(owner)s/%(repo)s.git"): ''' This clone mimicks the way Travis-CI clones a project's repo. So far Travis-CI is the most limiting in the sense of only fetching partial @@ -464,56 +501,60 @@ class utils: if not repo_dir: repo_dir = os.path.join(os.getcwd(), owner+','+repo) utils.makedirs(os.path.dirname(repo_dir)) - if not os.path.exists(os.path.join(repo_dir,'.git')): - utils.check_call("git","clone", - "--depth=1", - "--branch=%s"%(branch), - url_format%{'owner':owner,'repo':repo}, - repo_dir) + if not os.path.exists(os.path.join(repo_dir, '.git')): + utils.check_call("git", "clone", + "--depth=1", + "--branch=%s" % (branch), + url_format % {'owner': owner, 'repo': repo}, + repo_dir) os.chdir(repo_dir) else: os.chdir(repo_dir) - utils.check_call("git","pull", - # "--depth=1", # Can't do depth as we get merge errors. - "--quiet","--no-recurse-submodules") + utils.check_call("git", "pull", + # "--depth=1", # Can't do depth as we get merge errors. + "--quiet", "--no-recurse-submodules") if commit: - utils.check_call("git","checkout","-qf",commit) - if os.path.exists(os.path.join('.git','modules')): + utils.check_call("git", "checkout", "-qf", commit) + if os.path.exists(os.path.join('.git', 'modules')): if sys.platform == 'win32': - utils.check_call('dir',os.path.join('.git','modules')) + utils.check_call('dir', os.path.join('.git', 'modules')) else: - utils.check_call('ls','-la',os.path.join('.git','modules')) + utils.check_call('ls', '-la', os.path.join('.git', 'modules')) if submodules: - utils.check_call("git","submodule","--quiet","update", - "--quiet","--init","--recursive", - ) - utils.check_call("git","submodule","--quiet","foreach","git","fetch") + utils.check_call("git", "submodule", "--quiet", "update", + "--quiet", "--init", "--recursive", + ) + utils.check_call("git", "submodule", "--quiet", + "foreach", "git", "fetch") return repo_dir + class parallel_call(threading.Thread): ''' Runs a synchronous command in a thread waiting for it to complete. ''' - + def __init__(self, *command, **kargs): - super(parallel_call,self).__init__() + super(parallel_call, self).__init__() self.command = command self.command_kargs = kargs self.start() - + def run(self): self.result = utils.call(*self.command, **self.command_kargs) - + def join(self): - super(parallel_call,self).join() + super(parallel_call, self).join() if self.result != 0: raise(SystemCallError(self.command, self.result)) -def set_arg(args, k, v = None): + +def set_arg(args, k, v=None): if not args.get(k): args[k] = v return args[k] + class script_common(object): ''' Main script to run continuous integration. @@ -525,38 +566,38 @@ class script_common(object): opt = optparse.OptionParser( usage="%prog [options] [commands]") - #~ Debug Options: - opt.add_option( '--debug-level', - help="debugging level; controls the amount of debugging output printed", - type='int' ) - opt.add_option( '-j', - help="maximum number of parallel jobs to use for building with b2", - type='int', dest='jobs') + # ~ Debug Options: + opt.add_option('--debug-level', + help="debugging level; controls the amount of debugging output printed", + type='int') + opt.add_option('-j', + help="maximum number of parallel jobs to use for building with b2", + type='int', dest='jobs') opt.add_option('--branch') opt.add_option('--commit') - kargs = self.init(opt,kargs) + kargs = self.init(opt, kargs) kargs = self.ci.init(opt, kargs) - set_arg(kargs,'debug_level',0) - set_arg(kargs,'jobs',2) - set_arg(kargs,'branch',None) - set_arg(kargs,'commit',None) - set_arg(kargs,'repo',None) - set_arg(kargs,'repo_dir',None) - set_arg(kargs,'actions',None) - set_arg(kargs,'pull_request', None) + set_arg(kargs, 'debug_level', 0) + set_arg(kargs, 'jobs', 2) + set_arg(kargs, 'branch', None) + set_arg(kargs, 'commit', None) + set_arg(kargs, 'repo', None) + set_arg(kargs, 'repo_dir', None) + set_arg(kargs, 'actions', None) + set_arg(kargs, 'pull_request', None) #~ Defaults - for (k,v) in kargs.iteritems(): - setattr(self,k,v) - ( _opt_, self.actions ) = opt.parse_args(None,self) + for (k, v) in kargs.items(): + setattr(self, k, v) + (_opt_, self.actions) = opt.parse_args(None, self) if not self.actions or self.actions == []: - self.actions = kargs.get('actions',None) + self.actions = kargs.get('actions', None) if not self.actions or self.actions == []: - self.actions = [ 'info' ] + self.actions = ['info'] if not self.repo_dir: self.repo_dir = os.getcwd() self.build_dir = os.path.join(os.path.dirname(self.repo_dir), "build") - + # API keys. self.bintray_key = os.getenv('BINTRAY_KEY') @@ -565,32 +606,34 @@ class script_common(object): self.command_info() self.main() utils.print_call_stats() - except: + except Exception as e: utils.print_call_stats() - raise - + # raise + utils.log(str(e)) + exit(1) + def init(self, opt, kargs): return kargs - + def start(self): pass def main(self): for action in self.actions: - action_m = "command_"+action.replace('-','_') + action_m = "command_"+action.replace('-', '_') ci_command = getattr(self.ci, action_m, None) ci_script = getattr(self, action_m, None) if ci_command or ci_script: - utils.log( "### %s.."%(action) ) + utils.log("### %s.." % (action)) if os.path.exists(self.repo_dir): os.chdir(self.repo_dir) if ci_command: ci_command() elif ci_script: ci_script() - - def b2( self, *args, **kargs ): - cmd = ['b2','--debug-configuration', '-j%s'%(self.jobs)] + + def b2(self, *args, **kargs): + cmd = ['b2', '--debug-configuration', '-j%s' % (self.jobs)] cmd.extend(args) if 'toolset' in kargs: @@ -602,18 +645,18 @@ class script_common(object): return utils.check_call(*cmd) # Common test commands in the order they should be executed.. - + def command_info(self): pass - + def command_install(self): utils.makedirs(self.build_dir) os.chdir(self.build_dir) - + def command_install_toolset(self, toolset): - if self.ci and hasattr(self.ci,'install_toolset'): + if self.ci and hasattr(self.ci, 'install_toolset'): self.ci.install_toolset(toolset) - + def command_before_build(self): pass @@ -626,22 +669,23 @@ class script_common(object): def command_after_success(self): pass + class ci_cli(object): ''' This version of the script provides a way to do manual building. It sets up additional environment and adds fetching of the git repos that would normally be done by the CI system. - + The common way to use this variant is to invoke something like: - + mkdir ci cd ci python path-to/library_test.py --branch=develop [--repo=mylib] ... - + Status: In working order. ''' - - def __init__(self,script): + + def __init__(self, script): if sys.platform == 'darwin': # Requirements for running on OSX: # https://www.stack.nl/~dimitri/doxygen/download.html#srcbin @@ -652,7 +696,7 @@ class ci_cli(object): self.script = script self.repo_dir = os.getcwd() self.exit_result = 0 - + def init(self, opt, kargs): kargs['actions'] = [ # 'clone', @@ -661,36 +705,37 @@ class ci_cli(object): 'build', 'before_cache', 'finish' - ] + ] return kargs - + def finish(self, result): self.exit_result = result - + def command_finish(self): exit(self.exit_result) + class ci_travis(object): ''' This variant build releases in the context of the Travis-CI service. ''' - - def __init__(self,script): + + def __init__(self, script): self.script = script - + def init(self, opt, kargs): - set_arg(kargs,'repo_dir', os.getenv("TRAVIS_BUILD_DIR")) - set_arg(kargs,'branch', os.getenv("TRAVIS_BRANCH")) - set_arg(kargs,'commit', os.getenv("TRAVIS_COMMIT")) - set_arg(kargs,'repo', os.getenv("TRAVIS_REPO_SLUG").split("/")[1]) - set_arg(kargs,'pull_request', - os.getenv('TRAVIS_PULL_REQUEST') \ + set_arg(kargs, 'repo_dir', os.getenv("TRAVIS_BUILD_DIR")) + set_arg(kargs, 'branch', os.getenv("TRAVIS_BRANCH")) + set_arg(kargs, 'commit', os.getenv("TRAVIS_COMMIT")) + set_arg(kargs, 'repo', os.getenv("TRAVIS_REPO_SLUG").split("/")[1]) + set_arg(kargs, 'pull_request', + os.getenv('TRAVIS_PULL_REQUEST') if os.getenv('TRAVIS_PULL_REQUEST') != 'false' else None) return kargs - + def finish(self, result): exit(result) - + def install_toolset(self, toolset): ''' Installs specific toolset on CI system. @@ -701,30 +746,31 @@ class ci_travis(object): if 'ppa' in info: for ppa in info['ppa']: utils.check_call( - 'sudo','add-apt-repository','--yes',ppa) + 'sudo', 'add-apt-repository', '--yes', ppa) if 'deb' in info: utils.make_file('sources.list', - "deb %s"%(' '.join(info['deb'])), - "deb-src %s"%(' '.join(info['deb']))) - utils.check_call('sudo','bash','-c','cat sources.list >> /etc/apt/sources.list') + "deb %s" % (' '.join(info['deb'])), + "deb-src %s" % (' '.join(info['deb']))) + utils.check_call('sudo', 'bash', '-c', + 'cat sources.list >> /etc/apt/sources.list') if 'apt-key' in info: for key in info['apt-key']: - utils.check_call('wget',key,'-O','apt.key') - utils.check_call('sudo','apt-key','add','apt.key') + utils.check_call('wget', key, '-O', 'apt.key') + utils.check_call('sudo', 'apt-key', 'add', 'apt.key') utils.check_call( - 'sudo','apt-get','update','-qq') + 'sudo', 'apt-get', 'update', '-qq') utils.check_call( - 'sudo','apt-get','install','-qq',info['package']) + 'sudo', 'apt-get', 'install', '-qq', info['package']) if 'debugpackage' in info and info['debugpackage']: utils.check_call( - 'sudo','apt-get','install','-qq',info['debugpackage']) + 'sudo', 'apt-get', 'install', '-qq', info['debugpackage']) # Travis-CI commands in the order they are executed. We need # these to forward to our common commands, if they are different. - + def command_before_install(self): pass - + def command_install(self): self.script.command_install() @@ -752,66 +798,70 @@ class ci_travis(object): def command_after_script(self): pass + class ci_circleci(object): ''' This variant build releases in the context of the CircleCI service. ''' - - def __init__(self,script): + + def __init__(self, script): self.script = script - + def init(self, opt, kargs): - set_arg(kargs,'repo_dir', os.path.join(os.getenv("HOME"),os.getenv("CIRCLE_PROJECT_REPONAME"))) - set_arg(kargs,'branch', os.getenv("CIRCLE_BRANCH")) - set_arg(kargs,'commit', os.getenv("CIRCLE_SHA1")) - set_arg(kargs,'repo', os.getenv("CIRCLE_PROJECT_REPONAME").split("/")[1]) - set_arg(kargs,'pull_request', os.getenv('CIRCLE_PR_NUMBER')) + set_arg(kargs, 'repo_dir', os.path.join( + os.getenv("HOME"), os.getenv("CIRCLE_PROJECT_REPONAME"))) + set_arg(kargs, 'branch', os.getenv("CIRCLE_BRANCH")) + set_arg(kargs, 'commit', os.getenv("CIRCLE_SHA1")) + set_arg(kargs, 'repo', os.getenv( + "CIRCLE_PROJECT_REPONAME").split("/")[1]) + set_arg(kargs, 'pull_request', os.getenv('CIRCLE_PR_NUMBER')) return kargs - + def finish(self, result): exit(result) - + def command_machine_post(self): # Apt update for the pckages installs we'll do later. - utils.check_call('sudo','apt-get','-qq','update') + utils.check_call('sudo', 'apt-get', '-qq', 'update') # Need PyYAML to read Travis yaml in a later step. - utils.check_call("pip","install","--user","PyYAML") - + utils.check_call("pip", "install", "--user", "PyYAML") + def command_checkout_post(self): os.chdir(self.script.repo_dir) - utils.check_call("git","submodule","update","--quiet","--init","--recursive") - + utils.check_call("git", "submodule", "update", + "--quiet", "--init", "--recursive") + def command_dependencies_pre(self): # Read in .travis.yml for list of packages to install # as CircleCI doesn't have a convenient apt install method. import yaml - utils.check_call('sudo','-E','apt-get','-yqq','update') - utils.check_call('sudo','apt-get','-yqq','purge','texlive*') - with open(os.path.join(self.script.repo_dir,'.travis.yml')) as yml: + utils.check_call('sudo', '-E', 'apt-get', '-yqq', 'update') + utils.check_call('sudo', 'apt-get', '-yqq', 'purge', 'texlive*') + with open(os.path.join(self.script.repo_dir, '.travis.yml')) as yml: travis_yml = yaml.load(yml) - utils.check_call('sudo','apt-get','-yqq', - '--no-install-suggests','--no-install-recommends','--force-yes','install', - *travis_yml['addons']['apt']['packages']) - + utils.check_call('sudo', 'apt-get', '-yqq', + '--no-install-suggests', '--no-install-recommends', '--force-yes', 'install', + *travis_yml['addons']['apt']['packages']) + def command_dependencies_override(self): self.script.command_install() - + def command_dependencies_post(self): pass - + def command_database_pre(self): pass - + def command_database_override(self): pass - + def command_database_post(self): pass - + def command_test_pre(self): self.script.command_install() self.script.command_before_build() - + def command_test_override(self): # CircleCI runs all the test subsets. So in order to avoid # running the after_success we do it here as the build step @@ -819,63 +869,125 @@ class ci_circleci(object): self.script.command_build() self.script.command_before_cache() self.script.command_after_success() - + def command_test_post(self): pass + class ci_appveyor(object): - - def __init__(self,script): + + def __init__(self, script): self.script = script - + def init(self, opt, kargs): - set_arg(kargs,'repo_dir',os.getenv("APPVEYOR_BUILD_FOLDER")) - set_arg(kargs,'branch',os.getenv("APPVEYOR_REPO_BRANCH")) - set_arg(kargs,'commit',os.getenv("APPVEYOR_REPO_COMMIT")) - set_arg(kargs,'repo',os.getenv("APPVEYOR_REPO_NAME").split("/")[1]) - set_arg(kargs,'address_model',os.getenv("PLATFORM",None)) - set_arg(kargs,'variant',os.getenv("CONFIGURATION","debug")) - set_arg(kargs,'pull_request', os.getenv('APPVEYOR_PULL_REQUEST_NUMBER')) + set_arg(kargs, 'repo_dir', os.getenv("APPVEYOR_BUILD_FOLDER")) + set_arg(kargs, 'branch', os.getenv("APPVEYOR_REPO_BRANCH")) + set_arg(kargs, 'commit', os.getenv("APPVEYOR_REPO_COMMIT")) + set_arg(kargs, 'repo', os.getenv("APPVEYOR_REPO_NAME").split("/")[1]) + set_arg(kargs, 'address_model', os.getenv("PLATFORM", None)) + set_arg(kargs, 'variant', os.getenv("CONFIGURATION", "debug")) + set_arg(kargs, 'pull_request', os.getenv( + 'APPVEYOR_PULL_REQUEST_NUMBER')) return kargs - + def finish(self, result): exit(result) - + # Appveyor commands in the order they are executed. We need # these to forward to our common commands, if they are different. - + def command_install(self): self.script.command_install() - + def command_before_build(self): os.chdir(self.script.repo_dir) - utils.check_call("git","submodule","update","--quiet","--init","--recursive") + utils.check_call("git", "submodule", "update", + "--quiet", "--init", "--recursive") self.script.command_before_build() - + def command_build_script(self): self.script.command_build() - + def command_after_build(self): self.script.command_before_cache() - + def command_before_test(self): pass - + def command_test_script(self): pass - + def command_after_test(self): pass - + def command_on_success(self): self.script.command_after_success() - + def command_on_failure(self): pass - + def command_on_finish(self): pass + +class ci_azp(object): + ''' + This variant build releases in the context of the Azure Pipelines service. + ''' + + def __init__(self, script): + self.script = script + + def init(self, opt, kargs): + set_arg(kargs, 'repo_dir', os.getenv("AZP_REPO_DIR")) + set_arg(kargs, 'branch', os.getenv("AZP_BRANCH")) + set_arg(kargs, 'commit', os.getenv("AZP_COMMIT")) + set_arg(kargs, 'repo', os.getenv("AZP_REPO")) + set_arg(kargs, 'pull_request', os.getenv('AZP_PULL_REQUEST')) + return kargs + + def finish(self, result): + exit(result) + + def install_toolset(self, toolset): + ''' + Installs specific toolset on CI system. + ''' + info = toolset_info[toolset] + if sys.platform.startswith('linux'): + os.chdir(self.script.build_dir) + if 'ppa' in info: + for ppa in info['ppa']: + utils.check_call( + 'sudo', 'add-apt-repository', '--yes', ppa) + if 'deb' in info: + utils.check_call( + 'sudo', '-E', 'apt-add-repository', + 'deb %s' % (' '.join(info['deb']))) + if 'apt-key' in info: + for key in info['apt-key']: + utils.check_call('wget', key, '-O', 'apt.key') + utils.check_call('sudo', '-E', 'apt-key', 'add', 'apt.key') + utils.check_call( + 'sudo', '-E', 'apt-get', 'update') + utils.check_call( + 'sudo', '-E', 'apt-get', '-yq', '--no-install-suggests', + '--no-install-recommends', 'install', info['package']) + if 'debugpackage' in info and info['debugpackage']: + utils.check_call( + 'sudo', 'apt-get', '-yq', '--no-install-suggests', + '--no-install-recommends', 'install', info['debugpackage']) + + def command_install(self): + self.script.command_install() + self.script.command_before_build() + + def command_script(self): + self.script.command_build() + self.script.command_before_cache() + self.script.command_after_success() + + def main(script_klass): if os.getenv('TRAVIS', False): script_klass(ci_travis) @@ -883,5 +995,7 @@ def main(script_klass): script_klass(ci_circleci) elif os.getenv('APPVEYOR', False): script_klass(ci_appveyor) + elif os.getenv('AZP', False): + script_klass(ci_azp) else: script_klass(ci_cli) diff --git a/tools/ci/library_test.py b/tools/ci/library_test.py index 3cde056..16bc734 100644 --- a/tools/ci/library_test.py +++ b/tools/ci/library_test.py @@ -1,11 +1,12 @@ -#!/usr/bin/env python -# Copyright Rene Rivera 2016 +# Copyright Rene Rivera 2016-2019 # # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) +from __future__ import print_function + import os.path import shutil import sys @@ -13,6 +14,7 @@ from common import toolset_info, main, utils, script_common, ci_cli, set_arg __dirname__ = os.path.dirname(os.path.realpath(__file__)) + class script(script_common): ''' Main script to test a Boost C++ Library. @@ -20,21 +22,27 @@ class script(script_common): def __init__(self, ci_klass, **kargs): script_common.__init__(self, ci_klass, **kargs) - + def init(self, opt, kargs): - opt.add_option( '--toolset', - help="single toolset to test with" ) - opt.add_option( '--target', + opt.add_option( + '--toolset', + help="single toolset to test with") + opt.add_option( + '--target', help="test target to build for testing, defaults to TARGET or 'minimal'") - opt.add_option( '--address-model', - help="address model to test, ie 64 or 32" ) - opt.add_option( '--variant', - help="variant to test, ie debug, release" ) + opt.add_option( + '--address-model', + help="address model to test, ie 64 or 32") + opt.add_option( + '--variant', + help="variant to test, ie debug, release") set_arg(kargs, 'toolset', os.getenv("TOOLSET")) set_arg(kargs, 'target', os.getenv('TARGET', 'minimal')) - set_arg(kargs, 'address_model', os.getenv("ADDRESS_MODEL",None)) - set_arg(kargs, 'variant', os.getenv("VARIANT","debug")) - set_arg(kargs, 'cxxflags', os.getenv("CXXFLAGS",None)) + set_arg(kargs, 'address_model', os.getenv("ADDRESS_MODEL", None)) + set_arg(kargs, 'variant', os.getenv("VARIANT", "debug")) + set_arg(kargs, 'cxxstd', os.getenv("CXXSTD", None)) + set_arg(kargs, 'cxxdialect', os.getenv("CXXDIALECT", None)) + set_arg(kargs, 'cxxdefs', os.getenv("CXXDEFS", None)) return kargs def start(self): @@ -45,48 +53,41 @@ class script(script_common): def command_install(self): script_common.command_install(self) # Fetch & install toolset.. - utils.log( "Install toolset: %s"%(self.toolset) ) + utils.log("Install toolset: %s" % (self.toolset)) if self.toolset: self.command_install_toolset(self.toolset) - + def command_before_build(self): script_common.command_before_build(self) - + # Fetch dependencies. - utils.git_clone('boostorg','build','develop',repo_dir=self.b2_dir) - + utils.git_clone('boostorg', 'build', 'develop', repo_dir=self.b2_dir) + # Create config file for b2 toolset. if not isinstance(self.ci, ci_cli): - cxxflags = None - if self.cxxflags: - cxxflags = self.cxxflags.split() - cxxflags = " ".join(cxxflags) utils.make_file(os.path.join(self.repo_dir, 'project-config.jam'), - """ -using %(toolset)s : %(version)s : %(command)s : %(cxxflags)s ; -using python : %(pyversion)s : "%(python)s" ; -"""%{ - 'toolset':toolset_info[self.toolset]['toolset'], - 'version':toolset_info[self.toolset]['version'], - 'command':toolset_info[self.toolset]['command'], - 'cxxflags':""+cxxflags if cxxflags else "", - 'pyversion':"%s.%s"%(sys.version_info[0],sys.version_info[1]), - 'python':sys.executable.replace("\\","\\\\") - }) - - # "Convert" boostorg-predef into standalone b2 project. - if os.path.exists(os.path.join(self.repo_dir,'build.jam')) and not os.path.exists(os.path.join(self.repo_dir,'project-root.jam')): - os.rename(os.path.join(self.repo_dir,'build.jam'), os.path.join(self.repo_dir,'project-root.jam')) + """ +using %(toolset)s : %(version)s : %(command)s ; +""" % { + 'toolset': toolset_info[self.toolset]['toolset'], + 'version': toolset_info[self.toolset]['version'], + 'command': toolset_info[self.toolset]['command'], + }) + + # # "Convert" boostorg-predef into standalone b2 project. + # if os.path.exists(os.path.join(self.repo_dir, 'build.jam')) and not os.path.exists(os.path.join(self.repo_dir, 'project-root.jam')): + # os.rename(os.path.join(self.repo_dir, 'build.jam'), + # os.path.join(self.repo_dir, 'project-root.jam')) def command_build(self): script_common.command_build(self) - + # Set up tools. if not isinstance(self.ci, ci_cli) and toolset_info[self.toolset]['command']: os.environ['PATH'] = os.pathsep.join([ os.path.dirname(toolset_info[self.toolset]['command']), os.environ['PATH']]) - + # Bootstrap Boost Build engine. os.chdir(self.b2_dir) if sys.platform == 'win32': @@ -95,35 +96,44 @@ using python : %(pyversion)s : "%(python)s" ; utils.check_call("./bootstrap.sh") os.environ['PATH'] = os.pathsep.join([self.b2_dir, os.environ['PATH']]) os.environ['BOOST_BUILD_PATH'] = self.b2_dir - + # Run the limited tests. - print("--- Testing %s ---"%(self.repo_dir)) - os.chdir(os.path.join(self.repo_dir,'test')) + print("--- Testing %s ---" % (self.repo_dir)) + os.chdir(os.path.join(self.repo_dir, 'test')) toolset_to_test = "" if self.toolset: if not isinstance(self.ci, ci_cli): toolset_to_test = toolset_info[self.toolset]['toolset'] else: toolset_to_test = self.toolset + cxxdefs = [] + if self.cxxdefs: + cxxdefs = ['define=%s' % (d) for d in self.cxxdefs.split(',')] self.b2( '-d1', '-p0', 'preserve-test-targets=off', '--dump-tests', '--verbose-test', - '--build-dir=%s'%(self.build_dir), - '--out-xml=%s'%(os.path.join(self.build_dir,'regression.xml')), - '' if not toolset_to_test else 'toolset=%s'%(toolset_to_test), - '' if not self.address_model else 'address-model=%s'%(self.address_model), - 'variant=%s'%(self.variant), - self.target - ) - + '--build-dir=%s' % (self.build_dir), + '--out-xml=%s' % (os.path.join(self.build_dir, 'regression.xml')), + '' if not toolset_to_test else 'toolset=%s' % (toolset_to_test), + '' if not self.address_model else 'address-model=%s' % ( + self.address_model), + 'variant=%s' % (self.variant), + '' if not self.cxxstd else 'cxxstd=%s' % ( + self.cxxstd), + '' if not self.cxxdialect else 'cxxstd-dialect=%s' % ( + self.cxxdialect), + self.target, + *cxxdefs + ) + # Generate a readable test report. import build_log log_main = build_log.Main([ '--output=console', - os.path.join(self.build_dir,'regression.xml')]) + os.path.join(self.build_dir, 'regression.xml')]) # And exit with an error if the report contains failures. # This lets the CI notice the error and report a failed build. # And hence trigger the failure machinery, like sending emails. @@ -133,9 +143,10 @@ using python : %(pyversion)s : "%(python)s" ; def command_before_cache(self): script_common.command_before_cache(self) os.chdir(self.b2_dir) - utils.check_call("git","clean","-dfqx") - utils.check_call("git","status","-bs") + utils.check_call("git", "clean", "-dfqx") + utils.check_call("git", "status", "-bs") # utils.check_call("git","submodule","--quiet","foreach","git","clean","-dfqx") # utils.check_call("git","submodule","foreach","git","status","-bs") + main(script)