亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? regression.py

?? C++的一個(gè)好庫。。。現(xiàn)在很流行
?? PY
?? 第 1 頁 / 共 2 頁
字號:

# Copyright (c) MetaCommunications, Inc. 2003-2005
#
# Distributed under the Boost Software License, Version 1.0. 
# (See accompanying file LICENSE_1_0.txt or copy at 
# http://www.boost.org/LICENSE_1_0.txt)

import urllib
import tarfile
import socket
import time
import getopt
import glob
import shutil
import stat
import os.path
import os
import platform
import traceback
import string
import sys

regression_root    = os.path.abspath( os.path.dirname( sys.argv[0] ) )
regression_results = os.path.join( regression_root, 'results' )
regression_log     = os.path.join( regression_results, 'bjam.log' )
install_log        = os.path.join( regression_results, 'bjam_install.log' )

boost_root      = os.path.join( regression_root, 'boost' )
xsl_reports_dir = os.path.join( boost_root, 'tools', 'regression', 'xsl_reports' )
timestamp_path  = os.path.join( regression_root, 'timestamp' )

cvs_command_line         = 'cvs -z9 %(command)s'
cvs_ext_command_line     = 'cvs -d:ext:%(user)s@cvs.sourceforge.net:/cvsroot/boost -z9 %(command)s'
cvs_pserver_command_line = 'cvs -d:pserver:%(user)s@cvs.sourceforge.net:/cvsroot/boost -z9 %(command)s'

bjam = {}
process_jam_log = {}

if sys.platform == 'win32':
    bjam[ 'name' ] = 'bjam.exe'
    bjam[ 'build_cmd' ] = lambda toolset: 'build.bat %s' % toolset
    bjam[ 'is_supported_toolset' ] = lambda x: x in [ 'borland', 'como', 'gcc', 'gcc-nocygwin' \
                                                    , 'intel-win32', 'metrowerks', 'mingw' \
                                                    , 'msvc', 'vc7' \
                                                    ]
    process_jam_log[ 'name' ] = 'process_jam_log.exe'
    process_jam_log[ 'default_toolset' ] = 'vc-7_1'
    patch_boost_name = 'patch_boost.bat'
else:
    bjam[ 'name' ] = 'bjam'
    bjam[ 'build_cmd' ] = lambda toolset:'./build.sh %s' % toolset
    bjam[ 'is_supported_toolset' ] = lambda x: x in [ 'acc', 'como', 'darwin', 'gcc' \
                                                    , 'intel-linux', 'kcc', 'kylix' \
                                                    , 'mipspro', 'sunpro', 'tru64cxx' \
                                                    , 'vacpp'\
                                                    ]
    process_jam_log[ 'name' ] = 'process_jam_log'
    process_jam_log[ 'default_toolset' ] = 'gcc'
    patch_boost_name = './patch_boost'

bjam[ 'default_toolset' ] = ''
bjam[ 'path' ] = os.path.join( regression_root, bjam[ 'name' ] )
bjam[ 'source_dir' ] = os.path.join( boost_root, 'tools', 'build', 'jam_src' )
bjam[ 'build_path_root' ] = bjam[ 'source_dir' ]

process_jam_log[ 'path' ] = os.path.join( regression_root, process_jam_log[ 'name' ] )
process_jam_log[ 'source_dir' ] = os.path.join( boost_root, 'tools', 'regression', 'build' )
process_jam_log[ 'build_path_root' ] = os.path.join( 
      boost_root, 'bin', 'boost', 'tools', 'regression', 'build'
    , process_jam_log[ 'name' ]
    )

process_jam_log[ 'build_cmd' ] = lambda toolset: bjam_command( toolset )
process_jam_log[ 'is_supported_toolset' ] = lambda x : True

build_monitor_url = 'http://www.meta-comm.com/engineering/resources/build_monitor.zip'
pskill_url = 'http://www.sysinternals.com/files/pskill.zip'

utils = None


def log( message ):
    sys.stderr.write( '# %s\n' % message )
    sys.stderr.flush()


def platform_name():
    # See http://article.gmane.org/gmane.comp.lib.boost.testing/933
    if sys.platform == 'win32':
        return 'Windows'

    return platform.system()


def rmtree( path ):
    if os.path.exists( path ):
        if sys.platform == 'win32':
            os.system( 'del /f /s /q "%s" >nul 2>&1' % path )
            shutil.rmtree( path )
        else:
            os.system( 'rm -f -r "%s"' % path )


def retry( f, args, max_attempts=5, sleep_secs=10 ):
    for attempts in range( max_attempts, -1, -1 ):
        try:
            return f( *args )
        except Exception, msg:
            log( '%s failed with message "%s"' % ( f.__name__, msg ) )
            if attempts == 0: 
                log( 'Giving up.' )
                raise

            log( 'Retrying (%d more attempts).' % attempts )
            time.sleep( sleep_secs )


def cleanup( args, **unused ):
    if args == []: args = [ 'source', 'bin' ]

    if 'source' in args:
        log( 'Cleaning up "%s" directory ...' % boost_root )
        rmtree( boost_root )
    
    if 'bin' in args:
        boost_bin_dir = os.path.join( boost_root, 'bin' )
        log( 'Cleaning up "%s" directory ...' % boost_bin_dir )
        rmtree( boost_bin_dir )

        log( 'Cleaning up "%s" directory ...' % regression_results )
        rmtree( regression_results )


def http_get( source_url, destination, proxy ):
    if proxy is None: proxies = None
    else:             proxies = { 'http' : proxy }

    src = urllib.urlopen( source_url, proxies = proxies )

    f = open( destination, 'wb' )
    while True:
        data = src.read( 16*1024 )
        if len( data ) == 0: break
        f.write( data )

    f.close()
    src.close()


def tarball_name_for_tag( tag, timestamp = False ):
    if not timestamp: return 'boost-%s.tar.bz2' % tag
    else:             return 'boost-%s.timestamp' % tag


def download_boost_tarball( destination, tag, proxy, timestamp_only = False ):
    tarball_name = tarball_name_for_tag( tag, timestamp_only )
    tarball_path = os.path.join( destination, tarball_name )
    tarball_url = 'http://www.meta-comm.com/engineering/boost/snapshot/%s' % tarball_name

    log( 'Downloading "%s" to "%s"...'  % ( tarball_url, os.path.dirname( tarball_path ) ) )
    if os.path.exists( tarball_path ):
        os.unlink( tarball_path )

    http_get(
          tarball_url
        , tarball_path
        , proxy
        )

    return tarball_path


def find_boost_dirs( path ):
    return [ x for x in glob.glob( os.path.join( path, 'boost[-_]*' ) )
                        if os.path.isdir( x ) ]


def unpack_tarball( tarball_path, destination  ):
    log( 'Looking for old unpacked archives...' )
    old_boost_dirs = find_boost_dirs( destination )
    
    for old_boost_dir in old_boost_dirs:
        if old_boost_dir != tarball_path:
            log( 'Deleting old directory %s.' % old_boost_dir ) 
            rmtree( old_boost_dir )

    log( 'Unpacking boost tarball ("%s")...' % tarball_path )

    tarball_name = os.path.basename( tarball_path )
    extension = tarball_name[ tarball_name.find( '.' ) : ]

    if extension in ( ".tar.gz", ".tar.bz2" ):
        mode = os.path.splitext( extension )[1][1:]
        tar = tarfile.open( tarball_path, 'r:%s' % mode )
        for tarinfo in tar:
            tar.extract( tarinfo, destination )        
            if sys.platform == 'win32' and not tarinfo.isdir():
                # workaround what appears to be a Win32-specific bug in 'tarfile'
                # (modification times for extracted files are not set properly)
                f = os.path.join( destination, tarinfo.name )
                os.chmod( f, stat.S_IWRITE )
                os.utime( f, ( tarinfo.mtime, tarinfo.mtime ) )
        tar.close()
    elif extension in ( ".zip" ):
        import zipfile
        
        z = zipfile.ZipFile( tarball_path, 'r', zipfile.ZIP_DEFLATED ) 
        for f in z.infolist():
            destination_file_path = os.path.join( destination, f.filename )
            if destination_file_path[-1] == "/": # directory 
                if not os.path.exists( destination_file_path  ):
                    os.makedirs( destination_file_path  )
            else: # file
                result = open( destination_file_path, 'wb' )
                result.write( z.read( f.filename ) )
                result.close()
        z.close()
    else:
        raise 'Do not know how to unpack archives with extension \"%s\"' % extension

    boost_dir = find_boost_dirs( destination )[0]
    log( '    Unpacked into directory "%s"' % boost_dir )
    
    if os.path.exists( boost_root ):
        log( 'Deleting "%s" directory...' % boost_root )
        rmtree( boost_root )

    log( 'Renaming "%s" into "%s"' % ( boost_dir, boost_root ) )
    os.rename( boost_dir, boost_root )


def cvs_command( user, command ):
    if user is None:
        cmd = cvs_command_line % { 'command': command }
    elif user == 'anonymous':
        cmd = cvs_pserver_command_line % { 'user': user, 'command': command }
    else:
        cmd = cvs_ext_command_line % { 'user': user, 'command': command }
    
    log( 'Executing CVS command "%s"' % cmd )
    rc = os.system( cmd )
    if rc != 0:
        raise Exception( 'CVS command "%s" failed with code %d' % ( cmd, rc ) )


def cvs_checkout( user, tag, args ):
    if tag != 'CVS-HEAD':
        command = 'checkout -r %s boost' % tag
    else:
        command = 'checkout boost'
    
    os.chdir( regression_root )
    cvs_command( user, command )


def cvs_update( user, tag, args ):
    if tag != 'CVS-HEAD':
        command = 'update -dPA -r %s' % tag
    else:
        command = 'update -dPA'
    
    os.chdir( os.path.join( regression_root, 'boost' ) )
    cvs_command( user, command )


def format_time( t ):
    return time.strftime( 
          '%a, %d %b %Y %H:%M:%S +0000'
        , t
        )


def refresh_timestamp():
    if os.path.exists( timestamp_path ):
       os. unlink( timestamp_path )

    open( timestamp_path, 'w' ).close()


def timestamp():
    return time.strftime(
          '%Y-%m-%dT%H:%M:%SZ'
        , time.gmtime( os.stat( timestamp_path ).st_mtime )
        )


def get_tarball( tag, proxy, args, **unused ):
    if args == []: args = [ 'download', 'unpack' ]

    tarball_path = None

    if 'download' in args:
        tarball_path = download_boost_tarball( regression_root, tag, proxy )

    if 'unpack' in args:
        if not tarball_path:
            tarball_path = os.path.join( regression_root, tarball_name_for_tag( tag ) )
        unpack_tarball( tarball_path, regression_root )


def get_source( user, tag, proxy, args, **unused ):
    refresh_timestamp()
    log( 'Getting sources (%s)...' % timestamp() )

    if user is not None:
        retry( 
              cvs_checkout
            , ( user, tag, args )
            )
    else:
        retry( 
              get_tarball
            , ( tag, proxy, args )
            )


def update_source( user, tag, proxy, args, **unused ):
    if user is not None or os.path.exists( os.path.join( boost_root, 'CVS' ) ):
        open( timestamp_path, 'w' ).close()
        log( 'Updating sources from CVS (%s)...' % timestamp() )
        retry( 
              cvs_update
            , ( user, tag, args )
            )
    else:
        get_source( user, tag, proxy, args )


def tool_path( name_or_spec ):
    if isinstance( name_or_spec, basestring ):
        return os.path.join( regression_root, name_or_spec )

    if os.path.exists( name_or_spec[ 'path' ] ):
        return name_or_spec[ 'path' ]

    if name_or_spec.has_key( 'build_path' ):
        return name_or_spec[ 'build_path' ]

    build_path_root = name_or_spec[ 'build_path_root' ]
    log( 'Searching for "%s" in "%s"...' % ( name_or_spec[ 'name' ], build_path_root ) )
    for root, dirs, files in os.walk( build_path_root ):
        if name_or_spec[ 'name' ] in files:
            return os.path.join( root, name_or_spec[ 'name' ] )
    
    raise Exception( 'Cannot find "%s" in any of the following locations:\n%s' % (
          name_or_spec[ 'name' ]
        , '\n'.join( [ name_or_spec[ 'path' ], build_path_root ] )
        ) )


def build_if_needed( tool, toolset, toolsets ):
    if os.path.exists( tool[ 'path' ] ):
        log( 'Found preinstalled "%s"; will use it.' % tool[ 'path' ] )
        return

    log( 'Preinstalled "%s" is not found; building one...' % tool[ 'path' ] )

    if toolset is None:
        if toolsets is not None:
            toolset = string.split( toolsets, ',' )[0]
            if not tool[ 'is_supported_toolset' ]( toolset ):
                log( 'Warning: Specified toolset (%s) cannot be used to bootstrap "%s".'\
                     % ( toolset, tool[ 'name' ] ) )

                toolset = tool[ 'default_toolset' ]
                log( '         Using default toolset for the platform (%s).' % toolset )
        else:
            toolset = tool[ 'default_toolset' ]
            log( 'Warning: No bootstrap toolset for "%s" was specified.' % tool[ 'name' ] )
            log( '         Using default toolset for the platform (%s).' % toolset )

    if os.path.exists( tool[ 'source_dir' ] ):
        log( 'Found "%s" source directory "%s"' % ( tool[ 'name' ], tool[ 'source_dir' ] ) )
        build_cmd = tool[ 'build_cmd' ]( toolset )
        log( 'Building "%s" (%s)...' % ( tool[ 'name'], build_cmd ) )
        utils.system( [ 
              'cd "%s"' % tool[ 'source_dir' ]
            , build_cmd
            ] )
    else:
        raise 'Could not find "%s" source directory "%s"' % ( tool[ 'name' ], tool[ 'source_dir' ] )

    if not tool.has_key( 'build_path' ):
        tool[ 'build_path' ] = tool_path( tool )

    if not os.path.exists( tool[ 'build_path' ] ):
        raise 'Failed to find "%s" after build.' % tool[ 'build_path' ]

    log( '%s succesfully built in "%s" location' % ( tool[ 'name' ], tool[ 'build_path' ] ) )


def import_utils():
    global utils
    if utils is None:
        sys.path.append( xsl_reports_dir )
        import utils as utils_module
        utils = utils_module


def download_if_needed( tool_name, tool_url, proxy ):
    path = tool_path( tool_name )
    if not os.path.exists( path ):
        log( 'Preinstalled "%s" is not found.' % path )
        log( '  Downloading from %s...' % tool_url )
        
        zip_path = '%s.zip' % os.path.splitext( path )[0]
        http_get( tool_url, zip_path, proxy )

        log( '  Unzipping %s...' % path )
        utils.unzip( zip_path, os.path.dirname( path ) )

        log( '  Removing %s...' % zip_path )
        os.unlink( zip_path )
        log( 'Done.' )


def setup(
          comment
        , toolsets
        , bjam_toolset
        , pjl_toolset
        , monitored
        , proxy
        , args
        , **unused
        ):
    import_utils()
    
    if os.path.exists( patch_boost_name ):
        log( 'Found patch file "%s". Executing it.' % patch_boost_name )
        utils.system( [ patch_boost_name ] )

    build_if_needed( bjam, bjam_toolset, toolsets )
    build_if_needed( process_jam_log, pjl_toolset, toolsets )
    
    if monitored:
        if sys.platform == 'win32':
            download_if_needed( 'build_monitor.exe', build_monitor_url, proxy )
            download_if_needed( 'pskill.exe', pskill_url, proxy )
        else:
            log( 'Warning: Test monitoring is not supported on this platform (yet).' )
            log( '         Please consider contributing this piece!' )

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
日本欧美韩国一区三区| 久久色在线观看| 亚洲福利国产精品| 日本乱码高清不卡字幕| 亚洲丝袜自拍清纯另类| 一本色道久久加勒比精品| 亚洲欧美日韩一区二区| 在线视频一区二区三区| 亚洲电影一区二区三区| 欧美日本一道本在线视频| 日日噜噜夜夜狠狠视频欧美人| 91精品欧美福利在线观看| 日韩在线观看一区二区| 精品国产一区二区亚洲人成毛片| 韩国女主播成人在线观看| 国产亚洲欧美色| 91美女在线看| 视频一区视频二区中文| 欧美精品一区二区三区蜜桃| 岛国精品一区二区| 一区二区三区中文字幕精品精品| 欧美精品自拍偷拍动漫精品| 看电影不卡的网站| 国产精品成人在线观看| 欧美私模裸体表演在线观看| 久久国产欧美日韩精品| 国产精品拍天天在线| 欧美日韩国产高清一区二区三区 | 激情偷乱视频一区二区三区| 欧美高清在线视频| 欧美日韩中文国产| 国产一区二区精品久久| 伊人一区二区三区| 久久久青草青青国产亚洲免观| av中文字幕亚洲| 美腿丝袜在线亚洲一区| 1000部国产精品成人观看| 日韩女优视频免费观看| 91影视在线播放| 国产一区福利在线| 亚洲国产色一区| 国产欧美1区2区3区| 8x8x8国产精品| 97精品视频在线观看自产线路二| av一二三不卡影片| 图片区小说区国产精品视频| 国产精品理论片在线观看| 91精品国产综合久久久蜜臀图片 | 暴力调教一区二区三区| 丝袜国产日韩另类美女| 亚洲色图制服诱惑 | 一本到三区不卡视频| 国产麻豆精品95视频| 午夜精品视频在线观看| 日韩美女精品在线| 国产欧美一区二区三区沐欲| 宅男噜噜噜66一区二区66| 91丨porny丨首页| 国产91精品欧美| 精品一区二区三区久久久| 丝袜脚交一区二区| 亚洲成人av福利| 亚洲裸体xxx| 日韩美女精品在线| 1区2区3区国产精品| 国产精品美女久久久久久久久久久 | 91麻豆精品国产91久久久久久久久| 99re66热这里只有精品3直播| 国产精品影音先锋| 国产精品66部| 国产一区二区日韩精品| 精品综合久久久久久8888| 毛片av中文字幕一区二区| 午夜av一区二区三区| 视频一区视频二区在线观看| 亚洲成av人片一区二区梦乃| 亚洲一区二区三区四区中文字幕| 亚洲视频狠狠干| 亚洲精品美国一| 亚洲精品国产精品乱码不99| 亚洲精品水蜜桃| 一区二区三区色| 亚洲综合在线视频| 一区二区三区国产精华| 一区二区三区四区国产精品| 亚洲一区二区在线免费观看视频| 亚洲激情欧美激情| 午夜影视日本亚洲欧洲精品| 午夜精品久久久久久久久| 五月婷婷久久丁香| 麻豆视频一区二区| 国产一区二区剧情av在线| 大陆成人av片| 一本色道久久加勒比精品| 在线看国产一区二区| 欧美日韩一区中文字幕| 日韩一区二区三区在线观看| 欧美成va人片在线观看| 久久久久久久久久久久久夜| 国产精品青草久久| 亚洲综合丝袜美腿| 日本伊人精品一区二区三区观看方式| 看电视剧不卡顿的网站| 国产高清不卡一区| 色婷婷亚洲婷婷| 91精品国产综合久久久蜜臀粉嫩| 日韩精品一区二区在线观看| 亚洲国产精品成人综合色在线婷婷| 国产精品麻豆视频| 午夜精品视频一区| 国产suv一区二区三区88区| 91免费精品国自产拍在线不卡| 欧美午夜电影一区| 欧美精品一区二区久久婷婷 | www.视频一区| 欧美精品丝袜中出| 久久久久99精品一区| 亚洲精品欧美激情| 久久se精品一区二区| kk眼镜猥琐国模调教系列一区二区| 欧美综合天天夜夜久久| 2021中文字幕一区亚洲| 亚洲黄色av一区| 国产伦精一区二区三区| 91极品美女在线| 国产亚洲自拍一区| 亚洲国产成人精品视频| 国产不卡在线播放| 欧美日韩精品欧美日韩精品一 | 久久99久久久欧美国产| 91最新地址在线播放| 欧美一区二区三区四区在线观看| 久久精品视频一区二区| 亚州成人在线电影| kk眼镜猥琐国模调教系列一区二区 | 2023国产一二三区日本精品2022| 综合久久一区二区三区| 国产一区二区毛片| 欧美精品色综合| 亚洲精品高清在线观看| 粗大黑人巨茎大战欧美成人| 日韩女同互慰一区二区| 午夜精品福利久久久| 91色.com| 中文字幕欧美日韩一区| 狠狠色丁香婷婷综合久久片| 91 com成人网| 一区二区三区久久| 99精品欧美一区二区蜜桃免费| 久久久午夜电影| 黄色小说综合网站| 欧美电影免费观看完整版| 日韩精品亚洲一区| 欧美日韩mp4| 亚洲精品免费在线观看| 91碰在线视频| 中文字幕一区av| 成人aa视频在线观看| 国产精品色呦呦| 成熟亚洲日本毛茸茸凸凹| 久久美女艺术照精彩视频福利播放| 日韩电影一二三区| 欧美精品乱码久久久久久按摩| 亚洲四区在线观看| 成人性生交大片| 中文乱码免费一区二区| 国产在线精品一区在线观看麻豆| 欧美精品乱码久久久久久| 亚洲美女屁股眼交| av中文一区二区三区| 国产日韩高清在线| 国产精品一区二区黑丝| 日韩免费成人网| 三级欧美韩日大片在线看| 91精品久久久久久久久99蜜臂| 亚洲影院免费观看| 91久久精品国产91性色tv| 午夜国产精品一区| 在线不卡中文字幕播放| 国产一区二区三区免费在线观看| 91精品国产美女浴室洗澡无遮挡| 午夜免费欧美电影| 日韩手机在线导航| 欧美精品第一页| 中文字幕免费不卡在线| 久草中文综合在线| 精品对白一区国产伦| 五月婷婷综合在线| 亚洲黄色在线视频| 日韩美女视频一区二区| 国产精品无码永久免费888| 色八戒一区二区三区| 国产主播一区二区| 欧美性欧美巨大黑白大战| 日韩精品一区二| 亚洲黄网站在线观看| 五月天视频一区| 亚洲精品成a人| 欧美精品第一页| 成人免费福利片|