亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? contribulyze.py

?? subversion-1.4.5.tar.gz 配置svn的源碼
?? PY
?? 第 1 頁 / 共 2 頁
字號:
    out.write('</tr>\n')    out.write('</table>\n\n')    out.write('</div>\n\n')    sorted_logs = unique_logs.keys()    sorted_logs.sort()    for log in sorted_logs:      out.write('<hr />\n')      out.write('<div class="h3" id="%s" title="%s">\n' % (log.revision,                                                           log.revision))      out.write('<pre>\n')      if revision_url_pattern:        revision_url = revision_url_pattern % log.revision[1:]        revision = '<a href="%s">%s</a>' \            % (escape_html(revision_url), log.revision)      else:        revision = log.revision      out.write('<b>%s | %s | %s</b>\n\n' % (revision,                                             escape_html(log.committer),                                             escape_html(log.date)))      out.write(spam_guard_in_html_block(escape_html(log.message)))      out.write('</pre>\n')      out.write('</div>\n\n')    out.write('<hr />\n')    out.write(html_footer())    out.close()class Field:  """One field in one log message."""  def __init__(self, name, alias = None):    # The name of this field (e.g., "Patch", "Review", etc).    self.name = name    # An alias for the name of this field (e.g., "Reviewed").    self.alias = alias    # A list of contributor objects, in the order in which they were    # encountered in the field.    self.contributors = [ ]    # Any parenthesized asides immediately following the field.  The    # parentheses and trailing newline are left on.  In theory, this    # supports concatenation of consecutive asides.  In practice, the    # parser only detects the first one anyway, because additional    # ones are very uncommon and furthermore by that point one should    # probably be looking at the full log message.    self.addendum = ''  def add_contributor(self, contributor):    self.contributors.append(contributor)  def add_endum(self, addendum):    self.addendum += addendum  def __str__(self):    s = 'FIELD: %s (%d contributors)\n' % (self.name, len(self.contributors))    for contributor in self.contributors:      s += str(contributor) + '\n'    s += self.addendum    return sclass LogMessage:  # Maps revision strings (e.g., "r12345") onto LogMessage instances,  # holding all the LogMessage instances ever created.  all_logs = { }  def __init__(self, revision, committer, date):    """Instantiate a log message.  All arguments are strings,    including REVISION, which should retain its leading 'r'."""    self.revision = revision    self.committer = committer    self.date = date    self.message = ''    # Map field names (e.g., "Patch", "Review", "Suggested") onto    # Field objects.    self.fields = { }    if LogMessage.all_logs.has_key(revision):      complain("Revision '%s' seen more than once" % revision, True)    LogMessage.all_logs[revision] = self  def add_field(self, field):    self.fields[field.name] = field  def accum(self, line):    """Accumulate one more line of raw message."""    self.message += line  def __cmp__(self, other):    """Compare two log messages by revision number, for sort().    Return -1, 0 or 1 depending on whether a > b, a == b, or a < b.    Note that this is reversed from normal sorting behavior, but it's    what we want for reverse chronological ordering of revisions."""    a = int(self.revision[1:])    b = int(other.revision[1:])    if a > b: return -1    if a < b: return 1    else:     return 0      def __hash__(self):    """I don't really understand why defining __cmp__() but not    __hash__() renders an object type unfit to be a dictionary key,    especially in light of the recommendation that if a class defines    mutable objects and implements __cmp__() or __eq__(), then it    should not implement __hash__().  See these for details:    http://mail.python.org/pipermail/python-dev/2004-February/042580.html    http://mail.python.org/pipermail/python-bugs-list/2003-December/021314.html    In the meantime, I think it's safe to use the revision as a hash value."""    return int(self.revision[1:])  def __str__(self):    s = '=' * 15    header = ' LOG: %s | %s ' % (self.revision, self.committer)    s += header    s += '=' * 15    s += '\n'    for field_name in self.fields.keys():      s += str(self.fields[field_name]) + '\n'    s += '-' * 15    s += '-' * len(header)    s += '-' * 15    s += '\n'    return s### Code to parse the logs. ##log_separator = '-' * 72 + '\n'log_header_re = re.compile\                ('^(r[0-9]+) \| ([^|]+) \| ([^|]+) \| ([0-9]+)[^0-9]')field_re = re.compile('^(Patch|Review(ed)?|Suggested|Found) by:\s*(.*)')field_aliases = { 'Reviewed' : 'Review' }parenthetical_aside_re = re.compile('^\(.*\)\s*$')def graze(input):  just_saw_separator = False    while True:    line = input.readline()    if line == '': break    if line == log_separator:      if just_saw_separator:        sys.stderr.write('Two separators in a row.\n')        sys.exit(1)      else:        just_saw_separator = True        num_lines = None        continue    else:      if just_saw_separator:        m = log_header_re.match(line)        if not m:          sys.stderr.write('Could not match log message header.\n')          sys.stderr.write('Line was:\n')          sys.stderr.write("'%s'\n" % line)          sys.exit(1)        else:          log = LogMessage(m.group(1), m.group(2), m.group(3))          num_lines = int(m.group(4))          just_saw_separator = False          line = input.readline()          # Handle 'svn log -v' by waiting for the blank line.          while line != '\n':            line = input.readline()          # Parse the log message.          field = None          while num_lines > 0:            line = input.readline()            log.accum(line)            m = field_re.match(line)            if m:              # We're on the first line of a field.  Parse the field.              while m:                if not field:                  ident = m.group(1)                  if field_aliases.has_key(ident):                    field = Field(field_aliases[ident], ident)                  else:                    field = Field(ident)                # Each line begins either with "WORD by:", or with whitespace.                in_field_re = re.compile('^('                                         + (field.alias or field.name)                                         + ' by:\s+|\s+)(\S.*)+')                m = in_field_re.match(line)                user, real, email = Contributor.parse(m.group(2))                if user == 'me':                  user = log.committer                c = Contributor.get(user, real, email)                c.add_activity(field.name, log)                field.add_contributor(c)                line = input.readline()                log.accum(line)                num_lines -= 1                m = in_field_re.match(line)                if not m:                  m = field_re.match(line)                  if not m:                    aside_match = parenthetical_aside_re.match(line)                    if aside_match:                      field.add_endum(line)                  log.add_field(field)                  field = None            num_lines -= 1        continueindex_introduction = '''<p>The following list of contributors and their contributions is meantto help us keep track of whom to consider for commit access.  The listwas generated from "svn&nbsp;log" output by <ahref="http://svn.collab.net/repos/svn/trunk/tools/dev/contribulyze.py">contribulyze.py</a>, which looks for log messages that use the <ahref="http://subversion.tigris.org/hacking.html#crediting">specialcontribution format</a>.</p><p><i>Please do not use this list as a generic guide to who hascontributed what to Subversion!</i> It omits existing full committers,for example, because they are irrelevant to our search for newcommitters.  Also, it merely counts changes, it does not evaluatethem.  To truly understand what someone has contributed, you have toread their changes in detail.  This page can only assist humanjudgement, not substitute for it.</p>'''def drop(revision_url_pattern):  # Output the data.  #  # The data structures are all linked up nicely to one another.  You  # can get all the LogMessages, and each LogMessage contains all the  # Contributors involved with that commit; likewise, each Contributor  # points back to all the LogMessages it contributed to.  #  # However, the HTML output is pretty simple right now.  It's not take  # full advantage of all that cross-linking.  For each contributor, we  # just create a file listing all the revisions contributed to; and we  # build a master index of all contributors, each name being a link to  # that contributor's individual file.  Much more is possible... but  # let's just get this up and running first.  for key in LogMessage.all_logs.keys():    # You could print out all log messages this way, if you wanted to.    pass    # print LogMessage.all_logs[key]  detail_subdir = "detail"  if not os.path.exists(detail_subdir):    os.mkdir(detail_subdir)  index = open('index.html', 'w')  index.write(html_header('Contributors'))  index.write(index_introduction)  index.write('<ol>\n')  # The same contributor appears under multiple keys, so uniquify.  seen_contributors = { }  # Sorting alphabetically is acceptable, but even better would be to  # sort by number of contributions, so the most active people appear at  # the top -- that way we know whom to look at first for commit access  # proposals.  sorted_contributors = Contributor.all_contributors.values()  sorted_contributors.sort()  for c in sorted_contributors:    if not seen_contributors.has_key(c):      if c.score() > 0:        if c.is_full_committer:          # Don't even bother to print out full committers.  They are          # a distraction from the purposes for which we're here.          continue        else:          committerness = ''          if c.is_committer:            committerness = '&nbsp;(partial&nbsp;committer)'          urlpath = "%s/%s.html" % (detail_subdir, c.canonical_name())          fname = os.path.join(detail_subdir, "%s.html" % c.canonical_name())          index.write('<li><p><a href="%s">%s</a>&nbsp;[%s]%s</p></li>\n'                      % (url_encode(urlpath),                         c.big_name(html=True),                         c.score_str(), committerness))          c.html_out(revision_url_pattern, fname)    seen_contributors[c] = True  index.write('</ol>\n')  index.write(html_footer())  index.close()def process_committers(committers):  """Read from open file handle COMMITTERS, which should be in  the same format as the Subversion 'COMMITTERS' file.  Create  Contributor objects based on the contents."""  line = committers.readline()  while line != 'Blanket commit access:\n':    line = committers.readline()  in_full_committers = True  matcher = re.compile('(\S+)\s+([^\(\)]+)\s+(\([^()]+\)){0,1}')  line = committers.readline()  while line:    # Every @-sign we see after this point indicates a committer line.    if line == 'Commit access for specific areas:\n':      in_full_committers = False    elif line.find('@') >= 0:      line = line.strip()      m = matcher.match(line)      user = m.group(1)      real_and_email = m.group(2).strip()      ignored, real, email = Contributor.parse(real_and_email)      c = Contributor.get(user, real, email)      c.is_committer = True      c.is_full_committer = in_full_committers    line = committers.readline()def usage():  print 'USAGE: %s [-C COMMITTERS_FILE] < SVN_LOG_OR_LOG-V_OUTPUT' \        % os.path.basename(sys.argv[0])  print ''  print 'Create HTML files in the current directory, rooted at index.html,'  print 'in which you can browse to see who contributed what.'  print ''  print 'The log input should use the contribution-tracking format defined'  print 'in http://subversion.tigris.org/hacking.html#crediting.'  print ''  print 'Options:'  print ''  print '  -h, -H, -?, --help   Print this usage message and exit'  print '  -C FILE              Use FILE as the COMMITTERS file'  print '  -U URL               Use URL as a Python interpolation pattern to'  print '                       generate URLs to link revisions to some kind'  print '                       of web-based viewer (e.g. ViewCVS).  The'  print '                       interpolation pattern should contain exactly'  print '                       one format specifier, \'%s\', which will be'  print '                       replaced with the revision number.'  print ''def main():  try:    opts, args = getopt.getopt(sys.argv[1:], 'C:U:hH?', [ 'help' ])  except getopt.GetoptError, e:    complain(str(e) + '\n\n')    usage()    sys.exit(1)  # Parse options.  revision_url_pattern = None  for opt, value in opts:    if opt in ('--help', '-h', '-H', '-?'):      usage()      sys.exit(0)    elif opt == '-C':      process_committers(open(value))    elif opt == '-U':      revision_url_pattern = value  # Gather the data.  graze(sys.stdin)  # Output the data.  drop(revision_url_pattern)if __name__ == '__main__':  main()

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
jiyouzz国产精品久久| 久久中文字幕电影| 精品国产欧美一区二区| 亚洲欧美日本在线| 狠狠久久亚洲欧美| 在线播放91灌醉迷j高跟美女| 久久综合久久综合久久综合| 亚洲第一久久影院| 97久久精品人人爽人人爽蜜臀| 欧美一区二区三区视频在线观看| **性色生活片久久毛片| 国产一区二区三区免费观看| 欧美色男人天堂| 亚洲天堂网中文字| 成人性生交大片免费看中文| 日韩精品自拍偷拍| 五月天亚洲精品| 一本久久a久久免费精品不卡| 国产亚洲成年网址在线观看| 蜜桃av噜噜一区二区三区小说| 欧美性videosxxxxx| 亚洲人成在线观看一区二区| 懂色av噜噜一区二区三区av| 2021中文字幕一区亚洲| 久久精品国产**网站演员| 5月丁香婷婷综合| 亚洲h精品动漫在线观看| 欧洲一区在线电影| 亚洲成人福利片| 欧洲av一区二区嗯嗯嗯啊| 亚洲精品国久久99热| 色综合一个色综合亚洲| 亚洲日本免费电影| 一本久久精品一区二区| 一区二区欧美国产| 欧美在线视频不卡| 香蕉影视欧美成人| 日韩区在线观看| 激情六月婷婷久久| 国产欧美久久久精品影院 | 2022国产精品视频| 国产精品资源网| 久久精品一区八戒影视| 成人小视频在线观看| 亚洲欧洲国产日韩| 在线观看欧美黄色| 蜜臀久久99精品久久久画质超高清| 欧美一区二区三区在| 国产一区二区三区四区五区美女| 国产偷国产偷精品高清尤物 | 欧美成人r级一区二区三区| 久久66热偷产精品| 亚洲国产精品传媒在线观看| 一本一道综合狠狠老| 亚洲老妇xxxxxx| 91精品免费在线观看| 国产高清精品久久久久| 亚洲精品视频在线看| 91精品国产综合久久精品图片| 国产曰批免费观看久久久| 亚洲欧美在线另类| 欧美一区二区三区四区视频| 国产成人自拍网| 亚洲成精国产精品女| 精品国产91久久久久久久妲己| 成人h精品动漫一区二区三区| 一区二区三区在线高清| 精品999久久久| 在线精品视频一区二区| 国产伦精品一区二区三区视频青涩| 国产精品不卡一区| 日韩午夜三级在线| eeuss国产一区二区三区| 视频一区二区国产| 亚洲欧洲美洲综合色网| 欧美日韩精品欧美日韩精品| 国产成人综合视频| 午夜精品爽啪视频| 中文字幕一区二区在线观看| 884aa四虎影成人精品一区| 国产a精品视频| 美女网站在线免费欧美精品| 日韩激情视频在线观看| 国产精品欧美综合在线| 欧美日本一区二区| 99久久国产综合精品色伊 | 亚洲乱码国产乱码精品精98午夜| 日韩免费电影一区| 色婷婷av一区二区三区大白胸| 狠狠色狠狠色综合系列| 日日欢夜夜爽一区| 一区二区三区在线免费| 亚洲欧洲国产日韩| 国产欧美一区二区精品婷婷| 日韩精品一区二区三区中文不卡| 91福利在线看| 色综合久久久网| www.久久精品| 成人自拍视频在线| 国产高清精品网站| 国产一区二区三区美女| 捆绑紧缚一区二区三区视频| 日韩精彩视频在线观看| 午夜免费欧美电影| 亚洲成人av在线电影| 夜夜爽夜夜爽精品视频| 亚洲黄色在线视频| 亚洲乱码国产乱码精品精的特点 | 成人av影视在线观看| 国产乱码字幕精品高清av| 久久99久久99精品免视看婷婷| 五月婷婷综合网| 日韩精品亚洲专区| 亚洲1区2区3区4区| 午夜精品免费在线观看| 午夜精品一区二区三区免费视频| 亚洲va欧美va天堂v国产综合| 午夜视频一区二区| 天堂va蜜桃一区二区三区漫画版| 三级在线观看一区二区| 人人爽香蕉精品| 久久99精品国产麻豆婷婷| 韩国女主播一区| 国产福利一区在线观看| 成人午夜av电影| 色综合av在线| 69堂成人精品免费视频| 日韩精品在线一区二区| 久久久久久久综合| 国产精品卡一卡二卡三| 亚洲一区二区在线免费看| 亚洲午夜精品久久久久久久久| 午夜欧美在线一二页| 久久精品国产精品亚洲综合| 国产成人av电影在线观看| 99综合电影在线视频| 在线观看亚洲专区| 日韩午夜激情av| 国产精品网友自拍| 亚洲国产精品影院| 久久国产婷婷国产香蕉| 成人在线一区二区三区| 在线观看免费视频综合| 欧美电影免费观看高清完整版在线观看| 久久亚洲综合色| 亚洲欧美激情一区二区| 蜜臀av性久久久久蜜臀av麻豆| 国产高清亚洲一区| 欧美日韩在线观看一区二区 | 日本一区二区成人| 亚洲最新在线观看| 精品一区二区三区视频| av欧美精品.com| 日韩一二三区视频| 国产精品国产三级国产a| 日日摸夜夜添夜夜添亚洲女人| 国产精品1区二区.| 欧美三区在线视频| 中文字幕av免费专区久久| 香蕉乱码成人久久天堂爱免费| 国产麻豆欧美日韩一区| 欧美日韩国产首页| 国产精品色噜噜| 琪琪久久久久日韩精品| 91免费看`日韩一区二区| 精品久久人人做人人爰| 亚洲综合免费观看高清完整版在线 | 欧美日韩视频专区在线播放| 久久久亚洲精华液精华液精华液| 亚洲国产一二三| 成人av免费在线观看| 精品国产露脸精彩对白| 婷婷中文字幕一区三区| 色婷婷香蕉在线一区二区| 国产日韩精品一区二区三区| 天堂av在线一区| 色吊一区二区三区| 国产精品污www在线观看| 老色鬼精品视频在线观看播放| 在线观看视频欧美| 国产精品久久久久久久久果冻传媒 | 粉嫩aⅴ一区二区三区四区| 日韩欧美在线一区二区三区| 亚洲一区二区在线观看视频| 99精品欧美一区二区三区综合在线| 国产午夜亚洲精品不卡| 久久不见久久见免费视频1| 91精品国产91久久久久久一区二区 | 中文字幕欧美日本乱码一线二线| 麻豆一区二区在线| 欧美一区二区视频观看视频| 亚洲第一会所有码转帖| 欧美色大人视频| 亚洲国产成人91porn| 欧美性猛交xxxxxxxx| 亚洲一线二线三线视频| 欧洲在线/亚洲| 亚洲国产成人av网| 欧美精品v国产精品v日韩精品| 亚洲午夜精品网|