亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? records-lib.pl

?? BIND 9 dynamic DNS webmin module. This module supports both static and dynamic zones, and IPv4 and I
?? PL
?? 第 1 頁 / 共 4 頁
字號:
#! /usr/bin/perl##    B9DDNS - BIND 9 dynamic DNS webmin module.#    Copyright (C) 2003 John Horne. <john.horne@plymouth.ac.uk>#    Copyright (C) 2004 John Horne. <john.horne@plymouth.ac.uk>##    This program is free software; you can redistribute it and/or modify#    it under the terms of the GNU General Public License as published by#    the Free Software Foundation; either version 2 of the License, or#    (at your option) any later version.##    This program is distributed in the hope that it will be useful,#    but WITHOUT ANY WARRANTY; without even the implied warranty of#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the#    GNU General Public License for more details.##    You should have received a copy of the GNU General Public License#    along with this program; if not, write to the Free Software#    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.### Common DNS record functions for the dynamic DNS module#use strict;no strict 'vars';## Reads a DNS zone and return a data structure of records.## get_zone(zone, file, origin, [previous], [zone_type])#sub get_zone {	my $num = my $gen = my $prv = my $i = my $j = 0;	my $lnum = my $glen = my $merge = my $merged_2 = my $oset = 0;	my $soa_seen = my $dynamic_zone = my $default_ttl = my $ttl = 0;	my $token_count = my $no_file = my $want_sd = 0;	my $origin = my $inc_origin = my $file = my $line = my $comment = '';	my $zone = my $zone_name = my $type = my $cmd_line = '';	my $real_file = my $orig = my $sd = '';	my @records = ();	my @tokens = my @linenum = my @offsets = my @comments = my @inc = ();	my %sd_names = my %stub_recs = ();	$zone = $_[0];	$origin = $zone_name = $_[2];	$orig = $origin unless ($origin eq '.');	if ($_[1]) {		$file = $_[1];		$real_file = $config{'chroot'} . &absolute_path($file);	}	else {		$no_file++;	}	if (defined($_[4])) {		$type = $_[4];	}	else {		$type = &find_value('type', $zone->{'members'});	}	$want_sd++ if ($type eq 'master');	if ($zone && (($dynamic_zone = &dynamic_zone($zone, 0)) || $no_file)) {		$cmd_line = $config{'dig_cmd'} . ' +nocomments +noidentify +nocmd +nostats ';		if ($type eq 'stub') {			#			# Stub zones only need the NS and SOA record, the			# relevant A and AAAA records will be automatically			# included in the answer. If, however, named is not			# running then we will have to wait for each query to			# timeout, and this can be many seconds. To avoid this,			# we simply return when named has stopped. For other			# zone types they wil get a connection refused error			# straight away, and so return straight away.			#			if (&named_running() == 0) {				return @records;			}			$cmd_line .= '+noauthority ' . $origin . ' ns @' . $config{'ns_if_query'} . ' ' . $origin . ' soa @' . $config{'ns_if_query'};		}		else {			$cmd_line .= '-t axfr ' . $origin . ' @' . $config{'ns_if_transfer'};		}		unless (open(FILE, "$cmd_line |")) {			$line = $!;			&terror('xfr_failed', $zone_name, $line);		}	}	else {		if (open(FILE, $real_file)) {			#			# If we allow dynamic zones to be treated as static			# ones when named is stopped, then we must delete any			# journal file that may exist because it is assumed we			# are about to make changes to the zone file.			#			if ($access{'dyn_as_static'} && &named_running() == 0 &&			    $type eq 'master') {				unlink $real_file . '.jnl';			}		}	}	while (defined($line = <FILE>)) {		chomp($line);		$glen = $merge = 0;		$comment = $merged_2 = '';		$line =~ s/\s*$//o;		#		# Strip out comments (# is not a valid comment separator here!).		#		if ($line =~ /(?<!\\);/o &&		    ($line =~ /^((?:[^;"]+|(?:"(?:[^"]*)"))*);(.*)/o ||		     $line =~ /^((?:[^;\\]|\\.)*);(.*)/o ||		     $line =~ /^((?:(?:[^;"\\]|\\.)+|(?:"(?:[^"\\]|\\.)*"))*);(.*)/o)) {			$line = $1;			$comment = $2;		}		# Now split the line into tokens.		$oset = 0;		while (1) {			$merge = 1;			if ($line =~ /^(\s*)"((?:[^"\\]|\\.)*)"(.*)/o ||			    $line =~ /^(\s*)((?:[^\s()"\\]|\\.)+)(.*)/o ||			    ($merge = 0) ||			    $line =~ /^(\s*)([()])(.*)/o) {				if ($glen == 0) {					$oset += length($1);				}				else {					$glen += length($1);				}				$glen += length($2);				$merged_2 .= $2;				$line = $3;				if (! $merge || $line =~ /^([\s()]|$)/o) {					push(@tokens, $merged_2);					push(@linenum, $lnum);					push(@offsets, $oset);					push(@comments, $comment); 					$comment = $merged_2 = '';					$oset += $glen;					$glen = 0;				}			}			else {				last;			}		}		$lnum++;	}	close(FILE);	#	# Now parse into data structures.	#	$i = 0;	while ($i < @tokens) {		if (uc($tokens[$i]) eq '$ORIGIN') {			#			# The $ORIGIN directive (may be relative or absolute).			#			$i++;			unless ($tokens[$i] =~ /\.$/o) {				$tokens[$i] .= '.';				$tokens[$i] .= $origin unless ($origin eq '.');			}			$origin = $tokens[$i];			$i++;		}		elsif (uc($tokens[$i]) eq '$INCLUDE') {			if ($linenum[$i + 1] == $linenum[$i + 2]) {				#				# $INCLUDE zonefile origin				#				unless ($tokens[$i + 2] =~ /\.$/o) {					$tokens[$i + 2] .= '.';					$tokens[$i + 2] .= $origin unless ($origin eq '.');				}				$inc_origin = $tokens[$i + 2];				@inc = &get_zone(0, $tokens[$i + 1],						 $inc_origin,				       (@records ? $records[$#records] : undef),						 $type);				$i += 3;			}			else {				#				# $INCLUDE zonefile				#				@inc = &get_zone(0, $tokens[$i + 1], $origin,				       (@records ? $records[$#records] : undef),						 $type);				$i += 2;			}			foreach $j (@inc) {				$j->{'num'} = $num++;			}			push(@records, @inc);		}		elsif (uc($tokens[$i]) eq '$GENERATE') {			#			# Generate directive...add it as a special record.			#			$gen = { 'line' => $linenum[$i], 'num' => $num++,				 'type' => '$GEN', 'fqdn' => $tokens[$i + 2],				 'values' => [ ] };			local @generate = ();			while ($linenum[++$i] == $gen->{'line'}) {				push (@{ $gen->{values} }, $tokens[$i]);				push(@generate, $tokens[$i]);			}			$gen->{'generate'} = \@generate;			push(@records, $gen);		}		elsif (uc($tokens[$i]) eq '$TTL') {			#			# TTL directive			#			$i++;			$ttl = &convert_time(0, $tokens[$i]);			$default_ttl = $ttl unless ($soa_seen);			$i++;		}		elsif ($tokens[$i] =~ /^\$(\S+)/o) {			#			# Some other special directive.			#			$j = $linenum[$i];				while($linenum[$i] == $j) {				$i++;			}		}		else {			local @values = ();			local %rec = ();			$rec{'line'} = $linenum[$i];			$rec{'comment'} = $comments[$i];			if (uc($tokens[$i]) eq 'IN' && $offsets[$i] > 0) {				# Starting with a class.				$rec{'class'} = uc($tokens[$i]);				$i++;			}			elsif ($tokens[$i] =~ /^\d/o &&			       $tokens[$i] !~ /\.in-addr\.arpa\.?$/io &&			       $offsets[$i] > 0) {				# Starting with a TTL and class.				$rec{'ttl'} = $tokens[$i];				$rec{'class'} = uc($tokens[$i + 1]);				$i += 2;			}			elsif (uc($tokens[$i + 1]) eq 'IN') {				# Starting with a name and class.				$rec{'fqdn'} = $tokens[$i];				$rec{'class'} = uc($tokens[$i + 1]);				$i += 2;			}			elsif ($offsets[$i] > 0) {				# Starting with nothing.				$rec{'class'} = 'IN';			}			elsif ($tokens[$i + 1] =~ /^\d/o &&			       uc($tokens[$i + 2]) eq 'IN') {				# Starting with a name, TTL and class.				$rec{'fqdn'} = $tokens[$i];				$rec{'ttl'} = $tokens[$i + 1];				$rec{'class'} = 'IN';				$i += 3;			}			elsif ($tokens[$i + 1] =~ /^\d/o) {				# Starting with a name and TTL.				$rec{'fqdn'} = $tokens[$i];				$rec{'ttl'} = $tokens[$i + 1];				$rec{'class'} = 'IN';				$i += 2;			}			else {				# Starting with a name.				$rec{'fqdn'} = $tokens[$i];				$rec{'class'} = 'IN';				$rec{'ttl'} = $ttl unless ($ttl == $default_ttl);				$i++;			}			$rec{'type'} = uc($tokens[$i++]);			if ($rec{'fqdn'}) {				$rec{'fqdn'} = $origin if ($rec{'fqdn'} eq '@');				unless ($rec{'fqdn'} =~ /\.$/o) {					$rec{'fqdn'} .= '.';					$rec{'fqdn'} .= $origin unless ($origin eq '.');				}				$rec{'fqdn'} = lc($rec{'fqdn'});				if ($want_sd && $rec{'type'} ne 'SOA' &&						$rec{'type'} ne 'NS') {					$sd = '';					if ($rec{'type'} eq 'DNAME') {						if ($rec{'fqdn'} =~ /(([^.]+\.)?[^.]+)\.$orig$/i) {							$sd = $1;							$sd =~ s/^\*\.//o;						}					}					elsif ($rec{'type'} eq 'SRV') {						if ($rec{'fqdn'} =~ /^_[^.]+\._[^.]+.*\.(([^.]+\.)?[^.]+)\.$orig$/i) {							$sd = $1;						}						else {							if ($rec{'fqdn'} =~ /\.(([^.]+\.)?[^.]+)\.$orig$/i) {								$sd = $1;							}						}					}					else {						if ($rec{'fqdn'} =~ /\.(([^.]+\.)?[^.]+)\.$orig$/i) {							$sd = $1;						}					}					if ($sd) {						unless (exists($sd_names{$sd})) {							$sd_names{$sd} = 1;							$sd =~ s/^[^.]+\.//o;							$sd_names{$sd} = 1;						}					}				}			}			else {				$prv = ($#records >= 0) ? $records[$#records]							: $_[3];				unless ($prv) {					&terror('efirst', $linenum[$i] + 1, $file);				}				$rec{'fqdn'} = $prv->{'fqdn'};			}			if ($rec{'type'} eq 'SOA') {				#				# Zone transfers start and end with the SOA				# record. We only need to record one though.				# If we don't see the second one then the				# transfer has not succeded.				#				$soa_seen++;				last if ($soa_seen == 2);			}			#			# Now read values until either an end of line			# or a ')' is found.			#			$token_count = 0;			$j = $linenum[$i];			while ($linenum[$i] == $j && $i < @tokens) {				if ($tokens[$i] eq '(') {					while (1) {						if (++$i >= @tokens) {							&terror('xfr_invalid_rr', $rec{'line'});						}						elsif ($tokens[$i] eq ')') {							$i++;							last;						}						if (&convert_value($rec{'type'}, $token_count) >= 0) {							$tokens[$i] = $origin if ($tokens[$i] eq '@');							unless ($tokens[$i] =~ /\.$/o) {								$tokens[$i] .= '.';								$tokens[$i] .= $origin unless ($origin eq '.');							}							$tokens[$i] = lc($tokens[$i]);						}						$token_count++;						push(@values, $tokens[$i]);					}					last;				}				else {					if (&convert_value($rec{'type'}, $token_count) >= 0) {						$tokens[$i] = $origin if ($tokens[$i] eq '@');						unless ($tokens[$i] =~ /\.$/o) {							$tokens[$i] .= '.';							$tokens[$i] .= $origin unless ($origin eq '.');						}						$tokens[$i] = lc($tokens[$i]);					}					$token_count++;					push(@values, $tokens[$i++]);				}			}			#			# Unfortunately, for stub zones we are likely to get			# duplicate records returned since we are, in effect,			# making two queries. To avoid showing two records			# which are the same, we sift through the ones already			# seen. Although this could be expensive, hopefully a			# stub zone will have very few entries.			#			if ($type eq 'stub' && $rec{'type'} ne 'SOA') {				if (exists($stub_recs{$rec{'fqdn'}}{$values[0]})) {					next;				}				else {					$stub_recs{$rec{'fqdn'}}{$values[0]}++;				}			}			$rec{'values'} = \@values;			$rec{'eline'} = $linenum[$i - 1];			$rec{'num'} = $num++;			push(@records, \%rec);		}	}	if ($dynamic_zone && $soa_seen == 1 && $type ne 'stub') {		&terror('xfr_incomplete', $zone_name);	}	if ($want_sd) {		$sd_zones{$zone_name} = ();		%{ $sd_zones{$zone_name} } = map { $_ => 1 } keys(%sd_names);	}	return @records;}## Create a new resource record.## create_record(zone, file, name, ttl, class, type, values, comment)#sub create_record {	my $lref = my $dynamic_zone = my $default_ttl = my $ttl = 0;	my $zone = my $file = my $zone_ref = my $str = my $err = '';	$zone = $_[0];	$file = $_[1];	$zone_ref = &get_zone_data($zone);	$dynamic_zone = $zone_ref->[1];	$default_ttl = $zone_ref->[2];	$ttl = &convert_time(0, $_[3]) if (defined($_[3]));	if ($dynamic_zone) {		if ($dynamic_zone == 1) {			$ttl = $default_ttl unless ($ttl);			$str = 'local ' . $config{'ns_if_update'} . "\n";			$str .= 'zone ' . $zone->{'value'} . "\n";			$str .= "update add $_[2] $ttl $_[4] $_[5] $_[6]\n\n";			$err = `echo "$str" | $config{'nsupdate_cmd'} -d 2>&1`;			&check_update_err($err) if ($?);		}	}	else {		$_[3] = $ttl;		$lref = &read_file_lines($config{'chroot'} .							&absolute_path($file));		push(@$lref, &make_record(@_[2 .. $#_]));		&flush_file_lines();	}	return;}## Modify (update) an existing record.## modify_record(zone, file, &old, name, ttl, class, type, values, comment)#sub modify_record {	my $lref = my $lines = my $v = 0;	my $dynamic_zone = my $default_ttl = my $ttl = 0;	my $zone = my $file = my $zone_ref = my $old = my $str = my $err = '';	my $values = '';	$zone = $_[0];	$file = $_[1];	$old = $_[2];	$zone_ref = &get_zone_data($zone);	$dynamic_zone = $zone_ref->[1];	$default_ttl = $zone_ref->[2];	$ttl = &convert_time(0, $_[4]) if (defined($_[4]));	if ($dynamic_zone) {		if ($dynamic_zone == 1) {			$ttl = $default_ttl unless ($ttl);			#			# We need to delete the old record rather than just			# changing the current one because the name and its			# value(s) may have changed. Thus we must, in effect,			# remove the old record first - except for SOA records.			#			if ($_[6] ne 'SOA') {				$values = '';				for ($v = 0; defined($old->{'values'}->[$v]); $v++) {					$values .= ' ' . $old->{'values'}->[$v];				}				if ($v == 0) {		# No values seen.					&terror('update_no_data');				}				$str = 'local ' . $config{'ns_if_update'} . "\n";				$str .= 'zone ' . $zone->{'value'} . "\n";				$str .= "prereq yxrrset $old->{'fqdn'} $old->{'class'} $old->{'type'}\n";				$str .= "update delete $old->{'fqdn'} $old->{'class'} $old->{'type'} $values\n";			}			$str .= "update add $_[3] $ttl $_[5] $_[6] $_[7]\n\n";

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
不卡一区二区中文字幕| 91精品一区二区三区在线观看| 国产麻豆视频精品| 日本91福利区| 另类人妖一区二区av| 精品一区二区三区在线播放| 久久精品国产澳门| 紧缚奴在线一区二区三区| 久久66热偷产精品| 精品无码三级在线观看视频| 国产一区二区在线观看视频| 国产一区二区三区视频在线播放| 九九久久精品视频| 国产精品伊人色| 成人午夜视频网站| 色综合天天综合| 欧美在线观看视频一区二区| 欧美日韩精品高清| 欧美xfplay| 日本一区二区三区电影| 亚洲欧美一区二区在线观看| 亚洲精品一二三区| 99精品一区二区| 欧美专区日韩专区| 91精品麻豆日日躁夜夜躁| 精品国产一二三区| 国产精品久久久久久久裸模| 亚洲综合另类小说| 美国毛片一区二区三区| 国产 日韩 欧美大片| 色先锋久久av资源部| 日韩视频一区二区三区在线播放| 2023国产一二三区日本精品2022| 中文字幕巨乱亚洲| 亚洲国产精品一区二区www在线 | 国产精品国产三级国产普通话三级| 自拍偷拍国产精品| 日韩国产欧美三级| 国产成人免费视频一区| 欧美在线免费视屏| 久久免费国产精品| 艳妇臀荡乳欲伦亚洲一区| 免费观看久久久4p| 成人91在线观看| 4438x亚洲最大成人网| 国产精品区一区二区三区| 亚洲午夜日本在线观看| 国产精品99久久久久久久女警 | 国产视频在线观看一区二区三区| 综合在线观看色| 麻豆精品一区二区综合av| www.日韩精品| 欧美一区二区三区四区视频| 中文字幕中文字幕一区| 日本在线不卡视频一二三区| 波多野结衣欧美| 日韩免费福利电影在线观看| 亚洲久草在线视频| 国产综合色在线| 欧美日韩国产欧美日美国产精品| 欧美国产国产综合| 日本v片在线高清不卡在线观看| eeuss鲁片一区二区三区| 日韩免费一区二区| 亚洲午夜羞羞片| 99久久精品国产麻豆演员表| 久久综合色之久久综合| 午夜影视日本亚洲欧洲精品| 97超碰欧美中文字幕| 2023国产精品自拍| 轻轻草成人在线| 欧美性猛交xxxx乱大交退制版| 欧美国产一区视频在线观看| 久久精品理论片| 欧美美女网站色| 一区二区成人在线视频| 成人一区二区在线观看| 精品国产123| 日韩av一区二区三区四区| 在线观看一区二区视频| 中文字幕一区日韩精品欧美| 国产精品一线二线三线精华| 91精品国产91热久久久做人人| 亚洲一区在线观看视频| 成年人午夜久久久| 国产喂奶挤奶一区二区三区| 久久99精品国产麻豆不卡| 3d成人h动漫网站入口| 亚洲一区视频在线观看视频| 99视频精品免费视频| 国产精品无码永久免费888| 久久91精品久久久久久秒播| 欧美一区三区二区| 日韩电影在线观看电影| 欧美日韩激情在线| 午夜影视日本亚洲欧洲精品| 欧美性猛片xxxx免费看久爱| 亚洲影院在线观看| 欧美视频一二三区| 亚洲一区二区三区中文字幕| 91黄视频在线| av在线一区二区三区| 国产午夜精品久久久久久免费视 | 亚洲三级小视频| 99视频精品免费视频| 中文字幕在线视频一区| 成人不卡免费av| 自拍偷拍亚洲综合| 91视频在线观看免费| 亚洲人成网站精品片在线观看| 播五月开心婷婷综合| 亚洲欧美日本韩国| 在线影视一区二区三区| 亚洲国产另类av| 91精品国产欧美一区二区| 久久精品国产99国产精品| 久久影院视频免费| 高清beeg欧美| 亚洲欧美一区二区不卡| 欧美色电影在线| 蜜臀久久久久久久| 欧美精品一区二区三区视频| 国产成人日日夜夜| 亚洲欧美偷拍卡通变态| 欧美日韩国产高清一区二区三区 | 首页欧美精品中文字幕| 日韩视频免费直播| 丁香一区二区三区| 亚洲精品国久久99热| 91.xcao| 韩国女主播成人在线| 国产精品国产三级国产aⅴ无密码 国产精品国产三级国产aⅴ原创 | 亚洲电影激情视频网站| 一区二区三区蜜桃网| 欧美高清你懂得| 国产毛片精品国产一区二区三区| 国产精品萝li| 欧美日韩视频在线一区二区 | 欧美日韩国产片| 国产一区美女在线| 亚洲精品成人悠悠色影视| 3d成人h动漫网站入口| 国产成人亚洲综合色影视| 一区二区三区欧美久久| 欧美xxxx在线观看| 99精品视频在线免费观看| 日韩专区一卡二卡| 国产精品麻豆一区二区| 欧美伦理电影网| 从欧美一区二区三区| 亚洲成人一区二区| 国产亚洲一区字幕| 欧美视频一二三区| 盗摄精品av一区二区三区| 午夜精品一区二区三区三上悠亚| 久久久亚洲精华液精华液精华液| 91成人在线免费观看| 国产一区二区主播在线| 亚洲福利国产精品| 国产精品天美传媒| 欧美电影在线免费观看| www.欧美色图| 久久精品理论片| 亚洲国产一区二区在线播放| 国产色一区二区| 69堂亚洲精品首页| 99久久精品国产观看| 国产一区三区三区| 日韩中文欧美在线| 一区二区三区欧美| 国产精品美女久久久久久久| 欧美岛国在线观看| 欧美色精品在线视频| 99这里都是精品| 国产成人一级电影| 精品中文av资源站在线观看| 一区二区三区视频在线看| 欧美极品少妇xxxxⅹ高跟鞋| 91精品国产91综合久久蜜臀| 在线视频一区二区三区| 成人爱爱电影网址| 国产精品亚洲人在线观看| 美国av一区二区| 三级久久三级久久| 亚洲一级二级三级在线免费观看| 欧美国产一区视频在线观看| 精品国产亚洲一区二区三区在线观看| 欧美日韩一区二区三区在线| 91免费版在线| 99久久精品免费看| 成人性视频网站| 国产91在线看| 国产精品正在播放| 国产精一区二区三区| 精品一区二区三区视频在线观看| 日本不卡视频一二三区| 日韩vs国产vs欧美| 婷婷六月综合网| 天堂在线亚洲视频| 日韩精品91亚洲二区在线观看|