亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? pmc.pm

?? 一個論文管理系統
?? PM
字號:
# $Id: PMC.pm,v 1.5 2005/10/17 16:17:57 ben Exp $## Copyright 2005 Nature Publishing Group# This program is free software; you can redistribute it and/or# modify it under the terms of the GNU General Public License# as published by the Free Software Foundation; either version 2# of the License, or (at your option) any later version.## The Bibliotech::CitationSource::PMC class retrieves citation data for articles# on pubmedcentral.org using an open archives initiative protocol for metadata harvesting (OAI-PMH)package Bibliotech::CitationSource::PMC;use strict;use warnings;use Bibliotech::CitationSource;use base 'Bibliotech::CitationSource';use Bibliotech::CitationSource::Simple;use Data::Dumper;use XML::LibXML;use XML::LibXML::NodeList;use HTTP::OAI::Harvester;use constant VERSION => '2.0'; 						# version for Harvester Identifyuse constant PMC_BASE_URL => 'http://www.pubmedcentral.gov/oai/oai.cgi'; # baseURL for Harvester Identifyuse constant META_PREFIX => 'pmc_fm';				# return metadata onlyuse constant ID_PREFIX => 'oai:pubmedcentral.gov';	# prefix for Harvester GetRecord() identifiersub api_version{  1;}sub name{  'PMC';}sub version{  '$Revision: 1.5 $';}sub understands{    my ($self, $uri) = @_;	#check the host	return 0 unless ($uri->scheme =~ /^http$/i);	return 0 unless ($uri->host =~ m/^(www\.)?pubmedcentral(\.nih)?\.(gov|org)$/);	#check there's a query	return 0 unless $uri->query;	# check the path	return 0 unless ($uri->path =~ m/articlerender\.fcgi/ || $uri->path =~ m/pagerender\.fcgi/ || ($uri->path =~ m/picrender\.fcgi/ && $uri->query =~ /blobtype=pdf/i));		#finally, check the query for article page request	return 1 if ($uri->query =~ m/artid=[0-9]+/);	return 2 if ($uri->query =~ m/pubmedid=[0-9]+/);    return 0;}sub citations{	my ($self, $uri) = @_;	my $understands = $self->understands($uri);	return undef unless $understands;	if ($understands == 2) {	  $uri->query =~ m/pubmedid=([0-9]+)/;	  my %id = (db => 'pubmed', pubmed => $1);	  return $self->citations_id_switch('Pubmed', \%id);	}     	my $art_id = $self->get_art_id($uri);	return undef unless $art_id;	 	my $metadata = $self->metadata($art_id);	return undef unless $metadata;	return new Bibliotech::CitationSource::ResultList(Bibliotech::CitationSource::Result::Simple->new($metadata));}## The PubMed Central OAI service (PMC-OAI) provides access to metadata of all items in the PubMed Central (PMC) archive, #	as well as to the full text of a subset of these items.## Peak hours for requests are Monday to Friday, 5:00 AM to 9:00 PM, U.S. Eastern time. # Do not make more than one request every 3 seconds, even at off-peak times. #sub metadata{    my ($self, $art_id) = @_;    my $xml;	#	# harvest the PMC-OAI static repository with the Identify method, at baseURL 	#	already know the Identify object	#	my $h = HTTP::OAI::Harvester->new(		repository=>HTTP::OAI::Identify->new( baseURL=> PMC_BASE_URL, version=> VERSION)	);	#	# get corresponding record for $art_id from repository	#	my($gr) = $h->GetRecord(			#identifier => ID_PREFIX . ":" . "abc",		# to test for error (no match) from GetRecord			identifier => ID_PREFIX . ":" . $art_id,	# required			metadataPrefix => META_PREFIX				# required	);	# this didn't work	#if($gr->is_error) {		#$self->errstr('GetRecord Error: ' . $gr->message);		#return undef;	#}	if($gr->errors) {		$self->errstr('GetRecord Error for ' . $art_id);		return undef;	}	#	# get first record from GetRecord object (first record stored in response)	#	??how likely will it be to have multiple records returned for an artid??	#	my($rec) = $gr->next;	unless($rec) {		$self->errstr("No records");		return undef;	}	#	# could be helpful	#	#$self->errstr($rec->identifier . " (" . $rec->datestamp . ")");	# get the parsed DOM tree	my($dom) = $rec->metadata->dom;	###DEBUG	#return $dom;	       # go get the metadata from tree    my $metadata = $self->build_metadata($dom);    return undef unless $metadata;    # check that it's worth returning    unless($metadata->{'journal'} && $metadata->{'pubdate'})    {		$self->errstr('Insufficient metadata extracted for artid: ' . $art_id);		return undef;    }    return $metadata;}sub get_art_id {    my ($self, $uri) = @_;    my $art_id;	my(%q_hash) = $uri->query_form;	my($q_hash_ref) = \%q_hash;    if ( $uri->scheme eq 'http' && keys %{$q_hash_ref} ) {    	$art_id = $q_hash_ref->{'artid'};    }    return $art_id;}my @monthnames = ("", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec");sub build_metadata {    my ($self, $dom) = @_;    my $root = $dom->getDocumentElement;	unless ($root) {		$self->errstr("no root");	}	#	# grab journal-meta node	#	my $jmeta = getFirstElement($root, 'journal-meta');	my $journaltitle = getFirstElement($jmeta, 'journal-title');	#		get print issue number	my $issn = "";	foreach my $i ($jmeta->getElementsByLocalName('issn')) {		my $pubtype = $i->getAttribute('pub-type');		if ($pubtype eq "ppub") {			$issn = $i->string_value;		}	}		#	# now grab article-meta node	#	my $artmeta = getFirstElement($root, 'article-meta');        my $title = getFirstElement($artmeta, 'article-title');	my $fpage = getFirstElement($artmeta, 'fpage');        my $lpage = getFirstElement($artmeta, 'lpage');	my $vol = getFirstElement($artmeta, 'volume');        my $issue = getFirstElement($artmeta, 'issue');	#convert nodes to strings, checking for undef first.	($journaltitle, $title, $fpage, $lpage, $vol, $issue) = map { $_->string_value if $_ } ($journaltitle, $title, $fpage, $lpage, $vol, $issue); 	#sort out page range	my $page;	$page = $fpage if $fpage;	$page = $fpage.' - '.$lpage if ($fpage && $lpage && ($fpage != $lpage));	#get identifiers	my $pmid;	my $doi;	foreach my $i ($artmeta->getElementsByLocalName('article-id')) {                my $pubtype = $i->getAttribute('pub-id-type');                if ($pubtype eq "pmid") {                        $pmid = $i->string_value;                }		if ($pubtype eq "doi") {                        $doi = $i->string_value;                }        } 	#		get  pub date	my $day = '';	my $month = '';	my $year = ''; 	my ($pday, $pmonth, $pyear, $eday, $emonth, $eyear);	foreach my $pd ($artmeta->getElementsByLocalName('pub-date')) {		my $pubtype = $pd->getAttribute('pub-type');		if ($pubtype eq "ppub") {			$pday = getFirstElement($pd, 'day');			$pmonth = getFirstElement($pd, 'month');			$pyear = getFirstElement($pd, 'year');		}		if ($pubtype eq "epub") {			$eday = getFirstElement($pd, 'day');                        $emonth = getFirstElement($pd, 'month');                        $eyear = getFirstElement($pd, 'year');                }	}	my @pcount = grep { defined $_; } ($pday, $pmonth, $pyear);        my @ecount = grep { defined $_; } ($eday, $emonth, $eyear);	if (@ecount > @pcount) {		$day = $eday->string_value if $eday;                $month = $emonth->string_value if $emonth;                $year = $eyear->string_value if $eyear;	}	else {                $day = $pday->string_value if $pday;                $month = $pmonth->string_value if $pmonth;                $year = $pyear->string_value if $pyear;	}	#		get the author info	my @contrib_groups = $artmeta->getElementsByLocalName('contrib-group');	my($authors);        $authors = &getAuthors(@contrib_groups) if @contrib_groups;    return {              title => $title,	     pubdate => "$day $monthnames[$month] $year",         journal => { name => $journaltitle,                      issn => $issn,					}, 		 page => $page,         volume => $vol,         issue => $issue,         pubmed => $pmid,	 doi => $doi,         authors => $authors,	};}sub getAuthors {	my ($authorGroup) = @_;	my(@auList);	# build names foreach contrib that contrib-type = "author"	foreach my $c ($authorGroup->getElementsByLocalName('contrib')) {		my $type = $c->getAttribute('contrib-type');		my $name;		#  build name (others: collab)		if(getFirstElement($c, 'name') && $type eq "author") {			$name->{'forename'} = getFirstElement($c, 'given-names')->string_value;			$name->{'lastname'} = getFirstElement($c, 'surname')->string_value;		}		push(@auList, $name) if $name;	}	return \@auList if @auList;	return undef unless @auList;	#return "No Authors" unless @auList;}## get the first element in the array returned by getElementsByLocalName#sub getFirstElement {	my ($node, $name) = @_;	my @values = $node->getElementsByLocalName($name);	return($values[0]);}#sub errstr {#    my ($self, $err) = @_;##    print STDERR $self->name . " " . $err . "\n";#}#true!1;

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
欧美日韩国产一级| 国产成人精品免费一区二区| 国产精品久久夜| 国产色婷婷亚洲99精品小说| 精品国产乱码久久久久久牛牛| 91精品国产综合久久香蕉的特点| 欧美性受xxxx| 欧美精品在线观看一区二区| 精品视频在线看| 欧美一级一区二区| 精品国产三级a在线观看| 欧美大片一区二区三区| 精品噜噜噜噜久久久久久久久试看| 日韩精品一区二区三区视频| 久久综合一区二区| 日本一二三不卡| 成人欧美一区二区三区黑人麻豆 | 国产在线精品免费av| 国产真实乱对白精彩久久| 国产美女精品人人做人人爽| www.日韩大片| 欧美电影影音先锋| 久久丝袜美腿综合| 亚洲欧美日韩精品久久久久| 丝袜诱惑制服诱惑色一区在线观看 | 日韩黄色免费网站| 狠狠色伊人亚洲综合成人| 波多野结衣在线一区| 91国偷自产一区二区三区成为亚洲经典 | 国产精品久久久久三级| 午夜一区二区三区视频| 精品无人码麻豆乱码1区2区| 激情伊人五月天久久综合| 不卡区在线中文字幕| 欧美日韩国产乱码电影| 国产午夜精品一区二区三区嫩草 | 这里只有精品99re| 欧美极品少妇xxxxⅹ高跟鞋| 亚洲在线一区二区三区| 久久精品999| 色香蕉久久蜜桃| 日韩精品一区在线| 一区二区三区视频在线看| 国产一区二区三区精品欧美日韩一区二区三区| 国产ts人妖一区二区| 欧美日韩在线一区二区| 国产精品人成在线观看免费| 奇米777欧美一区二区| a在线欧美一区| 日韩一二三区不卡| 一级特黄大欧美久久久| 国产精品一区二区在线播放| 欧美午夜宅男影院| 国产精品美女久久久久久久久 | 亚洲男人的天堂av| 国产大陆亚洲精品国产| 在线电影院国产精品| 亚洲女人小视频在线观看| 国产成人在线视频免费播放| 欧美高清激情brazzers| 亚洲精品午夜久久久| 成人三级伦理片| 国产欧美一区二区精品仙草咪| 蜜臀av性久久久久蜜臀av麻豆| 欧美少妇bbb| 一区二区三区在线观看网站| 成人午夜在线免费| 久久久久久黄色| 久久电影国产免费久久电影| 91麻豆精品国产91久久久久| 亚洲成av人在线观看| 色婷婷国产精品综合在线观看| 中文字幕一区二区三区四区不卡| 美脚の诱脚舐め脚责91| 欧美videossexotv100| 久久精品国产一区二区| 精品国产乱码久久久久久牛牛| 极品尤物av久久免费看| 久久奇米777| 盗摄精品av一区二区三区| 国产精品欧美一级免费| 99热精品国产| 黄色日韩三级电影| 欧美不卡一区二区三区四区| 国产在线国偷精品免费看| 久久久精品日韩欧美| 国产jizzjizz一区二区| 亚洲天天做日日做天天谢日日欢 | 老司机精品视频在线| 精品区一区二区| 大白屁股一区二区视频| 亚洲三级在线看| 欧美日本乱大交xxxxx| 久久99九九99精品| 国产精品每日更新| 欧美日韩在线一区二区| 久久精品久久综合| 亚洲欧美中日韩| 欧美区在线观看| 久久99国产精品麻豆| 国产欧美日韩亚州综合| 色婷婷激情久久| 精品一区二区三区久久久| 国产拍欧美日韩视频二区| 欧美综合一区二区| 久久成人18免费观看| 亚洲欧美一区二区三区极速播放| 777亚洲妇女| 97se亚洲国产综合自在线观| 亚洲第一激情av| 国产精品美女久久久久久久网站| 欧美日韩国产在线观看| 处破女av一区二区| 香蕉成人啪国产精品视频综合网| 久久综合久久综合久久| 欧美中文一区二区三区| 国产成人自拍网| 蜜桃一区二区三区在线观看| 中文字幕一区二区三区色视频| 51精品国自产在线| 91亚洲男人天堂| 激情六月婷婷综合| 日韩精品每日更新| 亚洲精品视频免费看| 国产亚洲成aⅴ人片在线观看 | 国模少妇一区二区三区| 午夜精品久久久久久久99水蜜桃 | 国产露脸91国语对白| 午夜视频久久久久久| 成人免费一区二区三区视频 | 精品精品欲导航| 欧美福利一区二区| 色婷婷亚洲综合| 成人av网址在线| 国产美女视频91| 狠狠色狠狠色综合日日91app| 亚洲韩国精品一区| 亚洲精品日日夜夜| 亚洲欧美综合色| 国产精品大尺度| 中文字幕欧美日本乱码一线二线| 日韩免费电影网站| 宅男噜噜噜66一区二区66| 欧美三级电影网站| 欧美在线一二三| 日本丰满少妇一区二区三区| 91色porny| 日本精品一级二级| 本田岬高潮一区二区三区| 国产乱码精品一区二区三区av| 精品一区二区三区免费毛片爱| 一区二区三区精品视频在线| 一区二区三区四区不卡视频| 一区二区三区四区激情| 亚洲一区电影777| 亚洲国产中文字幕| 五月婷婷激情综合| 美女视频黄免费的久久| 极品少妇一区二区三区精品视频| 激情综合网av| 成人黄色a**站在线观看| 粉嫩欧美一区二区三区高清影视| 国产成人精品午夜视频免费| 国产成人av一区二区三区在线观看| 日韩精品一级中文字幕精品视频免费观看| 亚洲综合一区二区三区| 天天av天天翘天天综合网色鬼国产| 天堂成人国产精品一区| 免费成人在线观看| 国产精品影视天天线| 99久久免费视频.com| 欧洲一区二区三区免费视频| 91精品国产91综合久久蜜臀| 亚洲精品在线观看视频| 亚洲欧洲精品天堂一级 | 日韩欧美亚洲国产精品字幕久久久| 欧美一区二区三区四区五区| 精品国产乱码久久久久久蜜臀| 久久久久久久av麻豆果冻| 亚洲男人电影天堂| 免费的成人av| 99视频精品免费视频| 欧美精品在线一区二区三区| 久久亚洲捆绑美女| 尤物在线观看一区| 卡一卡二国产精品| 99久久99久久精品国产片果冻| 欧美日韩免费高清一区色橹橹| 久久久久久麻豆| 一区二区在线免费| 久久99精品国产.久久久久久| www.综合网.com| 日韩精品一区二区三区视频在线观看| 国产精品九色蝌蚪自拍| 麻豆精品视频在线观看| 99国产精品国产精品毛片| 日韩欧美激情一区| 亚洲一区二区欧美| 成人18视频日本| 久久综合久久综合亚洲|