亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? distbindata.cc

?? torch tracking code, it is a good code
?? CC
字號:
const char *help = "\progname: distbindata.cc\n\code2html: This program computes a distance between 2 bindata files.\n\version: Torch3 vision2.0, 2004-2006\n\(c) Sebastien Marcel (marcel@idiap.ch)\n";// core#include "string_utils.h"// datasets#include "FileBinDataSet.h"// machines#include "ConnectedMachine.h"#include "Linear.h"#include "Tanh.h"#include "LogSoftMax.h"// normalisation#include "MyMeanVarNorm.h"// metrics#include "Pearson.h"#include "Canberra.h"#include "NormalizeCorrelation.h"#include "StandardCorrelation.h"#include "StandardCovariance.h"#include "ChiSquare.h"#include "TangentDistance.h"#include "Mahanalobis.h"// eigen#include "PCAMachine.h"// misc#include "CmdLine.h"#include "FileListCmdOption.h"using namespace Torch;real mMlpOneHot(int n_inputs, real *x, real *y, ConnectedMachine *mlp, MyMeanVarNorm *mv_norm, Sequence *seq, bool diff, bool delta){   	if(diff)		for(int i = 0 ; i < n_inputs ; i++) seq->frames[0][i] = x[i] - y[i];	else if(delta)	{	   	int j = 0;		for(int i = 0 ; i < n_inputs ; i++, j++) seq->frames[0][j] = x[i] - y[i];		for(int i = 0 ; i < n_inputs ; i++, j++) seq->frames[0][j] = y[i];	}	else	{	   	int j = 0;		for(int i = 0 ; i < n_inputs ; i++, j++) seq->frames[0][j] = x[i];		for(int i = 0 ; i < n_inputs ; i++, j++) seq->frames[0][j] = y[i];	}	   	mv_norm->preProcessInputs(seq);		mlp->forward(seq);	   		return mlp->outputs->frames[0][0] - mlp->outputs->frames[0][1];}int main(int argc, char **argv){  	char *template_file;  	char *model_file;	char *score_filename;	char *norm_model_filename;	bool use_mean_template;	bool verbose;	bool one_score_per_file;	//	bool mahalanobis;	bool canberra;	bool pearson;	bool nc;	bool stdcor;	bool stdcov;	bool chisquare;	bool td;	bool mlpmetric1hot;		//	bool diff;	bool delta;	int dim;		//	int width;	int height;	//  	Allocator *allocator = new Allocator;  	DiskXFile::setLittleEndianMode();  	//=================== The command-line ==========================	FileListCmdOption filelist("file name", "the list files or one data file");        filelist.isArgument(true);  	// Construct the command line  	CmdLine cmd;	cmd.setBOption("write log", false);	  	// Put the help line at the beginning  	cmd.info(help);  	// Train mode  	cmd.addText("\nArguments:");  	cmd.addSCmdArg("template", &template_file, "the template file to compare with");  	cmd.addCmdOption(&filelist);  	cmd.addText("\nOptions:");  	cmd.addBCmdOption("-verbose", &verbose, false, "verbose", true);  	cmd.addBCmdOption("-use_mean", &use_mean_template, false, "use the mean model", true);  	cmd.addBCmdOption("-one_score_per_file", &one_score_per_file, false, "computes one score per input file", true);  	cmd.addSCmdOption("-score", &score_filename, "", "score filename");  	cmd.addSCmdOption("-norm", &norm_model_filename, "", "norm model filename");  	cmd.addText("\nFeatures:");  	cmd.addBCmdOption("-diff", &diff, false, "diff input features", true);  	cmd.addBCmdOption("-delta", &delta, false, "delta input features", true);  	cmd.addICmdOption("-dim", &dim, -1, "dimension to use", true);  	cmd.addText("\nMetrics:");  	cmd.addBCmdOption("-mahalanobis", &mahalanobis, false, "Mahalanobis metric", true);  	cmd.addBCmdOption("-canberra", &canberra, false, "Canberra metric", true);  	cmd.addBCmdOption("-pearson", &pearson, false, "one minus Pearson correlation", true);  	cmd.addBCmdOption("-nc", &nc, false, "Normalized correlation", true);  	cmd.addBCmdOption("-stdcor", &stdcor, false, "Standard Correlation", true);  	cmd.addBCmdOption("-stdcov", &stdcov, false, "Standard Covariance", true);  	cmd.addBCmdOption("-chisquare", &chisquare, false, "Chi Square", true);  	cmd.addBCmdOption("-td", &td, false, "tangent distance", true);  	cmd.addICmdOption("-width", &width, -1, "width of the image for tangent distance", true);  	cmd.addICmdOption("-height", &height, -1, "height of the image for tangent distance", true);  	cmd.addBCmdOption("-mlpmetric1hot", &mlpmetric1hot, false, "mlpmetric1hot distance");  	cmd.addSCmdOption("-model", &model_file, "", "model filename");  	// Read the command line  	cmd.read(argc, argv);	//	if(verbose)	{		if(mahalanobis) print("Using Mahalanobis-cosine metric with PCA model %s\n", model_file);		else if(td) print("Using Tangent distance on %dx%d images\n", width, height);		else if(canberra) print("Using Canberra metric\n");		else if(pearson) print("Using one minus Pearson correlation\n");		else if(nc) print("Using Normalized correlation\n");		else if(stdcor) print("Using Standard Correlation\n");		else if(stdcov) print("Using Standard Covariance\n");		else if(chisquare) print("Using Chi Square\n");		else if(mlpmetric1hot) print("Using One hot MLP metric with model %s\n", model_file);		else print("No metric chosen, setting to Euclidean by default\n");		print(" + n_filenames = %d\n", filelist.n_files);		for(int i = 0 ; i < filelist.n_files ; i++)			print("   filename[%d] = %s\n", i, filelist.file_names[i]);	}	// load the template	int n_inputs_template;	int n_patterns_model;		DiskXFile model(template_file, "r");	model.read(&n_patterns_model, sizeof(int), 1);	model.read(&n_inputs_template, sizeof(int), 1);	if(verbose)	{		print(" Number of inputs = %d\n", n_inputs_template);		print(" Number of reference patterns = %d\n", n_patterns_model);	}	real **ref_model = new real*[n_patterns_model];	real *mean_model = new real [n_inputs_template];	for(int j=0; j< n_inputs_template; j++) mean_model[j] = 0.0;	for(int p = 0 ; p < n_patterns_model ; p++)	{		ref_model[p] = new real [n_inputs_template];		model.read(ref_model[p], sizeof(real), n_inputs_template);		for(int j=0; j< n_inputs_template; j++)		{			mean_model[j] += ref_model[p][j];		}	}	for(int j=0; j< n_inputs_template; j++) mean_model[j] /= (real) n_patterns_model;	real *inputs = new real [n_inputs_template];	// load the normalization	DiskXFile *normfile = NULL;	real mu = 0.0;	real sigma = 1.0;	if(strcmp(norm_model_filename, "") != 0)	{		normfile = new(allocator) DiskXFile(norm_model_filename, "r");		normfile->read(&mu, sizeof(real), 1);		normfile->read(&sigma, sizeof(real), 1);		print("Norm model (%s): mu=%g \t sigma = %g\n", norm_model_filename, mu, sigma);	}			//	DiskXFile *scorefile = NULL;	if(strcmp(score_filename, "") != 0) scorefile = new(allocator) DiskXFile(score_filename, "w");	// create the metric	Metric *metric = NULL;	ConnectedMachine *mlp = NULL;	MyMeanVarNorm *mv_norm = NULL;	Sequence *seq;		PCAMachine *pca_machine = NULL;	int dim_ = dim;		if((dim_ == -1) || (dim_ > n_inputs_template)) dim_ = n_inputs_template;	if(canberra) metric = new mCanberra(dim_);	else if(pearson) metric = new mPearson(dim_);	else if(nc) metric = new mNC(dim_);	else if(stdcor) metric = new mStdCorrelation(dim_);	else if(stdcov) metric = new mStdCovariance(dim_);	else if(chisquare) metric = new mChiSquare(dim_);	else if(td)	{	   	if(width != -1 && height != -1 && width * height == n_inputs_template)			metric = new mTangentDistance(width, height);		else error("width(%d) or height (%d) incorrect for Tangent Distance", width, height);	}	else if(mahalanobis)	{		if(strcmp(model_file, ""))		{			pca_machine = new PCAMachine(n_inputs_template);			DiskXFile *file = NULL;			file = new DiskXFile(model_file, "r");			pca_machine->loadXFile(file);			delete file;			pca_machine->setIOption("verbose_level", 1);			pca_machine->setROption("variance", -1.0);			pca_machine->init();			if(dim > 0) pca_machine->n_outputs = dim;			metric = new mMahanalobisCosine(n_inputs_template, pca_machine);		}		else error("No PCA model available for Mahalanobis");	}	else if(mlpmetric1hot)	{		if(strcmp(model_file, ""))		{			int n_inputs_;			int n_hu;			int n_outputs;			print("Loading One hot MLP metric\n");					DiskXFile mlpmodel(model_file, "r");			mlpmodel.taggedRead(&n_inputs_, sizeof(int), 1, "N_INPUTS");			mlpmodel.taggedRead(&n_hu, sizeof(int), 1, "N_HU");			mlpmodel.taggedRead(&n_outputs, sizeof(int), 1, "N_OUTPUTS");			print(" Number of inputs = %d\n", n_inputs_);			print(" Number of hidden units = %d\n", n_hu);			print(" Number of outputs = %d\n", n_outputs);			if(diff)			{		   		print("Using diff features.\n");				if(n_inputs_ != n_inputs_template) error("Number of inputs incorrect.");			}			else			{		   		if(delta) print("Using delta features.\n");				if(n_inputs_ != 2*n_inputs_template) error("Number of inputs incorrect.");			}			if(n_outputs != 2) error("Number of outputs incorrect.");			//			mlp = new(allocator) ConnectedMachine;			Linear *c1 = new(allocator) Linear(n_inputs_, n_hu);			Tanh *c2 = new(allocator) Tanh(n_hu);			Linear *c3 = new(allocator) Linear(n_hu, n_outputs);			GradientMachine *c4 = new(allocator) LogSoftMax(n_outputs);			mlp->addFCL(c1);    			mlp->addFCL(c2);			mlp->addFCL(c3);			mlp->addFCL(c4);			mlp->build();			//    			mv_norm = new(allocator) MyMeanVarNorm(n_inputs_, 1);					//			mv_norm->loadXFile(&mlpmodel);			mlp->loadXFile(&mlpmodel);			seq = new(allocator) Sequence(1, n_inputs_);		}		else error("No model available");	}	else metric = new mEuclidean(dim_);		for(int i = 0 ; i < filelist.n_files ; i++)	{	   	if(verbose) print(" + filename[%d] = %s\n", i, filelist.file_names[i]);		char *temp = strBaseName(filelist.file_names[i]);		char *file_name = strRemoveSuffix(temp);		if(scorefile != NULL)					if(one_score_per_file) scorefile->printf("%s ", file_name);				int n_inputs;		int n_patterns;					// Test the file		DiskXFile *file = new DiskXFile(filelist.file_names[i], "r");		file->read(&n_patterns, sizeof(int), 1);		file->read(&n_inputs, sizeof(int), 1);		if(verbose)		{			print("Reading bindata file (%s)\n", filelist.file_names[i]);			print("   n_inputs = %d\n", n_inputs);			print("   n_patterns = %d\n", n_patterns);  		}				if(n_inputs != n_inputs_template)			error("Incorrect number of inputs (%d <> %d) !", n_inputs, n_inputs_template);					real min_ = +1000.0;		real max_ = -1000.0;		real sum_ = 0.0;		real avg;		for(int j=0; j< n_patterns; j++)		{			if(!one_score_per_file) scorefile->printf("%s_%03d ", file_name, j);						file->read(inputs, sizeof(real), n_inputs);					real d = 0.0;					if(use_mean_template)			{				if(mlpmetric1hot) d = -mMlpOneHot(n_inputs, inputs, mean_model, mlp, mv_norm, seq, diff, delta);				else d = metric->measure(inputs, mean_model);			}			else			{				for(int p = 0 ; p < n_patterns_model ; p++)				{					//if(strcmp(mlpmetric1hot, "")) d = -mMlpOneHot(n_inputs, inputs, ref_model[p], mlp, mv_norm, seq, diff, delta);					//else d += mEuclidean(n_inputs, inputs, ref_model[p]);					if(mlpmetric1hot) d += -mMlpOneHot(n_inputs, inputs, ref_model[p], mlp, mv_norm, seq, diff, delta);					else d += metric->measure(inputs, ref_model[p]);				}							d /= (real) n_patterns_model;			}		   	if(!one_score_per_file)			{			   	real z = -d;					if(strcmp(norm_model_filename, "") != 0)				{					z -= mu;					z /= sigma;				}		 			scorefile->printf("%g\n",  z);			}						sum_ += d;				//			if(d < min_) min_ = d;			if(d > max_) max_ = d;		}		avg = sum_/(real)n_patterns;		//		if(verbose)		{			print("Outputs:\n");			print("  min = %g\n", min_);			print("  max = %g\n", max_);			print("  sum = %g\n", sum_);			print("  avg = %g\n", avg);		}		if(scorefile != NULL)		{		   	if(one_score_per_file)			{			   	real z = -avg;					if(strcmp(norm_model_filename, "") != 0)				{					z -= mu;					z /= sigma;				}		 			scorefile->printf("%g ",  z);			}		}				//		delete file;		//		if(scorefile != NULL)		   	if(one_score_per_file) scorefile->printf("\n");	}		//	for(int p = 0 ; p < n_patterns_model ; p++) delete [] ref_model[p];	delete [] ref_model;	delete [] inputs;	delete metric;	//  	delete allocator;  	return(0);}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
久久老女人爱爱| 久久99久久久欧美国产| 久久久久综合网| 日韩欧美国产一区在线观看| 欧美日韩另类一区| 欧美日韩精品一区二区三区蜜桃| 欧美性一级生活| 欧美日韩高清一区二区不卡| 欧美老人xxxx18| 欧美一级精品大片| 日韩免费在线观看| 久久九九久久九九| 欧美极品美女视频| 亚洲欧美日韩在线| 亚洲电影你懂得| 蜜乳av一区二区三区| 国产米奇在线777精品观看| 国产成人亚洲精品狼色在线| www.99精品| 欧美日韩一区中文字幕| 制服.丝袜.亚洲.另类.中文| 精品久久久久久久人人人人传媒| 久久久久久综合| 亚洲欧美在线视频观看| 一区二区三区在线观看动漫| 日韩影院精彩在线| 国内精品伊人久久久久av影院| 国产成a人亚洲| 欧美性一二三区| 久久久久久久久伊人| 亚洲人午夜精品天堂一二香蕉| 亚洲www啪成人一区二区麻豆| 免费人成精品欧美精品| 国产九色精品成人porny| 色噜噜久久综合| 精品国产凹凸成av人网站| 日韩码欧中文字| 美女爽到高潮91| 色婷婷国产精品| 精品福利一二区| 亚洲国产中文字幕| 国产成人精品在线看| 欧美中文字幕亚洲一区二区va在线| 欧美一区二区在线不卡| 国产精品久久毛片a| 日本不卡高清视频| 91麻豆高清视频| www国产精品av| 亚洲一区二区欧美| 高清不卡一区二区| 成人一级视频在线观看| 欧美一区二区三区四区视频| 日韩午夜在线观看| 中文字幕在线不卡| 久久超碰97中文字幕| 91在线云播放| 久久综合99re88久久爱| 亚洲国产欧美一区二区三区丁香婷| 激情欧美日韩一区二区| 欧美男男青年gay1069videost| 国产精品青草久久| 看国产成人h片视频| 欧美丝袜丝交足nylons| 亚洲视频综合在线| 粉嫩aⅴ一区二区三区四区五区| 欧美一级在线免费| 日韩综合在线视频| 欧美日韩国产高清一区二区三区| 亚洲免费观看高清完整版在线| 国产在线播精品第三| 日韩欧美中文字幕精品| 日欧美一区二区| 在线不卡免费欧美| 午夜电影网亚洲视频| 欧美日韩在线观看一区二区 | 欧美三级电影网| 亚洲欧美日韩在线不卡| 91丝袜呻吟高潮美腿白嫩在线观看| 国产欧美精品国产国产专区| 伊人夜夜躁av伊人久久| 日韩午夜在线播放| 日韩一区二区三区观看| 国产精品久久久久影院老司| 国产精品视频九色porn| 国产成人亚洲综合a∨婷婷图片| 久久男人中文字幕资源站| 免费人成在线不卡| 精品乱人伦小说| 国产激情视频一区二区在线观看 | 亚洲精品日韩综合观看成人91| 成人国产精品免费观看| 亚洲图片激情小说| 欧美三级欧美一级| 久久99精品久久久久久| 久久精品一区二区三区四区| 成人h动漫精品| 一区二区三区在线免费观看| 在线影视一区二区三区| 色综合视频在线观看| 久久久噜噜噜久噜久久综合| 国产成人亚洲综合a∨婷婷| 国产精品视频第一区| 在线观看av不卡| 偷拍自拍另类欧美| 精品理论电影在线观看| av网站免费线看精品| 亚洲成人资源网| 久久综合久久综合亚洲| 99精品视频一区二区三区| 香蕉av福利精品导航| 久久综合色天天久久综合图片| jlzzjlzz国产精品久久| 午夜电影一区二区| 欧美激情一区三区| 欧美精品丝袜久久久中文字幕| 国产精品影音先锋| 亚洲国产欧美日韩另类综合| 久久综合国产精品| 欧美视频中文字幕| 国产福利一区二区三区视频| 韩国女主播一区| 欧美日韩高清在线| 久久蜜桃香蕉精品一区二区三区| 国产精品免费视频网站| 国产精品久久久久久久午夜片| 91久久人澡人人添人人爽欧美| 免费观看30秒视频久久| 综合欧美一区二区三区| 精品国产青草久久久久福利| 欧美综合在线视频| 国产白丝网站精品污在线入口| 午夜国产精品影院在线观看| 亚洲视频1区2区| 欧美国产精品中文字幕| 精品日韩在线一区| 7777精品伊人久久久大香线蕉经典版下载| 成a人片国产精品| 激情欧美一区二区| 日韩精品免费视频人成| 亚洲美女视频一区| 中文字幕一区二区三中文字幕| www激情久久| 懂色中文一区二区在线播放| 久久99深爱久久99精品| 精品国产青草久久久久福利| 久久精品国产在热久久| 日韩免费福利电影在线观看| 亚洲第一狼人社区| 亚洲精品国久久99热| 中文字幕一区二区三区视频| 精品1区2区在线观看| 精品免费国产二区三区| 欧美一级日韩不卡播放免费| 91精品国产综合久久精品麻豆| 欧美性色黄大片手机版| 日本高清不卡在线观看| 在线免费观看日韩欧美| 91搞黄在线观看| 色老汉av一区二区三区| 91亚洲精品久久久蜜桃网站| 成人一区二区三区视频在线观看| 成人性生交大片免费看中文| 成人综合在线观看| 99精品久久99久久久久| 91福利在线看| 欧美精品久久99久久在免费线| 一区二区三区波多野结衣在线观看| 欧美一级高清大全免费观看| 婷婷一区二区三区| 国产成人在线免费| 国产视频不卡一区| 国产精品免费丝袜| 亚洲欧洲av另类| 亚洲午夜久久久久中文字幕久| 午夜视频在线观看一区二区 | 精品久久一区二区| 国产片一区二区| 最新成人av在线| 日日摸夜夜添夜夜添精品视频| 久久精品国产在热久久| 成人网在线免费视频| 欧美性videosxxxxx| 日韩欧美国产综合一区 | 99精品国产91久久久久久| 欧洲av一区二区嗯嗯嗯啊| 欧美影院午夜播放| 日本一区二区免费在线| 国产欧美综合在线观看第十页| 久久久国产精华| 精品视频一区二区不卡| 日韩亚洲欧美综合| 国产精品你懂的在线欣赏| 亚洲一区二区高清| 精品亚洲国产成人av制服丝袜| av电影在线观看完整版一区二区| 色88888久久久久久影院按摩| 欧美一区二区在线免费观看| 国产女人18水真多18精品一级做| 亚洲h动漫在线| av一本久道久久综合久久鬼色|