亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? cluster2.c

?? 聚類算法全集以及內附數據集
?? C
?? 第 1 頁 / 共 4 頁
字號:
  nst = clset->nst;             /* get the numerical statistics */  switch (mode & 0xf) {         /* evaluate the initialization mode */    case CLS_CENTER:            /* -- center of the data space */      c = clset->cls->ctr;      /* compute the center */      nst_center(nst, c);       /* of the data space */      for (p = clset->cls +(k = clset->clscnt); --k > 0; )        vec_copy((--p)->ctr, c, clset->incnt);      break;                    /* copy the center to all clusters */    case CLS_DIAG:              /* -- diagonal of the data space */    case CLS_LATIN:             /* -- latin hypercube sampling */      for (i = clset->incnt; --i >= 0; ) {        m = nst_max(nst, i);    /* compute value decrement */        d = (m -nst_min(nst, i)) / clset->clscnt;        x = m -0.5*d;           /* compute last value */        for (p = clset->cls +(k = clset->clscnt); --k >= 0; ) {          (--p)->ctr[i] = x; x -= d; }      }                         /* set equally spaced values */      if (mode == CLS_DIAG)     /* if only to set the diagonal, */        break;                  /* there is nothing else to be done */    /* case CLS_LATIN: */       /* -- latin hypercube sampling */      p = clset->cls;           /* shuffle elements of the centers */      for (n = clset->clscnt; --n > 0; ) {        for (i = clset->incnt; --i >= 0; ) {          k = (int)((n+1) *randfn());          if      (k > n) k = n;   /* compute a random index in */          else if (k < 0) k = 0;   /* the remaining set of centers */          x           = p[k].ctr[i];          p[k].ctr[i] = p[n].ctr[i];          p[n].ctr[i] = x;      /* exchange the i-th elements of the */	}                       /* k-th and the n-th cluster center */      } break;                  /* (shuffle dimensions independently) */    case CLS_POINTS:            /* -- given points in the data space */      if (vec) nst_norm(nst, vec, clset->vec);      vec = clset->vec;         /* scale given vector to the buffer */      if (clset->init >= clset->clscnt)        clset->init = 0;        /* if all clusters are init., restart */      p = clset->cls +clset->init++;      vec_copy(p->ctr, vec, clset->incnt);      break;                    /* copy the given vector */    case CLS_UNIFORM:           /* -- uniformly distributed */    default:                    /* (this is also the default) */      nst_spans(nst, b = clset->vec);  /* get the value spans */      for (p = clset->cls +(k = clset->clscnt); --k >= 0; )        for (c = (--p)->ctr, i = clset->incnt; --i >= 0; )          c[i] = nst_min(nst, i) +b[i] *randfn();      break;                    /* set cluster centers to random */  }                             /* points in the data space */  /* --- scale the coordinates --- */  if ((mode & 0xf) != CLS_POINTS)    for (p = clset->cls +(k = clset->clscnt); --k >= 0; ) {      --p; nst_norm(nst, p->ctr, p->ctr); }  if (((mode & 0xf) != CLS_POINTS)  ||  (clset->init  >= clset->clscnt)) {    /* --- add a random offset --- */    if (range > 0) {            /* if a range for offsets is given */      for (p = clset->cls +(k = clset->clscnt); --k >= 0; )        for (c = (--p)->ctr +(i = clset->incnt); --i >= 0; )          *--c += (2 *randfn() -1) *range;    }                           /* add a random offset to all values */    /* --- normalize the centers --- */    if (mode & CLS_UNIT)        /* if centers on the unit sphere */      _normctr(clset, 0);       /* normalize the cluster centers */  }}  /* cls_init() *//*--------------------------------------------------------------------*/void cls_method (CLSET *clset, int method){                               /* --- set parameter update method */  int     i;                    /* loop variable */  CLUSTER *p;                   /* to traverse the clusters */  double  t;                    /* initialization value */  assert(clset                  /* check the function arguments */     && ((method & CLS_METHOD)   >= CLS_GRADIENT)     && ((method & CLS_METHOD)   <= CLS_BACKPROP)     && ((method & CLS_MODIFIER) >= CLS_NONE)     && ((method & CLS_MODIFIER) <= CLS_QUICK));  clset->method = method;       /* note the parameter update method */  method &= CLS_MODIFIER;       /* get the update modifier */  if (method > CLS_EXPAND) {    /* if one of the higher methods */    t = (method == CLS_ADAPTIVE) ? 1 : 0;    for (p = clset->cls +(i = clset->clscnt); --i >= 0; ) {      --p; mat_init(p->chv, MAT_VALUE, &t);           mat_init(p->bfv, MAT_ZERO,  NULL);    }                           /* initialize the change matrix */  }                             /* and the buffer matrix */}  /* cls_method() *//*--------------------------------------------------------------------*/void cls_regular (CLSET *clset, const double *params){                               /* --- set regularization parameters */  int i;                        /* loop variable */  assert(clset && params);      /* check the function arguments */  for (i = 5; --i >= 0; ) clset->regps[i] = params[i];}  /* cls_regular() */          /* copy the parameters *//*--------------------------------------------------------------------*/void cls_lrate (CLSET *clset, const double *lrates,const double *decays){                               /* --- set learning rate parameters */  int i;                        /* loop variable */  assert(clset);                /* check the function arguments */  if (lrates) {                 /* if learning rates are given, */    for (i = 3; --i >= 0; )     /* copy them to the cluster set */      clset->lrates[i] = lrates[i];  }  if (decays) {                 /* if decay parameters are given, */    for (i = 3; --i >= 0; )     /* copy them to the cluster set */      clset->decays[i] = decays[i];  }}  /* cls_lrate() *//*--------------------------------------------------------------------*/int cls_aggr (CLSET *clset, const double *vec, double weight){                               /* --- aggregate a data vector */  int      i, n;                /* cluster index, loop variable */  CLUSTER  *c;                  /* to traverse the clusters */  MATADDFN *add;                /* aggregation function */  double   msd;                 /* membership degree */  double   exp;                 /* adaptation exponent */  assert(clset);                /* check the function arguments */  i = cls_exec(clset,vec,NULL); /* compute degrees of membership */  vec = clset->vec;             /* get the buffered vector */  exp = fabs(clset->msexp);     /* and the adaptation exponent */  if      (clset->type & CLS_COVARS)          add = mat_addmp;  else if (clset->type & (CLS_VARS|CLS_SIZE)) add = mat_addsv;  else                                        add = mat_addvec;  /* --- alternating estimation --- */  /* Aggregate the data vectors for the estimation step. */  if ((clset->method & CLS_METHOD) == CLS_ALTOPT) {    if (exp <= 0) {             /* if hard/crisp clustering */      c = clset->cls +i;        /* get the cluster to assign to */      add(c->smp, vec, weight); }    /* sum the weighted vector */    else {                      /* if fuzzy/prob. clustering */      for (c = clset->cls +(n = clset->clscnt); --n >= 0; ) {        msd = (--c)->msd;       /* traverse the clusters */        if (msd <= 0) continue; /* skip cluster with zero membership */        if      (exp == 2) msd *= msd;        else if (exp != 1) msd = pow(msd, exp);        msd *= weight;          /* compute the data point weight */        add(c->smp, vec, msd);  /* sum the data vector weighted */      }                         /* with the degree of membership */    } }  /* --- competitive learning --- */  /* Aggregate the difference vectors for the update step. */  else if ((clset->method & CLS_METHOD) == CLS_COMPLRN) {    if (exp <= 0) {             /* if hard/crisp clustering */      c = clset->cls +i;        /* get the cluster to assign to */      add(c->smp, c->dif, weight); } /* sum the weighted vector */    else {                      /* if fuzzy/prob. clustering */      for (c = clset->cls +(n = clset->clscnt); --n >= 0; ) {        msd = (--c)->msd;       /* traverse the clusters */        if (msd <= 0) continue; /* skip cluster with zero membership */        if      (exp == 2) msd *= msd;        else if (exp != 1) msd = pow(msd, exp);        msd *= weight;          /* compute the data point weight */        add(c->smp, c->dif, msd);      }                         /* sum the difference vector weighted */    } }                         /* with the degree of membership */  /* --- gradient based update --- */  else {                        /* if (method == CLS_GRADIENT) */    /* ... to be done ... */  }  return i;                     /* return index of best cluster */}  /* cls_aggr() *//*--------------------------------------------------------------------*/void cls_bkprop (CLSET *clset, const double *errs){                               /* --- backpropagate errors */  int     i, n;                 /* loop variables, cluster index */  int     type;                 /* cluster type flags */  CLUSTER *p;                   /* to traverse the clusters */  double  *c, *d, t;            /* to access the vectors, buffers */  assert(clset && errs);        /* check the function arguments */  type = clset->type;           /* get the cluster type flags */  for (p = clset->cls +(n = clset->clscnt); --n >= 0; ) {    d = (--p)->dif;             /* traverse the clusters */      t = -errs[n] *clset->drvfn(p->d2, clset->rfnps, p->msd);    if      (type & CLS_COVARS){/* -- if adaptable covariances */      mat_mulmv(clset->buf, p->inv, d);      mat_addmpx(p->smp, d, t); /* compute derivative terms */      d = clset->buf; }         /* and get the buffered result */    else if (type & CLS_VARS) { /* -- if adaptable variances */      mat_muldv(clset->buf, p->inv, d);      mat_addsvx(p->smp, d, t); /* compute derivative terms */      d = clset->buf; }         /* and get the buffered result */    else if (type & CLS_SIZE) { /* -- if adaptable isotropic var. */      t = t /p->var;            /* include the variance */      mat_inc(p->smp, 0, 0, -t *p->d2);    }                           /* sum the variance gradients */    t *= -2;                    /* compute center update factor */    c = p->sum +(i = clset->incnt);    for (d += i; --i >= 0; ) *--c += *--d *t;  }                             /* sum the center gradients */}  /* cls_bkprop() *//*--------------------------------------------------------------------*/double cls_update (CLSET *clset, int conly){                               /* --- update a set of clusters */  int     n, type;              /* loop variable, type buffer */  CLUSTER *p;                   /* to traverse the clusters */  double  sum = 0;              /* sum of cluster weights */  assert(clset);                /* check the function argument */  type = clset->type;           /* note and replace the cluster type */  if (conly) clset->type = CLS_CENTER;  /* --- determine the update weights --- */  if ((clset->method & CLS_METHOD) != CLS_BACKPROP) {    for (p = clset->cls +(n = clset->clscnt); --n >= 0; ) {      --p; p->nw = mat_weight(p->smp);      if (p->nw >= MINWEIGHT) { p->d2 =  0; }      else { p->nw = MINWEIGHT; p->d2 = -1; }      sum += p->nw;             /* get and adapt the cluster weights */    }                           /* and sum them for a normalization */    if (sum <= 0) return 0;     /* check for a proper update */    clset->msd[1] = 1.0/sum;    /* note the normalization factor */  }  /* --- compute new parameters --- */  switch (clset->method & CLS_METHOD) {    case CLS_ALTOPT  : _altopt  (clset); break;    case CLS_COMPLRN : _complrn (clset); break;    case CLS_BACKPROP: _backprop(clset); break;    default          : _gradient(clset); break;  }                             /* call the approp. update function */  /* --- regularize the parameters --- */  _regshape (clset);            /* regularize cluster shapes, */  _regsize  (clset);            /* cluster sizes, and */  _regweight(clset);            /* cluster weights */  /* --- update the parameters --- */  sum = ((clset->method & CLS_MODIFIER) == CLS_NONE)      ? _stdupd(clset)          /* update the cluster parameters */      : _nnupd (clset);         /* with the given update modifier */  if (conly) clset->type = type;/* restore the cluster type */  if (clset->method & CLS_ORIGIN) { /* if cluster centers at origin, */    _zeroctr(clset, 1); return 1; } /* zero the center vectors */  if (clset->method & CLS_UNIT)     /* if centers on unit sphere, */    _normctr(clset, 0);             /* (re)normalize the vectors */  return sum;                   /* return the maximal change */}  /* cls_update() */

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
丁香婷婷综合五月| 日本欧美加勒比视频| 亚洲乱码中文字幕| 99九九99九九九视频精品| 亚洲视频在线观看三级| 在线免费av一区| 日本vs亚洲vs韩国一区三区二区 | 精品少妇一区二区三区免费观看| 一区二区三区四区亚洲| 884aa四虎影成人精品一区| 久久国产精品区| 久久精品视频一区二区| 国产资源在线一区| 久久久综合精品| 成人午夜av在线| 天天爽夜夜爽夜夜爽精品视频| 国产乱码精品1区2区3区| 亚洲精品自拍动漫在线| 精品少妇一区二区三区| 一本一道综合狠狠老| 久久国产精品99精品国产| 亚洲狠狠丁香婷婷综合久久久| 欧美一级精品大片| 欧美福利视频导航| 精品视频1区2区3区| av激情亚洲男人天堂| 国产乱子伦一区二区三区国色天香| 亚洲免费观看高清完整版在线观看 | 懂色av一区二区三区免费看| 国内精品国产三级国产a久久| 亚洲成人777| 日韩av一区二区在线影视| 一区二区三区日本| 亚洲欧洲综合另类| 亚洲欧美一区二区在线观看| 国产精品久久久久久久久动漫 | 欧美成人女星排名| 欧美一级免费大片| www激情久久| 国产欧美精品在线观看| 成人免费在线视频观看| 亚洲欧美日韩国产一区二区三区| 亚洲人成人一区二区在线观看 | 成人性色生活片| 97精品久久久午夜一区二区三区 | 国产欧美日韩中文久久| 国产精品国产自产拍高清av| 亚洲色图都市小说| 五月激情丁香一区二区三区| 免费在线观看一区二区三区| 久久97超碰色| 在线一区二区三区四区| 91精品国产免费久久综合| 国产三级精品三级| 亚洲国产色一区| 成人亚洲一区二区一| 欧美一区二区三区日韩视频| 日本一区二区三级电影在线观看| 亚洲综合小说图片| 成人午夜激情影院| 欧美本精品男人aⅴ天堂| 国产精品久久久久久久久晋中| 三级欧美韩日大片在线看| 国产不卡免费视频| 91精品欧美久久久久久动漫 | 中文字幕一区在线| 久久国产免费看| 91精品国产一区二区| 亚洲欧美日韩系列| av不卡一区二区三区| 久久在线观看免费| 国产精品亚洲第一| 欧美精品一区二区三区久久久| 亚洲欧美日韩一区| 色综合久久88色综合天天| 国产精品福利在线播放| 国产福利不卡视频| 久久影院午夜论| 成人网男人的天堂| 亚洲图片你懂的| 3d动漫精品啪啪| 激情六月婷婷综合| 亚洲欧美一区二区三区孕妇| 欧美亚日韩国产aⅴ精品中极品| 国产精品高清亚洲| 欧洲色大大久久| 美腿丝袜亚洲综合| 亚洲欧美一区二区在线观看| 色婷婷综合久久久久中文 | 2023国产精华国产精品| 成人免费av在线| 日韩电影在线观看网站| 欧美国产精品劲爆| 在线不卡一区二区| jizz一区二区| 激情图区综合网| 亚洲一区二区黄色| 国产精品久久久久久久蜜臀| 欧美日韩高清一区| 不卡一区中文字幕| 国产自产2019最新不卡| 亚洲成人福利片| 亚洲免费在线视频| 久久久美女艺术照精彩视频福利播放| 色偷偷久久一区二区三区| 国产一区二区毛片| 日韩国产成人精品| 亚洲va国产天堂va久久en| 亚洲欧美激情插| 国产精品国产馆在线真实露脸| 久久免费的精品国产v∧| 欧美色图天堂网| 欧美日韩在线三级| 欧洲日韩一区二区三区| 91成人看片片| 欧美久久高跟鞋激| 69成人精品免费视频| 91精品欧美久久久久久动漫| 欧美丰满少妇xxxxx高潮对白| 欧美综合视频在线观看| 色就色 综合激情| 欧美久久久久久蜜桃| 欧美一区欧美二区| 亚洲国产精品ⅴa在线观看| 日本一区二区在线不卡| 亚洲美腿欧美偷拍| 亚洲高清中文字幕| 国产呦萝稀缺另类资源| 国产一级精品在线| 99久久er热在这里只有精品15| 91免费观看视频在线| 5月丁香婷婷综合| 久久久国产精品麻豆| 亚洲乱码一区二区三区在线观看| 亚洲乱码精品一二三四区日韩在线| 亚洲尤物视频在线| 国产不卡视频一区| 欧美夫妻性生活| 中文字幕一区在线观看| 狂野欧美性猛交blacked| 99久久精品国产麻豆演员表| 欧美性猛交xxxxxx富婆| 1区2区3区欧美| 国产精品自拍三区| 欧美一区二区日韩一区二区| 欧美高清在线精品一区| 免费观看在线综合色| 欧美在线小视频| 亚洲免费观看在线观看| 成人免费毛片嘿嘿连载视频| 678五月天丁香亚洲综合网| 亚洲欧美一区二区不卡| av在线不卡观看免费观看| 日本一区二区三区久久久久久久久不 | 国产一区二区三区在线观看免费 | 色婷婷香蕉在线一区二区| 亚洲精品一区在线观看| 久久国产精品99久久人人澡| 欧美三区免费完整视频在线观看| 自拍偷自拍亚洲精品播放| 国产成人精品影院| 国产精品电影一区二区| www.在线成人| 亚洲成人午夜电影| 日韩精品一区二区三区在线观看| 一区二区三区不卡视频在线观看 | 91在线视频在线| 一区二区视频在线| 日韩亚洲欧美一区二区三区| 蜜臀av一区二区| 亚洲国产成人自拍| 欧美狂野另类xxxxoooo| 久久er精品视频| 亚洲日本在线天堂| 日韩欧美一区二区视频| 国产成人免费在线视频| 一区二区三区日韩在线观看| 欧美精品日日鲁夜夜添| 国产综合色视频| 亚洲国产一区二区在线播放| 日韩丝袜情趣美女图片| 不卡的av电影| 国产精品小仙女| 日韩电影一区二区三区四区| 国产精品毛片高清在线完整版| 欧美视频自拍偷拍| 不卡一区中文字幕| 国内久久精品视频| 日韩国产精品久久| 亚洲第一会所有码转帖| 国产精品久久久久久久浪潮网站| 日韩午夜激情av| 欧美日韩一区二区三区在线| 99久久99久久精品免费看蜜桃 | 亚洲图片欧美视频| 亚洲视频 欧洲视频| 亚洲欧洲日韩一区二区三区| 久久一日本道色综合| www日韩大片| 国产午夜精品理论片a级大结局|