亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? ann.pas

?? ANN And Hopfield Neural Network
?? PAS
字號:
//Copyright Ramesh Sabeti - sabeti@reazon.com

unit ANN;

interface

uses Math, Dialogs, SysUtils, classes;

const
  NUM_FEATURES = 2;
  NUM_PIXELS = 100;
type
  TCharMatrix = array [0..NUM_PIXELS - 1] of Integer;
  TNeuralNet = class;
  THopfield = class
  private
    WeightMatrix: array [0..NUM_PIXELS - 1, 0..NUM_PIXELS - 1] of Extended;
    State,
    NewState: TCharMatrix;
    procedure Converge;
  public
    constructor Create;
    procedure Learn(Sample: TCharMatrix);
    function Retrieve(Probe: TCharMatrix): TCharMatrix;// var Output: array of Extended);
    function Hamming: Integer;
    function Normalize(New: Extended; Prev: Integer): Integer;
    procedure Clear;
  end;

  TNodeType = (ndIn, ndHidden, ndOut);
  TNode = class
    ANN: TNeuralNet;
    NType: TNodeType;
    Net, NOut,
    Delta: Extended;
    EdgesIn, EdgesOut: TList;
  public
    fTag: String;
    constructor Create(NeuralNet: TNeuralNet; NodeType: TNodeType; Tag: string);
    destructor Destroy;
    procedure Update;
    function GetNet: Extended;
    function GetNOut: Extended;

  end;

  TNodeVector = class(TList);

  TEdge = class
    LearningFactor,
    OldWeight,
    Weight : Extended;
    FromNode, ToNode: TNode;
  public
    fTag: String;
    constructor Create(NodeFrom, NodeTo: TNode; Tag: String);
    procedure UpdateWeight;
  end;

  TNeuralNet = class
  private
    fLearningFactor : Extended;
  public
    constructor Create(LearningFactor: Extended);
    destructor Destroy;
    procedure Connect(NodeA, NodeB: TNode);
  end;

  TFeedForward = class(TNeuralNet)
    Layers: TList;
  public
    constructor Create(NumNodes: array of Integer; LearningFactor: Extended);
    destructor Destroy;
    procedure Feed(Sample: array of Extended);
    function Train( Sample: array of Extended;
                     DesiredOut: array of Extended): Extended;
    procedure BackProp( Sample: array of Extended;
                        DesiredOut: array of Extended);
    function Error(DesiredOut: array of Extended): Extended;

  end;

  TMatrix = array [0..NUM_FEATURES - 1, 0..NUM_FEATURES - 1] of Extended;
  TFeatureVector = array [0 .. NUM_FEATURES - 1] of Extended;
  TFeature = record
    Vector: TFeatureVector;
    OrigClass, CompClass: Integer;
  end;

  TAnalysis = record
    Total,
    Classified,
    MisClassified: Integer;
    PercentClassified,
    PercentMisClassified: Extended;
  end;


  procedure InitClass ( var Samples: array of TFeature;
                        classNum: Integer;
                        Means, StdDevs: array of Extended);

  procedure ResultAnalysis( Samples: array of TFeature;
                            var Results: array of TAnalysis;
                            TrainingSetCounts: array of Integer);



implementation

constructor TNode.Create(NeuralNet: TNeuralNet; NodeType: TNodeType; Tag: string);
begin
  inherited Create;
  fTag := Tag;
  ANN := NeuralNet;
  NType := NodeType;
  Delta := 0;
  EdgesIn := TList.Create;
  EdgesOut := TList.Create;
end;

constructor TNeuralNet.Create(LearningFactor: Extended);
begin
  Inherited Create;
  fLearningFactor := LearningFactor;
end;

constructor TFeedForward.Create( NumNodes: array of Integer;
                                 LearningFactor: Extended);
var
  LayerNum, NodeNum, NextNodeNum,
  i, j: Integer;
  NewNode: TNode;
  NodeVector: TNodeVector;
begin
  inherited Create(LearningFactor);
  Layers := TList.Create;
  //Create layers
  for i := Low(NumNodes) to High(NumNodes) do begin
    NodeVector := TNodeVector.Create;
    Layers.Add(NodeVector);

    //Create nodes of each layer
    for j := 1 to NumNodes[i] do begin
      if i = Low(NumNodes) then  //Input layer
        NewNode := TNode.Create(Self, ndIn, IntToStr(i) + ',' + IntToStr(j))
      else if i = High(NumNodes) then   //output layer
        NewNode := TNode.Create(Self, ndOut, IntToStr(i) + ',' + IntToStr(j))
      else
        NewNode := TNode.Create(Self, ndHidden, IntToStr(i) + ',' + IntToStr(j));

      NodeVector.Add(NewNode);
    end;
  end;

  //Connect nodes
  Randomize;
  //for each layer except for the output layer
  for LayerNum := Low(NumNodes) to High(NumNodes) - 1 do begin

    //iterate through the layer
    for NodeNum := 0 to NumNodes[LayerNum] - 1 do begin

      //iterate through the next layer
      for NextNodeNum := 0 to NumNodes[LayerNum + 1] - 1 do begin

        //connect each node to all nodes of next layer
        Connect(TNode(TNodeVector(Layers.Items[LayerNum]).Items[NodeNum]),
                TNode(TNodeVector(Layers.Items[LayerNum + 1]).Items[NextNodeNum]));
      end;
    end;
  end;
end;

procedure TNeuralNet.Connect( NodeA, NodeB: TNode);
var
  Edge: TEdge;
begin
  Edge := TEdge.Create(NodeA, NodeB, NodeA.fTag + '->' + NodeB.fTag);
  NodeA.EdgesOut.Add(Edge);
  NodeB.EdgesIn.Add(Edge);
end;

constructor TEdge.Create(NodeFrom, NodeTo: TNode; Tag: string);
begin
  inherited Create;
  Weight := Random(100) / 100;
  FromNode := NodeFrom;
  ToNode := NodeTo;
  fTag := Tag;
end;

procedure InitClass ( var Samples: array of TFeature;
                      classNum: Integer;
                      Means, StdDevs: array of Extended);
var
  i, j: Integer;
begin

  for i := Low(Samples) to High(Samples) do begin
    for j := Low(TFeatureVector) to High(TFeatureVector) do begin
      Samples[i].Vector[j] := RandG(Means[j], StdDevs[j]);
    end;
    Samples[i].OrigClass := ClassNum;
    Samples[i].CompClass := -1;
  end;
end;

procedure ResultAnalysis( Samples: array of TFeature;
                          var Results: array of TAnalysis;
                          TrainingSetCounts: array of Integer);
var
  i, j: Integer;
begin
  //initialize the results array
  for i := Low(Results) to High(Results) do begin
    Results[i].Classified := 0;
    Results[i].Misclassified := 0;
    Results[i].Total := 0;
  end;

  //Count number of classified and misclassified per class.
  for i := Low(Samples) to High(Samples) do begin
    if Samples[i].OrigClass = Samples[i].CompClass then
      Results[Samples[i].OrigClass].Classified :=
        Results[Samples[i].OrigClass].Classified + 1
    else
      Results[Samples[i].OrigClass].Misclassified :=
        Results[Samples[i].OrigClass].Misclassified + 1;

    Results[Samples[i].CompClass].Total :=
      Results[Samples[i].CompClass].Total + 1;

  end;

  //Calculate the percentages
  for i := Low(Results) to High(Results) do begin
    Results[i].PercentClassified := Results[i].Classified /
                                    TrainingSetCounts[i] * 100;
    Results[i].PercentMisClassified := Results[i].MisClassified /
                                       TrainingSetCounts[i] * 100;
  end;
end;


destructor TNeuralNet.Destroy;
begin
  inherited Destroy;
end;

destructor TFeedForward.Destroy;
var
  i: Integer;
begin
  for i := 0 to Layers.Count - 1 do begin
    TNode(Layers.Items[i]).Free;
  end;

  inherited Destroy;
end;

destructor TNode.Destroy;
var
  i: Integer;
begin
  for i := 0 to EdgesIn.Count - 1 do begin
    TEdge(EdgesIn.Items[i]).Free;
  end;

  for i := 0 to EdgesOut.Count - 1 do begin
    TEdge(EdgesOut.Items[i]).Free;
  end;

  inherited Destroy;
end;

procedure TNode.Update;
var
  i: Integer;
begin
  if NType <> ndIn then begin
    Net := 0;
    for i := 0 to EdgesIn.Count - 1 do
      Net := Net +
             (TNode(TEdge(EdgesIn.Items[i]).FromNode).NOut *
              TEdge(EdgesIn.Items[i]).Weight);
  end;
  NOut := 1 / (1 + Exp(-Net));
end;

function TNode.GetNet: Extended;
begin
  Result := Net;
end;

function TNode.GetNOut: Extended;
begin
  Result := NOut;
end;

//This function trains the net on one sample, and returns the error measure
function TFeedForward.Train( Sample: array of Extended;
                              DesiredOut: array of Extended): Extended;
begin
  Feed(Sample);
  Result := Error(DesiredOut);
  BackProp(Sample, DesiredOut);
end;

function TFeedForward.Error(DesiredOut: array of Extended): Extended;
var
  OutLayer: TNodeVector;
  i: Integer;
begin
  OutLayer := Layers.Items[Layers.Count - 1];
  Result := 0;
  for i := 0 to OutLayer.Count - 1 do begin
    Result := Result + Power(DesiredOut[i] - TNode(OutLayer.Items[i]).NOut, 2);
  end;
  Result := Result / (OutLayer.Count - 1);
end;

procedure TFeedForward.Backprop( Sample: array of Extended;
                                 DesiredOut: array of Extended);

var
  i, j, k, l: Integer;
  CurLayer: TNodeVector;
  CurNode, NextNode : TNode;
  CurEdge, EdgeOut: TEdge;
  Accum: Extended;
begin
  //For all hidden and output layers
  for j := Layers.Count - 1 downto 0 do begin
    CurLayer := TNodeVector(Layers.Items[j]);
    //for each node in the layer
    for i := 0 to CurLayer.Count - 1 do begin
      CurNode := TNode(CurLayer.Items[i]);
      if CurNode.NType = ndOut then begin //if output node
        CurNode.Delta := (DesiredOut[i] - CurNode.NOut) *
                         CurNode.NOut * (1 - CurNode.NOut);
      end
      else begin  //if hidden node
        Accum := 0;
        for l := 0 to CurNode.EdgesOut.Count - 1 do begin
          EdgeOut := CurNode.EdgesOut.Items[l];
          NextNode := TNode(EdgeOut.ToNode);
          Accum := Accum + (EdgeOut.OldWeight * NextNode.Delta);
        end;
        CurNode.Delta := CurNode.NOut * (1 - CurNode.NOut) * Accum;
      end;

      for k := 0 to CurNode.EdgesIn.Count - 1 do begin
        CurEdge := TEdge(CurNode.EdgesIn.Items[k]);
        CurEdge.UpdateWeight;
      end;
    end;
  end;
end;

procedure TEdge.UpdateWeight;
begin
  OldWeight := Weight;
  Weight := Weight + (
            ToNode.ANN.fLearningFactor * ToNode.Delta * FromNode.NOut);
end;

procedure TFeedForward.Feed( Sample: array of Extended );
var
  i, j: Integer;
  CurLayer: TNodeVector;
  CurNode: TNode;
begin
  //Initialize the Input Layer
  CurLayer := TNodeVector(Layers.Items[0]);
  for i := Low(Sample) to High(Sample) do begin
    CurNode := TNode(CurLayer.Items[i]);
    CurNode.Net := Sample[i];
    CurNode.Update;
  end;

  //Update hidden and output Layers
  for j := 1 to Layers.Count - 1 do begin
    CurLayer := TNodeVector(Layers.Items[j]);
    for i := 0 to CurLayer.Count - 1 do begin
      CurNode := TNode(CurLayer.Items[i]);
      CurNode.Update;
    end;
  end;
end;

procedure THopfield.Converge;
begin

end;

constructor THopfield.Create;
var
  i, j: Integer;
begin
  inherited;
  Clear;
end;

procedure THopfield.Learn(Sample: TCharMatrix);
var
  i, j, k: Integer;
begin

  for i := Low(Sample) to High(Sample) do begin
    //update all edges except self feedback ones
    for j := Low(Sample) to High(Sample) do begin
      if i <> j then begin
        WeightMatrix[i, j] := WeightMatrix[i, j] + Sample[i] * Sample[j];
      end;
    end;
  end;


end;

function THopfield.Retrieve( Probe: TCharMatrix): TCharMatrix;
var
  i, j, x, iter: Integer;
  Sum: Extended;
  NodeSet: set of Low(State)..High(State);
  Converged : Boolean;
begin
  //Initialize
  State := Probe;

  //Converge
  Randomize;
  iter := 0;
  while iter < 5000 do begin
    Inc(iter);
    NodeSet := [];   //empty the set of computed nodes

    j := (j + 1) mod NUM_PIXELS;   //serial update

    Sum := 0;

    for i := Low(State) to High(State) do
      Sum := Sum + (WeightMatrix[j, i] * State[i]);

    Sum := Normalize(Sum, State[j]);
    if Sum <> State[j] then begin
      State[j] := Trunc(Sum);
      if Hamming < 10 then  //has converged
        Break;
    end;

  end;  //while

  //Output
  Result := State;

end;


function THopfield.Hamming: Integer;
var
  i, j: Integer;
  Y : array [0..NUM_PIXELS - 1] of Extended;
begin
  Result := 0;
  for i :=  Low(State) to High(State) do begin
    Y[i] := 0;
    for j := Low(State) to High(State) do
      Y[i] := Y[i] + WeightMatrix[i, j] * State[i];

    if Normalize(Y[i], State[i]) <> State[i] then
      Result := Result + 1;                        //increment distance

  end;

end;

function THopfield.Normalize(New: Extended; Prev: Integer): Integer;
begin
  if New > 0 then
    Result := 1
  else if New < 0 then
    Result := -1
  else          //Leave state unchanged
    Result := Prev;
end;

procedure THopfield.Clear;
var
  i, j: integer;
begin
  for i := Low(WeightMatrix) to High(WeightMatrix) do
    for j := Low(WeightMatrix) to High(WeightMatrix) do
      WeightMatrix[i, j] := 0;
end;

end.

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产亚洲精品资源在线26u| 日日噜噜夜夜狠狠视频欧美人| 美女www一区二区| 久久蜜桃av一区二区天堂| 精品亚洲aⅴ乱码一区二区三区| 欧美精品一区视频| 色999日韩国产欧美一区二区| 中文字幕在线观看一区| 777午夜精品视频在线播放| 激情久久五月天| 亚洲v精品v日韩v欧美v专区| www国产成人免费观看视频 深夜成人网| 国产精品自拍网站| 国产美女在线精品| 美女视频黄a大片欧美| 玉足女爽爽91| 国产精品免费久久| 久久在线免费观看| 精品裸体舞一区二区三区| 91官网在线免费观看| 国产成人在线视频网站| 男男视频亚洲欧美| 亚洲午夜影视影院在线观看| 国产精品美女久久久久aⅴ国产馆| 欧美精品免费视频| 9191久久久久久久久久久| 91黄色免费观看| 色婷婷国产精品久久包臀| 99久久精品国产麻豆演员表| 成人激情文学综合网| 高清beeg欧美| 成人激情免费视频| 色吧成人激情小说| 欧美日韩国产在线观看| 久久久久久电影| 精品无人区卡一卡二卡三乱码免费卡| 国产精品无圣光一区二区| 成人欧美一区二区三区视频网页 | 欧美电视剧免费观看| 91原创在线视频| 国产91综合一区在线观看| 裸体健美xxxx欧美裸体表演| 一区二区三区日韩欧美| 国产精品九色蝌蚪自拍| 久久久久久久久免费| 精品sm捆绑视频| 久久99久久99| 狠狠网亚洲精品| www.亚洲色图.com| 精品国产区一区| 亚洲一区在线看| 精品在线一区二区| 色婷婷久久一区二区三区麻豆| 日韩三级免费观看| 亚洲人亚洲人成电影网站色| 视频一区欧美精品| 成人精品视频.| 欧美丰满高潮xxxx喷水动漫| 国产亚洲精久久久久久| 一区二区三区.www| 成人一级黄色片| 亚洲一区二区中文在线| 中文字幕成人av| 国产综合久久久久影院| 国产精品乱码人人做人人爱| 色94色欧美sute亚洲13| 亚洲成人午夜电影| 欧美中文字幕一区二区三区 | 一区二区视频免费在线观看| 国产在线精品一区二区| 精品日韩欧美在线| 捆绑变态av一区二区三区| 国产精品99久久不卡二区| 精品精品欲导航| 美女视频黄a大片欧美| 国产欧美日韩卡一| 在线视频国内一区二区| 亚洲无人区一区| 色乱码一区二区三区88| 亚洲第一电影网| 在线播放中文一区| 捆绑变态av一区二区三区| 欧美一区二区播放| 九一九一国产精品| 亚洲欧洲三级电影| 日本伦理一区二区| 日韩av在线免费观看不卡| 91精品国产福利在线观看| 亚洲欧美电影一区二区| 欧美久久久影院| 韩日欧美一区二区三区| 国产农村妇女毛片精品久久麻豆| 在线免费视频一区二区| 九九在线精品视频| 日欧美一区二区| 一区二区视频在线看| 国产三级一区二区| 国产日韩欧美一区二区三区乱码 | 亚洲不卡av一区二区三区| 欧美视频在线一区| 国产精品一区二区三区四区| 亚洲成人资源在线| 亚洲视频资源在线| 国产日本欧美一区二区| 在线播放国产精品二区一二区四区| 国产sm精品调教视频网站| 极品少妇xxxx偷拍精品少妇| 欧美激情资源网| 欧美性感一类影片在线播放| 国产69精品久久久久毛片| 日日摸夜夜添夜夜添精品视频| 一区二区三区欧美久久| 久久九九久久九九| 久久久噜噜噜久久中文字幕色伊伊| 日韩欧美电影在线| 欧美一区二区视频网站| 欧美日韩中文字幕一区| 欧美怡红院视频| 91行情网站电视在线观看高清版| 成人性色生活片免费看爆迷你毛片| 久久91精品国产91久久小草 | 亚洲人亚洲人成电影网站色| 亚洲欧美色一区| 亚洲综合一二三区| 国产成人在线免费观看| 亚洲影视在线播放| 免费在线视频一区| 国产白丝网站精品污在线入口| 99综合电影在线视频| 欧美性猛片aaaaaaa做受| 欧美嫩在线观看| 国产日韩精品一区二区浪潮av| 亚洲一区二区三区在线播放| 久久99国产精品免费| 色综合中文字幕国产 | 久久99精品国产麻豆婷婷| 国产一区二区调教| 欧美性猛交xxxxxx富婆| 久久久精品中文字幕麻豆发布| 国产精品久久久久影院| 美女性感视频久久| 欧美日韩aaaaaa| 亚洲国产精品一区二区久久恐怖片 | 麻豆91在线播放| 色嗨嗨av一区二区三区| 精品国产乱码久久久久久影片| 五月综合激情网| 欧美xxxxx牲另类人与| 一区二区三区自拍| 不卡高清视频专区| 久久一日本道色综合| 中文字幕亚洲区| 亚洲国产精品久久久男人的天堂| 亚洲成国产人片在线观看| 成人国产在线观看| 亚洲人成网站色在线观看| 福利视频网站一区二区三区| 欧美一级欧美一级在线播放| 欧美韩国日本不卡| 成人精品gif动图一区| 亚洲国产高清aⅴ视频| av午夜一区麻豆| 亚洲一区二区三区在线看| 欧美日韩激情一区二区| 日韩av电影天堂| 欧美电视剧在线观看完整版| 国产美女娇喘av呻吟久久| 国产欧美日韩在线看| 色综合久久综合网97色综合| 亚洲成av人**亚洲成av**| 5566中文字幕一区二区电影| 激情六月婷婷综合| 亚洲三级在线免费| 日韩欧美一级二级三级| 9i看片成人免费高清| 视频一区欧美日韩| 亚洲欧美日韩成人高清在线一区| 91性感美女视频| 国产麻豆精品久久一二三| 亚洲激情欧美激情| 国产亚洲欧美日韩俺去了| www.99精品| 国产剧情在线观看一区二区| 午夜精品一区在线观看| 在线视频亚洲一区| 国产精品系列在线观看| 亚洲大片精品永久免费| 国产精品电影院| 欧洲av一区二区嗯嗯嗯啊| 东方欧美亚洲色图在线| 亚州成人在线电影| 亚洲一区二区三区四区五区中文| 国产精品免费丝袜| 国产欧美日韩精品a在线观看| 欧美一级黄色大片| 日韩美女视频一区二区在线观看| 欧美色倩网站大全免费| 欧美日韩不卡在线| av不卡一区二区三区| 国产99久久久精品|