OpenCores
URL https://opencores.org/ocsvn/artificial_neural_network/artificial_neural_network/trunk

Subversion Repositories artificial_neural_network

[/] [artificial_neural_network/] [trunk/] [ANN_kernel/] [RTL_VHDL_files/] [ann.vhd] - Diff between revs 6 and 8

Go to most recent revision | Show entire file | Details | Blame | View Log

Rev 6 Rev 8
Line 26... Line 26...
use work.layers_pkg.all;
use work.layers_pkg.all;
 
 
entity ann is
entity ann is
   generic
   generic
   (
   (
 
      WBinit  : boolean := false;
      Nlayer  : integer := 2;   ---- Number of layers
      Nlayer  : integer := 2;   ---- Number of layers
      NbitW   : natural := 16;  ---- Bit width of weights and biases
      NbitW   : natural := 16;  ---- Bit width of weights and biases
      NumIn   : natural := 64;  ---- Number of inputs to the network
      NumIn   : natural := 64;  ---- Number of inputs to the network
      NbitIn  : natural := 8;   ---- Bit width of the inputs
      NbitIn  : natural := 8;   ---- Bit width of the inputs
      NumN    : int_vector;   ------ Number of neurons in each layer
      NumN    : int_vector;   ------ Number of neurons in each layer
Line 115... Line 116...
if ltype_v(0) = "SP" generate
if ltype_v(0) = "SP" generate
 
 
first_layerSP_top_inst: entity work.layerSP_top
first_layerSP_top_inst: entity work.layerSP_top
   generic map
   generic map
   (
   (
 
      WBinit  => WBinit ,
 
      LNum    => 0 ,
      NumN    => NumN(0),   -- Number of neurons in the first layer
      NumN    => NumN(0),   -- Number of neurons in the first layer
      NumIn   => NumIn,   ---- Number of inputs of the first layer
      NumIn   => NumIn,   ---- Number of inputs of the first layer
      NbitIn  => NbitIn,   --- Bit width of the input data
      NbitIn  => NbitIn,   --- Bit width of the input data
      NbitW   => NbitW,   ---- Bit width of weights and biases
      NbitW   => NbitW,   ---- Bit width of weights and biases
      NbitOut => NbitO(0),  -- Bit width of the first layer output
      NbitOut => NbitO(0),  -- Bit width of the first layer output
Line 176... Line 179...
      -- Instantiate single activation function of the previous layer (i-1):
      -- Instantiate single activation function of the previous layer (i-1):
activation_function_inst: entity work.activation_function
activation_function_inst: entity work.activation_function
      generic map
      generic map
      (
      (
         f_type => ftype_v(i-1), -- Activation function type of the previous layer (i-1)
         f_type => ftype_v(i-1), -- Activation function type of the previous layer (i-1)
         Nbit   => NbitO(i-1)   --- Bit width of the outputs of the previous layer (i-1)
         Nbit   => NbitO(i-1),   -- Bit width of the outputs of the previous layer (i-1)
 
         lsbit  => LSbit(i-1)    -- least significant bit of activation function  
      )
      )
      port map
      port map
      (
      (
         reset   => reset,
         reset   => reset,
         clk     => clk,
         clk     => clk,
Line 200... Line 204...
      -- Instantiate single activation function of the previous layer (i-1):
      -- Instantiate single activation function of the previous layer (i-1):
activation_function_inst: entity work.activation_function
activation_function_inst: entity work.activation_function
      generic map
      generic map
      (
      (
         f_type => ftype_v(i-1), -- Activation function type of the previous layer (i-1)
         f_type => ftype_v(i-1), -- Activation function type of the previous layer (i-1)
         Nbit   => NbitO(i-1)   --- Bit width of the outputs of the previous layer (i-1)
         Nbit   => NbitO(i-1),   -- Bit width of the outputs of the previous layer (i-1)
 
         lsbit  => LSbit(i-1)    -- least significant bit of activation function
      )
      )
      port map
      port map
      (
      (
         reset   => reset,
         reset   => reset,
         clk     => clk,
         clk     => clk,
Line 224... Line 229...
      -- First of the parallel activation functions. This is the one which generates the output data validation
      -- First of the parallel activation functions. This is the one which generates the output data validation
act_function_inst_0: entity work.activation_function
act_function_inst_0: entity work.activation_function
      generic map
      generic map
      (
      (
         f_type => ftype_v(i-1), -- Activation function type of the previous layer (i-1)
         f_type => ftype_v(i-1), -- Activation function type of the previous layer (i-1)
         Nbit   => NbitO(i-1)   --- Bit width of the outputs of the previous layer (i-1)
         Nbit   => NbitO(i-1),   -- Bit width of the outputs of the previous layer (i-1)
 
         lsbit  => LSbit(i-1)    -- least significant bit of activation function
      )
      )
      port map
      port map
      (
      (
         reset   => reset,
         reset   => reset,
         clk     => clk,
         clk     => clk,
Line 243... Line 249...
      for j in 1 to NumN(i-1)-1 generate
      for j in 1 to NumN(i-1)-1 generate
activation_function_inst: entity work.activation_function
activation_function_inst: entity work.activation_function
         generic map
         generic map
         (
         (
            f_type => ftype_v(i-1), -- Activation function type of the previous layer (i-1)
            f_type => ftype_v(i-1), -- Activation function type of the previous layer (i-1)
            Nbit   => NbitO(i-1)   --- Bit width of the outputs of the previous layer (i-1)
            Nbit   => NbitO(i-1) ,  -- Bit width of the outputs of the previous layer (i-1)
 
            lsbit  => LSbit(i-1)    -- least significant bit of activation function
         )
         )
         port map
         port map
         (
         (
            reset   => reset,
            reset   => reset,
            clk     => clk,
            clk     => clk,
Line 268... Line 275...
      -- Instantiate single activation function of the previous layer (i-1):
      -- Instantiate single activation function of the previous layer (i-1):
activation_function_inst: entity work.activation_function
activation_function_inst: entity work.activation_function
      generic map
      generic map
      (
      (
         f_type => ftype_v(i-1),
         f_type => ftype_v(i-1),
         Nbit   => NbitO(i-1)
         Nbit   => NbitO(i-1),
 
         lsbit  => LSbit(i-1)    -- least significant bit of activation function
      )
      )
      port map
      port map
      (
      (
         reset   => reset,
         reset   => reset,
         clk     => clk,
         clk     => clk,
Line 307... Line 315...
SP_case:
SP_case:
   if ltype_v(i) = "SP" generate
   if ltype_v(i) = "SP" generate
layerSP_top_inst: entity work.layerSP_top
layerSP_top_inst: entity work.layerSP_top
      generic map
      generic map
      (
      (
 
         WBinit  => WBinit ,
 
         LNum    => i ,
         NumN    => NumN(i),   --- Number of neurons in layer (i)
         NumN    => NumN(i),   --- Number of neurons in layer (i)
         NumIn   => NumN(i-1),  -- Number of inputs, is the number of neurons in previous layer (i-1)
         NumIn   => NumN(i-1),  -- Number of inputs, is the number of neurons in previous layer (i-1)
         NbitIn  => NbitO(i-1), -- Bit width of the input data, is the bit width of output data of layer (i-1)
         NbitIn  => NbitO(i-1), -- Bit width of the input data, is the bit width of output data of layer (i-1)
         NbitW   => NbitW,   ----- Bit width of weights and biases
         NbitW   => NbitW,   ----- Bit width of weights and biases
         NbitOut => NbitO(i),   -- Bit width of layer (i) output
         NbitOut => NbitO(i),   -- Bit width of layer (i) output
Line 343... Line 353...
PS_case:
PS_case:
   if ltype_v(i) = "PS" generate
   if ltype_v(i) = "PS" generate
layerPS_top_inst: entity work.layerPS_top
layerPS_top_inst: entity work.layerPS_top
      generic map
      generic map
      (
      (
 
         WBinit  => WBinit ,
 
         LNum    => i ,
         NumN    => NumN(i),   --- Number of neurons in layer (i)
         NumN    => NumN(i),   --- Number of neurons in layer (i)
         NumIn   => NumN(i-1),  -- Number of inputs, is the number of neurons in previous layer (i-1)
         NumIn   => NumN(i-1),  -- Number of inputs, is the number of neurons in previous layer (i-1)
         NbitIn  => NbitO(i-1), -- Bit width of the input data, is the bit width of output data of layer (i-1)
         NbitIn  => NbitO(i-1), -- Bit width of the input data, is the bit width of output data of layer (i-1)
         NbitW   => NbitW,   ----- Bit width of weights and biases
         NbitW   => NbitW,   ----- Bit width of weights and biases
         NbitOut => NbitO(i),   -- Bit width of layer (i) output
         NbitOut => NbitO(i),   -- Bit width of layer (i) output
Line 378... Line 390...
   -- Parallel-input parallel-output layer:
   -- Parallel-input parallel-output layer:
PP_case:
PP_case:
   if ltype_v(i) = "PP" generate
   if ltype_v(i) = "PP" generate
      -- TODO: instance a full parallel layer. At current version this layer type has not been developed.
      -- TODO: instance a full parallel layer. At current version this layer type has not been developed.
      -- synthesis translate_off
      -- synthesis translate_off
      assert l_type(i) /= "PP"
      --assert l_type(i) /= "PP"
         report "Current version does not accept parallel-input parallel-output (PP) layer type."
      --   report "Current version does not accept parallel-input parallel-output (PP) layer type."
         severity failure;
      --   severity failure;
      -- synthesis translate_on
      -- synthesis translate_on
      -- TODO: delete above lines when instantiate the parallel-input parallel-output layer.
      -- TODO: delete above lines when instantiate the parallel-input parallel-output layer.
   end generate;
   end generate;
 
 
end generate; -- layers_insts
end generate; -- layers_insts
Line 412... Line 424...
 
 
last_activation_function_inst: entity work.activation_function
last_activation_function_inst: entity work.activation_function
      generic map
      generic map
      (
      (
         f_type => ftype_v(Nlayer-1), -- Activation function type of the last layer (Nlayer-1)
         f_type => ftype_v(Nlayer-1), -- Activation function type of the last layer (Nlayer-1)
         Nbit   => NbitO(Nlayer-1)   --- Bit width of the outputs of the last layer (Nlayer-1)
         Nbit   => NbitO(Nlayer-1),   --- Bit width of the outputs of the last layer (Nlayer-1)
 
         lsbit  => LSbit(Nlayer-1)    -- least significant bit of activation function
      )
      )
      port map
      port map
      (
      (
         reset   => reset,
         reset   => reset,
         clk     => clk,
         clk     => clk,
Line 433... Line 446...
if (ltype_v(Nlayer-1)(2) = 'S') generate
if (ltype_v(Nlayer-1)(2) = 'S') generate
last_activation_function_inst: entity work.activation_function
last_activation_function_inst: entity work.activation_function
      generic map
      generic map
      (
      (
         f_type => ftype_v(Nlayer-1), -- Activation function type of the last layer (Nlayer-1)
         f_type => ftype_v(Nlayer-1), -- Activation function type of the last layer (Nlayer-1)
         Nbit   => NbitO(Nlayer-1)   --- Bit width of the outputs of the last layer (Nlayer-1)
         Nbit   => NbitO(Nlayer-1),   -- Bit width of the outputs of the last layer (Nlayer-1)
 
         lsbit  => LSbit(Nlayer-1)    -- least significant bit of activation function
      )
      )
      port map
      port map
      (
      (
         reset   => reset,
         reset   => reset,
         clk     => clk,
         clk     => clk,

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.