From d8bc60ede1528b07071f09169606a8aba9bbbfeb Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Thu, 5 Sep 2024 13:29:12 +0200 Subject: [PATCH 01/17] mem [FEATURE]: Add mem_clear component --- comp/base/mem/mem_clear/Modules.tcl | 11 +++ comp/base/mem/mem_clear/mem_clear.vhd | 108 ++++++++++++++++++++++++++ comp/base/mem/mem_clear/readme.rst | 33 ++++++++ doc/source/memory.rst | 4 + 4 files changed, 156 insertions(+) create mode 100644 comp/base/mem/mem_clear/Modules.tcl create mode 100644 comp/base/mem/mem_clear/mem_clear.vhd create mode 100644 comp/base/mem/mem_clear/readme.rst diff --git a/comp/base/mem/mem_clear/Modules.tcl b/comp/base/mem/mem_clear/Modules.tcl new file mode 100644 index 000000000..f23c50fe6 --- /dev/null +++ b/comp/base/mem/mem_clear/Modules.tcl @@ -0,0 +1,11 @@ +# Modules.tcl: Components include script +# Copyright (C) 2024 CESNET +# Author(s): Lukas Nevrkla +# +# SPDX-License-Identifier: BSD-3-Clause + +# Packages +lappend PACKAGES "$OFM_PATH/comp/base/pkg/math_pack.vhd" +lappend PACKAGES "$OFM_PATH/comp/base/pkg/type_pack.vhd" + +lappend MOD "$ENTITY_BASE/mem_clear.vhd" diff --git a/comp/base/mem/mem_clear/mem_clear.vhd b/comp/base/mem/mem_clear/mem_clear.vhd new file mode 100644 index 000000000..ac19d8b4b --- /dev/null +++ b/comp/base/mem/mem_clear/mem_clear.vhd @@ -0,0 +1,108 @@ +-- mem_clear.vhd: Unit for clearing BRAM memories +-- Copyright (C) 2024 CESNET z. s. p. o. +-- Author(s): Lukas Nevrkla +-- +-- SPDX-License-Identifier: BSD-3-Clause + +library ieee; +use ieee.std_logic_1164.all; +use ieee.numeric_std.all; + +use work.math_pack.all; +use work.type_pack.all; + +entity MEM_CLEAR is +generic ( + DATA_WIDTH : integer := 32; + ITEMS : integer := 512; + -- Will disable memory clearing during RST + CLEAR_EN : boolean := true +); +port ( + CLK : in std_logic; + RST : in std_logic; + + -- All addresses were generated + CLEAR_DONE : out std_logic; + -- Clear address given by CLEAR_ADDR + CLEAR_WR : out std_logic; + CLEAR_ADDR : out std_logic_vector(log2(ITEMS) - 1 downto 0) +); +end entity; + +architecture FULL of MEM_CLEAR is + + type FSM_STATES_T is ( + CLEAR, + RUNNING + ); + + -- State machine -- + + signal curr_state : FSM_STATES_T; + signal next_state : FSM_STATES_T; + + signal addr_i : std_logic_vector(log2(ITEMS)-1 downto 0); + signal addr_r : std_logic_vector(log2(ITEMS)-1 downto 0); + signal rst_r : std_logic; + + begin + + CLEAR_ADDR <= addr_i; + + reg_p : process (CLK) + begin + if (rising_edge(CLK)) then + addr_r <= addr_i; + rst_r <= RST; + end if; + end process; + + ------------------- + -- STATE MACHINE -- + ------------------- + + state_reg_p : process (CLK) + begin + if (rising_edge(CLK)) then + if (RST = '1') then + if (CLEAR_EN = true) then + curr_state <= CLEAR; + else + curr_state <= RUNNING; + end if; + else + curr_state <= next_state; + end if; + end if; + end process; + + -- Output logic + process (all) + begin + CLEAR_DONE <= '0'; + CLEAR_WR <= '0'; + next_state <= curr_state; + + case curr_state is + when CLEAR => + if (RST = '0') then + CLEAR_wR <= '1'; + + if (rst_r = '1') then + addr_i <= (others => '0'); + else + addr_i <= std_logic_vector(unsigned(addr_r) + 1); + end if; + + if (unsigned(addr_i) = (ITEMS - 1)) then + next_state <= RUNNING; + end if; + end if; + + when RUNNING => + CLEAR_DONE <= '1'; + end case; + end process; + +end architecture; diff --git a/comp/base/mem/mem_clear/readme.rst b/comp/base/mem/mem_clear/readme.rst new file mode 100644 index 000000000..961169ed3 --- /dev/null +++ b/comp/base/mem/mem_clear/readme.rst @@ -0,0 +1,33 @@ +.. _mem_clear: + +Memory clear +------------ + +Simple component that will generate addresses for memory clearing when RST is asserted. + +Component port and generics description +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. vhdl:autoentity:: MEM_CLEAR + :noautogenerics: + + +Instance template +^^^^^^^^^^^^^^^^^ + +.. code-block:: + + data_clear_i : entity work.MEM_CLEAR + generic map ( + DATA_WIDTH => BOX_WIDTH, + ITEMS => BOX_CNT, + CLEAR_EN => CLEAR_BY_RST + ) + port map ( + CLK => CLK, + RST => RST, + + CLEAR_DONE => RST_DONE, + CLEAR_WR => wr_clear, + CLEAR_ADDR => wr_addr_clear + ); diff --git a/doc/source/memory.rst b/doc/source/memory.rst index 14e253585..c29648f05 100644 --- a/doc/source/memory.rst +++ b/doc/source/memory.rst @@ -75,6 +75,9 @@ Allows setting type of memory (LUT, BRAM, URAM) or automatic mode. Optimized for **SP_URAM_XILINX** - Structural implementation of single clock URAM memory based on Xilinx specific primitives with one read/write port. Only for Xilinx UltraScale+ FPGAs. +**_CLEAR** - Wrap around **** with additional clear logic. +Detailed :ref:`documentation can be found here`. + .. toctree:: :maxdepth: 1 :hidden: @@ -83,6 +86,7 @@ Allows setting type of memory (LUT, BRAM, URAM) or automatic mode. Optimized for comp/base/mem/sdp_bram/readme comp/base/mem/mp_bram/readme comp/base/mem/lvt_mem/readme + comp/base/mem/mem_clear/readme .. comp/base/mem/ References From 13eba82f61c848ce176b79c9479fa5ee40f1b7f3 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Thu, 5 Sep 2024 13:02:10 +0200 Subject: [PATCH 02/17] histogramer [BUGFIX]: Fix histogram box update by refactoring --- comp/debug/histogramer/Modules.tcl | 9 +- comp/debug/histogramer/histogramer.vhd | 420 +++++++++++-------------- 2 files changed, 193 insertions(+), 236 deletions(-) diff --git a/comp/debug/histogramer/Modules.tcl b/comp/debug/histogramer/Modules.tcl index 18d21178f..727dd738e 100644 --- a/comp/debug/histogramer/Modules.tcl +++ b/comp/debug/histogramer/Modules.tcl @@ -1,5 +1,5 @@ # Modules.tcl: Components include script -# Copyright (C) 2021 CESNET z. s. p. o. +# Copyright (C) 2024 CESNET z. s. p. o. # Author(s): Lukas Nevrkla # # SPDX-License-Identifier: BSD-3-Clause @@ -7,14 +7,17 @@ # Paths to components set CNT_BASE "$OFM_PATH/comp/base/logic/cnt" +set SDP_BRAM_BASE "$OFM_PATH/comp/base/mem/sdp_bram" set DP_BRAM_BASE "$OFM_PATH/comp/base/mem/dp_bram" +set MEM_CLEAR_BASE "$OFM_PATH/comp/base/mem/mem_clear" # Packages lappend PACKAGES "$OFM_PATH/comp/base/pkg/math_pack.vhd" lappend PACKAGES "$OFM_PATH/comp/base/pkg/type_pack.vhd" -lappend COMPONENTS [ list "CNT" $CNT_BASE "FULL" ] -lappend COMPONENTS [ list "DP_BRAM" $DP_BRAM_BASE "FULL" ] +lappend COMPONENTS [ list "CNT" $CNT_BASE "FULL" ] +lappend COMPONENTS [ list "DP_BRAM" $DP_BRAM_BASE "FULL" ] +lappend COMPONENTS [ list "MEM_CLEAR" $MEM_CLEAR_BASE "FULL" ] # Source files for implemented component lappend MOD "$ENTITY_BASE/histogramer.vhd" diff --git a/comp/debug/histogramer/histogramer.vhd b/comp/debug/histogramer/histogramer.vhd index 1c48a9208..50e5abf18 100644 --- a/comp/debug/histogramer/histogramer.vhd +++ b/comp/debug/histogramer/histogramer.vhd @@ -1,10 +1,9 @@ -- histogramer.vhd: Component for creating histograms --- Copyright (C) 2022 CESNET z. s. p. o. +-- Copyright (C) 2024 CESNET z. s. p. o. -- Author(s): Lukas Nevrkla -- -- SPDX-License-Identifier: BSD-3-Clause - library ieee; use ieee.std_logic_1164.all; use ieee.numeric_std.all; @@ -12,16 +11,16 @@ use ieee.numeric_std.all; use work.math_pack.all; use work.type_pack.all; - -- .. vhdl:autogenerics:: HISTOGRAMER entity HISTOGRAMER is generic ( - -- Width of input values + -- Input values width INPUT_WIDTH : integer; - -- Width of one histogram box (number of values in a given range) + -- Histogram box width (number of occurences in a given range) -- Box probably overflowed when its value equals 2**BOX_WIDTH-1 BOX_WIDTH : integer; -- Number of histogram boxes (defines histogram precision) + -- Must be power of 2 BOX_CNT : integer; -- Defines if read or write should occur when both happen at the same time READ_PRIOR : boolean := false; @@ -60,78 +59,140 @@ end entity; -- ========================================================================= architecture FULL of HISTOGRAMER is - --------------- - -- Constants -- - --------------- -- Should equal BRAM latency constant PIPELINE_ITEMS : integer := 2; constant ADDR_WIDTH : integer := log2(BOX_CNT); - constant ADDR_MAX : std_logic_vector(ADDR_WIDTH - 1 downto 0) := std_logic_vector(to_unsigned(BOX_CNT - 1, ADDR_WIDTH)); - constant MAX_BOX_VAL : std_logic_vector(BOX_WIDTH - 1 downto 0) := (others => '1'); - constant MAX_BOX_VAL_LONG : unsigned := unsigned(MAX_BOX_VAL); + type PIPELINE_T is record + vld : std_logic; + is_read : std_logic; + collision : std_logic; + addr : std_logic_vector(ADDR_WIDTH - 1 downto 0); + box : std_logic_vector(BOX_WIDTH - 1 downto 0); + end record; + + type PIPELINE_ARRAY_T is array (integer range <>) of PIPELINE_T; + + constant STAGES : positive := 2; + + -- Last pip_in is the output + signal pip_in : PIPELINE_ARRAY_T(STAGES downto 0); + signal pip_out : PIPELINE_ARRAY_T(STAGES - 1 downto -1); + signal fin_data : PIPELINE_T; + signal new_data : PIPELINE_T; + + signal collision_i : std_logic_vector(STAGES downto 0); + signal collision : std_logic_vector(STAGES downto 0); + + signal rd_data_vld : std_logic; + signal rd_data : std_logic_vector(BOX_WIDTH - 1 downto 0); + + signal wr : std_logic; + signal wr_i : std_logic; + signal wr_clear : std_logic; + signal wr_erase : std_logic; + signal wr_data_i : std_logic_vector(BOX_WIDTH - 1 downto 0); + signal wr_data : std_logic_vector(BOX_WIDTH - 1 downto 0); + signal wr_addr : std_logic_vector(ADDR_WIDTH - 1 downto 0); + signal wr_addr_i : std_logic_vector(ADDR_WIDTH - 1 downto 0); + signal wr_addr_clear : std_logic_vector(ADDR_WIDTH - 1 downto 0); + + -- Add + handle overflow + -- When overflow occurs, the value is set to maximum + function add_f(a : std_logic_vector ; b : std_logic_vector) return std_logic_vector is + constant DATA_WIDTH : integer := a'length; + -- Width is larger by 1 bit to detect overflow + variable tmp : std_logic_vector(DATA_WIDTH downto 0); + variable res : std_logic_vector(DATA_WIDTH - 1 downto 0); + begin + tmp := std_logic_vector(unsigned('0' & a) + unsigned('0' & b)); + res := std_logic_vector(tmp(DATA_WIDTH - 1 downto 0)) when (tmp(DATA_WIDTH) = '0') else + (others => '1'); + return res; + end function; - ------------- - -- Signals -- - ------------- + function first_one_f(bits : std_logic_vector) + return std_logic_vector is + begin + return bits and std_logic_vector(unsigned(not bits) + 1); + end; + + function last_one_f(bits : std_logic_vector) + return std_logic_vector is + constant DATA_WIDTH : integer := bits'length; + variable in_rot : std_logic_vector(DATA_WIDTH - 1 downto 0); + variable first_one : std_logic_vector(DATA_WIDTH - 1 downto 0); + variable out_rot : std_logic_vector(DATA_WIDTH - 1 downto 0); + begin + for i in bits'range loop + in_rot(i) := bits(bits'length - 1 - i); + end loop; - signal input_write : std_logic; - signal input_read : std_logic; + first_one := first_one_f(in_rot); - signal pipeline_box : slv_array_t(PIPELINE_ITEMS downto 0)(BOX_WIDTH - 1 downto 0); - signal pipeline_addr : slv_array_t(PIPELINE_ITEMS downto 0)(ADDR_WIDTH - 1 downto 0); - signal pipeline_vld : std_logic_vector(PIPELINE_ITEMS downto 0); - signal pipeline_read : std_logic_vector(PIPELINE_ITEMS downto 0); + for i in bits'range loop + out_rot(i) := first_one(bits'length - 1 - i); + end loop; - signal input_pipeline_box : std_logic_vector(BOX_WIDTH - 1 downto 0); + return out_rot; + end; - signal last_pipeline_box : std_logic_vector(BOX_WIDTH - 1 downto 0); - signal last_pipeline_addr : std_logic_vector(ADDR_WIDTH - 1 downto 0); - signal last_pipeline_vld : std_logic; - signal last_pipeline_read : std_logic; +begin - -- For overflow detection - signal pipeline_box_incr : slv_array_t(PIPELINE_ITEMS downto 0)(BOX_WIDTH - 1 downto 0); - signal pipeline_box_res_tmp : std_logic_vector(BOX_WIDTH - 1 downto 0); - signal pipeline_box_res : std_logic_vector(BOX_WIDTH - 1 downto 0); + assert INPUT_WIDTH >= log2(BOX_CNT) + report "Histogramer: there are more histogram boxes then possible states of the input" & + " (input width: " & integer'image(INPUT_WIDTH) & ", box_cnt: " & integer'image(BOX_CNT) & ")!" + severity FAILURE; - signal clear_result : std_logic; + assert 2 ** log2(BOX_CNT) = BOX_CNT + report "Histogramer: BOX CNT is not power of 2!" + severity FAILURE; - signal colision_index : std_logic_vector(PIPELINE_ITEMS - 1 downto 0); - signal colision : std_logic; - signal colision_last : std_logic; + -- Pipeline core -- + ------------------- - signal feadback_to_first : std_logic; + pipeline_g : for i in STAGES - 1 downto 0 generate + pipeline_p : process (CLK) + begin + if (rising_edge(CLK)) then + if (RST = '1' or RST_DONE = '0') then + pip_out(i).vld <= '0'; + pip_out(i).addr <= (others => '0'); + elsif (pip_in(i).vld = '1') then + pip_out(i) <= pip_in(i); + else + pip_out(i).vld <= '0'; + end if; + end if; + end process; + end generate; - signal bram_read : std_logic; - signal bram_read_data_vld : std_logic; - signal bram_read_data : std_logic_vector(BOX_WIDTH - 1 downto 0); - signal bram_read_addr : std_logic_vector(ADDR_WIDTH - 1 downto 0); + -- Pipeline input -- + -------------------- - signal bram_write : std_logic; - signal bram_write_data : std_logic_vector(BOX_WIDTH - 1 downto 0); - signal bram_write_addr : std_logic_vector(ADDR_WIDTH - 1 downto 0); + new_data.vld <= (INPUT_VLD or READ_REQ); + new_data.collision <= '0'; - signal bram_clear_done : std_logic; - signal bram_clear_addr : std_logic_vector(ADDR_WIDTH - 1 downto 0); + -- MSB bits selects histogram box + new_data.addr <= READ_ADDR when (new_data.is_read = '1') else + INPUT(INPUT_WIDTH - 1 downto INPUT_WIDTH - ADDR_WIDTH); - function add_handle_overflow(a : std_logic_vector ; b : std_logic_vector) return std_logic_vector is - variable w : integer := a'length; - variable tmp : unsigned(w downto 0); - variable res : std_logic_vector(w - 1 downto 0); - begin - tmp := unsigned('0' & a) + unsigned('0' & b); - res := std_logic_vector(tmp(BOX_WIDTH - 1 downto 0)) when (tmp(w) = '0') else - (others => '1'); - return res; - end function; + read_prior_g : if (READ_PRIOR = true) generate + new_data.is_read <= READ_REQ; + else generate + new_data.is_read <= READ_REQ and not INPUT_VLD; + end generate; -begin - ------------------------- - -- Component instances -- - ------------------------- + -- Write will increment box by 1 + new_data.box <= (others => '0') when (new_data.is_read = '1') else + std_logic_vector(to_unsigned(1, new_data.box'length)); + + pip_out(-1) <= new_data; + + -- Histogram memory -- + ---------------------- data_i : entity work.DP_BRAM_BEHAV generic map ( @@ -143,199 +204,92 @@ begin RST => RST, PIPE_ENA => '1', - REA => bram_read, + REA => pip_in(0).vld, WEA => '0', - ADDRA => bram_read_addr, + ADDRA => pip_in(0).addr, DIA => (others => '0'), - DOA => bram_read_data, - DOA_DV => bram_read_data_vld, + DOA => rd_data, + DOA_DV => rd_data_vld, PIPE_ENB => '1', REB => '0', - WEB => bram_write, - ADDRB => bram_write_addr, - DIB => bram_write_data + WEB => wr_i, + ADDRB => wr_addr_i, + DIB => wr_data_i ); - ------------------------- - -- Combinational logic -- - ------------------------- - - -- Input management -- - ---------------------- - - -- Selection between read/write - read_prior_g : if (READ_PRIOR = true) generate - input_write <= INPUT_VLD and not READ_REQ and bram_clear_done; - input_read <= READ_REQ and bram_clear_done; - end generate; - write_prior_g : if (READ_PRIOR = false) generate - input_write <= INPUT_VLD and bram_clear_done; - input_read <= READ_REQ and not INPUT_VLD and bram_clear_done; - end generate; - - -- Command selection - pipeline_vld(0) <= (input_write or input_read) and (not colision or feadback_to_first); - pipeline_read(0) <= input_read and (not colision or feadback_to_first); - -- Select histogram box (adress) by cutting value - pipeline_addr(0) <= INPUT(INPUT_WIDTH - 1 downto INPUT_WIDTH - ADDR_WIDTH) when (input_write = '1') else - READ_ADDR; - -- Box initial value - input_pipeline_box <= std_logic_vector(to_unsigned(1, BOX_WIDTH)) when (input_write = '1') else - (others => '0'); - -- Colision between last and the first box => join to the first box - -- Join to the last box would need adder and ovf detection before bram write port - -- which would have bad timing - -- Adding register would need next colision detection and the same problem would occur again - pipeline_box(0) <= input_pipeline_box when (feadback_to_first = '0') else - add_handle_overflow(input_pipeline_box, last_pipeline_box); - - -- Colision detection -- - ------------------------ - - pipeline_colision_g : for i in PIPELINE_ITEMS - 1 downto 0 generate - colision_index(i) <= '1' when (pipeline_addr(0) = pipeline_addr(i + 1) and pipeline_vld(i + 1) = '1' and (INPUT_VLD = '1' or READ_REQ = '1')) else - '0'; - end generate; - colision_last <= '1' when (pipeline_addr(0) = last_pipeline_addr and last_pipeline_vld = '1' and (INPUT_VLD = '1' or READ_REQ = '1')) else - '0'; - colision <= (or colision_index); -- or colision_last; - - feadback_to_first <= colision_last and not clear_result; - - -- BRAM management -- - --------------------- + wr_i <= wr when (RST_DONE = '1') else + wr_clear; + wr_addr_i <= wr_addr when (RST_DONE = '1') else + wr_addr_clear; + wr_data_i <= wr_data when (RST_DONE = '1') else + (others => '0'); - bram_read <= pipeline_vld(0) or pipeline_read(0); - bram_read_addr <= pipeline_addr(0); - - bram_write <= (last_pipeline_vld and not feadback_to_first) when (bram_clear_done = '1') else - '1'; - bram_write_addr <= last_pipeline_addr when (bram_clear_done = '1') else - bram_clear_addr; - bram_write_data <= (others => '0') when (clear_result = '1' or bram_clear_done = '0') else - last_pipeline_box; + data_clear_i : entity work.MEM_CLEAR + generic map ( + DATA_WIDTH => BOX_WIDTH, + ITEMS => BOX_CNT, + CLEAR_EN => CLEAR_BY_RST + ) + port map ( + CLK => CLK, + RST => RST, - -- Clear by read -- - ------------------- + CLEAR_DONE => RST_DONE, + CLEAR_WR => wr_clear, + CLEAR_ADDR => wr_addr_clear + ); - -- Clear by read detection - clear_by_read_g : if (CLEAR_BY_READ = true) generate - clear_result <= last_pipeline_read or (colision_last and input_read); - end generate; - dont_clear_by_read_g : if (CLEAR_BY_READ = false) generate - clear_result <= '0'; - end generate; + -- Collision detection (between pipeline and write back) -- + ----------------------------------------------------------- - -- Pipeline management -- - ------------------------- + -- If collision occurs (same adress is beeing edited), + -- write-back value will be also saved in coresponding pipeline stage + -- Corresponding pipeline stage will ignore BRAM read data - -- Increment with overflow detection - pipeline_incr_g : for i in PIPELINE_ITEMS downto 0 generate - pipeline_box_incr(i) <= add_handle_overflow(pipeline_box(i), std_logic_vector(to_unsigned(1, pipeline_box(0)'length))); + collision_g : for i in STAGES - 1 downto 0 generate + collision_i(i) <= '1' when (pip_in(i).addr = fin_data.addr and pip_in(i).vld = '1' and fin_data.vld = '1') else + '0'; end generate; - - -- Result creation (BRAM read data + pipeline box + handle last colision + handle overflow) - pipeline_box_res_tmp<= pipeline_box(PIPELINE_ITEMS) when (colision_index(PIPELINE_ITEMS - 1) = '0' or input_read = '1') else - pipeline_box_incr(PIPELINE_ITEMS); - pipeline_box_res <= add_handle_overflow(pipeline_box_res_tmp, bram_read_data); - - -- Output -- - ------------ - - READ_BOX <= last_pipeline_box; - READ_BOX_VLD <= ((last_pipeline_read and last_pipeline_vld) or (colision_last and input_read)) and not feadback_to_first; - RST_DONE <= bram_clear_done; - - - --------------- - -- Registers -- - --------------- - - -- Pipeline -- - -------------- - - pipeline_g : for i in PIPELINE_ITEMS downto 1 generate - pipeline_p : process(CLK) - begin - if (rising_edge(CLK)) then - if (RST = '1') then - pipeline_vld(i) <= '0'; - pipeline_read(i) <= '0'; - pipeline_addr(i) <= (others => '0'); - else - pipeline_vld(i) <= pipeline_vld(i - 1); - pipeline_read(i) <= pipeline_read(i - 1); - pipeline_addr(i) <= pipeline_addr(i - 1); - - -- Collision detected - if (i > 1 and colision_index(i - 2) = '1') then - if (input_read = '1') then - pipeline_read(i) <= '1'; - pipeline_box(i) <= pipeline_box(i - 1); - else - pipeline_box(i) <= pipeline_box_incr(i - 1); - end if; - else - pipeline_box(i) <= pipeline_box(i - 1); - end if; - end if; - end if; - end process; + collision_i(STAGES) <= '0'; + + -- When multiple collisions occurs, handle only the closest one + -- The other collision will be handeled when the closest collision will be at the end + collision <= last_one_f(collision_i); + + pip_data_g : for i in STAGES downto 0 generate + pip_in(i).vld <= pip_out(i - 1).vld; + pip_in(i).is_read <= pip_out(i - 1).is_read; + pip_in(i).addr <= pip_out(i - 1).addr; + pip_in(i).collision <= collision(i) or pip_out(i - 1).collision; + + pip_data_box_g : if i = STAGES generate + -- Last stage -- + -- Read data should be valid at exactly this point + -- If collision occured, don't use read data + pip_in(i).box <= add_f(pip_out(i - 1).box, rd_data) when (fin_data.collision = '0') else + pip_out(i - 1).box; + else generate + -- 2 special cases can occur during collision with the last stage + -- * Write at the last stage wrote a new value => update current box + -- * Read at the last stage caused clear of the box => write only current box (don't add with old box value) + pip_in(i).box <= pip_out(i - 1).box when (collision(i) = '0' or wr_erase = '1') else + add_f(pip_out(i - 1).box, fin_data.box); + end generate; end generate; - last_pipeline_box_data_p : process(CLK) - begin - if (rising_edge(CLK)) then - last_pipeline_addr <= pipeline_addr(PIPELINE_ITEMS); - last_pipeline_box <= pipeline_box_res; - last_pipeline_read <= pipeline_read(PIPELINE_ITEMS); - - if (colision_index(PIPELINE_ITEMS - 1) = '1' and input_read = '1') then - last_pipeline_read <= '1'; - end if; - end if; - end process; - - last_pipeline_box_p : process(CLK) - begin - if (rising_edge(CLK)) then - if (RST = '1') then - last_pipeline_vld <= '0'; - else - last_pipeline_vld <= pipeline_vld(PIPELINE_ITEMS); - end if; - end if; - end process; - - -- Clear by RST -- + -- Output phase -- ------------------ - clear_addr_p : process(CLK) - begin - if (rising_edge(CLK)) then - if (RST = '1' or bram_clear_done = '1') then - bram_clear_addr <= (others => '0'); - else - bram_clear_addr <= std_logic_vector(unsigned(bram_clear_addr) + 1); - end if; - end if; - end process; + fin_data <= pip_in(STAGES); + READ_BOX_VLD <= fin_data.vld and fin_data.is_read; + READ_BOX <= fin_data.box; - clear_by_rst_g : if (CLEAR_BY_RST = true) generate - clear_done_p : process(CLK) - begin - if (rising_edge(CLK)) then - if (RST = '1') then - bram_clear_done <= '0'; - elsif (bram_clear_addr = ADDR_MAX) then - bram_clear_done <= '1'; - end if; - end if; - end process; - end generate; - dont_clear_by_rst_g : if (CLEAR_BY_RST = false) generate - bram_clear_done <= '1'; - end generate; + wr <= fin_data.vld; + wr_erase <= '1' when (CLEAR_BY_READ = true and fin_data.is_read = '1') else + '0'; + wr_addr <= fin_data.addr; + wr_data <= fin_data.box when (wr_erase = '0') else + (others => '0'); end architecture; From 346db72d126dfbadfed49c30f16085d6e6e3d707 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Thu, 5 Sep 2024 13:03:48 +0200 Subject: [PATCH 03/17] histogramer [FEATURE]: Add verifications --- comp/debug/histogramer/harp.toml | 51 +++ comp/debug/histogramer/uvm/Modules.tcl | 17 + comp/debug/histogramer/uvm/signals.fdo | 295 ++++++++++++++++++ comp/debug/histogramer/uvm/signals_sig.fdo | 18 ++ comp/debug/histogramer/uvm/tbench/dut.sv | 70 +++++ comp/debug/histogramer/uvm/tbench/env/env.sv | 146 +++++++++ .../debug/histogramer/uvm/tbench/env/model.sv | 112 +++++++ comp/debug/histogramer/uvm/tbench/env/pkg.sv | 22 ++ .../histogramer/uvm/tbench/env/scoreboard.sv | 151 +++++++++ .../histogramer/uvm/tbench/env/sequencer.sv | 42 +++ .../debug/histogramer/uvm/tbench/testbench.sv | 117 +++++++ .../histogramer/uvm/tbench/tests/const.sv | 36 +++ .../debug/histogramer/uvm/tbench/tests/pkg.sv | 22 ++ .../histogramer/uvm/tbench/tests/sequence.sv | 90 ++++++ .../uvm/tbench/tests/sequence_virt.sv | 94 ++++++ .../histogramer/uvm/tbench/tests/test.sv | 132 ++++++++ comp/debug/histogramer/uvm/top_level.fdo | 45 +++ 17 files changed, 1460 insertions(+) create mode 100644 comp/debug/histogramer/harp.toml create mode 100644 comp/debug/histogramer/uvm/Modules.tcl create mode 100644 comp/debug/histogramer/uvm/signals.fdo create mode 100644 comp/debug/histogramer/uvm/signals_sig.fdo create mode 100644 comp/debug/histogramer/uvm/tbench/dut.sv create mode 100644 comp/debug/histogramer/uvm/tbench/env/env.sv create mode 100644 comp/debug/histogramer/uvm/tbench/env/model.sv create mode 100644 comp/debug/histogramer/uvm/tbench/env/pkg.sv create mode 100644 comp/debug/histogramer/uvm/tbench/env/scoreboard.sv create mode 100644 comp/debug/histogramer/uvm/tbench/env/sequencer.sv create mode 100644 comp/debug/histogramer/uvm/tbench/testbench.sv create mode 100644 comp/debug/histogramer/uvm/tbench/tests/const.sv create mode 100644 comp/debug/histogramer/uvm/tbench/tests/pkg.sv create mode 100644 comp/debug/histogramer/uvm/tbench/tests/sequence.sv create mode 100644 comp/debug/histogramer/uvm/tbench/tests/sequence_virt.sv create mode 100644 comp/debug/histogramer/uvm/tbench/tests/test.sv create mode 100644 comp/debug/histogramer/uvm/top_level.fdo diff --git a/comp/debug/histogramer/harp.toml b/comp/debug/histogramer/harp.toml new file mode 100644 index 000000000..accdd5824 --- /dev/null +++ b/comp/debug/histogramer/harp.toml @@ -0,0 +1,51 @@ +# rtlproject.toml: File verification and synthesis combinations +# Copyright (C) 2024 CESNET z. s. p. o. +# Author(s): Lukas Nevrkla +# +# SPDX-License-Identifier: BSD-3-Clause + +[generics] +asserts = [ + """(2 ** INPUT_WIDTH >= BOX_CNT)""" +] +[settings.default] + +INPUT_WIDTH = 8 +BOX_WIDTH = 32 +BOX_CNT = 32 +READ_PRIOR = 0 +CLEAR_BY_READ = 1 +CLEAR_BY_RST = 1 +DEVICE = "ULTRASCALE" + +[settings.box_cnt] +type = "list" +BOX_CNT = [4, 8, 16, 2048] + +[settings.box_width] +type = "list" +BOX_WIDTH = [1, 4, 256] + +[settings.input_width] +type = "list" +INPUT_WIDTH = [2, 4, 512] + +[settings.read_prior] +type = "list" +READ_PRIOR = [0, 1] + +[settings.clear_by_read] +type = "list" +CLEAR_BY_READ = [0, 1] + +[[ver.combinations]] +name = "basic" +settings = ["box_cnt", "box_width", "input_width"] #, "read_prior", "clear_by_read"] + +[ver.settings] +tests = ["test_basic"] + +[[synth.combinations]] +name = "Test differenct box cnt" +settings = ["box_cnt", "box_width", "input_width"] #, "read_prior", "clear_by_read"] + diff --git a/comp/debug/histogramer/uvm/Modules.tcl b/comp/debug/histogramer/uvm/Modules.tcl new file mode 100644 index 000000000..42cdea984 --- /dev/null +++ b/comp/debug/histogramer/uvm/Modules.tcl @@ -0,0 +1,17 @@ +# Modules.tcl: Components include script +# Copyright (C) 2022 CESNET z. s. p. o. +# Author: Lukas Nevrkla +# +# SPDX-License-Identifier: BSD-3-Clause + +# Set paths + +lappend COMPONENTS [ list "SV_MVB_UVM_BASE" "$OFM_PATH/comp/uvm/mvb" "FULL"] +lappend COMPONENTS [ list "SV_LOGIC_VECTOR" "$OFM_PATH/comp/uvm/logic_vector" "FULL"] +lappend COMPONENTS [ list "SV_LOGIC_VECTOR_MVB" "$OFM_PATH/comp/uvm/logic_vector_mvb" "FULL"] + +lappend MOD "$ENTITY_BASE/tbench/env/pkg.sv" +lappend MOD "$ENTITY_BASE/tbench/tests/pkg.sv" + +lappend MOD "$ENTITY_BASE/tbench/dut.sv" +lappend MOD "$ENTITY_BASE/tbench/testbench.sv" diff --git a/comp/debug/histogramer/uvm/signals.fdo b/comp/debug/histogramer/uvm/signals.fdo new file mode 100644 index 000000000..17ea30417 --- /dev/null +++ b/comp/debug/histogramer/uvm/signals.fdo @@ -0,0 +1,295 @@ +# signals.fdo : Include file with signals +# Copyright (C) 2022 CESNET z. s. p. o. +# Author: Lukas Nevrkla +# +# SPDX-License-Identifier: BSD-3-Clause + +proc all {NAME PATH} { + add_wave "-noupdate -hex" $PATH/CLK + add_wave "-noupdate -hex" $PATH/RST + + add wave -divider "Main interface" + add_wave "-noupdate -hex" $PATH/RDY + add_wave "-noupdate -hex" $PATH/req_delayed + add_wave "-noupdate -hex" $PATH/is_modify_reg + add_wave "-noupdate -hex" $PATH/symbol_reg + add_wave "-noupdate -hex " $PATH/new_item_*_reg + add_wave "-noupdate -hex" $PATH/VLD + add_wave "-noupdate -hex " $PATH/RESP_* + + add wave -divider "Memory interface" + add_wave "-noupdate -hex " $PATH/mem_manager_i/MEM_RDY + add_wave "-noupdate -hex " $PATH/mem_manager_i/MEM_READ + add_wave "-noupdate -hex " $PATH/mem_manager_i/MEM_WRITE + add_wave "-noupdate -hex " $PATH/mem_manager_i/MEM_ADDR + add_wave "-noupdate -hex " $PATH/mem_manager_i/MEM_BURST + add_wave "-noupdate -hex " $PATH/mem_manager_i/MEM_WRITE_DATA + add_wave "-noupdate -hex " $PATH/mem_manager_i/MEM_READ_DATA_VLD + add_wave "-noupdate -hex " $PATH/mem_manager_i/MEM_READ_DATA + + add wave -divider "FAULTS" + add_wave "-noupdate -hex " $PATH/FAULT_SORT_ERR + + add wave -divider "FSMs" + add_wave "-noupdate -hex -label ROOT_MANAGER" $PATH/root_manager_i/curr_state + add_wave "-noupdate -hex -label LEAF_EDITOR" $PATH/leaf_manager_i/leaf_editor_i/leaf_editor_core_i/curr_state + add_wave "-noupdate -hex -label MEM_CMD_GEN" $PATH/mem_manager_i/mem_cmd_gen_i/curr_state + add_wave "-noupdate -hex -label MEM_DRIVER" $PATH/mem_manager_i/mem_driver_i/curr_state + + add wave -divider "Root_manager interface" + add_wave "-noupdate -hex " $PATH/root_manager_i/MAIN_VLD + add_wave "-noupdate -hex " $PATH/root_manager_i/EXTRA_VLD + add_wave "-noupdate -hex " $PATH/root_manager_i/POSSIBLE_ACTION + add_wave "-noupdate -hex " $PATH/root_manager_i/TREE_EMPTY + add_wave "-noupdate -hex " $PATH/root_manager_i/MEM_OFFSET + add_wave "-noupdate -hex " $PATH/root_manager_i/ITEM_CNT + add_wave "-noupdate -hex " $PATH/root_manager_i/IS_UPDATE_VLD + add_wave "-noupdate -hex " $PATH/root_manager_i/IS_UPDATE + add_wave "-noupdate -hex " $PATH/root_manager_i/IS_SPLIT_PRICE + add_wave "-noupdate -hex " $PATH/root_manager_i/IS_ROOT_PRICE + add_wave "-noupdate -hex " $PATH/root_manager_i/EXTRA_PRICE + add_wave "-noupdate -hex " $PATH/root_manager_i/DEBUG_NEW_ROOT_VLD + add_wave "-noupdate -hex " $PATH/root_manager_i/DEBUG_NEW_ROOT + add_wave "-noupdate -hex " $PATH/root_manager_i/new_root + add_wave "-noupdate -hex " $PATH/root_manager_i/old_root + + add wave -divider "Leaf_manager interface" + add_wave "-noupdate -hex " $PATH/leaf_manager_i/REQ + add_wave "-noupdate -hex " $PATH/leaf_manager_i/VLD + add_wave "-noupdate -hex " $PATH/leaf_manager_i/IS_LAST_OUT + add_wave "-noupdate -hex " $PATH/leaf_manager_i/RESULT_ITEMS + add_wave "-noupdate -hex " $PATH/leaf_manager_i/FOUND_ITEM + add_wave "-noupdate -hex " $PATH/leaf_manager_i/EXTRA_VLD + add_wave "-noupdate -hex " $PATH/leaf_manager_i/IS_UPDATE + + add wave -divider "Root_manager" + add wave -divider "" + add_wave "-group { root_manager } -noupdate -hex" $PATH/root_manager_i/* + + add wave -divider "" + add_wave "-group { root_loader } -noupdate -hex" $PATH/root_manager_i/root_loader_i/* + + add wave -divider "" + add_wave "-group { range_select } -noupdate -hex" $PATH/root_manager_i/root_loader_i/range_select_i/* + + add wave -divider "" + add_wave "-group { ctx_loader } -noupdate -hex" $PATH/root_manager_i/ctx_loader_i/* + + add wave -divider "" + add_wave "-group { ctx_transformer } -noupdate -hex" $PATH/root_manager_i/ctx_transformer_i/* + + add wave -divider "" + add_wave "-group { ctx_processor } -noupdate -hex" $PATH/root_manager_i/ctx_processor_i/* + + add wave -divider "" + add_wave "-group { root_editor } -noupdate -hex" $PATH/root_manager_i/ctx_processor_i/root_editor_i/* + + add wave -divider "" + add_wave "-group { redistribute_check } -noupdate -hex" $PATH/root_manager_i/ctx_loader_i/redistribute_check_i/* + + + add wave -divider "Leaf_manager output" + add wave -divider "" + add_wave "-group { leaf_manager } -noupdate -hex" $PATH/leaf_manager_i/* + + add_wave "-group { editor } -noupdate -hex" $PATH/leaf_manager_i/editor_i/* + + add wave -divider "" + add_wave "-group { item_cnt_decoder } -noupdate -hex" $PATH/leaf_manager_i/item_cnt_decoder_i/* + + add wave -divider "" + add_wave "-group { range_select_leaf } -noupdate -hex" $PATH/leaf_manager_i/range_select_i/* + + add wave -divider "" + add_wave "-group { burst_editor } -noupdate -hex" $PATH/leaf_manager_i/burst_editor_i/* + + add wave -divider "" + add_wave "-group { list_editor } -noupdate -hex" $PATH/leaf_manager_i/burst_editor_i/list_editor_i/* + + add wave -divider "" + add_wave "-group { list_check } -noupdate -hex" $PATH/leaf_manager_i/list_check_i/* + + + add wave -divider "Mem_manager" + # add_wave "-noupdate -hex " $PATH/mem_manager_i/RDY + # add_wave "-noupdate -hex " $PATH/mem_manager_i/MAIN_REQ + # add_wave "-noupdate -hex " $PATH/mem_manager_i/EXTRA_REQ + # add_wave "-noupdate -hex " $PATH/mem_manager_i/POSSIBLE_ACTION + # add_wave "-noupdate -hex " $PATH/mem_manager_i/TREE_EMPTY + # add_wave "-noupdate -hex " $PATH/mem_manager_i/MEM_OFFSET + # add_wave "-noupdate -hex " $PATH/mem_manager_i/ITEM_CNT + # add wave -divider "Mem_manager root output" + # add_wave "-noupdate -hex " $PATH/mem_manager_i/ROOT_IS_UPDATE_VLD + # add_wave "-noupdate -hex " $PATH/mem_manager_i/ROOT_IS_UPDATE + # add_wave "-noupdate -hex " $PATH/mem_manager_i/IS_SPLIT_PRICE + # add_wave "-noupdate -hex " $PATH/mem_manager_i/IS_ROOT_PRICE + # add_wave "-noupdate -hex " $PATH/mem_manager_i/ROOT_PRICE + # add wave -divider "Mem_manager leaf interface" + # add_wave "-noupdate -hex " $PATH/mem_manager_i/ITEM_FOUND + # add_wave "-noupdate -hex " $PATH/mem_manager_i/IS_UPDATE + # add_wave "-noupdate -hex " $PATH/mem_manager_i/EDITED_DATA_VLD + # add_wave "-noupdate -hex " $PATH/mem_manager_i/EDITED_DATA_IS_LAST + # add_wave "-noupdate -hex " $PATH/mem_manager_i/EDITED_DATA + # add_wave "-noupdate -hex " $PATH/mem_manager_i/NEW_DATA_VLD + # add_wave "-noupdate -hex " $PATH/mem_manager_i/NEW_DATA + add wave -divider "Mem_manager mem_cmd_gen" + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_cmd_gen_i/RDY + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_cmd_gen_i/IS_SPLIT_PRICE + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_cmd_gen_i/IS_ROOT_PRICE + # add wave -divider "Mem_manager mem_cmd_gen leaf interface" + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_cmd_gen_i/ITEM_FOUND + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_cmd_gen_i/IS_UPDATE + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_cmd_gen_i/EDITED_DATA_VLD + + add wave -divider "Mem_manager mem_driver" + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/RDY + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/REQ + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/IS_WRITE + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/MEM_OFFSET + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/ITEM_CNT + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/ADDR_OFFSET + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/ITEMS_OFFSET + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/BURST_DECR + add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/ROOT_ADDR + + # add wave -divider "Mem_manager mem_driver leaf interface" + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/INPUT_DATA_VLD + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/INPUT_DATA + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/OUTPUT_DATA_VLD + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/OUTPUT_DATA + # + # add wave -divider "Mem_manager mem_driver mem interface" + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/MEM_READ + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/MEM_WRITE + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/MEM_WRITE_DATA + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/MEM_ADDR + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/MEM_BURST + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/MEM_RDY + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/MEM_READ_DATA_VLD + # add_wave "-noupdate -hex " $PATH/mem_manager_i/mem_driver_i/MEM_READ_DATA + + add wave -divider "" + add_wave "-group { mem_manager } -noupdate -hex" $PATH/mem_manager_i/* + add wave -divider "" + add_wave "-group { mem_cmd_gen } -noupdate -hex" $PATH/mem_manager_i/mem_cmd_gen_i/* + add wave -divider "" + add_wave "-group { mem_driver } -noupdate -hex" $PATH/mem_manager_i/mem_driver_i/* + add wave -divider "" + add_wave "-group { drop_fifo } -noupdate -hex" $PATH/mem_manager_i/mem_driver_i/drop_fifo_i/* + + + + + + + ## add wave -divider "$NAME" + ## add_wave "-noupdate -hex" $PATH/* + # + #add_wave "-noupdate -hex" $PATH/CLK + #add_wave "-noupdate -hex" $PATH/RST + # + #add wave -divider "Main interface" + #add_wave "-noupdate -hex" $PATH/RDY + #add_wave "-noupdate -hex" $PATH/REQ + #add_wave "-noupdate -hex" $PATH/VLD + #add_wave "-noupdate -hex" $PATH/IS_MODIFY + #add_wave "-noupdate -hex" $PATH/ROOT_ADDR + #add_wave "-noupdate -hex " $PATH/NEW_ITEM_* + #add_wave "-noupdate -hex " $PATH/FOUND_ITEM_* + # + #add wave -divider "Memory interface" + ## add_wave "-noupdate -hex " $PATH/MEM_* + #add_wave "-noupdate -hex " $PATH/MEM_READ* + #add_wave "-noupdate -hex " $PATH/../MEM_* + # + # + #add wave -divider "Root_manager output" + #add_wave "-noupdate -hex " $PATH/root_manager_i/MAIN_VLD + #add_wave "-noupdate -hex " $PATH/root_manager_i/EXTRA_VLD + #add_wave "-noupdate -hex " $PATH/root_manager_i/POSSIBLE_ACTION + #add_wave "-noupdate -hex " $PATH/root_manager_i/TREE_EMPTY + #add_wave "-noupdate -hex " $PATH/root_manager_i/MEM_OFFSET + #add_wave "-noupdate -hex " $PATH/root_manager_i/ITEM_CNT + #add_wave "-noupdate -hex " $PATH/root_manager_i/curr_state + #add_wave "-noupdate -hex " $PATH/root_manager_i/new_root + #add_wave "-noupdate -hex " $PATH/root_manager_i/old_root + # + #add wave -divider "" + #add_wave "-group { root_manager } -noupdate -hex" $PATH/root_manager_i/* + # + #add wave -divider "" + #add_wave "-group { root_editor } -noupdate -hex" $PATH/root_manager_i/root_editor_i/* + # + #add wave -divider "" + #add_wave "-group { redistribute_check } -noupdate -hex" $PATH/root_manager_i/redistribute_check_i/* + # + # + # + #add wave -divider "Leaf_manager output" + #add_wave "-noupdate -hex " $PATH/leaf_manager_i/EXTRA_VLD + #add_wave "-noupdate -hex " $PATH/leaf_manager_i/IS_UPDATE + #add_wave "-noupdate -hex " $PATH/leaf_manager_i/FOUND_ITEM + #add_wave "-noupdate -hex " $PATH/leaf_manager_i/ITEMS + #add_wave "-noupdate -hex " $PATH/leaf_manager_i/RESULT_ITEMS + #add_wave "-noupdate -hex " $PATH/leaf_manager_i/VLD + #add_wave "-noupdate -hex " $PATH/leaf_manager_i/IS_LAST_OUT + #add_wave "-noupdate -hex " $PATH/leaf_manager_i/leaf_editor_i/curr_state + # + #add wave -divider "" + #add_wave "-group { leaf_manager } -noupdate -hex" $PATH/leaf_manager_i/* + # + #add wave -divider "" + #add_wave "-group { leaf_editor } -noupdate -hex" $PATH/leaf_manager_i/leaf_editor_i/* + # + #add wave -divider "" + #add_wave "-group { item_cnt_decoder } -noupdate -hex" $PATH/leaf_manager_i/item_cnt_decoder_i/* + # + # + # + #add wave -divider "Mem_manager output" + #add_wave "-noupdate -hex " $PATH/mem_manager_i/LEAF_PROCESSED + #add_wave "-noupdate -hex " $PATH/mem_manager_i/ROOT_IS_UPDATE + #add_wave "-noupdate -hex " $PATH/mem_manager_i/EXTRA_PRICE + #add_wave "-noupdate -hex " $PATH/mem_manager_i/fifo_empty + #add_wave "-noupdate -hex " $PATH/mem_manager_i/fifo_wr + #add_wave "-noupdate -hex " $PATH/mem_manager_i/fifo_block_wr + #add_wave "-noupdate -hex " $PATH/mem_manager_i/fifo_rd + #add_wave "-noupdate -hex " $PATH/mem_manager_i/EDITED_DATA_VLD + #add_wave "-noupdate -hex " $PATH/mem_manager_i/ITEM_FOUND + #add_wave "-noupdate -hex " $PATH/mem_manager_i/IS_UPDATE + # + #add wave -divider "Mem_manager main fsm" + #add_wave "-noupdate -hex " $PATH/mem_manager_i/cmd_gen_i/curr_state + #add_wave "-noupdate -hex " $PATH/mem_manager_i/cmd_done + #add_wave "-noupdate -hex " $PATH/mem_manager_i/cmd_activate + #add_wave "-noupdate -hex " $PATH/mem_manager_i/cmd_mem_offset + #add_wave "-noupdate -hex " $PATH/mem_manager_i/cmd_item_cnt + #add_wave "-noupdate -hex " $PATH/mem_manager_i/is_write + #add_wave "-noupdate -hex " $PATH/mem_manager_i/wait_for_found + #add_wave "-noupdate -hex " $PATH/mem_manager_i/can_update + #add_wave "-noupdate -hex " $PATH/mem_manager_i/addr_from_sum + #add_wave "-noupdate -hex " $PATH/mem_manager_i/just_load + #add_wave "-noupdate -hex " $PATH/mem_manager_i/only_update + # + #add wave -divider "Mem_manager second layer" + #add_wave "-noupdate -hex " $PATH/mem_manager_i/curr_state + #add_wave "-noupdate -hex " $PATH/mem_manager_i/save_addr + #add_wave "-noupdate -hex " $PATH/mem_manager_i/skip_word + #add_wave "-noupdate -hex " $PATH/mem_manager_i/cmd_running + #add_wave "-noupdate -hex " $PATH/mem_manager_i/cmd_en + #add_wave "-noupdate -hex " $PATH/mem_manager_i/update_running + #add_wave "-noupdate -hex " $PATH/mem_manager_i/burst_zero + #add_wave "-noupdate -hex " $PATH/mem_manager_i/addr_reg + #add_wave "-noupdate -hex " $PATH/mem_manager_i/burst_reg + #add_wave "-noupdate -hex " $PATH/mem_manager_i/req_burst_done + #add_wave "-noupdate -hex " $PATH/mem_manager_i/req_burst_cnt + #add_wave "-noupdate -hex " $PATH/mem_manager_i/possible_action_reg + # + #add wave -divider "" + #add_wave "-group { mem_manager } -noupdate -hex" $PATH/mem_manager_i/* + # + #add wave -divider "" + #add_wave "-group { uut } -noupdate -hex" $PATH/* + +} + diff --git a/comp/debug/histogramer/uvm/signals_sig.fdo b/comp/debug/histogramer/uvm/signals_sig.fdo new file mode 100644 index 000000000..de7c1555b --- /dev/null +++ b/comp/debug/histogramer/uvm/signals_sig.fdo @@ -0,0 +1,18 @@ +# signal_sig.fdo : Include file with signals +# Copyright (C) 2022 CESNET z. s. p. o. +# Author: Lukas Nevrkla +# +# SPDX-License-Identifier: BSD-3-Clause + +source "./signals.fdo" +view wave +delete wave * + +add_wave "-noupdate -hex " /testbench/DUT_U/VHDL_DUT_U/CLK +add_wave "-noupdate -hex " /testbench/DUT_U/VHDL_DUT_U/RST + +add_wave "-group { uut } -noupdate -hex" /testbench/DUT_U/VHDL_DUT_U/* + +## all ALL /testbench/DUT_U/VHDL_DUT_U/ + +config wave -signalnamewidth 1 diff --git a/comp/debug/histogramer/uvm/tbench/dut.sv b/comp/debug/histogramer/uvm/tbench/dut.sv new file mode 100644 index 000000000..10b0bf0ff --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/dut.sv @@ -0,0 +1,70 @@ +//-- dut.sv: Design under test +//-- Copyright (C) 2022 CESNET z. s. p. o. +//-- Author: Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + +import test::*; + +module DUT #( + int unsigned INPUT_WIDTH, + int unsigned BOX_WIDTH, + int unsigned BOX_CNT, + logic READ_PRIOR, + logic CLEAR_BY_READ, + logic CLEAR_BY_RST, + string DEVICE, + int unsigned REQ_WIDTH, + int unsigned RESP_WIDTH +)( + input logic CLK, + input logic RST, + mvb_if.dut_rx mvb_req, + mvb_if.dut_tx mvb_resp +); + + logic RST_DONE; + logic INPUT_VLD; + logic [INPUT_WIDTH - 1 : 0] INPUT; + logic READ_REQ; + logic [$clog2(BOX_CNT) - 1 : 0] READ_ADDR; + logic READ_BOX_VLD; + logic [BOX_WIDTH - 1 : 0] READ_BOX; + + logic req; + logic read_req; + + assign mvb_req.DST_RDY = RST_DONE; + assign req = mvb_req.SRC_RDY & mvb_req.VLD; + assign {read_req, INPUT, READ_ADDR} = mvb_req.DATA; + + assign INPUT_VLD = req & ! read_req; + assign READ_REQ = req & read_req; + + assign mvb_resp.DST_RDY = 2'b1; + assign mvb_resp.SRC_RDY = READ_BOX_VLD; + assign mvb_resp.VLD = READ_BOX_VLD; + assign mvb_resp.DATA = READ_BOX; + + HISTOGRAMER #( + .INPUT_WIDTH (INPUT_WIDTH ), + .BOX_WIDTH (BOX_WIDTH ), + .BOX_CNT (BOX_CNT ), + .READ_PRIOR (READ_PRIOR ), + .CLEAR_BY_READ (CLEAR_BY_READ), + .CLEAR_BY_RST (CLEAR_BY_RST ) + ) VHDL_DUT_U ( + .CLK (CLK ), + .RST (RST ), + .RST_DONE (RST_DONE ), + + .INPUT_VLD (INPUT_VLD ), + .INPUT (INPUT ), + + .READ_REQ (READ_REQ ), + .READ_ADDR (READ_ADDR ), + + .READ_BOX_VLD (READ_BOX_VLD), + .READ_BOX (READ_BOX ) + ); +endmodule diff --git a/comp/debug/histogramer/uvm/tbench/env/env.sv b/comp/debug/histogramer/uvm/tbench/env/env.sv new file mode 100644 index 000000000..e1ef8089b --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/env/env.sv @@ -0,0 +1,146 @@ +//-- env.sv: Verification environment +//-- Copyright (C) 2022 CESNET z. s. p. o. +//-- Author: Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + +// Environment for functional verification of encode. +// This environment containts two mii agents. +class env #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH +) extends uvm_env; + + `uvm_component_param_utils(uvm_histogramer::env #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )); + + uvm_logic_vector_mvb::env_rx #(1, REQ_WIDTH) req_env; + uvm_logic_vector_mvb::config_item cfg_req; + uvm_logic_vector_mvb::env_tx #(1, RESP_WIDTH) resp_env; + uvm_logic_vector_mvb::config_item cfg_resp; + + uvm_reset::agent m_reset; + uvm_reset::config_item m_config_reset; + + uvm_histogramer::virt_sequencer #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + ) vscr; + + scoreboard #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + ) m_scoreboard; + + uvm_mvb::coverage #(1, REQ_WIDTH) m_cover_req; + uvm_mvb::coverage #(1, RESP_WIDTH) m_cover_resp; + + // Constructor of environment. + function new(string name, uvm_component parent); + super.new(name, parent); + endfunction + + // Create base components of environment. + function void build_phase(uvm_phase phase); + + m_cover_req = new("m_cover_req"); + m_cover_resp = new("m_cover_resp"); + + cfg_req = new; + cfg_resp = new; + + cfg_req.active = UVM_ACTIVE; + cfg_resp.active = UVM_PASSIVE; + + cfg_req.interface_name = "vif_req"; + cfg_resp.interface_name = "vif_resp"; + + m_config_reset = new; + m_config_reset.active = UVM_ACTIVE; + m_config_reset.interface_name = "vif_reset"; + + uvm_config_db #(uvm_reset::config_item)::set(this, "m_reset", "m_config", m_config_reset); + m_reset = uvm_reset::agent::type_id::create("m_reset", this); + + uvm_config_db #(uvm_logic_vector_mvb::config_item)::set(this, "req_env", "m_config", cfg_req); + uvm_config_db #(uvm_logic_vector_mvb::config_item)::set(this, "resp_env", "m_config", cfg_resp); + + req_env = uvm_logic_vector_mvb::env_rx #(1, REQ_WIDTH) ::type_id::create("req_env", this); + resp_env = uvm_logic_vector_mvb::env_tx #(1, RESP_WIDTH)::type_id::create("resp_env", this); + + vscr = uvm_histogramer::virt_sequencer#( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )::type_id::create("vscr", this); + m_scoreboard = scoreboard#( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )::type_id::create("m_scoreboard", this); + endfunction + + // Connect agent's ports with ports from scoreboard. + function void connect_phase(uvm_phase phase); + + req_env.analysis_port.connect(m_scoreboard.in_data); + resp_env.m_logic_vector_agent.analysis_port.connect(m_scoreboard.out_data); + + req_env.m_mvb_agent.analysis_port.connect(m_cover_req.analysis_export); + resp_env.m_mvb_agent.analysis_port.connect(m_cover_resp.analysis_export); + + m_reset.sync_connect(req_env.reset_sync); + m_reset.sync_connect(resp_env.reset_sync); + + vscr.m_reset = m_reset.m_sequencer; + vscr.req_sqr = req_env.m_sequencer; + endfunction +endclass diff --git a/comp/debug/histogramer/uvm/tbench/env/model.sv b/comp/debug/histogramer/uvm/tbench/env/model.sv new file mode 100644 index 000000000..c75a9327b --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/env/model.sv @@ -0,0 +1,112 @@ +//-- model.sv: Model of implementation +//-- Copyright (C) 2021 CESNET z. s. p. o. +//-- Author: Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + +`include "../tests/const.sv" + +class model #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH +) extends uvm_component; + `uvm_component_param_utils(uvm_histogramer::model #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )) + + typedef logic unsigned [INPUT_WIDTH - 1 : 0] value_t; + typedef logic unsigned [BOX_WIDTH - 1 : 0] box_t; + typedef logic unsigned [$clog2(BOX_CNT) - 1 : 0] addr_t; + + uvm_tlm_analysis_fifo #(uvm_logic_vector::sequence_item#(REQ_WIDTH)) in_data; + uvm_analysis_port #(uvm_logic_vector::sequence_item#(RESP_WIDTH)) out_data; + + box_t boxes[addr_t]; + + function new(string name = "model", uvm_component parent = null); + super.new(name, parent); + + in_data = new("in_data", this); + out_data = new("out_data", this); + endfunction + + function int unsigned used(); + int unsigned ret = 0; + ret |= (in_data.used() != 0); + return ret; + endfunction + + task run_phase(uvm_phase phase); + forever begin + run_model(); + end + endtask + + task run_model(); + uvm_logic_vector::sequence_item#(REQ_WIDTH) tr_in_data; + uvm_logic_vector::sequence_item#(RESP_WIDTH) tr_out_data; + + logic read_req; + value_t val; + addr_t addr; + box_t box; + + in_data.get(tr_in_data); + {read_req, val, addr} = tr_in_data.data; + + if (read_req) begin + box = read(addr); + + tr_out_data = uvm_logic_vector::sequence_item#(RESP_WIDTH)::type_id::create("tr_out_data"); + tr_out_data.data = 0; + tr_out_data.data = {box}; + out_data.write(tr_out_data); + end else begin + new_val(val); + end + endtask + + function void new_val(value_t val); + addr_t addr = val >> (INPUT_WIDTH - $clog2(BOX_CNT)); + + //$write("New val %d (addr %d)\n", val, addr); + + if ( ! boxes.exists(addr)) + boxes[addr] = 0; + + // Handle box overflow + if (boxes[addr] != 2 ** BOX_WIDTH - 1) + boxes[addr] ++; + endfunction + + function box_t read(addr_t addr); + box_t box = 0; + + if (boxes.exists(addr)) + box = boxes[addr]; + + if (CLEAR_BY_READ) + boxes[addr] = 0; + + //$write("Read addr %d, box %d\n", addr, box); + return box; + endfunction + +endclass diff --git a/comp/debug/histogramer/uvm/tbench/env/pkg.sv b/comp/debug/histogramer/uvm/tbench/env/pkg.sv new file mode 100644 index 000000000..07d47c549 --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/env/pkg.sv @@ -0,0 +1,22 @@ +//-- pkg.sv: Package for environment +//-- Copyright (C) 2022 CESNET z. s. p. o. +//-- Author: Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + +`ifndef UVM_HISTOGRAMER +`define uvm_histogramer + +package uvm_histogramer; + + `include "uvm_macros.svh" + import uvm_pkg::*; + + `include "sequencer.sv" + `include "model.sv" + `include "scoreboard.sv" + `include "env.sv" + +endpackage + +`endif diff --git a/comp/debug/histogramer/uvm/tbench/env/scoreboard.sv b/comp/debug/histogramer/uvm/tbench/env/scoreboard.sv new file mode 100644 index 000000000..74330dd35 --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/env/scoreboard.sv @@ -0,0 +1,151 @@ +//-- scoreboard.sv: Scoreboard for verification +//-- Copyright (C) 2022 CESNET z. s. p. o. +//-- Author: Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + +//TODO +`include "../tests/const.sv" + +class scoreboard #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH +) extends uvm_scoreboard; + + `uvm_component_utils(uvm_histogramer::scoreboard #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )) + // Analysis components. + uvm_analysis_export #(uvm_logic_vector::sequence_item#(REQ_WIDTH)) in_data; + uvm_analysis_export #(uvm_logic_vector::sequence_item#(RESP_WIDTH)) out_data; + + uvm_tlm_analysis_fifo #(uvm_logic_vector::sequence_item#(RESP_WIDTH)) dut_data; + uvm_tlm_analysis_fifo #(uvm_logic_vector::sequence_item#(RESP_WIDTH)) model_data; + + model #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + ) m_model; + + string msg = "\n"; + local int unsigned compared_data = 0; + local int unsigned errors_data = 0; + + // Contructor of scoreboard. + function new(string name, uvm_component parent); + super.new(name, parent); + + in_data = new("in_data", this); + out_data = new("out_data", this); + + dut_data = new("dut_data", this); + model_data = new("model_data", this); + endfunction + + function void build_phase(uvm_phase phase); + m_model = model#( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )::type_id::create("m_model", this); + endfunction + + function int unsigned used(); + int unsigned ret = 0; + + ret |= (dut_data.used() != 0); + ret |= (model_data.used() != 0); + ret |= (m_model.used() != 0); + + return ret; + endfunction + + function void connect_phase(uvm_phase phase); + in_data.connect(m_model.in_data.analysis_export); + m_model.out_data.connect(model_data.analysis_export); + out_data.connect(dut_data.analysis_export); + endfunction + + + task run_phase(uvm_phase phase); + fork + compare_data(); + join + endtask + + function automatic string resp_to_string(logic [RESP_WIDTH - 1 : 0] resp); + string res = ""; + logic [BOX_WIDTH - 1 : 0] box; + + box = resp; + + $swrite(res, "Box value = %d\n", box); + return res; + endfunction + + task compare_data(); + uvm_logic_vector::sequence_item#(RESP_WIDTH) tr_dut_data; + uvm_logic_vector::sequence_item#(RESP_WIDTH) tr_model_data; + forever begin + + model_data.get(tr_model_data); + dut_data.get(tr_dut_data); + compared_data++; + + if (tr_model_data.compare(tr_dut_data) == 0) begin + errors_data++; + $swrite(msg, "\Output does'nt match\n\tModel:\n%s\n\n\tDUT:\n%s", resp_to_string(tr_model_data.data), resp_to_string(tr_dut_data.data)); + `uvm_fatal(get_type_name(), $sformatf("%s", msg)) + end + end + endtask + + virtual function void report_phase(uvm_phase phase); + + $swrite(msg, "%s\n\n--- STATUS ---\n", msg); + $swrite(msg, "%sData: Compared/errors: %0d/%0d \n", msg, compared_data, errors_data); + $swrite(msg, "%s\n", msg); + $swrite(msg, "%sCount of items inside dut data fifo: %0d \n", msg, dut_data.used()); + $swrite(msg, "%sCount of items inside model data fifo: %0d \n", msg, model_data.used()); + $swrite(msg, "%sErrors: %0d \n", msg, errors_data); + + if (errors_data == 0 && this.used() == 0) begin + `uvm_info(get_type_name(), $sformatf("%s\n\n\t---------------------------------------\n\t---- VERIFICATION SUCCESS ----\n\t---------------------------------------", msg), UVM_NONE) + end else begin + `uvm_info(get_type_name(), $sformatf("%s\n\n\t---------------------------------------\n\t---- VERIFICATION FAIL ----\n\t---------------------------------------", msg), UVM_NONE) + end + endfunction + +endclass diff --git a/comp/debug/histogramer/uvm/tbench/env/sequencer.sv b/comp/debug/histogramer/uvm/tbench/env/sequencer.sv new file mode 100644 index 000000000..df99bd80f --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/env/sequencer.sv @@ -0,0 +1,42 @@ +// sequencer.sv: Virtual sequencer +// Copyright (C) 2023 CESNET z. s. p. o. +// Author: Lukas Nevrkla + +// SPDX-License-Identifier: BSD-3-Clause + + +`include "../tests/const.sv" + +class virt_sequencer #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH +) extends uvm_sequencer; + `uvm_component_param_utils(uvm_histogramer::virt_sequencer #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )) + + uvm_reset::sequencer m_reset; + uvm_logic_vector::sequencer #(REQ_WIDTH) req_sqr; + + function new(string name = "virt_sequencer", uvm_component parent); + super.new(name, parent); + endfunction + +endclass diff --git a/comp/debug/histogramer/uvm/tbench/testbench.sv b/comp/debug/histogramer/uvm/tbench/testbench.sv new file mode 100644 index 000000000..f48bdf544 --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/testbench.sv @@ -0,0 +1,117 @@ +//-- tbench.sv: Testbench +//-- Copyright (C) 2022 CESNET z. s. p. o. +//-- Author: Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + +import uvm_pkg::*; +`include "uvm_macros.svh" +import test::*; + +module testbench #( + int unsigned INPUT_WIDTH, + int unsigned BOX_WIDTH, + int unsigned BOX_CNT, + logic READ_PRIOR, + logic CLEAR_BY_READ, + logic CLEAR_BY_RST, + string DEVICE +); + + localparam int unsigned REQ_WIDTH = 1 + INPUT_WIDTH + $clog2(BOX_CNT); + localparam int unsigned RESP_WIDTH = BOX_WIDTH; + + localparam int unsigned ADDR_WIDTH = $clog2(BOX_CNT); + + typedef uvm_component_registry#(test::ex_test #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + ), "test::ex_test") type_id; + + // ------------------------------------------------------------------------------------------------------------------------------------------------------------------- + // Signals + logic CLK = 0; + + // ------------------------------------------------------------------------------------------------------------------------------------------------------------------- + // Interfaces + reset_if reset(CLK); + + mvb_if #(1, REQ_WIDTH) mvb_req (CLK); + mvb_if #(1, RESP_WIDTH) mvb_resp (CLK); + + // ------------------------------------------------------------------------------------------------------------------------------------------------------------------- + // Define clock period + always #(CLK_PERIOD) CLK = ~CLK; + + // ------------------------------------------------------------------------------------------------------------------------------------------------------------------- + // Start of tests + initial begin + uvm_root m_root; + // Configuration of database + uvm_config_db#(virtual reset_if)::set(null, "", "vif_reset", reset); + + uvm_config_db#(virtual mvb_if #(1, REQ_WIDTH)) ::set(null, "", "vif_req", mvb_req); + uvm_config_db#(virtual mvb_if #(1, RESP_WIDTH))::set(null, "", "vif_resp", mvb_resp); + + m_root = uvm_root::get(); + m_root.finish_on_completion = 0; + m_root.set_report_id_action_hier("ILLEGALNAME", UVM_NO_ACTION); + + // Stop reporting for us unusefull information + uvm_config_db#(int) ::set(null, "", "recording_detail", 0); + uvm_config_db#(uvm_bitstream_t)::set(null, "", "recording_detail", 0); + + run_test(); + $write("Verification finished successfully!\n"); + $stop(2); + end + + // ------------------------------------------------------------------------------------------------------------------------------------------------------------------- + // DUT + DUT #( + .INPUT_WIDTH (INPUT_WIDTH), + .BOX_WIDTH (BOX_WIDTH), + .BOX_CNT (BOX_CNT), + .READ_PRIOR (READ_PRIOR), + .CLEAR_BY_READ(CLEAR_BY_READ), + .CLEAR_BY_RST (CLEAR_BY_RST), + .DEVICE (DEVICE), + .REQ_WIDTH (REQ_WIDTH), + .RESP_WIDTH (RESP_WIDTH) + ) DUT_U ( + .CLK (CLK), + .RST (reset.RESET), + .mvb_req (mvb_req), + .mvb_resp (mvb_resp) + ); + + // ------------------------------------------------------------------------------------------------------------------------------------------------------------------- + // Properties + + mvb_property #( + .ITEMS (1), + .ITEM_WIDTH (REQ_WIDTH) + ) + property_wr ( + .RESET (reset.RESET), + .vif (mvb_req) + ); + + mvb_property #( + .ITEMS (1), + .ITEM_WIDTH (RESP_WIDTH) + ) + property_rd( + .RESET (reset.RESET), + .vif (mvb_resp) + ); + +endmodule diff --git a/comp/debug/histogramer/uvm/tbench/tests/const.sv b/comp/debug/histogramer/uvm/tbench/tests/const.sv new file mode 100644 index 000000000..7ecad1bfe --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/tests/const.sv @@ -0,0 +1,36 @@ +//-- const.sv: Package with global constants +//-- Copyright (C) 2022 CESNET z. s. p. o. +//-- Author(s): Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + +`ifndef CONST +`define CONST +/* + // DUT constants + parameter INPUT_WIDTH = 8; + parameter BOX_WIDTH = 32; + parameter BOX_CNT = 32; + parameter READ_PRIOR = 0; + parameter CLEAR_BY_READ = 1; + parameter CLEAR_BY_RST = 1; + parameter DEVICE = "none"; +*/ + // Test constants + parameter CLK_PERIOD = 1ns; + parameter RESET_CLKS = 10; + parameter DEBUG_TIME = 2000; + + parameter READ_OCCURENCE = 1; + parameter WRITE_OCCURENCE = 100; + parameter RAND_SEQ_REPEATS = 5000; +/* + parameter REQ_WIDTH = 1 + INPUT_WIDTH + $clog2(BOX_CNT); + parameter RESP_WIDTH = BOX_WIDTH; +*/ +/* + typedef logic unsigned [INPUT_WIDTH - 1 : 0] value_t; + typedef logic unsigned [BOX_WIDTH - 1 : 0] box_t; + typedef logic unsigned [$clog2(BOX_CNT) - 1 : 0] addr_t; +*/ +`endif diff --git a/comp/debug/histogramer/uvm/tbench/tests/pkg.sv b/comp/debug/histogramer/uvm/tbench/tests/pkg.sv new file mode 100644 index 000000000..62e21a4f7 --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/tests/pkg.sv @@ -0,0 +1,22 @@ +//-- pkg.sv: Test package +//-- Copyright (C) 2022 CESNET z. s. p. o. +//-- Author: Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + +`ifndef UVM_HISTOGRAMER_TEST +`define UVM_HISTOGRAMER_TEST + +package test; + + `include "uvm_macros.svh" + import uvm_pkg::*; + + `include "const.sv" + + `include "sequence.sv" + `include "sequence_virt.sv" + `include "test.sv" + +endpackage +`endif diff --git a/comp/debug/histogramer/uvm/tbench/tests/sequence.sv b/comp/debug/histogramer/uvm/tbench/tests/sequence.sv new file mode 100644 index 000000000..206de299f --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/tests/sequence.sv @@ -0,0 +1,90 @@ +//-- sequence.sv: Package with global constants +//-- Copyright (C) 2022 CESNET z. s. p. o. +//-- Author(s): Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + +`include "const.sv" + +class sequence_base #(DATA_WIDTH, INPUT_WIDTH, ADDR_WIDTH) extends uvm_sequence #(uvm_logic_vector::sequence_item #(DATA_WIDTH)); + `uvm_object_utils(test::sequence_base#(DATA_WIDTH, INPUT_WIDTH, ADDR_WIDTH)) + + uvm_logic_vector::sequence_item #(DATA_WIDTH) req; + + // Constructor - creates new instance of this class + function new(string name = "sequence_base"); + super.new(name); + + req = uvm_logic_vector::sequence_item#(DATA_WIDTH)::type_id::create("req"); + endfunction + + task write_req(logic [INPUT_WIDTH - 1 : 0] val); + logic read_req = 0; + logic [ADDR_WIDTH - 1 : 0] addr = 0; + + start_item(req); + req.data = {read_req, val, addr}; + finish_item(req); + endtask + + task read_req(logic [ADDR_WIDTH - 1 : 0] addr); + logic read_req = 1; + logic [INPUT_WIDTH - 1 : 0] val = 0; + + start_item(req); + req.data = {read_req, val, addr}; + finish_item(req); + endtask +endclass + +class sequence_read #(DATA_WIDTH, INPUT_WIDTH, ADDR_WIDTH) extends sequence_base #(DATA_WIDTH, INPUT_WIDTH, ADDR_WIDTH); + `uvm_object_utils(test::sequence_read#(DATA_WIDTH, INPUT_WIDTH, ADDR_WIDTH)) + + // Constructor - creates new instance of this class + function new(string name = "sequence_simple_const"); + super.new(name); + endfunction + + // Generates transactions + task body; + for (int addr = 0 ; addr < 2 ** ADDR_WIDTH; addr ++) + read_req(addr); + + // Test clear by read + for (int addr = 0 ; addr < 2 ** ADDR_WIDTH; addr ++) + read_req(addr); + endtask + +endclass + +class sequence_rand #(DATA_WIDTH, INPUT_WIDTH, ADDR_WIDTH, READ_OCCURENCE, WRITE_OCCURENCE) extends sequence_base #(DATA_WIDTH, INPUT_WIDTH, ADDR_WIDTH); + `uvm_object_utils(test::sequence_rand#(DATA_WIDTH, INPUT_WIDTH, ADDR_WIDTH, READ_OCCURENCE, WRITE_OCCURENCE)) + + rand bit read; + rand logic [ADDR_WIDTH - 1 : 0] addr; + rand logic [INPUT_WIDTH - 1 : 0] val; + + constraint constr { + read dist { + 0 := WRITE_OCCURENCE, + 1 := READ_OCCURENCE + }; + } + + function new(string name = "sequence_random"); + super.new(name); + endfunction + + task body; + req = uvm_logic_vector::sequence_item#(DATA_WIDTH)::type_id::create("req"); + `uvm_info(get_full_name(), "sequence_rand is running", UVM_DEBUG) + + if (read) + read_req(addr); + else + write_req(val); + endtask + +endclass + + diff --git a/comp/debug/histogramer/uvm/tbench/tests/sequence_virt.sv b/comp/debug/histogramer/uvm/tbench/tests/sequence_virt.sv new file mode 100644 index 000000000..f58200117 --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/tests/sequence_virt.sv @@ -0,0 +1,94 @@ +// sequence.sv: Virtual sequence +// Copyright (C) 2023 CESNET z. s. p. o. +// Author(s): Lukas Nevrkla + +// SPDX-License-Identifier: BSD-3-Clause + +class virt_sequence #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH +) extends uvm_sequence; + + `uvm_object_param_utils(test::virt_sequence #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )) + `uvm_declare_p_sequencer(uvm_histogramer::virt_sequencer #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )) + + uvm_reset::sequence_start m_reset; + test::sequence_read #(REQ_WIDTH, INPUT_WIDTH, ADDR_WIDTH) seq_read; + test::sequence_rand #(REQ_WIDTH, INPUT_WIDTH, ADDR_WIDTH, READ_OCCURENCE, WRITE_OCCURENCE) seq_rand; + + function new (string name = "virt_sequence"); + super.new(name); + endfunction + + virtual function void init(); + m_reset = uvm_reset::sequence_start::type_id::create("m_reset"); + + seq_read = test::sequence_read#(REQ_WIDTH, INPUT_WIDTH, ADDR_WIDTH)::type_id::create("seq_cread"); + seq_rand = test::sequence_rand#(REQ_WIDTH, INPUT_WIDTH, ADDR_WIDTH, READ_OCCURENCE, WRITE_OCCURENCE)::type_id::create("seq_rand"); + endfunction + + virtual task run_reset(); + m_reset.randomize(); + m_reset.start(p_sequencer.m_reset); + endtask + + task run_seq_read(); + seq_read.start(p_sequencer.req_sqr); + endtask + + task run_seq_rand(); + for(int i = 0; i < RAND_SEQ_REPEATS; i++) begin + seq_rand.randomize(); + seq_rand.start(p_sequencer.req_sqr); + end + endtask + + task body(); + init(); + + fork + run_reset(); + join_none + + #(100ns); + + fork + run_seq_rand(); + join + + fork + run_seq_read(); + join + endtask + +endclass diff --git a/comp/debug/histogramer/uvm/tbench/tests/test.sv b/comp/debug/histogramer/uvm/tbench/tests/test.sv new file mode 100644 index 000000000..d93286151 --- /dev/null +++ b/comp/debug/histogramer/uvm/tbench/tests/test.sv @@ -0,0 +1,132 @@ +//-- test.sv: Verification test +//-- Copyright (C) 2022 CESNET z. s. p. o. +//-- Author: Lukas Nevrkla + +//-- SPDX-License-Identifier: BSD-3-Clause + + +class ex_test #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + ) extends uvm_test; + typedef uvm_component_registry#(test::ex_test #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + ), "test::ex_test") type_id; + + bit timeout; + uvm_histogramer::env #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + ) m_env; + + // ------------------------------------------------------------------------ + // Functions + function new(string name, uvm_component parent); + super.new(name, parent); + endfunction + + function void build_phase(uvm_phase phase); + m_env = uvm_histogramer::env#( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )::type_id::create("m_env", this); + endfunction + + task test_wait_timeout(int unsigned time_length); + #(time_length*1us); + endtask + + task test_wait_result(); + do begin + #(600ns); + end while (m_env.m_scoreboard.used() != 0); + timeout = 0; + endtask + + // ------------------------------------------------------------------------ + // Create environment and Run sequences o their sequencers + task run_seq_rx(uvm_phase phase); + virt_sequence #( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + ) m_vseq; + + phase.raise_objection(this, "Start of rx sequence"); + + m_vseq = virt_sequence#( + INPUT_WIDTH, + BOX_WIDTH, + BOX_CNT, + READ_PRIOR, + CLEAR_BY_READ, + CLEAR_BY_RST, + DEVICE, + REQ_WIDTH, + RESP_WIDTH, + ADDR_WIDTH + )::type_id::create("m_vseq"); + + assert(m_vseq.randomize()); + m_vseq.start(m_env.vscr); + + timeout = 1; + fork + test_wait_timeout(DEBUG_TIME); + test_wait_result(); + join_any; + + phase.drop_objection(this, "End of rx sequence"); + endtask + + virtual task run_phase(uvm_phase phase); + run_seq_rx(phase); + endtask + + function void report_phase(uvm_phase phase); + `uvm_info(this.get_full_name(), {"\n\tTEST : ", this.get_type_name(), " END\n"}, UVM_NONE); + if (timeout) begin + `uvm_error(this.get_full_name(), "\n\t===================================================\n\tTIMEOUT SOME PACKET STUCK IN DESIGN\n\t===================================================\n\n"); + end + endfunction +endclass diff --git a/comp/debug/histogramer/uvm/top_level.fdo b/comp/debug/histogramer/uvm/top_level.fdo new file mode 100644 index 000000000..57541d11f --- /dev/null +++ b/comp/debug/histogramer/uvm/top_level.fdo @@ -0,0 +1,45 @@ +# top_level.fdo: Top Level Function simulation file +# Copyright (C) 2022 CESNET z. s. p. o. +# Author(s): Lukas Nevrkla +# +# SPDX-License-Identifier: BSD-3-Clause + +set FIRMWARE_BASE "../../../../" + +set DUT_BASE ".." +set DUT_UVM_BASE "." + +set TB_FILE "./tbench/testbench.sv" +set SIG_FILE "./signals_sig.fdo" + +lappend COMPONENTS \ + [list "DUT" $DUT_BASE "FULL"]\ + [list "DUT_UVM" $DUT_UVM_BASE "FULL"]\ + +# Disable Code Coverage +set SIM_FLAGS(CODE_COVERAGE) false + +set SIM_FLAGS(UVM_ENABLE) true +set SIM_FLAGS(UVM_TEST) "test::ex_test" +set SIM_FLAGS(DEBUG) true +#set SIM_FLAGS(UVM_TEST) "test::speed" +set SIM_FLAGS(UVM_VERBOSITY) UVM_NONE + +# Global include file for compilation +source "$FIRMWARE_BASE/build/Modelsim.inc.fdo" + +# Suppress warnings from std_arith library +puts "Std_arith Warnings - Disabled" +set StdArithNoWarnings 1 + +# Suppress warnings from numeric_std library +puts "Numeric_std Warnings - Disabled" +set NumericStdNoWarnings 1 + +nb_sim_run + +# Reports +if {$SIM_FLAGS(CODE_COVERAGE)} { + coverage save -instance /testbench/DUT_U -assert -directive -cvg -code bcefst -verbose actual.ucdb + vcover merge final.ucdb final.ucdb actual.ucdb +} From 6f1dbee4f447d5bebd8e0c89af8b8a70109bafbe Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Thu, 5 Sep 2024 13:05:04 +0200 Subject: [PATCH 04/17] latency_meter [FEATURE]: Add metadata interfaces --- comp/debug/latency_meter/latency_meter.vhd | 116 +++++++++++++-------- 1 file changed, 70 insertions(+), 46 deletions(-) diff --git a/comp/debug/latency_meter/latency_meter.vhd b/comp/debug/latency_meter/latency_meter.vhd index f685a62ff..8045f3880 100644 --- a/comp/debug/latency_meter/latency_meter.vhd +++ b/comp/debug/latency_meter/latency_meter.vhd @@ -17,6 +17,9 @@ generic ( DATA_WIDTH : integer; -- Defines max. number of parallel events that can be measured MAX_PARALEL_EVENTS : integer := 1; + + START_META_WIDTH : integer := 1; + END_META_WIDTH : integer := 1; DEVICE : string := "ULTRASCALE" ); port( @@ -24,10 +27,15 @@ port( RST : in std_logic; START_EVENT : in std_logic; + START_EVENT_META : in std_logic_vector(START_META_WIDTH - 1 downto 0) := (others => '0'); + END_EVENT : in std_logic; + END_EVENT_META : in std_logic_vector(END_META_WIDTH - 1 downto 0) := (others => '0'); LATENCY_VLD : out std_logic; LATENCY : out std_logic_vector(DATA_WIDTH - 1 downto 0); + LATENCY_START_META : out std_logic_vector(START_META_WIDTH - 1 downto 0); + LATENCY_END_META : out std_logic_vector(END_META_WIDTH - 1 downto 0); -- Signals that no more paralel events can be curently measured FIFO_FULL : out std_logic; @@ -38,7 +46,21 @@ end entity; architecture FULL of LATENCY_METER is + type OUTPUT_T is record + start_event : std_logic; + end_event : std_logic; + tick_cnt : std_logic_vector(DATA_WIDTH - 1 downto 0); + start_meta : std_logic_vector(START_META_WIDTH - 1 downto 0); + end_meta : std_logic_vector(END_META_WIDTH - 1 downto 0); + end record; + + type OUTPUT_ARRAY_T is array (integer range <>) of OUTPUT_T; + constant DATA_MAX : std_logic_vector(DATA_WIDTH - 1 downto 0) := (others => '1'); + constant OUTPUT_STAGES : positive := 3; + + signal fifo_out : std_logic_vector(DATA_WIDTH + START_META_WIDTH - 1 downto 0); + signal start_meta_i : std_logic_vector(START_META_WIDTH - 1 downto 0); signal tick_cnt : std_logic_vector(DATA_WIDTH - 1 downto 0); signal start_ticks : std_logic_vector(DATA_WIDTH - 1 downto 0); @@ -47,17 +69,9 @@ architecture FULL of LATENCY_METER is signal fifo_empty : std_logic; signal zero_delay : std_logic; - signal end_event_delay_0 : std_logic; - signal end_event_delay_1 : std_logic; - signal end_event_delay_2 : std_logic; - - signal start_event_delay_0 : std_logic; - signal start_event_delay_1 : std_logic; - signal start_event_delay_2 : std_logic; - - signal tick_cnt_delay_0 : std_logic_vector(DATA_WIDTH - 1 downto 0); - signal tick_cnt_delay_1 : std_logic_vector(DATA_WIDTH - 1 downto 0); - signal tick_cnt_delay_2 : std_logic_vector(DATA_WIDTH - 1 downto 0); + signal output_in : OUTPUT_ARRAY_T(OUTPUT_STAGES - 1 downto 0); + signal output_out : OUTPUT_ARRAY_T(OUTPUT_STAGES - 1 downto 0); + signal fin_out : OUTPUT_T; begin @@ -67,24 +81,26 @@ begin fifo_i : entity work.FIFOX generic map ( - DATA_WIDTH => DATA_WIDTH, + DATA_WIDTH => DATA_WIDTH + START_META_WIDTH, ITEMS => MAX_PARALEL_EVENTS, DEVICE => DEVICE ) port map ( CLK => CLK, RESET => RST, - - DI => tick_cnt, + + DI => (tick_cnt, START_EVENT_META), WR => START_EVENT, FULL => FIFO_FULL, STATUS => FIFO_ITEMS, - - DO => start_ticks, - RD => end_event_delay_2, + + DO => fifo_out, + RD => fin_out.end_event, EMPTY => fifo_empty ); + (start_ticks, start_meta_i) <= fifo_out; + ------------------------- -- Combinational logic -- ------------------------- @@ -92,10 +108,10 @@ begin tick_limit <= '1' when (tick_cnt = DATA_MAX) else '0'; - tick_ovf <= '1' when (tick_cnt_delay_2 < start_ticks) else - '0'; + tick_ovf <= '1' when (fin_out.tick_cnt < start_ticks) else + '0'; - zero_delay <= start_event_delay_2 and end_event_delay_2 and fifo_empty; + zero_delay <= fin_out.start_event and fin_out.end_event and fifo_empty; --------------- -- Registers -- @@ -112,36 +128,44 @@ begin end if; end process; - latency_p : process(CLK) - begin - if (rising_edge(CLK)) then - LATENCY <= (others => '0') when (zero_delay = '1') else - std_logic_vector(unsigned(tick_cnt_delay_2) - unsigned(start_ticks)) when (tick_ovf = '0') else - std_logic_vector(unsigned(tick_cnt_delay_2) + unsigned(DATA_MAX) - unsigned(start_ticks) + 1); - end if; - end process; + output_g : for i in OUTPUT_STAGES - 1 downto 0 generate + output_p : process (CLK) + begin + if (rising_edge(CLK)) then + if (RST = '1') then + output_out(i).start_event <= '0'; + output_out(i).end_event <= '0'; + else + output_out(i) <= output_in(i); + end if; + end if; + end process; - latency_vld_p : process(CLK) - begin - if (rising_edge(CLK)) then - LATENCY_VLD <= end_event_delay_2; - end if; - end process; + output_copy_g : if i > 0 generate + output_in(i) <= output_out(i - 1); + end generate; + end generate; - delay_p : process(CLK) - begin - if (rising_edge(CLK)) then - end_event_delay_0 <= END_EVENT; - end_event_delay_1 <= end_event_delay_0; - end_event_delay_2 <= end_event_delay_1; + output_in(0).start_event <= START_EVENT; + output_in(0).start_meta <= start_meta_i; + + output_in(0).end_event <= END_EVENT; + output_in(0).end_meta <= END_EVENT_META; - start_event_delay_0 <= START_EVENT; - start_event_delay_1 <= start_event_delay_0; - start_event_delay_2 <= start_event_delay_1; + output_in(0).tick_cnt <= tick_cnt; - tick_cnt_delay_0 <= tick_cnt; - tick_cnt_delay_1 <= tick_cnt_delay_0; - tick_cnt_delay_2 <= tick_cnt_delay_1; + fin_out <= output_out(OUTPUT_STAGES - 1); + + latency_vld_p : process(CLK) + begin + if (rising_edge(CLK)) then + LATENCY_VLD <= fin_out.end_event; + LATENCY <= (others => '0') when (zero_delay = '1') else + std_logic_vector(unsigned(fin_out.tick_cnt) - unsigned(start_ticks)) when (tick_ovf = '0') else + std_logic_vector(unsigned(fin_out.tick_cnt) + unsigned(DATA_MAX) - unsigned(start_ticks) + 1); + + LATENCY_START_META <= fin_out.start_meta; + LATENCY_END_META <= fin_out.end_meta; end if; end process; From 0c74843947016ae8e782959b3d078dde1e5b7655 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Thu, 19 Sep 2024 19:34:30 +0200 Subject: [PATCH 05/17] latency_meter [STYLE]: Fix white spaces --- comp/debug/latency_meter/latency_meter.vhd | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/comp/debug/latency_meter/latency_meter.vhd b/comp/debug/latency_meter/latency_meter.vhd index 8045f3880..0b132ebf9 100644 --- a/comp/debug/latency_meter/latency_meter.vhd +++ b/comp/debug/latency_meter/latency_meter.vhd @@ -88,12 +88,12 @@ begin port map ( CLK => CLK, RESET => RST, - + DI => (tick_cnt, START_EVENT_META), WR => START_EVENT, FULL => FIFO_FULL, STATUS => FIFO_ITEMS, - + DO => fifo_out, RD => fin_out.end_event, EMPTY => fifo_empty @@ -108,8 +108,8 @@ begin tick_limit <= '1' when (tick_cnt = DATA_MAX) else '0'; - tick_ovf <= '1' when (fin_out.tick_cnt < start_ticks) else - '0'; + tick_ovf <= '1' when (fin_out.tick_cnt < start_ticks) else + '0'; zero_delay <= fin_out.start_event and fin_out.end_event and fifo_empty; @@ -132,7 +132,7 @@ begin output_p : process (CLK) begin if (rising_edge(CLK)) then - if (RST = '1') then + if (RST = '1') then output_out(i).start_event <= '0'; output_out(i).end_event <= '0'; else @@ -148,20 +148,20 @@ begin output_in(0).start_event <= START_EVENT; output_in(0).start_meta <= start_meta_i; - + output_in(0).end_event <= END_EVENT; output_in(0).end_meta <= END_EVENT_META; output_in(0).tick_cnt <= tick_cnt; fin_out <= output_out(OUTPUT_STAGES - 1); - + latency_vld_p : process(CLK) - begin - if (rising_edge(CLK)) then + begin + if (rising_edge(CLK)) then LATENCY_VLD <= fin_out.end_event; - LATENCY <= (others => '0') when (zero_delay = '1') else - std_logic_vector(unsigned(fin_out.tick_cnt) - unsigned(start_ticks)) when (tick_ovf = '0') else + LATENCY <= (others => '0') when (zero_delay = '1') else + std_logic_vector(unsigned(fin_out.tick_cnt) - unsigned(start_ticks)) when (tick_ovf = '0') else std_logic_vector(unsigned(fin_out.tick_cnt) + unsigned(DATA_MAX) - unsigned(start_ticks) + 1); LATENCY_START_META <= fin_out.start_meta; From 63a0a1bf046e5d6f80e080c266b456c91aa33e77 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Mon, 9 Sep 2024 13:53:11 +0200 Subject: [PATCH 06/17] data_logger [BUGFIX]: Add register for internal reset to improve timing --- comp/debug/data_logger/data_logger.vhd | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/comp/debug/data_logger/data_logger.vhd b/comp/debug/data_logger/data_logger.vhd index f3d489d33..2b4612238 100644 --- a/comp/debug/data_logger/data_logger.vhd +++ b/comp/debug/data_logger/data_logger.vhd @@ -350,8 +350,9 @@ begin generic map( INPUT_WIDTH => VALUE_WIDTH(i), BOX_WIDTH => HIST_BOX_WIDTH(i), - BOX_CNT => HIST_BOX_CNT(i) - --READ_PRIOR => READ_PRIOR + BOX_CNT => HIST_BOX_CNT(i), + -- TODO: new value can be lost when read occurs! + READ_PRIOR => true ) port map( CLK => CLK, @@ -380,8 +381,13 @@ begin -- RST management -- -------------------- - rst_intern <= RST or mi_ctrl_reg(CTRL_RST_BIT); - SW_RST <= mi_ctrl_reg(CTRL_RST_BIT); + rst_p : process(CLK) + begin + if (rising_edge(CLK)) then + rst_intern <= RST or mi_ctrl_reg(CTRL_RST_BIT); + SW_RST <= mi_ctrl_reg(CTRL_RST_BIT); + end if; + end process; rst_done_g : if (VALUE_CNT > 0) generate rst_done_intern <= and rst_done_vec; From 546dceaa5fc6f7ed748cd89717b92f59000e1085 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Mon, 23 Sep 2024 10:48:50 +0200 Subject: [PATCH 07/17] data_logger/sw [STYLE]: Move packages used only in mem_tester inside mem_tester folder and fix import statement --- comp/debug/data_logger/sw/graph_gen/__init__.py | 0 comp/debug/data_logger/sw/logger_tools/__init__.py | 0 comp/debug/data_logger/sw/pdf_gen/__init__.py | 0 .../sw/graph_gen => mem_tester/sw}/graph_gen.py | 0 .../sw/logger_tools => mem_tester/sw}/logger_tools.py | 0 .../{data_logger/sw/pdf_gen => mem_tester/sw}/pdf_gen.py | 0 comp/debug/mem_tester/sw/report_gen.py | 6 +++--- 7 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 comp/debug/data_logger/sw/graph_gen/__init__.py delete mode 100644 comp/debug/data_logger/sw/logger_tools/__init__.py delete mode 100644 comp/debug/data_logger/sw/pdf_gen/__init__.py rename comp/debug/{data_logger/sw/graph_gen => mem_tester/sw}/graph_gen.py (100%) rename comp/debug/{data_logger/sw/logger_tools => mem_tester/sw}/logger_tools.py (100%) rename comp/debug/{data_logger/sw/pdf_gen => mem_tester/sw}/pdf_gen.py (100%) diff --git a/comp/debug/data_logger/sw/graph_gen/__init__.py b/comp/debug/data_logger/sw/graph_gen/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/comp/debug/data_logger/sw/logger_tools/__init__.py b/comp/debug/data_logger/sw/logger_tools/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/comp/debug/data_logger/sw/pdf_gen/__init__.py b/comp/debug/data_logger/sw/pdf_gen/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/comp/debug/data_logger/sw/graph_gen/graph_gen.py b/comp/debug/mem_tester/sw/graph_gen.py similarity index 100% rename from comp/debug/data_logger/sw/graph_gen/graph_gen.py rename to comp/debug/mem_tester/sw/graph_gen.py diff --git a/comp/debug/data_logger/sw/logger_tools/logger_tools.py b/comp/debug/mem_tester/sw/logger_tools.py similarity index 100% rename from comp/debug/data_logger/sw/logger_tools/logger_tools.py rename to comp/debug/mem_tester/sw/logger_tools.py diff --git a/comp/debug/data_logger/sw/pdf_gen/pdf_gen.py b/comp/debug/mem_tester/sw/pdf_gen.py similarity index 100% rename from comp/debug/data_logger/sw/pdf_gen/pdf_gen.py rename to comp/debug/mem_tester/sw/pdf_gen.py diff --git a/comp/debug/mem_tester/sw/report_gen.py b/comp/debug/mem_tester/sw/report_gen.py index 4ae44d24d..787f13cbe 100644 --- a/comp/debug/mem_tester/sw/report_gen.py +++ b/comp/debug/mem_tester/sw/report_gen.py @@ -15,9 +15,9 @@ import nfb from mem_tester import MemTester from mem_logger.mem_logger import MemLogger -from logger_tools.logger_tools import LoggerTools -from graph_gen.graph_gen import GraphGen -from pdf_gen.pdf_gen import PDFGen +from logger_tools import LoggerTools +from graph_gen import GraphGen +from pdf_gen import PDFGen class ReportGen: From 741ef566c795735b8355c834f46c7b8394149b02 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Mon, 23 Sep 2024 10:54:35 +0200 Subject: [PATCH 08/17] mem_tester/sw [STYLE]: Add file header description, remove commented code, edit style of saved plots --- comp/debug/mem_tester/sw/graph_gen.py | 4 +- comp/debug/mem_tester/sw/logger_tools.py | 79 +----------------------- comp/debug/mem_tester/sw/pdf_gen.py | 3 +- 3 files changed, 7 insertions(+), 79 deletions(-) diff --git a/comp/debug/mem_tester/sw/graph_gen.py b/comp/debug/mem_tester/sw/graph_gen.py index 9a6dcc76a..c0b851bb9 100644 --- a/comp/debug/mem_tester/sw/graph_gen.py +++ b/comp/debug/mem_tester/sw/graph_gen.py @@ -1,6 +1,8 @@ #!/usr/bin/env python3 # Copyright (C) 2022 CESNET z. s. p. o. # Author(s): Lukas Nevrkla +# +# Package for simple graph generation import os import numpy as np @@ -39,7 +41,7 @@ def set_ylabel(self, label, index=None): def plot_save(self, file_name): for i in self.output: - plt.savefig(self.folder + file_name + i) + plt.savefig(self.folder + file_name + i, bbox_inches='tight') plt.close() def basic_plot(self, x, y, style='o-', index=None, colors=None, width=1): diff --git a/comp/debug/mem_tester/sw/logger_tools.py b/comp/debug/mem_tester/sw/logger_tools.py index c28c12962..07905c74c 100644 --- a/comp/debug/mem_tester/sw/logger_tools.py +++ b/comp/debug/mem_tester/sw/logger_tools.py @@ -1,6 +1,8 @@ #!/usr/bin/env python3 # Copyright (C) 2022 CESNET z. s. p. o. # Author(s): Lukas Nevrkla +# +# Simple tools for parsing statistics import numpy as np @@ -53,80 +55,3 @@ def dict_to_numpy(self, d): res[i] = d[k] return res - -# ## Printing ## -# -# def printValue(self, name, key=None, value=None, norm='from main', normV=None): -# stat = self.origStat[-1] #self.stat[-1] -# if normV is None: -# normV = stat[norm] -# -# if key is None and value is None: -# return f"{name:<25}\n" -# -# if value is not None: -# val = value -# rel = val / normV * 100 -# elif key not in stat or normV <= 0: -# val = "-" -# rel = "-" -# else: -# val = stat[key] -# rel = val / normV * 100 -# -# return f"{name:<25} = {val:<15} [{rel:<.5} %]\n" -# -# def getHist(self, key): -# hist = self.origStat[-1][key] -# hist = {float(k) : v for k,v in hist.items()} -# return hist -# -# def sortHist(self, key): -# hist = self.getHist(key) -# hist = sorted(hist.items(), key=lambda i: i[0]) -# return hist -# -# def printHistRaw(self, hist, cut=None): -# txt = "" -# if len(hist) == 0: -# return " -\n" -# hist = {float(k) : v for k,v in hist.items()} -# sort = sorted(hist.items(), key=lambda i: i[0]) -# if cut is not None and len(sort) > 2 * cut + 1: -# sort = sort[:cut] + [('...', '...')] + sort[len(sort) - cut:] -# -# for k, v in sort: -# txt += f" {k} = {v}\n" -# return txt -# -# def topNHistRaw(self, hist, N=5): -# hist = {float(k) : v for k,v in hist.items()} -# arr = {k: v for k,v in sorted(hist.items(), key=lambda i: i[0])} -# top = 0 -# for k,v in arr.items(): -# if k < N: -# top += v -# else: -# break -# # TODO: div N? -# topRel = float(top) / self.origStat[-1]['from main'] / N * 100 -# return top, topRel -# -# def topNHist(self, hist, N=5): -# if len(hist) < N: -# return "" -# else: -# top, topRel = self.topNHistRaw(hist, N) -# return f" top {N} = {top} [{topRel:.2E} %]\n" -# -# def printHist(self, name, key, cut=5, N=None): -# stat= self.origStat[-1] #self.stat[-1] -# txt = "" -# txt += f"{name}:\n" -# if key not in stat: -# txt += "-\n" -# else: -# if N is not None: -# txt += self.topNHist(stat[key], N=N) -# txt += self.printHistRaw(stat[key], cut=cut) -# return txt diff --git a/comp/debug/mem_tester/sw/pdf_gen.py b/comp/debug/mem_tester/sw/pdf_gen.py index 61d926f86..93768b941 100644 --- a/comp/debug/mem_tester/sw/pdf_gen.py +++ b/comp/debug/mem_tester/sw/pdf_gen.py @@ -1,7 +1,8 @@ #!/usr/bin/env python3 # Copyright (C) 2022 CESNET z. s. p. o. # Author(s): Lukas Nevrkla - +# +# Simple package for generating PDF reports using Markdown format class PDFGen: def __init__(self): From f2fcd046caffff0f2dacfc41d3eddff4c4f1ce65 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Tue, 10 Sep 2024 07:33:30 +0200 Subject: [PATCH 09/17] data_logger/logger_stats [FEATURE]: Add package for loading data_logger statistics --- .../data_logger/sw/logger_stats/__init__.py | 0 .../sw/logger_stats/logger_stats.py | 919 ++++++++++++++++++ 2 files changed, 919 insertions(+) create mode 100644 comp/debug/data_logger/sw/logger_stats/__init__.py create mode 100644 comp/debug/data_logger/sw/logger_stats/logger_stats.py diff --git a/comp/debug/data_logger/sw/logger_stats/__init__.py b/comp/debug/data_logger/sw/logger_stats/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/comp/debug/data_logger/sw/logger_stats/logger_stats.py b/comp/debug/data_logger/sw/logger_stats/logger_stats.py new file mode 100644 index 000000000..35ec0054f --- /dev/null +++ b/comp/debug/data_logger/sw/logger_stats/logger_stats.py @@ -0,0 +1,919 @@ +#!/usr/bin/env python3 +# Copyright (C) 2022 CESNET z. s. p. o. +# Author(s): Lukas Nevrkla +# +# Package for structured loading and saving statistics from data_logger + +import json +import numpy as np +import pandas as pd +from typing import List, Callable, Any, Optional + + +# Common conversion functions # + +Multipliers = { + 'k': 10**3, + 'M': 10**6, + 'G': 10**9, + 'T': 10**12, +} + + +TimeUnits = { + 'd': 24 * 3600, + 'h': 3600, + 'min': 60, + 's': 1, + 'ms': 1 / 10**3, + 'us': 1 / 10**6, + 'ns': 1 / 10**9, +} + + +def ConvertDefault(v): + return v + + +def ConvertTime(freq : float, units : str = "ns") -> Callable[[float], float]: + """ + Convert numeric value to time [ns] with specified CLK frequency [Hz] + + Parameters + ---------- + freq : float + Frequency of the FPGA clocks [HZ] + units : str + In which units should be time returned ('h', 'min', 's', 'ms', 'us', 'ns') + + Returns + ------- + Callable [[float], float] + Conversion function + """ + + if units not in TimeUnits: + raise Exception(f"Unit {units} is not recognized") + else: + mult = 1 / TimeUnits[units] + + def res(v): + return v / freq * mult + + return res + + +def ConvertStates(states : List[Any]) -> Callable[[float], Any]: + """ + Convert numeric value to discrete states (for example strings) + + Parameters + ---------- + states : List[Any] + Value 'i' will be converted to item at list's i-th index + + Returns + ------- + Callable[[float], Any] + Conversion function + """ + + def res(val): + assert 0 <= val, f"Negative value {val} cannot be converted to state!" + if val < len(states): + return states[int(val)] + else: + return str(val) + + return res + + +# Common format functions # + +def FormatDefault( + units : str = '', + decimal : int = 0, + only_last : bool = False +) -> Callable[[Any], str]: + """ + Default formatting function for single valued statistics (for example counter interface) + + Parameters + ---------- + units : str + Custom units string that will be appended at the end of statistic + decimal : int + Number of decimal places for printing + only_last : bool + Print only the latest measurement (else print sum of the measurements) + + Returns + ------- + Callable[[Any], Str] + Conversion function + """ + + def res(v): + if v is None: + return "-" + + if isinstance(v, (list, pd.core.series.Series, np.ndarray)): + if only_last or not isinstance(v, (int, float)): + v = v[-1] + else: + v = sum(v) + + unit_str = f" {units}" if len(units) > 0 else '' + if isinstance(v, (int, float)): + val_str = f"{v:.{decimal}f}" + else: + val_str = str(v) + + return f"{val_str}{unit_str}" + + return res + + +def FormatDefaultValue( + format : Callable[[Any], str] = FormatDefault(), + units : str = "" +) -> Callable[[Any], str]: + """ + Default formatting function for value interface + + Parameters + ---------- + format : Callable[[Any], str] + Formatting function for min, max, avg statistics + units : str + Custom units string that will be appended at the end of statistic + + Returns + ------- + Callable[[Any], Str] + Conversion function + """ + + return lambda data: f"<{format(data['min'])} / {format(data['avg'])} / {format(data['max'])}> {units}" + + +class LoggerStats: + """ + Class for structured loading and printing data_logger statistics + + - Multiple data_loggers can be observed + - Statistics can be organized in tree structure (using nested LoggerStats classes) + - Custom names and conversion / format functions can be provided + + Default statistics types: + - `Constant(index, name)` + - `Counter(index, name)` + - `TimeCounter(index, freq, name)` - counter measuring time / latency + - `FlowTimeCounter(index_words, index_ticks, freq, word_bits, name)` - 2 counters measuring data flow + - `Value(index, name)` + - `ValueCMD(index, name, cmd_width, cmds)` - same as Value, but histogram is split to `2**cmd_width` types specified by MSB bits + - `Custom(name, data)` - statistic's value will be specified during creation or during loading + - `CustomJSON(name)` - statistic's value will be specified externally by JSON string + + Providing data_logger classes: + + - Data_loggers can be provided to each leaf node manualy + - Providing data_logger to parent node will copy logger to all sub-nodes + - Loggers provided in leaf nodes won't be overridden by setting parent node's logger + + Example: + + ``` + stats_a = LoggerStats('Stats A', logger=logger_a) + stats_b = LoggerStats('Stats B', logger=logger_b) + + stats = LoggerStats('Root stats') + stats.add_stat(stats_a) + stats.add_stat(stats_b) + + stats_a.add_stat(Value(7, 'Name A') + stats_a.add_stat(Value(42, 'Name B', convert=ConvertTime(FREQ) + ... + + stats.add_stats( + name='Stats C', + names=C_names, + indexes=list(range(7)), + constructor=lambda i, n: Stats.Counter(i, n) + ) + ... + + stats.load() + print(stats.to_str()) + stats.save('stats.npz') + ``` + """ + + StrOffset = 2 + + def __init__(self, name : str, logger=None): + """ + Initialize statistics node (root node or sub-node) + + Parameters + ---------- + name : str + Node name + logger : DataLogger class + Default data_logger for all sub-statistics in this node + """ + + self.name = name + self.logger = logger + + self.stats = [] + self.time = [] + + def calc_stats(data): + return data + + self.calc_stats = calc_stats + + def add_stat(self, stat): + """ + Add new statistic under this node. + Nested node can be created by passing LoggerStats class. + """ + + stat.set_logger(self.logger) + self.stats.append(stat) + + def add_stats( + self, + indexes : List[int], + names : List[str], + constructor : Callable[[int, str], Any], + name : Optional[str] = None, + logger=None + ): + """ + Add list of the new statistics. + If name is None (by default), statistics will be added to the current node. + Otherwise subnode with a given name will be created. + + Example: + + reqs = ['wr req cnt', 'wr req words', ...] + stats.add_stats( + name='Requests', + names=reqs, + indexes=list(range(len(reqs))), + constructor=lambda i, n: Stats.Counter(i, n) + ) + """ + + if name is not None: + group = LoggerStats(name, logger) + self.add_stat(group) + else: + group = self + + for i, name in zip(indexes, names): + group.add_stat(constructor(i, name)) + + def add_calc_stats(self, calc_stats): + """ + Add callback that will transform statistics after each logging + + Parameters + ---------- + calc_stats : Callable[[data], data] + Callback + """ + + self.calc_stats = calc_stats + + def set_logger(self, logger): + """ + Set default data_logger for this node + """ + + if self.logger is None: + self.logger = logger + + for s in self.stats: + s.set_logger(logger) + + def load(self, time : Optional[float] = None): + """ + Load statistics + + All the statistics except Constant keep the full history of all load calls + + Parameters + ---------- + time : float + If specified, new statistic with logging time will be added ('Log time') + """ + + if time is not None: + self.time.append(time) + + for s in self.stats: + s.load() + + self.set_data(self.calc_stats(self.data())) + + def data(self): + """ + Get all statistics from this node + + Data format: `{sub-stat-name: sub-stat-data, ...}` + """ + + res = {s.name: s.data() for s in self.stats} + if len(self.time) > 0: + res['Log time'] = self.time + return res + + def set_data(self, data): + """ + Set all statistics to new values + """ + + for s in self.stats: + s.set_data(data[s.name]) + + # Add new statistics + stat_names = map(lambda s: s.name, self.stats) + for key in data: + if key not in stat_names: + self.add_stat(Custom(name=key, data=data[key])) + + if 'Log time' in data: + self.time = data['Log time'] + + def __getitem__(self, key): + return self.data()[key] + + def __setitem__(self, key, value): + data = self.data() + data[key] = value + self.set_data(data) + + def to_str(self, prefix_len=None, offset=0): + """ + Get all statistics in string format + """ + + if prefix_len is None: + prefix_len = self._prefix_len() + + res = [f"{' ' * offset}{self.name}:\n"] + res += map(lambda s: s.to_str(prefix_len, offset + self.StrOffset), self.stats) + return ''.join(res) + '\n' + + def _prefix_len(self): + if len(self.stats) == 0: + return self.StrOffset + else: + return max(map(lambda s: s._prefix_len(), self.stats)) + self.StrOffset + + def save(self, file): + """ + Save all statistics in compressed numpy format (.npz) + """ + + data = self.data() + np.savez_compressed(file, np.array(data, dtype=object)) + + def load_file(self, file): + """ + Load all statistics from .npz file + """ + + data = np.load(file, allow_pickle=True)['arr_0'].item() + self.set_data(data) + + +class DefaultStat: + def __init__( + self, + name : str, + logger=None, + convert=ConvertDefault, + format=FormatDefault() + ): + """ + Parameters + ---------- + index : int + Constant position inside data_logger statistic port + name : str + Statistics name + logger : DataLogger class + DataLogger class + convert : Callable[[float], float] + Optional conversion function + format : Callable[[float], str] + Optional format function + """ + + self.name = name + self.logger = logger + self.convert = convert + self.format = format + + self._raw_data = None + self._data = [] + + def set_logger(self, logger): + if self.logger is None: + self.logger = logger + + def data(self): + return self._data + + def set_data(self, data): + self._data = data + + def to_str(self, prefix=40, offset=0): + spaces = prefix - len(self.name) - offset + return f"{' ' * offset}{self.name}{' ' * spaces}: {self.format(self._data)}\n" + + def _prefix_len(self): + return len(self.name) + + def load(self): + assert self.logger is not None, f"Data Logger needs to be specified for stat {self.name}" + + +class Constant(DefaultStat): + """ + Constant provided in data_logger's CTRLI port + + Assumes that each constant have width of MI_DATA_WIDTH! + + Data format: `x` + + - Data contains constant value from latest call of the load function + """ + + def __init__(self, index : int, *args, **kwargs): + """ + Parameters + ---------- + index : int + Constant position inside data_logger CTRLI port + name : str + Statistics name + logger : DataLogger class + DataLogger class + convert : Callable[[float], float] + Optional conversion function + format : Callable[[float], str] + Optional format function + """ + + super().__init__(*args, format=format, **kwargs) + self.index = index + + self._raw_data = None + self._data = None + + def load(self): + super().load() + + ctrli = self.logger.load_ctrl(0) + data = self.logger.get_bits(ctrli, self.logger.mi_width, self.logger.mi_width * self.index) + + self._raw_data = data + self._data = self.convert(self._raw_data) + + +class Counter(DefaultStat): + """ + Data logger's counter statistics + + Data format: `[x, y, ...]` + + - Data contains counter's value from each load call + """ + + def __init__(self, index : int, *args, **kwargs): + """ + Parameters + ---------- + index : int + Counter index inside data_logger + name : str + Statistics name + logger : DataLogger class + DataLogger class + convert : Callable[[float], float] + Optional conversion function + format : Callable[[float], str] + Optional format function + """ + + super().__init__(*args, **kwargs) + self.index = index + + def load(self): + super().load() + + self._raw_data = self.logger.load_cnter(self.index) + self._data.append(self.convert(self._raw_data)) + + +class TimeCounter(DefaultStat): + """ + Data logger's statistics for measuring time / latency of some operation + + Data format: `[x, y, ...]` + + - Data contains counter's value from each load call + """ + + def __init__(self, index : int, freq : float, *args, units : str = 's', **kwargs): + """ + Parameters + ---------- + index int + Counter measuring number of clock cycles for which operation occurred + freq float + Frequency of the FPGA clocks [HZ] + name str + Statistics name + units str + Time units ('h', 'min', 's', 'ms', 'us', 'ns') + logger DataLogger + DataLogger class + convert : Callable[[float], float]) + Optional conversion function + format : Callable[[float], str]) + Optional format function + """ + + if 'format' not in kwargs: + kwargs['format'] = FormatDefault(units=units, decimal=3) + super().__init__(*args, **kwargs) + self.index = index + self.freq = freq + self.units = units + + def load(self): + super().load() + + ticks = self.logger.load_cnter(self.index) + time_s = ConvertTime(self.freq, units=self.units)(ticks) + + self._raw_data = time_s + self._data.append(self.convert(self._raw_data)) + + +class FlowCounter(DefaultStat): + """ + Data logger's statistics for measuring data flow using two counters (number of words and number of ticks) + + Data flow units are: Gb/s + + Data format: `[x, y, ...]` + + - Data contains counter's value from each load call + """ + + def __init__( + self, + index_words : int, + index_ticks : int, + freq : float, + word_bits : float = 1, + units : str = 'Gb/s', + *args, **kwargs + ): + """ + Parameters + ---------- + index_words : int + Counter measuring number of data packets + index_ticks : int + Counter measuring number of clock cycles during which communication occurred + freq : float + Frequency of the FPGA clocks [HZ] + word_bits : float + Number of bits inside one data word + name : str + Statistics name + logger : DataLogger + DataLogger class + convert : Callable[[float], float] + Optional conversion function + format : Callable[[float], str] + Optional format function + """ + + if 'format' not in kwargs: + kwargs['format'] = FormatDefault(units='Gb/s', decimal=3) + super().__init__(*args, **kwargs) + self.index_words = index_words + self.index_ticks = index_ticks + self.freq = freq + self.word_bits = word_bits + self.units = units + + def load(self): + super().load() + + words = self.logger.load_cnter(self.index_words) + ticks = self.logger.load_cnter(self.index_ticks) + + self._raw_data = self._convert(words, ticks) + self._data.append(self.convert(self._raw_data)) + + def _convert(self, words, ticks): + DataUnits = { + 'b': self.word_bits, # Bits + 'B': self.word_bits / 8, # Bytes + 'T': 1, # Transfers + 'p': 1, # Packets + } + + try: + if self.units[2] == '/': + mult = Multipliers[self.units[0]] + data = DataUnits[self.units[1]] + t = self.units[3:] + else: + mult = 1 + data = DataUnits[self.units[0]] + t = self.units[2:] + except KeyError: + raise Exception(f"Unit {self.units} is not recognized") + + time = ConvertTime(self.freq, units=t)(ticks) + if time == 0: + return 0 + if mult == 0: + return 0 + + return words * data / time / mult + + +class Value(DefaultStat): + """ + Data logger's value statistics + + Data format: + + ``` + { + 'min': [x, y, ...], + 'max': [x, y, ...], + 'avg': [x, y, ...], + 'hist': "np.array with shape: (time, boxes)", + 'hist_x': "list with values corresponding to the middles of each histogram box" + } + ``` + """ + + def __init__(self, index : int, *args, format=FormatDefaultValue(), **kwargs): + """ + Parameters + ---------- + index : int + Value interface index + name : str + Statistics name + logger : DataLogger + DataLogger class + convert : Callable[[float], float] + Optional conversion function + format : Callable[[float], str] + Optional format function + """ + + super().__init__(*args, format=format, **kwargs) + self.index = index + self._data = { + 'min': [], + 'avg': [], + 'max': [], + 'hist': None, + 'hist_x': None + } + + def load(self): + super().load() + + self._raw_data = self.logger.load_value(self.index) + + self.width = self.logger.config["VALUE_WIDTH"][self.index] + self.value_en = self.logger.config["VALUE_EN"][self.index] + self.sum_extra_width = self.logger.config["SUM_EXTRA_WIDTH"][self.index] + self.hist_box_cnt = self.logger.config["HIST_BOX_CNT"][self.index] + self.hist_box_width = self.logger.config["HIST_BOX_WIDTH"][self.index] + self.hist_step = self.logger.config["HIST_STEP"][self.index] + + metrics = ['min', 'avg', 'max', 'hist'] + ens = ['MIN', 'SUM', 'MAX', 'HIST'] + for m, en in zip(metrics, ens): + if not self.value_en[en]: + continue + + if m == 'hist': + x = [self.convert((i + 0.5) * self.hist_step) for i in range(0, self.hist_box_cnt)] + y = np.array([self._raw_data['hist']]) + + self._data['hist_x'] = x + + if self._data['hist'] is None: + self._data['hist'] = y + else: + self._data['hist'] = np.append(self._data['hist'], y, axis=0) + else: + self._data[m].append(self.convert(self._raw_data[m])) + + +class ValueCMD(DefaultStat): + """ + Same as value statistics, but splits each histogram box to `2 ** cmd_width` measurements (commands). + + Data format: + + ``` + { + 'cmd_0': { ... same as Value ... }, + 'cmd_1': { ... same as Value ... }, + ... + } + ``` + """ + + def __init__( + self, + index : int, + *args, + cmd_width : int, + cmds : List[str], + format=FormatDefaultValue(), + **kwargs + ): + """ + Parameters + ---------- + index : int + Value interface index + name : str + Statistics name + cmd_width : int + MSB bits in data_loggers value statistics represent different commands + cmds : List[str] + The list with commands names + logger : DataLogger + DataLogger class + convert : Callable[[float], float] + Optional conversion function + format : Callable[[float], str] + Optional format function + """ + + super().__init__(*args, format=format, **kwargs) + self.index = index + + self.cmd_width = cmd_width + self.cmds = cmds + + self._data = {} + for cmd in self.cmds: + self._data[cmd] = { + 'min': [], + 'avg': [], + 'max': [], + 'hist': None, + 'hist_x': None + } + + def to_str(self, prefix=40, offset=0): + spaces = prefix - len(self.name) - offset + res = f"{' ' * offset}{self.name}:\n" + + for cmd in self.cmds: + spaces = prefix - len(cmd) - offset - LoggerStats.StrOffset + res += f"{' ' * (offset + LoggerStats.StrOffset)}{cmd}{' ' * (spaces)}: " + res += f"{self.format(self._data[cmd])}\n" + + return res + + def _prefix_len(self): + return max(len(self.name), *list(map(lambda x: len(x), self.cmds))) + LoggerStats.StrOffset + + def load(self): + super().load() + + self._raw_data = self.logger.load_value(self.index) + + self.width = self.logger.config["VALUE_WIDTH"][self.index] + self.value_en = self.logger.config["VALUE_EN"][self.index] + self.sum_extra_width = self.logger.config["SUM_EXTRA_WIDTH"][self.index] + self.hist_box_cnt = self.logger.config["HIST_BOX_CNT"][self.index] + self.hist_box_width = self.logger.config["HIST_BOX_WIDTH"][self.index] + self.hist_step = self.logger.config["HIST_STEP"][self.index] + + self.hist_box_cnt //= 2 ** self.cmd_width + + x = [self.convert((i + 0.5) * self.hist_step) for i in range(0, self.hist_box_cnt)] + + for i, cmd in enumerate(self.cmds): + if not self.value_en['HIST']: + continue + + y = self._raw_data['hist'][i * self.hist_box_cnt : (i + 1) * self.hist_box_cnt] + y = np.array(y) + + if self._data[cmd]['hist'] is None: + self._data[cmd]['hist'] = y + else: + self._data[cmd]['hist'] = np.append(self._data[cmd]['hist'], y, axis=0) + + self._data[cmd]['hist_x'] = x + + y = np.array(y) + # Indexes of the non zero items + non_zero = np.nonzero(y)[0] + + if len(non_zero) == 0: + min = 0 + max = 0 + avg = 0 + else: + min = x[non_zero[0]] + max = x[non_zero[-1]] + avg = np.dot(x, y) / np.sum(y) + + self._data[cmd]['min'].append(min) + self._data[cmd]['max'].append(max) + self._data[cmd]['avg'].append(avg) + + +class Custom(DefaultStat): + """ + Statistic with externally specified value (using python object) + + Value can be specified during construction and during load + """ + + def __init__(self, *args, data=None, **kwargs): + """ + Parameters + ---------- + name : str + Statistics name + data : object + Statistic data + logger : DataLogger + DataLogger class + convert : Callable[[float], float] + Optional conversion function + format : Callable[[float], str] + Optional format function + """ + + if 'format' not in kwargs: + kwargs['format'] = FormatDefault(only_last=True) + super().__init__(*args, **kwargs) + self._data = [data] + + def load(self, data=None): + if data is not None: + self._data.append(data) + + +class CustomJSON(Custom): + """ + Statistic with externally specified value (using JSON string) + + Value can be specified during construction and during load + """ + + def __init__(self, *args, data=None, **kwargs): + """ + Parameters + ---------- + name : str + Statistics name + data : object + Statistic data + logger : DataLogger + DataLogger class + convert : Callable[[float], float] + Optional conversion function + format : Callable[[float], str] + Optional format function + """ + + super().__init__(*args, **kwargs) + if data is None: + self._data = [] + else: + self._data = [json.loads(data)] + + def load(self, data=None): + if data is not None: + data = json.loads(data) + self._data.append(data) From b7a0f82a9158ffdbd082b0416476655acdbe2fb6 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Tue, 10 Sep 2024 07:34:55 +0200 Subject: [PATCH 10/17] data_logger/graph_tools [FEATURE]: Add simple package for faster plotting data_logger's statistics --- .../data_logger/sw/graph_tools/__init__.py | 0 .../data_logger/sw/graph_tools/graph_tools.py | 250 ++++++++++++++++++ 2 files changed, 250 insertions(+) create mode 100644 comp/debug/data_logger/sw/graph_tools/__init__.py create mode 100644 comp/debug/data_logger/sw/graph_tools/graph_tools.py diff --git a/comp/debug/data_logger/sw/graph_tools/__init__.py b/comp/debug/data_logger/sw/graph_tools/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/comp/debug/data_logger/sw/graph_tools/graph_tools.py b/comp/debug/data_logger/sw/graph_tools/graph_tools.py new file mode 100644 index 000000000..eaeb2beab --- /dev/null +++ b/comp/debug/data_logger/sw/graph_tools/graph_tools.py @@ -0,0 +1,250 @@ +#!/usr/bin/env python3 +# Copyright (C) 2022 CESNET z. s. p. o. +# Author(s): Lukas Nevrkla +# +# Package for plotting statistics from logger_stats + + +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt +from matplotlib.colors import LogNorm +import seaborn as sns + + +def load_data(file_name : str): + """ + Load `*.npz` file created by logger_stats + + Parameters + ---------- + file_name (str): *.npz file with statistics + """ + + return np.load(file_name, allow_pickle=True)['arr_0'].item() + + +def plot_counter(data, x, y, title, log=False, diff=True): + """ + Plot historical values of data_logger's counter + + Parameters + ---------- + data : dict + Data in shape `{'Stat A': list, ...}` + x : str + X axis label + y : str + Y axis label + log : bool + Make Y axis logarithmic + diff : bool + Plot only differences between historical measurements (data_logger increment counters) + + Example: + + ``` + from graph_tools.graph_tools import * + + stats = load_data('stats.npz') + + node = pd.DataFrame.from_dict(stats['Stats A']['Counters']) + selected = ['Counter A', 'Counter B'] + + # Plot single counter + plot_counter(node['Counter X'], 'Time', 'Requests', 'Plot title') + + # Plot multiple counters + plot_counter(node[selected], 'Time', 'Requests', 'Plot title') + ``` + """ + + data = pd.DataFrame(data) + + if diff: + data = data.diff() + + plt.figure(figsize=(20, 6)) + sns.lineplot(data=data) + plt.title(title) + plt.xlabel(x) + plt.ylabel(y) + if log: + plt.yscale('log') + + +def plot_value(data, x : str, y : str, title : str, zoom : bool = True, log : bool = False): + """ + Plot histogram of data_logger's value interface + + Parameters + ---------- + data : dict + Data in shape `{'hist_x': list, 'hist': 2D array}` + x : str + X axis label + y : str + Y axis label + log : bool + Make Y axis logarithmic + zoom : bool + Zoom only to non zero area of the histogram + """ + + data = pd.DataFrame({'x': data['hist_x'], 'y': np.array(data['hist']).sum(axis=0)}) + + plt.figure(figsize=(20, 6)) + sns.lineplot(data=data, x='x', y='y') + plt.title(title) + plt.xlabel(x) + plt.ylabel(y) + if log: + plt.yscale('log') + + plt.fill_between(data['x'], data['y'], alpha=0.3, color="skyblue") + + if zoom: + non_zero_indices = np.where(np.array(data['y']) > 0)[0] + if len(non_zero_indices) > 1: + plt.xlim(data['x'][non_zero_indices[0]], data['x'][non_zero_indices[-1]]) + + +def downsize(data, x, y, ticks): + orig_x, orig_y = data.shape + + if orig_x <= x or orig_y <= y: + return data, ticks + + tile_x = orig_x // x + tile_y = orig_y // y + + print(x, y, tile_x, tile_y) + + tiles = data.reshape(-1, tile_x, y, tile_y) + tiles = tiles.transpose(0, 2, 1, 3) + tiles = tiles.sum(axis=3).sum(axis=2) + + #pad = tile_x - orig_x % x + #print(pad, tile_x, orig_x, x) + #ticks = np.pad(ticks, (0, pad), mode='constant', constant_values=0) + ticks = ticks.reshape(-1, y).mean(axis=1) + + return (tiles, ticks) + + +def trim_zeros(data, ticks): + # First non zero row + first_index = np.any(data != 0, axis=1).argmax() + last_index = data.shape[0] - np.any(data != 0, axis=1)[::-1].argmax() - 1 + + if first_index + 2 > last_index: + first_index = max(0, first_index - 2) + last_index = min(data.shape[0], last_index + 2) + + return (data[first_index : last_index + 1], ticks[first_index : last_index + 1]) + + +def downsize_ratio(data, max_x, max_y): + x, y = data.shape + + while x > max_x: + if x / 2 != x // 2: + break + else: + x //= 2 + + while y > max_y: + if y / 2 != y // 2: + break + else: + y //= 2 + + return (x, y) + + +def plot_value_2d( + data, + x : str, + y : str, + title : str, + zoom : bool = True, + log : bool = False, + downsize_size=None, + ticks=None +): + """ + Plot 2D histogram of data_logger's value interface history + + Parameters + ---------- + data : dict + Data in shape `{'hist_x': list, 'hist': 2D array}` + x : str + X axis label + y : str + Y axis label + log : bool + Make Y axis logarithmic + zoom : bool + Zoom only to non zero area of the histogram + ticks : [float] + Custom values for x axis + """ + + x_ticks = list(map(lambda x: round(x), data['hist_x'])) + hist = np.array(data['hist']).transpose(1, 0) + + if (hist == 0).all(): + print(f"Plot {title} contains all zeros") + return + + if zoom: + hist, x_ticks = trim_zeros(hist, x_ticks) + + if downsize_size is not None: + x_size, y_size = downsize_ratio(hist, *downsize_size) + hist, x_ticks = downsize(hist, x_size, y_size, np.array(x_ticks)) + + if len(x_ticks) <= 1: + print(f"More logs are needed ({hist})") + return + + if ticks is None: + ticks = [f'{i}' for i in range(0, hist.shape[1])] + + data = pd.DataFrame( + hist, + index=[x_ticks[i] for i in range(0, hist.shape[0])], + columns=ticks + ) + + def min_max_normalize(df): + return (df - df.min()) / (df.max() - df.min()) + data = data.apply(min_max_normalize) + + norm = None if not log else LogNorm() + plt.figure(figsize=(20, 8)) + sns.heatmap(data=data, norm=norm, cmap='jet') + plt.title(title) + plt.xlabel(x) + plt.ylabel(y) + + # Reduce number of ticks + for ind, label in enumerate(plt.gca().get_yticklabels()): + label.set_visible(ind % 5 == 0) + + +def plot_value_both(*args, **kwargs): + plot_value(*args, **kwargs) + plot_value_2d(*args, **kwargs) + + +def only_numeric(data): + df = pd.DataFrame(data) + mask = df.apply(lambda row: row.apply(lambda x: pd.api.types.is_numeric_dtype(type(x))).all(), axis=1) + df = df[mask] + return df + + +def legend(labels, loc='upper left', **kwargs): + plt.legend(labels, bbox_to_anchor=(1, 1), loc=loc, **kwargs) From ffb66e49858c6c6450f2d60a9af0f858f42e3db7 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Tue, 10 Sep 2024 07:37:38 +0200 Subject: [PATCH 11/17] data_logger [FEATURE]: Update setup.py --- comp/debug/data_logger/sw/setup.py | 31 +++++++++++++----------------- 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/comp/debug/data_logger/sw/setup.py b/comp/debug/data_logger/sw/setup.py index 0f6ddd70e..fdab559ac 100644 --- a/comp/debug/data_logger/sw/setup.py +++ b/comp/debug/data_logger/sw/setup.py @@ -1,29 +1,24 @@ #!/usr/bin/env python3 -# Copyright (C) 2022 CESNET z. s. p. o. +# Copyright (C) 2024 CESNET z. s. p. o. # Author(s): Lukas Nevrkla -from setuptools import setup - -# Install: -# python3 setup.py install --user -# Must be in package directory +from setuptools import setup, find_packages setup( name='logger_tools', - version='0.1.1', - description='Package with data_logger tools', - author='Lukáš Nevrkla', + version='1.0.0', + author='Lukas Nevrkla', author_email='xnevrk03@stud.fit.vutbr.cz', - packages=[ - 'data_logger', - 'mem_logger', - "graph_gen", - "logger_tools", - "pdf_gen", - ], + description='SW tools for data_logger FPGA component', + packages=find_packages(), install_requires=[ - 'nfb', + 'numpy', + 'pandas', 'matplotlib', - 'numpy' + 'seaborn', + 'Pillow' ], + #extras_require={ + # 'nfb': ['nfb'] + #}, ) From 66373362146cbf29bcee8b3b710bc383d775d280 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Tue, 10 Sep 2024 07:38:22 +0200 Subject: [PATCH 12/17] data_logger [DOC]: Add documentation of new python packages --- comp/debug/data_logger/readme.rst | 116 ++++++++++++++++++++++++++++-- 1 file changed, 109 insertions(+), 7 deletions(-) diff --git a/comp/debug/data_logger/readme.rst b/comp/debug/data_logger/readme.rst index 41013525c..1454a13e2 100644 --- a/comp/debug/data_logger/readme.rst +++ b/comp/debug/data_logger/readme.rst @@ -171,22 +171,124 @@ Instance template (full usage) Control SW ^^^^^^^^^^ -Folder ``data_logger/sw/`` contains ``Python3`` package that provides: +Folder ``data_logger/sw/`` contains following ``Python3`` packages: -* Module for basic interaction with ``DATA_LOGGER`` -* Modules for ``DATA_LOGGER`` wraps like ``MEM_LOGGER`` -* Simple graph generator based on `matplotlib` library -* Simple PDF / Markdown report generator -* Common tools +* ``data_logger`` ... basic interaction with ``DATA_LOGGER`` +* ``mem_logger`` ... basic interaction with ``MEM_LOGGER`` +* ``logger_stats`` ... loading firmware statistics (multiple ``DATA_LOGGERS`` can be organized in tree hierarchy) +* ``graph_tools`` ... simple plot functions for statistics from ``logger_stats`` Package can be installed using this command: * You also need to install ``python nfb`` package .. code-block:: + python3 -m pip install --upgrade pip + # Install nfb: + cd swbase/pynfb + python3 -m pip install Cython + python3 -m pip install . + cd - + + # Install this package: cd data_logger/sw - python3 setup.py install --user + python3 -m pip install . + +Example usage of ``logger_stats`` (for more usage see `mem_logger/mem_logger.py`): + +.. code-block:: + + + import logger_stats as Stats + from data_logger.data_logger import DataLogger + + def create_stats(): + # Create DataLoggers + logger_0 = DataLogger(index=0, dev='/dev/nfb0') + logger_1 = DataLogger(index=1, dev='/dev/nfb0') + + # Create Stats hierarchy + stats = Stats.LoggerStats('Example stats') + stats_0 = Stats.LoggerStats('Logger 0 stats', logger=logger_0) + stats_1 = Stats.LoggerStats('Logger 1 stats', logger=logger_1) + stats.add_stat(stats_0) + stats.add_stat(stats_1) + + # Add basic statistics + stats_0.add_stat(Stats.Constant(index=7, name='X')) + stats_0.add_stat(Stats.Counter(index=7, name='Y')) + stats_0.add_stat(Stats.Value(index=7, name='Z')) + + # FSM state statistic + def fms_convert(v): + states = [ + 'IDLE', + ... + ] + if v >= len(states): + return "???" + else: + return states[int(v)] + + fsm_format = Stats.FormatDefaultValue(format=Stats.FormatNone) + stats_1.add_stat(Stats.Value(2, 'FSM states', convert=fms_convert, format=fsm_format)) + + # Latency statistic + FREQ = 200 * 10**6 + time_conv = Stats.ConvertTime(FREQ) + time_form = Stats.FormatDefaultValue(units='ns') + stats_1.add_stat(Stats.Value(9, 'Latency', convert=time_conv, format=time_form)) + + # Add value statistic which includes multiple commands + CMDS = [ + 'CMD_A', + ... + ] + stats_1.add_stat(Stats.ValueCMD(7, 'Latency of CMDs', cmd_width=2, cmds=CMDS, convert=time_conv, format=time_form)) + + # Add multiple counters + counters = [ + 'Counter A', + ... + ] + stats_1.add_stats( + name='Counters', + names=counters, + indexes=list(range(len(counters))), + constructor=lambda i, n: Stats.Counter(i, n) + ) + + return stats + + + stats = create_stats() + stats.load() + print(stats.to_str()) + stats.save('stats.npz') + + +Example usage of ``graph_tools``: + + from graph_tools.graph_tools import load_data, plot_counter, plot_value, plot_value_2d + + stats = load_data('stats.npz') + + node = pd.DataFrame.from_dict(stats['Stats A']['Counters']) + selected = ['Counter A', 'Counter B'] + + # Plot single counter + plot_counter(node['Counter X'], 'Time', 'Requests', 'Plot title') + + # Plot multiple counters + plot_counter(node[selected], 'Time', 'Requests', 'Plot title') + + # Plot histogram of the value interface + plot_value(node['Value A'], 'Time', 'Blocks', 'Title' log=True) + + # Plot 2D histogram of the value interface history + plot_value_2d(node['Value A'], 'Time', 'Blocks', 'Title' log=True) + MI address space From aa92aff887611cb7174a518a127ee4753b5baafe Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Tue, 10 Sep 2024 07:30:39 +0200 Subject: [PATCH 13/17] data_logger [STYLE]: Fix data_logger package style --- .../data_logger/sw/data_logger/__init__.py | 3 - .../data_logger/sw/data_logger/data_logger.py | 103 +++++++++--------- 2 files changed, 54 insertions(+), 52 deletions(-) diff --git a/comp/debug/data_logger/sw/data_logger/__init__.py b/comp/debug/data_logger/sw/data_logger/__init__.py index fb2bf4a6b..e69de29bb 100644 --- a/comp/debug/data_logger/sw/data_logger/__init__.py +++ b/comp/debug/data_logger/sw/data_logger/__init__.py @@ -1,3 +0,0 @@ -from data_logger import data_logger - -__all__ = ["data_logger"] diff --git a/comp/debug/data_logger/sw/data_logger/data_logger.py b/comp/debug/data_logger/sw/data_logger/data_logger.py index 3b2c16f3a..c6d8ab67c 100644 --- a/comp/debug/data_logger/sw/data_logger/data_logger.py +++ b/comp/debug/data_logger/sw/data_logger/data_logger.py @@ -1,6 +1,9 @@ #!/usr/bin/env python3 # Copyright (C) 2022 CESNET z. s. p. o. # Author(s): Lukas Nevrkla +# +# Data_logger package for accessing data_logger component in FPGA + import nfb import math @@ -54,13 +57,30 @@ class DataLogger(nfb.BaseComp): def __init__(self, **kwargs): super().__init__(**kwargs) + self._rst_selection() + self.load_config() + + def _select(self, stat=None, index=None, slice=None, hist_addr=None): + if stat is not None and self.last_stat != stat: + self._comp.write32(self._REG_STATS, stat) + self.last_stat = stat + if index is not None and self.last_index != index: + self._comp.write32(self._REG_INDEX, index) + self.last_index = index + if slice is not None and self.last_slice != slice: + self._comp.write32(self._REG_SLICE, slice) + self.last_slice = slice + if hist_addr is not None and self.last_hist_addr != hist_addr: + self._comp.write32(self._REG_HIST, hist_addr) + self.last_hist_addr = hist_addr + + def _rst_selection(self): self.last_stat = None self.last_index = None self.last_slice = None self.last_hist_addr = None - self.config = self.load_config() - self.mi_width = self.config["MI_DATA_WIDTH"] + self.load_config() def main_ctrl_read(self): return { @@ -74,36 +94,23 @@ def rst(self): if not self._comp.wait_for_bit(self._REG_CTRL, self._BIT_RST_DONE, level=True): print("Err: Could not reset data_logger!", file=sys.stderr) - self.last_stat = None - self.last_index = None - self.last_slice = None - self.last_hist_addr = None + self._rst_selection() - def load_slices(self, width): + def _load_slices(self, width): slices = math.ceil(width / self.config["MI_DATA_WIDTH"]) value = 0 for i in range(0, slices): - if self.last_slice != i: - self._comp.write32(self._REG_SLICE, i) - self.last_slice = i - + self._select(slice=i) value += self._comp.read32(self._REG_VALUE) << (i * self.config["MI_DATA_WIDTH"]) return value def stat_read(self, stat, index=0, en_slices=True): - if self.last_stat != stat: - self._comp.write32(self._REG_STATS, stat) - self.last_stat = stat - if self.last_index != index: - self._comp.write32(self._REG_INDEX, index) - self.last_index = index + self._select(stat=stat, index=index) if not en_slices: - if self.last_slice != 0: - self._comp.write32(self._REG_SLICE, 0) - self.last_slice = 0 + self._select(slice=0) return self._comp.read32(self._REG_VALUE) if stat == self._ID_CTRLO: @@ -121,21 +128,13 @@ def stat_read(self, stat, index=0, en_slices=True): else: width = self.config["MI_DATA_WIDTH"] - return self.load_slices(width) + return self._load_slices(width) def hist_read(self, index, addr): - if self.last_stat != self._ID_VALUE_HIST: - self._comp.write32(self._REG_STATS, self._ID_VALUE_HIST) - self.last_stat = self._ID_VALUE_HIST - if self.last_index != index: - self._comp.write32(self._REG_INDEX, index) - self.last_index = index - if self.last_hist_addr != addr: - self._comp.write32(self._REG_HIST, addr) - self.last_hist_addr = addr - + self._select(stat=self._ID_VALUE_HIST, index=index, hist_addr=addr) width = self.config["HIST_BOX_WIDTH"][index] - return self.load_slices(width) + + return self._load_slices(width) def load_config(self): config = {} @@ -168,28 +167,29 @@ def load_config(self): config["HIST_BOX_WIDTH"] .append(self.stat_read(self._ID_HIST_BOX_WIDTH, i, en_slices=False)) hist_max = 2 ** config["VALUE_WIDTH"][i] - hist_step = hist_max / config["HIST_BOX_CNT"][i] + hist_box_cnt = config["HIST_BOX_CNT"][i] + hist_step = hist_max / hist_box_cnt if hist_box_cnt != 0 else 0 config["HIST_STEP"].append(hist_step) - return config + self.mi_width = config["MI_DATA_WIDTH"] + self.config = config def load_ctrl(self, out): id = self._ID_CTRLO if out else self._ID_CTRLI return self.stat_read(id, 0) + def load_ctrlo(self): + return self.stat_read(self._ID_CTRLO, 0) + + def load_ctrli(self): + return self.stat_read(self._ID_CTRLI, 0) + def set_ctrlo(self, val): - if self.last_stat != self._ID_CTRLO: - self._comp.write32(self._REG_STATS, self._ID_CTRLO) - self.last_stat = self._ID_CTRLO - if self.last_index != 0: - self._comp.write32(self._REG_INDEX, 0) - self.last_index = 0 + self._select(stat=self._ID_CTRLO, index=0) slices = math.ceil(self.config["CTRLO_WIDTH"] / self.mi_width) for i in range(0, slices): - if self.last_slice != i: - self._comp.write32(self._REG_SLICE, i) - self.last_slice = i + self._select(slice=i) slice = self.get_bits(val, self.mi_width, i * self.mi_width) self._comp.write32(self._REG_VALUE, slice) @@ -246,7 +246,7 @@ def stats_to_str(self, hist=False): stats['cnter_' + str(i)] = self.load_cnter(i) for i in range(0, self.config['VALUE_CNT']): val = self.load_value(i) - if not hist: + if not hist and 'hist' in val: del val['hist'] stats['value_' + str(i)] = val return json.dumps(stats, indent=4) @@ -259,14 +259,19 @@ def parseParams(): access = parser.add_argument_group('card access arguments') access.add_argument( - '-d', '--device', default=nfb.libnfb.Nfb.default_device, - metavar='device', help="device with target FPGA card") + '-d', '--device', default=nfb.libnfb.Nfb.default_dev_path, + metavar='device', help="""device with target FPGA card""" + ) access.add_argument( - '-i', '--index', type=int, metavar='index', default=0, help="index inside DevTree") + '-i', '--index', type=int, metavar='index', default=0, + help="""index inside DevTree""" + ) common = parser.add_argument_group('common arguments') - #common.add_argument('-p', '--print', action='store_true', help = """print registers""") - common.add_argument('--rst', action='store_true', help="reset mem_tester and mem_logger") + common.add_argument( + '--rst', action='store_true', + help="reset mem_tester and mem_logger" + ) args = parser.parse_args() return args From ad79f3ab526b9265a68c6fccf339c67b79e5dd9b Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Wed, 11 Sep 2024 13:46:15 +0200 Subject: [PATCH 14/17] mem_logger [BUGFIX]: Fix default histogram boxes, that caused data_logger assertion to fail --- comp/debug/data_logger/mem_logger/mem_logger.vhd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/comp/debug/data_logger/mem_logger/mem_logger.vhd b/comp/debug/data_logger/mem_logger/mem_logger.vhd index d07cd0473..b22e268c5 100644 --- a/comp/debug/data_logger/mem_logger/mem_logger.vhd +++ b/comp/debug/data_logger/mem_logger/mem_logger.vhd @@ -54,7 +54,7 @@ generic ( MI_ADDR_WIDTH : integer := 32; -- Specify read latency histogram precision - HISTOGRAM_BOXES : integer := 255; + HISTOGRAM_BOXES : integer := 256; -- Specify maximum paraller read requests MAX_PARALEL_READS : integer := 128; -- Specify read latency ticks count width From 288ef704401f185c3a6e14d447b67651814ff4c3 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Wed, 11 Sep 2024 13:46:57 +0200 Subject: [PATCH 15/17] mem_logger [FEATURE]: Load statistics using new logger_stats package --- .../data_logger/sw/mem_logger/__init__.py | 3 - .../data_logger/sw/mem_logger/mem_logger.py | 342 ++++++++---------- comp/debug/mem_tester/sw/mem_tester.py | 130 +++++-- comp/debug/mem_tester/sw/report_gen.py | 120 +++--- comp/debug/mem_tester/sw/test_mem_tester.py | 17 +- 5 files changed, 333 insertions(+), 279 deletions(-) diff --git a/comp/debug/data_logger/sw/mem_logger/__init__.py b/comp/debug/data_logger/sw/mem_logger/__init__.py index 45ac9b479..e69de29bb 100644 --- a/comp/debug/data_logger/sw/mem_logger/__init__.py +++ b/comp/debug/data_logger/sw/mem_logger/__init__.py @@ -1,3 +0,0 @@ -from mem_logger import mem_logger - -__all__ = ["mem_logger"] diff --git a/comp/debug/data_logger/sw/mem_logger/mem_logger.py b/comp/debug/data_logger/sw/mem_logger/mem_logger.py index ccabffb8e..db83b3d08 100644 --- a/comp/debug/data_logger/sw/mem_logger/mem_logger.py +++ b/comp/debug/data_logger/sw/mem_logger/mem_logger.py @@ -1,207 +1,165 @@ #!/usr/bin/env python3 # Copyright (C) 2022 CESNET z. s. p. o. # Author(s): Lukas Nevrkla +# +# Package for loading statistics from mem_logger component -import json import argparse +import numpy as np import nfb from data_logger.data_logger import DataLogger +import logger_stats.logger_stats as Stats class MemLogger(DataLogger): DT_COMPATIBLE = "netcope,mem_logger" - _BIT_LATENCY_TO_FIRST = 0 - def __init__(self, **kwargs): try: super().__init__(**kwargs) + except Exception as e: + raise Exception(f"ERROR while opening MemLogger component:\n {e}") - ctrli = self.load_ctrl(False) - self.config["MEM_DATA_WIDTH"] = self.get_bits(ctrli, self.mi_width, self.mi_width * 0) - self.config["MEM_ADDR_WIDTH"] = self.get_bits(ctrli, self.mi_width, self.mi_width * 1) - self.config["MEM_BURST_WIDTH"] = self.get_bits(ctrli, self.mi_width, self.mi_width * 2) - self.config["MEM_FREQ_KHZ"] = self.get_bits(ctrli, self.mi_width, self.mi_width * 3) - - except Exception: - print("ERROR while opening MemLogger component!\nMaybe unsupported FPGA firmware?!") - exit(1) + self.stats = self.init_stats() def set_config(self, latency_to_first): self.set_ctrlo(latency_to_first & 1) - def ticks_to_s(self, ticks): - return ticks / (self.config["MEM_FREQ_KHZ"] * 1000.0) - - def ticks_to_flow(self, words, ticks): - s = self.ticks_to_s(ticks) - if s == 0: - return 0 - return (words * self.config["MEM_DATA_WIDTH"]) / s - - # Remove leading and trailing zeros - def trim_hist(self, data): - res = {} - res_tmp = {} - - started = False - for k, v in data.items(): - if v != 0 or started: - res_tmp[k] = v - started = True - elif v == 0 or not started: - res_tmp = {k: v} - if v != 0: - res = res_tmp.copy() - - return res - - def latency_hist_step(self): - hist_max = 2 ** self.config["VALUE_WIDTH"][0] - hist_step = hist_max / self.config["HIST_BOX_CNT"][0] - return self.ticks_to_s(hist_step - 1) * 10**9 - - def load_stats(self): - stats = {} - - ctrlo = self.load_ctrl(True) - stats["latency_to_first"] = (ctrlo >> self._BIT_LATENCY_TO_FIRST) & 1 - - # Cnters - stats["wait"] = self.load_cnter(0) - stats["request_hold"] = self.load_cnter(1) - stats["rdy_hold_write"] = self.load_cnter(2) - stats["rdy_hold_read"] = self.load_cnter(3) - - stats["wr_ticks"] = self.load_cnter(4) - stats["rd_ticks"] = self.load_cnter(5) - stats["total_ticks"] = self.load_cnter(6) - stats["wr_req_cnt"] = self.load_cnter(7) - stats["wr_req_words"] = self.load_cnter(8) - stats["rd_req_cnt"] = self.load_cnter(9) - stats["rd_req_words"] = self.load_cnter(10) - stats["rd_resp_words"] = self.load_cnter(11) - stats["err_zero_burst"] = self.load_cnter(12) - stats["err_simult_rw"] = self.load_cnter(13) - - # Values - stats["latency"] = self.load_value(0) + def init_stats(self): + stats = Stats.LoggerStats('Mem logger', logger=self) + + # Constants # + + constants = [ + "MEM_DATA_WIDTH", + "MEM_ADDR_WIDTH", + "MEM_BURST_WIDTH", + "MEM_FREQ_KHZ", + ] + stats.add_stats( + name='Constants', + names=constants, + indexes=list(range(len(constants))), + constructor=lambda i, n: Stats.Constant(i, n) + ) + stats.load() + + freq = stats['Constants']['MEM_FREQ_KHZ'] * 1000.0 + word_b = stats['Constants']['MEM_DATA_WIDTH'] + + # Counters # + + counters = [ + "no req + not rdy", + "no req + rdy", + "wr + not rdy", + "rd + not rdy", + "wr ticks", + "rd ticks", + "total ticks", + "wr req cnt", + "wr req words", + "rd req cnt", + "rd req words", + "rd resp words", + "err zero burst", + "err simult rw", + ] + + def counter_i(name): + return counters.index(name) + + # Requests # + + reqs = ['wr req cnt', 'wr req words', 'rd req cnt', 'rd req words', 'rd resp words'] + stats.add_stats( + name='Requests', + names=reqs, + indexes=list(map(counter_i, reqs)), + constructor=lambda i, n: Stats.Counter(i, n) + ) + + # Data flow # + + stats_flow = Stats.LoggerStats('Data flow') + stats.add_stat(stats_flow) + + stats_flow.add_stat(Stats.FlowCounter( + index_words=counter_i('wr req words'), index_ticks=counter_i('wr ticks'), + freq=freq, word_bits=word_b, name='wr flow' + )) + stats_flow.add_stat(Stats.FlowCounter( + index_words=counter_i('rd resp words'), index_ticks=counter_i('rd ticks'), + freq=freq, word_bits=word_b, name='rd flow' + )) + + # Values # + + stats_val = Stats.LoggerStats('Values') + stats.add_stat(stats_val) + + stats_val.add_stat(Stats.Value( + index=0, name='latency', + convert=Stats.ConvertTime(freq, units='ns'), + format=Stats.FormatDefaultValue(units='ns', format=Stats.FormatDefault(decimal=1)) + )) if self.config["VALUE_CNT"] > 1: - stats["paralel_read"] = self.load_value(1) - - # Calculate time and flow - stats["wr_time_ms"] = self.ticks_to_s(stats['wr_ticks']) * 10**3 - stats["wr_flow_gbs"] = self.ticks_to_flow(stats['wr_req_words'], stats['wr_ticks']) / 10**9 - - stats["rd_time_ms"] = self.ticks_to_s(stats['rd_ticks']) * 10**3 - stats["rd_flow_gbs"] = self.ticks_to_flow(stats['rd_resp_words'], stats['rd_ticks']) / 10**9 - - stats["total_time_ms"] = self.ticks_to_s(stats['total_ticks']) * 10**3 - stats["total_flow_gbs"] = self.ticks_to_flow(stats['wr_req_words'] + stats['rd_resp_words'], stats['total_ticks']) / 10**9 - - # Calculate latency - stats["latency"]["min_ns"] = self.ticks_to_s(stats["latency"]["min"]) * 10**9 - stats["latency"]["max_ns"] = self.ticks_to_s(stats["latency"]["max"]) * 10**9 - stats["latency"]["avg_ns"] = self.ticks_to_s(stats["latency"]["avg"]) * 10**9 - - # Calculate latency histogram - hist_step = self.config["HIST_STEP"][0] - stats["latency"]["hist_ns"] = {} + stats_val.add_stat(Stats.Value(index=1, name='paralel reads')) + + # Duration # + + stats_dur = Stats.LoggerStats('Test duration') + stats.add_stat(stats_dur) + + stats_dur.add_stat(Stats.TimeCounter( + index=counter_i('wr ticks'), freq=freq, + name='wr time', units='ms' + )) + stats_dur.add_stat(Stats.TimeCounter( + index=counter_i('rd ticks'), freq=freq, + name='rd time', units='ms' + )) + stats_dur.add_stat(Stats.TimeCounter( + index=counter_i('total ticks'), freq=freq, + name='total time', units='ms' + )) + + # Errors # + + errs = ['err zero burst', 'err simult rw'] + stats.add_stats( + name='Errors', + names=errs, + indexes=list(map(counter_i, errs)), + constructor=lambda i, n: Stats.Counter(i, n) + ) + + # Ready signal status # + + rdy_status = ["no req + not rdy", "no req + rdy", "wr + not rdy", "rd + not rdy"] + stats.add_stats( + name='Ready signal status', + names=rdy_status, + indexes=list(map(counter_i, rdy_status)), + constructor=lambda i, n: Stats.Counter(i, n) + ) + + # Special statistics # + + BIT_LATENCY_TO_FIRST = 0 + ctrlo = self.load_ctrl(True) + latency_to_first = (ctrlo >> BIT_LATENCY_TO_FIRST) & 1 + stats.add_stat(Stats.Custom(name='latency to first word', data=latency_to_first)) - for i, v in enumerate(stats["latency"]["hist"]): - end = self.ticks_to_s((i + 1) * hist_step - 1) * 10**9 - stats["latency"]["hist_ns"][end] = v - stats["latency"]["hist_ns"] = self.trim_hist(stats["latency"]["hist_ns"]) + stats.add_calc_stats(self.calc_stats) return stats - def stats_to_json(self, stats): - res = json.dumps(stats, indent=4) - print(res) - - def line_to_str(self, txt, val, unit=""): - if isinstance(val, int): - val = f"{val:<15}" - elif isinstance(val, float): - val = f"{val:< .2f}" - - if unit != "": - unit = f"[{unit}]" - return f"{txt:<20} {val} {unit}\n" - - def stats_to_str(self, stats): - res = "" - res += "Mem_logger statistics:\n" - res += "----------------------\n" - res += self.line_to_str("write requests ", stats['wr_req_cnt']) - res += self.line_to_str(" write words ", stats['wr_req_words']) - res += self.line_to_str("read requests ", stats['rd_req_cnt']) - res += self.line_to_str(" requested words", stats['rd_req_words']) - res += self.line_to_str(" received words ", stats['rd_resp_words']) - res += "Handshakes:\n" - res += self.line_to_str(" avmm rdy hold ", stats['rdy_hold_read'] + stats['rdy_hold_write']) - res += self.line_to_str(" avmm rdy hold (rd) ", stats['rdy_hold_read']) - res += self.line_to_str(" avmm rdy hold (wr) ", stats['rdy_hold_write']) - res += self.line_to_str(" no request ", stats["request_hold"]) - res += self.line_to_str(" wait ", stats["wait"]) - res += "Flow:\n" - res += self.line_to_str(" write", stats['wr_flow_gbs'], "Gb/s") - res += self.line_to_str(" read ", stats['rd_flow_gbs'], "Gb/s") - res += self.line_to_str(" total", stats['total_flow_gbs'], "Gb/s") - res += "Time:\n" - res += self.line_to_str(" write", stats['wr_time_ms'], "ms") - res += self.line_to_str(" read ", stats['rd_time_ms'], "ms") - res += self.line_to_str(" total", stats['total_time_ms'], "ms") - res += "Latency:\n" - res += self.line_to_str(" min", stats['latency']["min_ns"], "ns") - res += self.line_to_str(" max", stats['latency']["max_ns"], "ns") - res += self.line_to_str(" avg", stats['latency']["avg_ns"], "ns") - res += " histogram [ns]:\n" - if len(stats['latency']['hist_ns']) > 0: - prev = 0 - for k, v in stats['latency']['hist_ns'].items(): - if v != 0: - res += self.line_to_str(f" {prev:> 6.1f} -{k:> 6.1f} ...", v) - prev = k - res += "Errors:\n" - res += self.line_to_str(" zero burst count", stats['err_zero_burst']) - res += self.line_to_str(" simultaneous r+w", stats['err_simult_rw']) - - if self.config['VALUE_CNT'] > 1: - res += "Paralel reads count:\n" - res += self.line_to_str(" min", stats['paralel_read']["min"], "") - res += self.line_to_str(" max", stats['paralel_read']["max"], "") - res += self.line_to_str(" avg", stats['paralel_read']["avg"], "") - - hist_step = self.config["HIST_STEP"][1] - prev = 0 - for i, v in enumerate(stats["paralel_read"]["hist"]): - if v != 0: - res += self.line_to_str(f" {prev:> 6.1f} -{hist_step * (i + 1):> 6.1f} ...", v) - prev = hist_step * (i + 1) - - return res - - def config_to_str(self): - res = "" - res += "Mem_logger config:\n" - res += "------------------\n" - res += f"MEM_DATA_WIDTH: {self.config['MEM_DATA_WIDTH']}\n" - res += f"MEM_ADDR_WIDTH: {self.config['MEM_ADDR_WIDTH']}\n" - res += f"MEM_BURST_WIDTH {self.config['MEM_BURST_WIDTH']}\n" - res += f"MEM_FREQ_KHZ: {self.config['MEM_FREQ_KHZ']}\n" - res += f"LATENCY_WIDTH: {self.config['VALUE_WIDTH'][0]}\n" - res += f"HIST_BOX_CNT: {self.config['HIST_BOX_CNT'][0]}\n" - res += f"HIST_BOX_WIDTH: {self.config['HIST_BOX_WIDTH'][0]}\n" - res += "\n" - return res - - def print(self): - print(self.config_to_str()) - - stats = self.load_stats() - print(self.stats_to_str(stats)) + def calc_stats(self, data): + data['Data flow']['total flow'] = np.array(data['Data flow']['wr flow']) + np.array(data['Data flow']['rd flow']) + + return data def parseParams(): @@ -210,13 +168,24 @@ def parseParams(): ) access = parser.add_argument_group('card access arguments') - access.add_argument('-d', '--device', default=nfb.libnfb.Nfb.default_device, - metavar='device', help="""device with target FPGA card""") - access.add_argument('-i', '--index', type=int, metavar='index', default=0, help="""index inside DevTree""") + access.add_argument( + '-d', '--device', default=nfb.libnfb.Nfb.default_dev_path, + metavar='device', help="""device with target FPGA card""" + ) + access.add_argument( + '-i', '--index', type=int, metavar='index', + default=0, help="""index inside DevTree""" + ) common = parser.add_argument_group('common arguments') - #common.add_argument('-p', '--print', action='store_true', help = """print registers""") - common.add_argument('--rst', action='store_true', help="""reset mem_tester and mem_logger""") + common.add_argument( + '--rst', action='store_true', + help="""reset mem_tester and mem_logger""" + ) + common.add_argument( + '-j', '--stats-json', action='store_true', + help="""prints mem_logger statistics in json""" + ) args = parser.parse_args() return args @@ -224,11 +193,14 @@ def parseParams(): if __name__ == '__main__': args = parseParams() logger = MemLogger(dev=args.device, index=args.index) + logger.stats.load() - #if args.print: - logger.print() + print(logger.stats.to_str()) if args.rst: logger.rst() + if args.stats_json: + print(logger.stats.data()) + #logger.set_config(latency_to_first=True) diff --git a/comp/debug/mem_tester/sw/mem_tester.py b/comp/debug/mem_tester/sw/mem_tester.py index 8cf19101d..b27acd5f3 100644 --- a/comp/debug/mem_tester/sw/mem_tester.py +++ b/comp/debug/mem_tester/sw/mem_tester.py @@ -62,7 +62,7 @@ def __init__(self, mem_logger, **kwargs): self.last_test_config = None @staticmethod - def compatible_cnt(dev=nfb.libnfb.Nfb.default_device, comp=None): + def compatible_cnt(dev=nfb.libnfb.Nfb.default_dev_path, comp=None): dev = nfb.open(dev) nodes = dev.fdt_get_compatible(comp if comp is not None else MemTester.DT_COMPATIBLE) return len(nodes) @@ -163,7 +163,7 @@ def config_test( auto_precharge=False, refresh_period=None, ): - if burst_cnt > 2 ** self.mem_logger.config["MEM_BURST_WIDTH"] - 1: + if burst_cnt > 2 ** self.mem_logger.stats['Constants']["MEM_BURST_WIDTH"] - 1: print(f"Burst count {burst_cnt} is too large", file=sys.stderr) return @@ -181,7 +181,7 @@ def config_test( self._comp.write32(self._REG_CTRL_IN, ctrli) addr_lim = 0 - max_addr = 2 ** self.mem_logger.config["MEM_ADDR_WIDTH"] * addr_lim_scale + max_addr = 2 ** self.mem_logger.stats['Constants']["MEM_ADDR_WIDTH"] * addr_lim_scale if addr_lim_scale >= 1.0: max_addr -= 2 * burst_cnt addr_lim = int((max_addr // burst_cnt) * burst_cnt) @@ -210,8 +210,8 @@ def check_test_result(self, config, status, stats): errs += f"{status['err_cnt']} words were wrong\n" if status["ecc_err_occ"]: errs += "ECC error occurred\n" - if stats["rd_req_words"] != stats["rd_resp_words"]: - errs += f"{stats['rd_req_words'] - stats['rd_resp_words']} words were not received\n" + if stats['Requests']["rd req words"] != stats['Requests']["rd resp words"]: + errs += f"{stats['Requests']['rd req words'] - stats['Requests']['rd resp words']} words were not received\n" if not status["test_succ"] and errs == "" and not config["rand_addr"]: errs += "Unknown error occurred\n" return errs @@ -219,7 +219,8 @@ def check_test_result(self, config, status, stats): def get_test_result(self): config = self.last_test_config status = self.load_status() - stats = self.mem_logger.load_stats() + self.mem_logger.stats.load() + stats = self.mem_logger.stats.data() errs = self.check_test_result(config, status, stats) return config, status, stats, errs @@ -236,13 +237,13 @@ def test_result_to_str(self, config, status, stats, errs): res += "\nErrors:\n" res += errs res += "\n" - res += self.mem_logger.stats_to_str(stats) + res += self.mem_logger.stats.to_str() return res def amm_gen_set_buff(self, burst, data): prev_addr = self._comp.read32(self._REG_AMM_GEN_ADDR) - mi_width = self.mem_logger.config["MI_DATA_WIDTH"] - slices = math.ceil(self.mem_logger.config["MEM_DATA_WIDTH"] / mi_width) + mi_width = self.mem_logger.stats['Constants']["MI_DATA_WIDTH"] + slices = math.ceil(self.mem_logger.stats['Constants']["MEM_DATA_WIDTH"] / mi_width) for s in range(0, slices): slice = self.mem_logger.get_bits(data, mi_width, mi_width * s) @@ -253,8 +254,8 @@ def amm_gen_set_buff(self, burst, data): self._comp.write32(self._REG_AMM_GEN_ADDR, prev_addr) def amm_gen_get_buff(self): - mi_width = self.mem_logger.config["MI_DATA_WIDTH"] - slices = math.ceil(self.mem_logger.config["MEM_DATA_WIDTH"] / mi_width) + mi_width = self.mem_logger.stats['Constants']["MI_DATA_WIDTH"] + slices = math.ceil(self.mem_logger.stats['Constants']["MEM_DATA_WIDTH"] / mi_width) prev_addr = self._comp.read32(self._REG_AMM_GEN_ADDR) burst = self._comp.read32(self._REG_AMM_GEN_BURST) @@ -314,34 +315,92 @@ def parseParams(): ) access = parser.add_argument_group('card access arguments') - access.add_argument('-d', '--device', default=nfb.libnfb.Nfb.default_device, - metavar='device', help="""device with target FPGA card.""") - access.add_argument('-i', '--index', type=int, metavar='index', default=0, help="""mem_tester index inside DevTree.""") - access.add_argument('-I', '--logger-index', type=int, metavar='index', default=None, help="""mem_logger index inside DevTree.""") + access.add_argument( + '-d', '--device', default=nfb.libnfb.Nfb.default_dev_path, + metavar='device', help="""device with target FPGA card.""" + ) + access.add_argument( + '-i', '--index', type=int, metavar='index', default=0, + help="""mem_tester index inside DevTree.""" + ) + access.add_argument( + '-I', '--logger-index', type=int, metavar='index', default=None, + help="""mem_logger index inside DevTree.""" + ) common = parser.add_argument_group('common arguments') - common.add_argument('-p', '--print', action='store_true', help="""print registers""") - common.add_argument('--rst', action='store_true', help="""reset mem_tester and mem_logger""") - common.add_argument('--rst-tester', action='store_true', help="""reset mem_tester""") - common.add_argument('--rst-logger', action='store_true', help="""reset mem_logger""") - common.add_argument('--rst-emif', action='store_true', help="""reset memory driver""") + common.add_argument( + '-p', '--print', action='store_true', + help="""print registers""" + ) + common.add_argument( + '--rst', action='store_true', + help="""reset mem_tester and mem_logger""" + ) + common.add_argument( + '--rst-tester', action='store_true', + help="""reset mem_tester""" + ) + common.add_argument( + '--rst-logger', action='store_true', + help="""reset mem_logger""" + ) + common.add_argument( + '--rst-emif', action='store_true', + help="""reset memory driver""" + ) test = parser.add_argument_group('test related arguments') - #test.add_argument('-t', '--test', action='store_true', help = """run test""") - test.add_argument('-r', '--rand', action='store_true', help="""use random indexing during test""") - test.add_argument('-b', '--burst', default=4, type=int, help="""burst count during test""") - test.add_argument('-s', '--scale', default=1.0, type=float, help="""tested address space (1.0 = whole)""") - test.add_argument('-o', '--one-simult', action='store_true', help="""use only one simultaneous read during test""") - test.add_argument('-f', '--to-first', action='store_true', help="""measure latency to the first received word""") - test.add_argument('--auto-precharge', action='store_true', help="""use auto precharge during test""") - test.add_argument('--refresh', default=None, type=int, help="""set refresh period in ticks""") + test.add_argument( + '-r', '--rand', action='store_true', + help="""use random indexing during test""" + ) + test.add_argument( + '-b', '--burst', default=4, type=int, + help="""burst count during test""" + ) + test.add_argument( + '-s', '--scale', default=1.0, type=float, + help="""tested address space (1.0 = whole)""" + ) + test.add_argument( + '-o', '--one-simult', action='store_true', + help="""use only one simultaneous read during test""" + ) + test.add_argument( + '-f', '--to-first', action='store_true', + help="""measure latency to the first received word""" + ) + test.add_argument( + '--auto-precharge', action='store_true', + help="""use auto precharge during test""" + ) + test.add_argument( + '--refresh', default=None, type=int, + help="""set refresh period in ticks""" + ) other = parser.add_argument_group('amm_gen control arguments') - other.add_argument('--set-buff', metavar=('burst', 'data'), type=int, nargs=2, help="""set specific burst data in amm_gen buffer""") - other.add_argument('--get-buff', action='store_true', help="""print amm_gen buffer""") - other.add_argument('--gen-wr', metavar='addr', type=int, help="""writes amm_gen buffer to specific address""") - other.add_argument('--gen-rd', metavar='addr', type=int, help="""reads memory data to amm_gen buffer""") - other.add_argument('--gen-burst', type=int, help="""sets burst count for amm_gen""") + other.add_argument( + '--set-buff', metavar=('burst', 'data'), type=int, nargs=2, + help="""set specific burst data in amm_gen buffer""" + ) + other.add_argument( + '--get-buff', action='store_true', + help="""print amm_gen buffer""" + ) + other.add_argument( + '--gen-wr', metavar='addr', type=int, + help="""writes amm_gen buffer to specific address""" + ) + other.add_argument( + '--gen-rd', metavar='addr', type=int, + help="""reads memory data to amm_gen buffer""" + ) + other.add_argument( + '--gen-burst', type=int, + help="""sets burst count for amm_gen""" + ) args = parser.parse_args() return args @@ -359,9 +418,8 @@ def parseParams(): if args.print: status = tester.load_status() print(tester.status_to_str(status)) - print(tester.mem_logger.config_to_str()) - stats = tester.mem_logger.load_stats() - print(tester.mem_logger.stats_to_str(stats)) + tester.mem_logger.stats.load() + print(tester.mem_logger.stats.to_str()) print(tester.amm_gen_to_str()) elif args.rst or args.rst_tester: diff --git a/comp/debug/mem_tester/sw/report_gen.py b/comp/debug/mem_tester/sw/report_gen.py index 787f13cbe..86305e901 100644 --- a/comp/debug/mem_tester/sw/report_gen.py +++ b/comp/debug/mem_tester/sw/report_gen.py @@ -9,7 +9,6 @@ import subprocess import argparse -import json import numpy as np import nfb @@ -21,9 +20,10 @@ class ReportGen: - def __init__(self, graph_gen, dev="/dev/nfb0"): + def __init__(self, graph_gen, dev="/dev/nfb0", logger_offset=0): self.graph_gen = graph_gen self.dev = dev + self.logger_offset = logger_offset self.iterCnt = 0 self.currIter = 0 @@ -37,15 +37,15 @@ def __init__(self, graph_gen, dev="/dev/nfb0"): assert self.tester_cnt <= self.logger_cnt self.logger_config = [] - for i in range(0, self.logger_cnt): + for i in range(0, self.tester_cnt): self.open(i) - self.logger_config.append(self.mem_tester.mem_logger.config) + self.logger_config.append(self.mem_tester.mem_logger.stats['Constants']) self.data = { 'info': { 'dev': self.dev, 'tester_comp': self.tester_comp, - 'tester_cnt': self.logger_comp, + 'tester_cnt': self.tester_cnt, 'logger_comp': self.logger_comp, 'logger_cnt': self.logger_cnt, 'logger_config': self.logger_config, @@ -53,7 +53,7 @@ def __init__(self, graph_gen, dev="/dev/nfb0"): } def open(self, index): - logger = MemLogger(dev=self.dev, index=index) + logger = MemLogger(dev=self.dev, index=(index + self.logger_offset)) self.mem_tester = MemTester(logger, dev=self.dev, index=index) def test(self, key, descript, index, params, test_param=None, param_values=None): @@ -62,7 +62,7 @@ def test(self, key, descript, index, params, test_param=None, param_values=None) 'params': params, 'test_param': test_param, 'param_values': param_values, - 'stats': [], + 'stats': None, 'status': [], 'errs': [], } @@ -79,21 +79,23 @@ def test(self, key, descript, index, params, test_param=None, param_values=None) self.mem_tester.execute_test() config, status, stats, errs = self.mem_tester.get_test_result() - stats['latency'].pop('hist') - - data['stats'].append(stats) + data['stats'] = stats data['status'].append(status) data['errs'].append(errs) - data['stats'] = tools.parse_dict_list(data['stats']) + #data['stats'] = tools.parse_dict_list(data['stats']) data['status'] = tools.parse_dict_list(data['status']) if key not in self.data: self.data[key] = {} self.data[key][index] = data - def get_burst_seq(self, index, cnt, burst_scale=1.0): + def get_burst_seq(self, index, cnt, burst_scale=1.0, max_burst=None): burst_width = self.logger_config[index]['MEM_BURST_WIDTH'] - burst_lim = int((2 ** burst_width - 1) * burst_scale) + if max_burst is None: + burst_lim = int((2 ** burst_width - 1) * burst_scale) + else: + burst_lim = max_burst + res = np.linspace(1, burst_lim, cnt) return [int(i) for i in res] @@ -136,10 +138,22 @@ def print_progress(progress, txt='Complete', prefix='Progress', decimals=1, leng def parseParams(): parser = argparse.ArgumentParser(description="""Report generator for mem_tester component""") - parser.add_argument('-d', '--device', default=nfb.libnfb.Nfb.default_device, - metavar='device', help="""device with target FPGA card.""") - parser.add_argument('format', nargs='?', default='pdf', choices=['md', 'pdf'], - help="""Format of the output report)""") + parser.add_argument( + '-d', '--device', default=nfb.libnfb.Nfb.default_dev_path, + metavar='device', help="""device with target FPGA card.""" + ) + parser.add_argument( + 'format', nargs='?', default='pdf', choices=['md', 'pdf'], + help="""Format of the output report""" + ) + parser.add_argument( + '--max-burst', type=int, default=None, + help="""Max burst count that will be tested""" + ) + parser.add_argument( + '--logger-offset', type=int, default=0, + help="""Offset for mem_logger compatible (if there is more loggers then testers)""" + ) args = parser.parse_args() return args @@ -148,7 +162,9 @@ def run_cmd(cmd): return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read().strip().decode("utf-8") -def latency_table(pdf, bursts, latencies): +def latency_table(pdf, bursts, data): + latencies = data['Values']['latency'] + header = ['Latency x burst'] data = [ ['min [ns]'], @@ -157,25 +173,25 @@ def latency_table(pdf, bursts, latencies): ] for i, b in enumerate(bursts): header.append(f'{b:<.0f} [B]') - data[0].append(f"{latencies['latency']['min_ns'][i]:<.1f}") - data[1].append(f"{latencies['latency']['avg_ns'][i]:<.1f}") - data[2].append(f"{latencies['latency']['max_ns'][i]:<.1f}") + data[0].append(f"{latencies['min'][i]:<.1f}") + data[1].append(f"{latencies['avg'][i]:<.1f}") + data[2].append(f"{latencies['max'][i]:<.1f}") pdf.table(header, data) if __name__ == '__main__': args = parseParams() - data_file = 'data.json' + data_file = 'data.npz' report_file = 'mem_tester_report' img_path = 'fig/' tools = LoggerTools() graph_gen = GraphGen(folder=img_path, ratio=(13, 6), output=[".png"]) - gen = ReportGen(graph_gen, dev=args.device) + gen = ReportGen(graph_gen, dev=args.device, logger_offset=args.logger_offset) pdf = PDFGen() - burst_seq = gen.get_burst_seq(0, 50, burst_scale=0.25) + burst_seq = gen.get_burst_seq(0, 50, burst_scale=0.25, max_burst=args.max_burst) test_params = {'burst_cnt': burst_seq[0]} progress = [0, 12 * gen.tester_cnt, False] addr_scale = 0.05 @@ -204,8 +220,9 @@ def latency_table(pdf, bursts, latencies): ## Get data ## print_progress(progress, 'processing data') - with open(data_file, 'w') as f: - f.write(json.dumps(gen.data, sort_keys=True, indent=4)) + #with open(data_file, 'w') as f: + # f.write(json.dumps(gen.data, sort_keys=True, indent=4)) + np.savez_compressed(data_file, np.array(gen.data, dtype=object)) seq_data = gen.data['seq-burst'][index]['stats'] rand_data = gen.data['rand-burst'][index]['stats'] @@ -214,17 +231,17 @@ def latency_table(pdf, bursts, latencies): ## Plot ## - mem_width = gen.mem_tester.mem_logger.config["MEM_DATA_WIDTH"] / 8 + mem_width = gen.mem_tester.mem_logger.stats['Constants']["MEM_DATA_WIDTH"] / 8 burst_seq_b = [i * mem_width for i in burst_seq] # Plot data flow print_progress(progress, 'generating graphs') graph_gen.init_plots() # title="Data flow") graph_gen.basic_plot(burst_seq_b, [ - seq_data['wr_flow_gbs'], - seq_data['rd_flow_gbs'], - rand_data['wr_flow_gbs'], - rand_data['rd_flow_gbs'], + seq_data['Data flow']['wr flow'], + seq_data['Data flow']['rd flow'], + rand_data['Data flow']['wr flow'], + rand_data['Data flow']['rd flow'], ], colors=[ 'royalblue', 'green', @@ -254,17 +271,22 @@ def latency_table(pdf, bursts, latencies): data = rand_data_one_simult # Prepare data - hist = data["latency"]["hist_ns"] - hist_arr = tools.dict_to_numpy(hist) - offset = gen.mem_tester.mem_logger.latency_hist_step() / 2 - limits = (min(burst_seq_b), max(burst_seq_b), min(data['latency']['min_ns']) - offset, max(data['latency']['max_ns']) - offset) - hist_arr /= np.array(data["rd_req_cnt"]) + hist_x = data['Values']["latency"]["hist_x"] + hist_arr = data['Values']["latency"]["hist"] + req_cnt = np.array(data['Requests']["rd req cnt"]).reshape(-1, 1) + + hist_arr = hist_arr / req_cnt + offset = hist_x[1] - hist_x[0] + limits = ( + min(burst_seq_b), max(burst_seq_b), + min(data['Values']['latency']['min']) - offset, + max(data['Values']['latency']['max']) - offset + ) graph_gen.init_plots() # title="Read latency") graph_gen.basic_plot(burst_seq_b, [ - data['latency']["min_ns"], - #data['latency']["avg_ns"], - data['latency']["max_ns"], + data['Values']['latency']["min"], + data['Values']['latency']["max"], ], style='--', colors=['black', 'black', 'black'], width=3) graph_gen.plot_2d(hist_arr, limits=limits, min=0, log=True) graph_gen.set_xlabel("burst size [B]") @@ -305,7 +327,7 @@ def latency_table(pdf, bursts, latencies): header = ['Interface', 'DATA WIDTH', 'ADDRESS WIDTH', 'BURST WIDTH', 'Frequency [MHz]'] data = [] - for i in range(info['logger_cnt']): + for i in range(info['tester_cnt']): data.append([ i, info['logger_config'][i]['MEM_DATA_WIDTH'], @@ -332,15 +354,15 @@ def get_row(txt, data, unit): return [txt, f"{data[0]:<.2f} {unit}", f"{data[1]:<.2f} {unit}"] data = [] - data.append(get_row("total time", full_data['total_time_ms'], "ms")) - data.append(get_row("write time", full_data['wr_time_ms'], "ms")) - data.append(get_row("read time", full_data['rd_time_ms'], "ms")) - data.append(get_row("total data flow", full_data['total_flow_gbs'], "Gbps")) - data.append(get_row("write data flow", full_data['wr_flow_gbs'], "Gbps")) - data.append(get_row("read data flow", full_data['rd_flow_gbs'], "Gbps")) - data.append(get_row("min read latency", full_data['latency']['min_ns'], "ns")) - data.append(get_row("avg read latency", full_data['latency']['avg_ns'], "ns")) - data.append(get_row("max read latency", full_data['latency']['max_ns'], "ns")) + data.append(get_row("total time", full_data['Test duration']['total time'], "ms")) + data.append(get_row("write time", full_data['Test duration']['wr time'], "ms")) + data.append(get_row("read time", full_data['Test duration']['rd time'], "ms")) + data.append(get_row("total data flow", full_data['Data flow']['total flow'], "Gbps")) + data.append(get_row("write data flow", full_data['Data flow']['wr flow'], "Gbps")) + data.append(get_row("read data flow", full_data['Data flow']['rd flow'], "Gbps")) + data.append(get_row("min read latency", full_data['Values']['latency']['min'], "ns")) + data.append(get_row("avg read latency", full_data['Values']['latency']['avg'], "ns")) + data.append(get_row("max read latency", full_data['Values']['latency']['max'], "ns")) pdf.table(header, data) pdf.heading(2, "Maximum data flow") diff --git a/comp/debug/mem_tester/sw/test_mem_tester.py b/comp/debug/mem_tester/sw/test_mem_tester.py index 09d7f6f2f..777d8f855 100644 --- a/comp/debug/mem_tester/sw/test_mem_tester.py +++ b/comp/debug/mem_tester/sw/test_mem_tester.py @@ -15,6 +15,8 @@ ######### device = '/dev/nfb0' +logger_offset = 0 +allow_more_loggers = False def comp_cnt(comp=MemTester.DT_COMPATIBLE): @@ -28,11 +30,12 @@ def test_comp_cnt(): logger_cnt = comp_cnt(comp='netcope,mem_logger') # todo assert logger_cnt > 0, "No mem_logger found" - assert logger_cnt == tester_cnt, "Number of mem_testers does not match number of mem_loggers" + if not allow_more_loggers: + assert logger_cnt == tester_cnt, "Number of mem_testers does not match number of mem_loggers" def open(index): - logger = MemLogger(index=index, dev=device) + logger = MemLogger(index=index + logger_offset, dev=device) tester = MemTester(logger, index=index, dev=device) return tester @@ -55,7 +58,7 @@ def test_status(): def test_config(): for i in range(0, comp_cnt()): tester = open(i) - config = tester.mem_logger.config + config = tester.mem_logger.stats['Constants'] assert config["MEM_DATA_WIDTH"] > 0, f"Invalid MEM_DATA_WIDTH ({config['MEM_DATA_WIDTH']})" assert config["MEM_ADDR_WIDTH"] > 0, f"Invalid MEM_ADDR_WIDTH ({config['MEM_ADDR_WIDTH']})" @@ -66,8 +69,10 @@ def test_config(): def test_seq(): for i in range(0, comp_cnt()): tester = open(i) + config = tester.mem_logger.stats['Constants'] + min_burst = 1 - max_burst = 2 ** tester.mem_logger.config["MEM_BURST_WIDTH"] - 1 + max_burst = 2 ** config["MEM_BURST_WIDTH"] - 1 for b in (min_burst, max_burst): print() @@ -79,8 +84,8 @@ def test_seq(): print(tester.test_result_to_str(config, status, stats, errs)) assert errs == '', f"Errors occurred during test:\n {errs}" - assert stats['wr_req_cnt'] > 0, "No write requests were send" - assert stats['rd_req_cnt'] > 0, "No read requests were send" + assert sum(stats['Requests']['wr req cnt']) > 0, "No write requests were send" + assert sum(stats['Requests']['rd req cnt']) > 0, "No read requests were send" #assert stats['latency']['max_ns'] < 2000, "Too large maximum latency" #if b == min_burst: # assert stats['latency']['avg_ns'] < 500, "Too large average latency" From 93d138f9fef4d9494722bc31f3da68e8d060f31e Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Thu, 19 Sep 2024 19:44:55 +0200 Subject: [PATCH 16/17] tests/jenkins [FEATURE]: Add misc group --- tests/jenkins/ver_misc_tools.jenkins | 53 ++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 tests/jenkins/ver_misc_tools.jenkins diff --git a/tests/jenkins/ver_misc_tools.jenkins b/tests/jenkins/ver_misc_tools.jenkins new file mode 100644 index 000000000..6c1f0040f --- /dev/null +++ b/tests/jenkins/ver_misc_tools.jenkins @@ -0,0 +1,53 @@ +/* + * ver_misc_tools.jenkins: Jenkins configuration script + * Copyright (C) 2021 CESNET z. s. p. o. + * Author(s): Jakub Cabal + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +// Clean previous builds +library 'liberouter' +cleanBuilds() + +// ///////////////////////////////////////////////////////////////////////////// +// BASIC-VERIFICATION +// ///////////////////////////////////////////////////////////////////////////// +// Add engines for automatic Jenkins verifications into the following list +// FORMAT: +// [ 'name', 'path_to_ver', 'script.fdo' ], +def COMPONENTS_VER = [\ +] +// ///////////////////////////////////////////////////////////////////////////// + +// ///////////////////////////////////////////////////////////////////////////// +// MULTI-VERIFICATION +// ///////////////////////////////////////////////////////////////////////////// +// Add engines for automatic Jenkins verifications into the following list +// FORMAT: +// [ 'name' , 'path_to_ver' , 'fdo_file.fdo' , 'test_pkg.sv/.vhd' , 'settings.py' ] +def COMPONENTS_MULTIVER = [\ +] +// ///////////////////////////////////////////////////////////////////////////// + +// ///////////////////////////////////////////////////////////////////////////// +// HARP-VERIFICATION +// ///////////////////////////////////////////////////////////////////////////// +// Add engines for automatic Jenkins verifications into the following list +// FORMAT: +// [ 'name' , 'path_to_comp' ], +def COMPONENTS_HARP = [\ + ['HISTOGRAMER', 'comp/debug/histogramer'],\ +] + +// Run component verifications using common script +node('preklad') { + lock(label:"resources-${env.NODE_NAME}", quantity: 1) { + // fetch sources from GIT + checkout scm + def common_run = load "tests/jenkins/common.jenkins" + common_run.commonPrekladHarpRun(COMPONENTS_HARP) + //common_run.commonPrekladVerRun(COMPONENTS_VER) + //common_run.commonPrekladMultiVerRun(COMPONENTS_MULTIVER) + } +} From 38ae0b4b2cec33e8f534da7a9c6fd27d1d259432 Mon Sep 17 00:00:00 2001 From: xnevk03 Date: Sun, 27 Oct 2024 09:02:36 +0100 Subject: [PATCH 17/17] CHANGELOG: Describe changes of data_logger and histogramer --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e7d1b5a1a..01d4d3da5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] - Improve documentation - Improve documentation looks +- Data_logger: Added packages for statistics processing +- Histogramer: Fixed histogram box update ## [0.7.2] - 2024-10-17