| @@ -340,4 +340,5 @@ project/plugins/project/ | |||
| # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml | |||
| hs_err_pid* | |||
| *.fir | |||
| *.fir | |||
| *.json | |||
| @@ -22,26 +22,6 @@ | |||
| ** TODO Øving 1 | |||
| ** TODO Øving 2 | |||
| * Øving 0 | |||
| ** Praktisk | |||
| ** Kombinatorisk | |||
| *** Få absolutt enkleste krets (mux) til å fungere | |||
| *** TODO Tegn RTL for krets med to multiplexere | |||
| *** Sett sammen en krets som bruker to multiplexere | |||
| ** Tilstand | |||
| *** TODO Tegn RTL for en enkel teller | |||
| *** Implementer en enkel teller | |||
| *** TODO Implementer en enkel registerAvleser | |||
| *** Tegn RTL av registeravleser | |||
| *** Implementer daisyVec | |||
| ** Sette sammen | |||
| *** Lag tre daisyVecs som fylles og leses av i en test. | |||
| *** Kombiner egenlagd mux med daisyVecs | |||
| *** Kombiner til en parametriserbar matrise, med medfølgende tester. | |||
| *** Instansier to matriser | |||
| *** Multipliser dem | |||
| * Tutorials | |||
| https://github.com/ucb-bar/generator-bootcamp | |||
| https://github.com/ucb-bar/chisel-tutorial/wiki/chisel-installation | |||
| @@ -50,3 +50,4 @@ scalacOptions ++= scalacOptionsVersion(scalaVersion.value) | |||
| scalacOptions ++= Seq("-language:reflectiveCalls") | |||
| javacOptions ++= javacOptionsVersion(scalaVersion.value) | |||
| @@ -4,52 +4,6 @@ import chisel3.core.Input | |||
| import chisel3.iotesters.PeekPokeTester | |||
| object CoreMain { | |||
| def main(args: Array[String]): Unit = { | |||
| iotesters.Driver.execute(args, () => new mySelector(10)){ | |||
| c => new mySelectorTest(c) | |||
| } | |||
| } | |||
| } | |||
| class Tile(data_width: Int, cols: Int, rows: Int) extends Module{ | |||
| val io = IO(new Bundle { | |||
| val data_in = Input(UInt(data_width.W)) | |||
| val reset = Input(Bool()) | |||
| val data_out = Output(UInt(data_width.W)) | |||
| val data_out_delayed = Output(UInt(data_width.W)) | |||
| }) | |||
| val data_reg = Reg(init=UInt(0, width = data_width)) | |||
| io.data_out := io.data_in | |||
| data_reg := io.data_in | |||
| io.data_out_delayed := data_reg | |||
| } | |||
| class myTest(c: Tile) extends PeekPokeTester(c) { | |||
| poke(c.io.data_in, 0) | |||
| peek(c.io.data_out_delayed) | |||
| step(1) | |||
| poke(c.io.data_in, 1) | |||
| peek(c.io.data_out) | |||
| peek(c.io.data_out_delayed) | |||
| step(1) | |||
| poke(c.io.data_in, 2) | |||
| peek(c.io.data_out) | |||
| peek(c.io.data_out_delayed) | |||
| step(1) | |||
| poke(c.io.data_in, 3) | |||
| peek(c.io.data_out) | |||
| peek(c.io.data_out_delayed) | |||
| } | |||
| object Extras { | |||
| def somefun(someval: Int) : Unit = {} | |||
| @@ -101,8 +101,10 @@ class mySelector(numValues: Int) extends Module { | |||
| val counter = RegInit(UInt(Chisel.log2Up(numValues).W), 0.U) | |||
| val nextOutputIsFresh = RegInit(Bool(), true.B) | |||
| // Generate random values. Using the when keyword we choose which random | |||
| // value should drive the dataOut signal | |||
| /** | |||
| Generate random values. Using the when keyword we choose which random | |||
| value should drive the dataOut signal | |||
| */ | |||
| io.dataOut := 0.U | |||
| List.fill(numValues)(scala.util.Random.nextInt(100)).zipWithIndex.foreach { | |||
| case(rand, idx) => | |||
| @@ -114,9 +116,11 @@ class mySelector(numValues: Int) extends Module { | |||
| } | |||
| } | |||
| // While chisel comes with an inbuilt Counter, we implement ours the old fashion way | |||
| // There are far more elegant ways of implementing this, read the chisel docs, discuss | |||
| // best practice among yourselves and experiment! | |||
| /** | |||
| While chisel comes with an inbuilt Counter, we implement ours the old fashion way | |||
| There are far more elegant ways of implementing this, read the chisel docs, discuss | |||
| best practice among yourselves and experiment! | |||
| */ | |||
| nextOutputIsFresh := true.B | |||
| when(io.next === true.B){ | |||
| when(counter < (numValues - 1).U){ | |||
| @@ -1,7 +1,6 @@ | |||
| package Core | |||
| import chisel3._ | |||
| import chisel3.core.Input | |||
| import chisel3.iotesters.PeekPokeTester | |||
| import chisel3.util.Counter | |||
| /** | |||
| @@ -17,12 +16,20 @@ class daisyDot(elements: Int, dataWidth: Int) extends Module{ | |||
| val outputValid = Output(Bool()) | |||
| }) | |||
| /** | |||
| Keep track of how many elements have been accumulated. As the interface has no | |||
| indicator that data can be invalid it should always be assumed that data IS valid. | |||
| This in turn means that the counter should tick on every cycle | |||
| */ | |||
| val counter = Counter(elements) | |||
| val accumulator = RegInit(UInt(dataWidth.W), 0.U) | |||
| /** | |||
| Your implementation here | |||
| */ | |||
| // Increment the value of the accumulator with the product of data in A and B | |||
| // When the counter reaches elements set output valid to true and flush the accumulator | |||
| /** | |||
| LF | |||
| @@ -2,46 +2,43 @@ package Core | |||
| import chisel3._ | |||
| import chisel3.core.Input | |||
| import chisel3.iotesters.PeekPokeTester | |||
| import utilz._ | |||
| /** | |||
| DaisyGrids hold n daisyVecs. Unlike the daisyVecs, daisyGrids have a select signal for selecting | |||
| which daisyVec to work on, but these daisyVecs can not be controlled from the outside. | |||
| */ | |||
| class daisyGrid(rows: Int, cols: Int, dataWidth: Int) extends Module{ | |||
| class daisyGrid(dims: Dims, dataWidth: Int) extends Module{ | |||
| val io = IO(new Bundle { | |||
| val readEnable = Input(Bool()) | |||
| val writeEnable = Input(Bool()) | |||
| val dataIn = Input(UInt(dataWidth.W)) | |||
| val rowSelect = Input(UInt(8.W)) | |||
| val dataOut = Output(UInt(dataWidth.W)) | |||
| }) | |||
| val currentRowIndex = RegInit(UInt(8.W), 0.U) | |||
| val currentColIndex = RegInit(UInt(8.W), 0.U) | |||
| val memRows = Array.fill(rows){ Module(new daisyVector(cols, dataWidth)).io } | |||
| val elements = rows*cols | |||
| val rows = Array.fill(dims.rows){ Module(new daisyVector(dims.cols, dataWidth)).io } | |||
| /** | |||
| Your implementation here | |||
| */ | |||
| /** | |||
| LF | |||
| */ | |||
| io.dataOut := 0.U | |||
| for(ii <- 0 until rows){ | |||
| for(ii <- 0 until dims.rows){ | |||
| memRows(ii).readEnable := 0.U | |||
| memRows(ii).dataIn := io.dataIn | |||
| rows(ii).writeEnable := 0.U | |||
| rows(ii).dataIn := io.dataIn | |||
| when(io.rowSelect === ii.U ){ | |||
| memRows(ii).readEnable := io.readEnable | |||
| io.dataOut := memRows(ii).dataOut | |||
| rows(ii).writeEnable := io.writeEnable | |||
| io.dataOut := rows(ii).dataOut | |||
| } | |||
| } | |||
| } | |||
| @@ -2,26 +2,30 @@ package Core | |||
| import chisel3._ | |||
| import chisel3.core.Input | |||
| import chisel3.iotesters.PeekPokeTester | |||
| import utilz._ | |||
| /** | |||
| The daisy multiplier creates two daisy grids, one transposed, and multiplies them. | |||
| */ | |||
| class daisyMultiplier(val rowsA: Int, val colsA: Int, val rowsB: Int, val colsB: Int, val dataWidth: Int) extends Module { | |||
| class daisyMultiplier(dims: Dims, dataWidth: Int) extends Module { | |||
| val io = IO(new Bundle { | |||
| val dataInA = Input(UInt(dataWidth.W)) | |||
| val readEnableA = Input(Bool()) | |||
| val writeEnableA = Input(Bool()) | |||
| val dataInB = Input(UInt(dataWidth.W)) | |||
| val readEnableB = Input(Bool()) | |||
| val writeEnableB = Input(Bool()) | |||
| val dataOut = Output(UInt(dataWidth.W)) | |||
| val dataValid = Output(Bool()) | |||
| val done = Output(Bool()) | |||
| }) | |||
| // How many cycles does it take to fill the matrices with data? | |||
| /** | |||
| Your implementation here | |||
| */ | |||
| val rowCounter = RegInit(UInt(8.W), 0.U) | |||
| val colCounter = RegInit(UInt(8.W), 0.U) | |||
| @@ -33,20 +37,25 @@ class daisyMultiplier(val rowsA: Int, val colsA: Int, val rowsB: Int, val colsB: | |||
| val resultReady = RegInit(Bool(), false.B) | |||
| //////////////////////////////////////// | |||
| //////////////////////////////////////// | |||
| /// We transpose matrix B. This means that if both matrices read the same input | |||
| /// stream then they will end up transposed. | |||
| val matrixA = Module(new daisyGrid(rowsA, colsA, dataWidth)).io | |||
| val matrixB = Module(new daisyGrid(colsB, rowsB, dataWidth)).io | |||
| /** | |||
| Following the same principle behind the the vector matrix multiplication, by | |||
| NOT transposing the dimensions. | |||
| When writing a multiplier for a 3x2 matrix it's implicit that this means a | |||
| 3x2 matrix and 2x3, returning a 2x2 matrix. By not transposing the dimensions | |||
| we get the same effect as in VecMat | |||
| */ | |||
| val matrixA = Module(new daisyGrid(dims, dataWidth)).io | |||
| val matrixB = Module(new daisyGrid(dims, dataWidth)).io | |||
| matrixA.dataIn := io.dataInA | |||
| matrixA.readEnable := io.readEnableA | |||
| matrixA.writeEnable := io.writeEnableA | |||
| matrixB.dataIn := io.dataInB | |||
| matrixB.readEnable := io.readEnableB | |||
| printf("matrix A data in: %d\n", matrixB.dataIn) | |||
| matrixB.writeEnable := io.writeEnableB | |||
| //////////////////////////////////////// | |||
| @@ -54,16 +63,16 @@ class daisyMultiplier(val rowsA: Int, val colsA: Int, val rowsB: Int, val colsB: | |||
| /// Set up counter statemachine | |||
| io.done := false.B | |||
| when(colCounter === (colsA - 1).U){ | |||
| when(colCounter === (dims.cols - 1).U){ | |||
| colCounter := 0.U | |||
| when(rowCounter === (rowsA - 1).U){ | |||
| when(rowCounter === (dims.rows - 1).U){ | |||
| rowCounter := 0.U | |||
| calculating := true.B | |||
| when(calculating === true.B){ | |||
| when(rowOutputCounter === (rowsA - 1).U){ | |||
| when(rowOutputCounter === (dims.rows - 1).U){ | |||
| io.done := true.B | |||
| }.otherwise{ | |||
| rowOutputCounter := rowOutputCounter + 1.U | |||
| @@ -99,7 +108,7 @@ class daisyMultiplier(val rowsA: Int, val colsA: Int, val rowsB: Int, val colsB: | |||
| resultReady := false.B | |||
| io.dataValid := false.B | |||
| when(calculating === true.B){ | |||
| when(colCounter === (colsA - 1).U){ | |||
| when(colCounter === (dims.cols - 1).U){ | |||
| resultReady := true.B | |||
| } | |||
| } | |||
| @@ -117,18 +126,3 @@ class daisyMultiplier(val rowsA: Int, val colsA: Int, val rowsB: Int, val colsB: | |||
| } | |||
| io.dataOut := accumulator | |||
| } | |||
| class daisyMultiplierTest(c: daisyMultiplier) extends PeekPokeTester(c) { | |||
| poke(c.io.readEnableA, 1) | |||
| poke(c.io.readEnableB, 1) | |||
| for(ii <- 0 until 6){ | |||
| println("data in:") | |||
| poke(c.io.dataInA, (ii/2) + 1) | |||
| poke(c.io.dataInB, (ii/2) + 1) | |||
| println("fill counters") | |||
| step(1) | |||
| println("////////////////////\n") | |||
| } | |||
| } | |||
| @@ -4,21 +4,37 @@ import chisel3.core.Input | |||
| import chisel3.iotesters.PeekPokeTester | |||
| /** | |||
| DaisyVectors are not indexed. They have no control inputs or outputs, only data. | |||
| DaisyVectors are not indexed externally. They have no control inputs or outputs, only data. | |||
| */ | |||
| class daisyVector(elements: Int, dataWidth: Int) extends Module{ | |||
| val io = IO(new Bundle { | |||
| val readEnable = Input(Bool()) | |||
| val writeEnable = Input(Bool()) | |||
| val dataIn = Input(UInt(dataWidth.W)) | |||
| val dataOut = Output(UInt(dataWidth.W)) | |||
| }) | |||
| /** | |||
| although the vector is not accessible by index externally, an internal index is necessary | |||
| It is initialized to the value 0 | |||
| */ | |||
| val currentIndex = RegInit(UInt(8.W), 0.U) | |||
| val memory = Array.fill(elements)(RegInit(UInt(dataWidth.W), 0.U)) | |||
| /** | |||
| Your implementation here | |||
| */ | |||
| // Cycle the currentIndex register, it should be equal to the current (cycle % elements) | |||
| // Connect the selected output to io.dataOut | |||
| // Connect writeEnable to the selected memory (selectable with memory(currentIndex)) | |||
| /** | |||
| LF | |||
| */ | |||
| when(currentIndex === (elements - 1).U ){ | |||
| currentIndex := 0.U | |||
| }.otherwise{ | |||
| @@ -30,7 +46,7 @@ class daisyVector(elements: Int, dataWidth: Int) extends Module{ | |||
| for(ii <- 0 until elements){ | |||
| when(currentIndex === ii.U){ | |||
| when(io.readEnable === true.B){ | |||
| when(io.writeEnable === true.B){ | |||
| memory(ii) := io.dataIn | |||
| } | |||
| io.dataOut := memory(ii) | |||
| @@ -5,39 +5,86 @@ import chisel3._ | |||
| import chisel3.core.Input | |||
| import chisel3.iotesters.PeekPokeTester | |||
| import chisel3.util.Counter | |||
| import utilz._ | |||
| /** | |||
| The daisy multiplier creates two daisy grids, one transposed, and multiplies them. | |||
| */ | |||
| class daisyVecMat(val lengthA: Int, val rowsB: Int, val colsB: Int, val dataWidth: Int) extends Module { | |||
| class daisyVecMat(matrixDims: Dims, dataWidth: Int) extends Module { | |||
| val io = IO(new Bundle { | |||
| val io = IO( | |||
| new Bundle { | |||
| val dataInA = Input(UInt(dataWidth.W)) | |||
| val readEnableA = Input(Bool()) | |||
| val dataInA = Input(UInt(dataWidth.W)) | |||
| val writeEnableA = Input(Bool()) | |||
| val dataInB = Input(UInt(dataWidth.W)) | |||
| val readEnableB = Input(Bool()) | |||
| val dataInB = Input(UInt(dataWidth.W)) | |||
| val writeEnableB = Input(Bool()) | |||
| val dataOut = Output(UInt(dataWidth.W)) | |||
| val dataValid = Output(Bool()) | |||
| val done = Output(Bool()) | |||
| }) | |||
| val dataOut = Output(UInt(dataWidth.W)) | |||
| val dataValid = Output(Bool()) | |||
| val done = Output(Bool()) | |||
| // How many cycles does it take to fill the matrices with data? | |||
| } | |||
| ) | |||
| /** | |||
| The dimensions are transposed because this is a vector * matrix multiplication | |||
| [1, 2] | |||
| [a, b, c] x [3, 4] | |||
| [5, 6] | |||
| Here the vector will output a, b, c, a, b, c, a... | |||
| The Matrix is the type you made last exercise, so it is actually just 3 more vectors | |||
| of length 2. In cycle 0 the values {1, 3, 5} may be selected, in cycle 1 {2, 4, 6} | |||
| can be selected. | |||
| However, you can make up for the impedance mismatch by transposing the matrix, storing | |||
| the data in 2 vectors of length 3 instead. | |||
| In memory matrixB will look like [1, 3, 5] | |||
| [2, 4, 6] | |||
| For a correct result, it is up to the user to input the data for matrixB in a transposed | |||
| manner. This is done in the tests, you don't need to worry about it. | |||
| */ | |||
| val dims = matrixDims.transposed | |||
| // basic linAlg | |||
| val lengthA = dims.cols | |||
| //////////////////////////////////////// | |||
| //////////////////////////////////////// | |||
| /// We transpose matrix B. | |||
| val vecA = Module(new daisyVector(lengthA, dataWidth)).io | |||
| val matrixB = Module(new daisyGrid(colsB, rowsB, dataWidth)).io | |||
| val matrixB = Module(new daisyGrid(dims, dataWidth)).io | |||
| val dotProductCalculator = Module(new daisyDot(lengthA, dataWidth)).io | |||
| val dataIsLoaded = RegInit(Bool(), false.B) | |||
| /** | |||
| Your implementation here | |||
| */ | |||
| // Create counters to keep track of when the matrix and vector has gotten all the data. | |||
| // You can assume that writeEnable will be synchronized with the vectors. I.e for a vector | |||
| // of length 3 writeEnable can only go from true to false and vice versa at T = 0, 3, 6, 9 etc | |||
| // Create counters to keep track of how far along the computation is. | |||
| // Set up the correct rowSelect for matrixB | |||
| // Wire up write enables for matrixB and vecA | |||
| /** | |||
| In the solution I used the following to keep track of state | |||
| You can use these if you want to, or do it however you see fit. | |||
| */ | |||
| // val currentCol = Counter(dims.cols) | |||
| // val rowSel = Counter(dims.rows) | |||
| // val aReady = RegInit(Bool(), false.B) | |||
| // val bReady = RegInit(Bool(), false.B) | |||
| // val isDone = RegInit(Bool(), false.B) | |||
| // val (inputCounterB, counterBWrapped) = Counter(io.writeEnableB, (dims.elements) - 1) | |||
| // val (numOutputted, numOutputtedWrapped) = Counter(dataValid, lengthA) | |||
| // val (inputCounterA, counterAWrapped) = Counter(io.writeEnableA, lengthA - 1) | |||
| /** | |||
| LF | |||
| @@ -49,10 +96,10 @@ class daisyVecMat(val lengthA: Int, val rowsB: Int, val colsB: Int, val dataWidt | |||
| //////////////////////////////////////// | |||
| /// Wire components | |||
| vecA.dataIn := io.dataInA | |||
| vecA.readEnable := io.readEnableA | |||
| vecA.writeEnable := io.writeEnableA | |||
| matrixB.dataIn := io.dataInB | |||
| matrixB.readEnable := io.readEnableB | |||
| matrixB.writeEnable := io.writeEnableB | |||
| io.dataOut := dotProductCalculator.dataOut | |||
| @@ -66,9 +113,14 @@ class daisyVecMat(val lengthA: Int, val rowsB: Int, val colsB: Int, val dataWidt | |||
| //////////////////////////////////////// | |||
| //////////////////////////////////////// | |||
| /// Select the correct row | |||
| val (currentCol, colDone) = Counter(true.B, colsB) | |||
| val (rowSel, _) = Counter(colDone, rowsB) | |||
| matrixB.rowSelect := rowSel | |||
| val currentCol = Counter(dims.cols) | |||
| val rowSel = Counter(dims.rows) | |||
| when(currentCol.inc()){ | |||
| rowSel.inc() | |||
| } | |||
| matrixB.rowSelect := rowSel.value | |||
| //////////////////////////////////////// | |||
| @@ -77,10 +129,10 @@ class daisyVecMat(val lengthA: Int, val rowsB: Int, val colsB: Int, val dataWidt | |||
| val aReady = RegInit(Bool(), false.B) | |||
| val bReady = RegInit(Bool(), false.B) | |||
| val (inputCounterA, counterAWrapped) = Counter(io.readEnableA, lengthA - 1) | |||
| val (inputCounterA, counterAWrapped) = Counter(io.writeEnableA, lengthA - 1) | |||
| when(counterAWrapped){ aReady := true.B } | |||
| val (inputCounterB, counterBWrapped) = Counter(io.readEnableB, colsB*rowsB) | |||
| val (inputCounterB, counterBWrapped) = Counter(io.writeEnableB, (dims.elements) - 1) | |||
| when(counterBWrapped){ bReady := true.B } | |||
| dataIsLoaded := aReady & bReady | |||
| @@ -94,24 +146,5 @@ class daisyVecMat(val lengthA: Int, val rowsB: Int, val colsB: Int, val dataWidt | |||
| when(numOutputtedWrapped){ isDone := true.B } | |||
| // printf(p"dataInA = ${io.dataInA}\n") | |||
| // printf(p"validA = ${io.readEnableA}\n") | |||
| // printf(p"dataInB = ${io.dataInB}\n") | |||
| // printf(p"validB = ${io.readEnableB}\n") | |||
| // printf(p"validOut = ${io.dataValid}\n") | |||
| // printf(p"data loaded = ${dataIsLoaded}\n") | |||
| // printf(p"aReady = ${aReady}\n") | |||
| // printf(p"bReady = ${bReady}\n") | |||
| // printf(p"counter A = ${inputCounterA}\n") | |||
| // printf(p"counter B = ${inputCounterB}\n") | |||
| // printf(p"out = ${dotProductCalculator.dataOut}\n\n") | |||
| io.done := isDone | |||
| } | |||
| @@ -3,6 +3,9 @@ | |||
| FPGAs using chisel. | |||
| In this exercise you will implement a circuit capable of performing matrix | |||
| matrix multiplication in the chisel hardware description language. | |||
| HAND IN YOUR CODE IN AN ARCHIVE WITH YOUR USERNAME (e.g peteraa_ex0). | |||
| PLEASE ENSURE THAT WHEN UNZIPPING THE TESTS CAN BE RUN. | |||
| * Your first component | |||
| There are two types of digital components: Combinatorial and stateful. | |||
| @@ -176,18 +179,25 @@ | |||
| From the figure the principle of operation becomes clearer [inkskape drawing, rm sketch] | |||
| To test your implementation you can run testOnly Core.daisyVecSpec in sbt | |||
| To test your implementation you can run | |||
| sbt> testOnly Core.daisyVecSpec | |||
| in your sbt console | |||
| ** Task 2 - Dot Product | |||
| Your next task is to implement daisyDot. | |||
| daisyDot should calculate the dot product of two vectors, inA and inB. Ensure that validOut | |||
| is only asserted when you have a result. Ensure that your accumulator gets flushed after | |||
| calculating your dot product. | |||
| Your next task is to implement a dot product calculator. daisyDot should | |||
| calculate the dot product of two vectors, inA and inB. Ensure that validOut | |||
| is only asserted when you have a result. Ensure that your accumulator gets | |||
| flushed after calculating your dot product. | |||
| Implement the dot product calculator in daisyDot.scala | |||
| To test your implementation you can run | |||
| sbt> testOnly Core.daisyDotSpec | |||
| in your sbt console | |||
| ** Task 3 - Vector Matrix multiplication | |||
| Having implemented a dot product calculator, a vector matrix multiplier is not that different. | |||
| In imperative code we get something like this: | |||
| Having implemented a dot product calculator, a vector matrix multiplier is | |||
| not that different. In imperative code we get something like this: | |||
| #+begin_src scala | |||
| type Matrix[A] = List[List[A]] | |||
| @@ -202,22 +212,51 @@ | |||
| } | |||
| #+end_src scala | |||
| This is just repeated application of dotProduct. | |||
| Since vector matrix multiplication is the dotproduct of the vector and the rows of the matrix, | |||
| This is just repeated application of dotProduct. Since vector matrix | |||
| multiplication is the dotproduct of the vector and the rows of the matrix, | |||
| the matrix must be transposed. | |||
| The skeleton code contains more hints if this did not make any sense. | |||
| *** Subtask 1 - representing a matrix | |||
| Like the dot product calculator, the first step is to implement a register bank for storing a matrix. | |||
| This can be done by creating n vectors from Task 1 and then select which row is the 'current' row. | |||
| Like the dot product calculator, the first step is to implement a register | |||
| bank for storing a matrix. This can be done by creating n vectors from Task | |||
| 1 and then select which row is the 'current' row. | |||
| Implement this in daisyGrid.scala | |||
| The matrix representation you have created in this task allows you to select which row to read, but | |||
| not which column. This isn't very efficient when you want to read an entire column since you would have | |||
| to wait a full cycle for each row. | |||
| The way we deal with this is noticing that when multiplying two matrices we work on a row basis in | |||
| matrix A, and column basis on matrix B. If we simply transpose matrix B, then accessing its rows is | |||
| the same as accessing the columns of matrix B. | |||
| The matrix representation you have created in this task allows you to select | |||
| which row to read, but not which column. This isn't very efficient when you | |||
| want to read an entire column since you would have to wait a full cycle for | |||
| each row. The way we deal with this is noticing that when multiplying two | |||
| matrices we work on a row basis in matrix A, and column basis on matrix B. | |||
| If we simply transpose matrix B, then accessing its rows is the same as | |||
| accessing the columns of matrix B. | |||
| A consequence of this is that the API exposed by your matrix multiplier requires matrix B to be transposed. | |||
| A consequence of this is that the API exposed by your matrix multiplier | |||
| requires matrix B to be transposed. | |||
| *** Subtask 2 - vector matrix multiplication | |||
| You now have the necessary pieces to create a vector matrix multiplier. | |||
| Your implementation should have a vector and a matrix (grid). | |||
| Input for the vector is in order, input for the matrix is transposed. | |||
| Implement this in daisyVecMat.scala | |||
| ** Task 4 - Matrix Matrix multiplication | |||
| You can now implement a matrix matrix multiplier. | |||
| You can (and should) reuse the code for this module from the vector matrix | |||
| multiplier. | |||
| Implement this in daisyMatMul.scala | |||
| When all tests are green you are good to go. | |||
| ** Bonus exercise - Introspection on code quality and design choices | |||
| This "exercise" has no deliverable, but you should spend some time thinking about | |||
| what parts gave you most trouble and what you can do to change your approach. | |||
| In addition, the implementation you were railroaded into has a flaw that lead to | |||
| unescessary code duplication when going from a vector matrix multiplier to a matrix | |||
| matrix multiplier. | |||
| Why did this happen, and how could this have been avoided? | |||
| @@ -0,0 +1,45 @@ | |||
| package Core | |||
| object utilz { | |||
| type Matrix = List[List[Int]] | |||
| def genMatrix(dims: Dims): Matrix = | |||
| List.fill(dims.rows)( | |||
| List.fill(dims.cols)(scala.util.Random.nextInt(5)) | |||
| ) | |||
| case class Dims(rows: Int, cols: Int){ | |||
| val elements = rows*cols | |||
| def transposed = Dims(cols, rows) | |||
| } | |||
| def printVector(v: List[Int]): String = | |||
| v.mkString("[","\t","]") | |||
| def printMatrix(m: List[List[Int]]): String = | |||
| m.map(printVector).mkString("\n") | |||
| /** | |||
| Typically I'd fix the signature to Map[A,B] | |||
| Prints all the IOs of a Module | |||
| ex: | |||
| ``` | |||
| CycleTask[daisyVecMat]( | |||
| 10, | |||
| _ => println(s"at step $n"), | |||
| d => println(printModuleIO(d.peek(d.dut.io))), | |||
| ) | |||
| ``` | |||
| */ | |||
| def printModuleIO[A,B](m: scala.collection.mutable.LinkedHashMap[A,B]): String = | |||
| m.toList.map{ case(x,y) => "" + x.toString() + " -> " + y.toString() }.reverse.mkString("\n") | |||
| def dotProduct(xs: List[Int], ys: List[Int]): Int = | |||
| (for ((x, y) <- xs zip ys) yield x * y).sum | |||
| def matrixMultiply(ma: Matrix, mb: Matrix): Matrix = | |||
| ma.map(mav => mb.transpose.map(mbv => dotProduct(mav,mbv))) | |||
| } | |||
| @@ -0,0 +1,82 @@ | |||
| package Core | |||
| import chisel3._ | |||
| import chisel3.iotesters._ | |||
| import org.scalatest.{Matchers, FlatSpec} | |||
| import testUtils._ | |||
| class daisyDotSpec extends FlatSpec with Matchers { | |||
| behavior of "daisy vector" | |||
| it should "Only signal valid output at end of calculation" in { | |||
| val ins = (0 to 20).map(ii => | |||
| CycleTask[daisyDot]( | |||
| ii, | |||
| d => d.poke(d.dut.io.dataInA, 0), | |||
| d => d.poke(d.dut.io.dataInB, 0), | |||
| d => d.expect(d.dut.io.outputValid, if((ii % 3) == 2) 1 else 0), | |||
| ) | |||
| ) | |||
| iotesters.Driver.execute(() => new daisyDot(3, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyDot](ins, c).myTester | |||
| } should be(true) | |||
| } | |||
| it should "Be able to count to 3" in { | |||
| val ins = (0 to 20).map(ii => | |||
| CycleTask[daisyDot]( | |||
| ii, | |||
| d => d.poke(d.dut.io.dataInA, 1), | |||
| d => d.poke(d.dut.io.dataInB, 1), | |||
| d => d.expect(d.dut.io.outputValid, if((ii % 3) == 2) 1 else 0), | |||
| d => if(d.peek(d.dut.io.outputValid) == 1) | |||
| d.expect(d.dut.io.dataOut, 3) | |||
| ) | |||
| ) | |||
| iotesters.Driver.execute(() => new daisyDot(3, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyDot](ins, c).myTester | |||
| } should be(true) | |||
| } | |||
| it should "Be able to calculate dot products" in { | |||
| def createProblem(vecLen: Int): List[CycleTask[daisyDot]] = { | |||
| val in1 = List.fill(vecLen)(scala.util.Random.nextInt(10)) | |||
| val in2 = List.fill(vecLen)(scala.util.Random.nextInt(10)) | |||
| val dotProduct = (in1, in2).zipped.map(_*_).sum | |||
| (in1, in2, (0 to vecLen)).zipped.map{ | |||
| case(a, b, idx) => | |||
| CycleTask[daisyDot]( | |||
| idx, | |||
| d => d.poke(d.dut.io.dataInA, a), | |||
| d => d.poke(d.dut.io.dataInB, b), | |||
| d => if(d.peek(d.dut.io.outputValid) == 1) | |||
| d.expect(d.dut.io.dataOut, dotProduct) | |||
| ) | |||
| } | |||
| } | |||
| def createProblems(vecLen: Int): List[CycleTask[daisyDot]] = | |||
| List.fill(10)(createProblem(vecLen)).zipWithIndex.map{ case(probs, idx) => | |||
| probs.map(_.delay(3*idx)) | |||
| }.flatten | |||
| iotesters.Driver.execute(() => new daisyDot(3, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyDot](createProblems(3), c).myTester | |||
| } should be(true) | |||
| } | |||
| } | |||
| @@ -0,0 +1,82 @@ | |||
| package Core | |||
| import chisel3._ | |||
| import chisel3.iotesters._ | |||
| import org.scalatest.{Matchers, FlatSpec} | |||
| import testUtils._ | |||
| import utilz._ | |||
| class daisyGridSpec extends FlatSpec with Matchers { | |||
| behavior of "daisy grid" | |||
| def writeRowCheck(dims: Dims, rowSel: Int => Int): Seq[CycleTask[daisyGrid]] = { | |||
| (0 until dims.cols).map( n => | |||
| CycleTask[daisyGrid]( | |||
| n, | |||
| d => d.poke(d.dut.io.dataIn, n), | |||
| d => d.poke(d.dut.io.writeEnable, 1), | |||
| d => d.poke(d.dut.io.rowSelect, rowSel(n))) | |||
| ) ++ | |||
| (0 until dims.cols*2).map( n => | |||
| CycleTask[daisyGrid]( | |||
| n, | |||
| d => d.poke(d.dut.io.dataIn, 0), | |||
| d => d.poke(d.dut.io.writeEnable, 0), | |||
| d => d.poke(d.dut.io.rowSelect, rowSel(n)), | |||
| d => d.expect(d.dut.io.dataOut, n % dims.cols)).delay(dims.cols) | |||
| ) | |||
| } | |||
| val dims = Dims(rows = 4, cols = 5) | |||
| it should "work like a regular daisyVec when row select is fixed to 0" in { | |||
| iotesters.Driver.execute(() => new daisyGrid(dims, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyGrid](writeRowCheck(dims, _ => 0), c).myTester | |||
| } should be(true) | |||
| } | |||
| it should "work like a regular daisyVec when row select is fixed to 1" in { | |||
| iotesters.Driver.execute(() => new daisyGrid(dims, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyGrid](writeRowCheck(dims, _ => 1), c).myTester | |||
| } should be(true) | |||
| } | |||
| it should "be able to write a matrix and output it" in { | |||
| iotesters.Driver.execute(() => new daisyGrid(dims, 32), new TesterOptionsManager) { c => | |||
| def writeMatrix(matrix: Matrix): List[CycleTask[daisyGrid]] = { | |||
| (0 until dims.elements).toList.zipWithIndex.map{ case(n, idx) => | |||
| val row = n / dims.cols | |||
| CycleTask[daisyGrid]( | |||
| n, | |||
| d => d.poke(d.dut.io.dataIn, n), | |||
| d => d.poke(d.dut.io.writeEnable, 1), | |||
| d => d.poke(d.dut.io.rowSelect, row)) | |||
| } | |||
| } | |||
| def readMatrix(matrix: Matrix): List[CycleTask[daisyGrid]] = { | |||
| (0 until dims.elements).toList.zipWithIndex.map{ case(n, idx) => | |||
| val row = n / dims.cols | |||
| CycleTask[daisyGrid]( | |||
| n, | |||
| d => d.poke(d.dut.io.dataIn, 0), | |||
| d => d.poke(d.dut.io.writeEnable, 0), | |||
| d => d.poke(d.dut.io.rowSelect, row), | |||
| d => d.expect(d.dut.io.dataOut, n)) | |||
| } | |||
| } | |||
| val m = genMatrix(Dims(rows = 4, cols = 5)) | |||
| val ins = writeMatrix(m) ++ readMatrix(m).map(_.delay(dims.elements)) | |||
| IoSpec[daisyGrid](ins, c).myTester | |||
| } should be(true) | |||
| } | |||
| } | |||
| @@ -0,0 +1,112 @@ | |||
| package Core | |||
| import chisel3._ | |||
| import chisel3.iotesters._ | |||
| import org.scalatest.{Matchers, FlatSpec} | |||
| import testUtils._ | |||
| import utilz._ | |||
| class daisyMatMulSpec extends FlatSpec with Matchers { | |||
| def generateProblem(dims: Dims): List[CycleTask[daisyMultiplier]] = { | |||
| val matrixA = genMatrix(dims) | |||
| val matrixB = genMatrix(dims).transpose | |||
| val answers = matrixMultiply(matrixA, matrixB) | |||
| println("Multiplying matrix A") | |||
| println(printMatrix(matrixA)) | |||
| println("with matrix B") | |||
| println(printMatrix(matrixB)) | |||
| println("The input order of matrix B is") | |||
| println(printMatrix(matrixB.transpose)) | |||
| println("Expected output is") | |||
| println(printMatrix(answers)) | |||
| val matrixInputA = matrixA.flatten.zipWithIndex.map{ | |||
| case(in, idx) => | |||
| CycleTask[daisyMultiplier]( | |||
| idx, | |||
| d => d.poke(d.dut.io.dataInA, in), | |||
| d => d.poke(d.dut.io.writeEnableA, 1) | |||
| ) | |||
| } | |||
| val matrixInputB = matrixB.transpose.flatten.zipWithIndex.map{ | |||
| case(in, idx) => | |||
| CycleTask[daisyMultiplier]( | |||
| idx, | |||
| d => d.poke(d.dut.io.dataInB, in), | |||
| d => d.poke(d.dut.io.writeEnableB, 1) | |||
| ) | |||
| } | |||
| val disableInputs = List( | |||
| CycleTask[daisyMultiplier]( | |||
| dims.elements, | |||
| d => d.poke(d.dut.io.writeEnableA, 0) | |||
| ), | |||
| CycleTask[daisyMultiplier]( | |||
| dims.elements, | |||
| d => d.poke(d.dut.io.writeEnableB, 0) | |||
| ) | |||
| ) | |||
| val checkValid1 = (0 until dims.elements).map( n => | |||
| CycleTask[daisyMultiplier]( | |||
| n, | |||
| d => d.expect(d.dut.io.dataValid, 0, "data valid should not be asserted before data is ready") | |||
| ) | |||
| ).toList | |||
| val checkValid2 = (0 until dims.rows * dims.rows * dims.cols).map{ n => | |||
| val shouldBeValid = (n % dims.cols) == dims.cols - 1 | |||
| val answerRowIndex = n/(dims.rows*dims.cols) | |||
| val answerColIndex = ((n-1)/(dims.cols)) % dims.rows | |||
| val expectedOutput = answers(answerRowIndex)(answerColIndex) | |||
| CycleTask[daisyMultiplier]( | |||
| n, | |||
| d => if(!shouldBeValid) | |||
| d.expect(d.dut.io.dataValid, 0) | |||
| else { | |||
| d.expect(d.dut.io.dataValid, 1) | |||
| d.expect(d.dut.io.dataOut, expectedOutput) | |||
| } | |||
| ).delay(dims.elements + 1) | |||
| }.toList | |||
| // adds a lot of annoying noise | |||
| // val peekDebug = (0 until 20).map(n => | |||
| // CycleTask[daisyMultiplier]( | |||
| // n, | |||
| // _ => println(s"at step $n"), | |||
| // d => println(printModuleIO(d.peek(d.dut.io))), | |||
| // _ => println(), | |||
| // ) | |||
| // ).toList | |||
| matrixInputA ::: matrixInputB ::: disableInputs ::: checkValid1 ::: checkValid2 // ::: peekDebug | |||
| } | |||
| behavior of "mat multiplier" | |||
| val dims = Dims(rows = 3, cols = 2) | |||
| it should "work" in { | |||
| iotesters.Driver.execute(() => new daisyMultiplier(dims, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyMultiplier](generateProblem(Dims(rows = 3, cols = 2)), c).myTester | |||
| } should be(true) | |||
| } | |||
| } | |||
| @@ -0,0 +1,116 @@ | |||
| package Core | |||
| import chisel3._ | |||
| import chisel3.iotesters._ | |||
| import org.scalatest.{Matchers, FlatSpec} | |||
| import testUtils._ | |||
| import utilz._ | |||
| class daisyVecMatSpec extends FlatSpec with Matchers { | |||
| def generateProblem(dims: Dims): List[CycleTask[daisyVecMat]] = { | |||
| // for a vec len A, a matrix must have dims A rows | |||
| val matrixB = genMatrix(dims).transpose | |||
| val vecA = List.fill(dims.rows)(scala.util.Random.nextInt(5)) | |||
| def answers: List[Int] = matrixB.map( col => | |||
| (col, vecA).zipped.map(_*_).sum) | |||
| println("multiplying vector: ") | |||
| println(printVector(vecA)) | |||
| println("with matrix:") | |||
| println(printMatrix(matrixB.transpose)) | |||
| println("which should equal") | |||
| println(printVector(answers)) | |||
| println("Input order of matrix:") | |||
| println(printMatrix(matrixB)) | |||
| val vecInput = vecA.zipWithIndex.map{ | |||
| case(in, idx) => | |||
| CycleTask[daisyVecMat]( | |||
| idx, | |||
| d => d.poke(d.dut.io.dataInA, in), | |||
| d => d.poke(d.dut.io.writeEnableA, 1) | |||
| ) | |||
| } | |||
| val matrixInput = matrixB.flatten.zipWithIndex.map{ | |||
| case(in, idx) => | |||
| CycleTask[daisyVecMat]( | |||
| idx, | |||
| d => d.poke(d.dut.io.dataInB, in), | |||
| d => d.poke(d.dut.io.writeEnableB, 1) | |||
| ) | |||
| } | |||
| val inputDisablers = List( | |||
| CycleTask[daisyVecMat]( | |||
| dims.rows, | |||
| d => d.poke(d.dut.io.writeEnableA, 0) | |||
| ), | |||
| CycleTask[daisyVecMat]( | |||
| dims.elements, | |||
| d => d.poke(d.dut.io.writeEnableB, 0) | |||
| ) | |||
| ) | |||
| val checkValid1 = (0 until dims.elements).map( n => | |||
| CycleTask[daisyVecMat]( | |||
| n, | |||
| d => d.expect(d.dut.io.dataValid, 0, "data valid should not be asserted before data is ready") | |||
| ) | |||
| ).toList | |||
| val checkValid2 = (0 until dims.elements).map{ n => | |||
| val shouldBeValid = (n % dims.rows) == dims.rows - 1 | |||
| val whichOutput = answers( (n/dims.rows) ) | |||
| CycleTask[daisyVecMat]( | |||
| n, | |||
| d => if(!shouldBeValid) | |||
| d.expect(d.dut.io.dataValid, 0) | |||
| else { | |||
| d.expect(d.dut.io.dataValid, 1) | |||
| d.expect(d.dut.io.dataOut, whichOutput) | |||
| } | |||
| ).delay(dims.elements) | |||
| }.toList | |||
| // adds a lot of annoying noise | |||
| // val peekDebug = (0 until 20).map(n => | |||
| // CycleTask[daisyVecMat]( | |||
| // n, | |||
| // _ => println(s"at step $n"), | |||
| // d => println(printModuleIO(d.peek(d.dut.io))), | |||
| // _ => println(), | |||
| // ) | |||
| // ).toList | |||
| vecInput ::: matrixInput ::: inputDisablers ::: checkValid1 ::: checkValid2 // ::: peekDebug | |||
| } | |||
| behavior of "vec mat multiplier" | |||
| val dims = Dims(rows = 3, cols = 2) | |||
| it should "work" in { | |||
| iotesters.Driver.execute(() => new daisyVecMat(dims, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyVecMat](generateProblem(Dims(rows = 3, cols = 2)), c).myTester | |||
| } should be(true) | |||
| } | |||
| } | |||
| @@ -0,0 +1,84 @@ | |||
| package Core | |||
| import chisel3._ | |||
| import chisel3.iotesters._ | |||
| import org.scalatest.{Matchers, FlatSpec} | |||
| import testUtils._ | |||
| class daisyVecSpec extends FlatSpec with Matchers { | |||
| behavior of "daisy vector" | |||
| it should "not write when write enable is low" in { | |||
| val ins = (0 to 10).map(ii => | |||
| CycleTask[daisyVector]( | |||
| ii, | |||
| d => d.poke(d.dut.io.dataIn, 0), | |||
| d => d.poke(d.dut.io.writeEnable, 0), | |||
| d => d.expect(d.dut.io.dataOut, 0)) | |||
| ).toList | |||
| iotesters.Driver.execute(() => new daisyVector(4, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyVector](ins, c).myTester | |||
| } should be(true) | |||
| } | |||
| it should "write only when write enable is asserted" in { | |||
| val ins = | |||
| (0 until 4).map(ii => | |||
| CycleTask[daisyVector]( | |||
| ii, | |||
| _ => println("inputting 2s'"), | |||
| d => d.poke(d.dut.io.dataIn, 2), | |||
| d => d.poke(d.dut.io.writeEnable, 1))) ++ | |||
| (0 until 6).map(ii => | |||
| CycleTask[daisyVector]( | |||
| ii + 4, | |||
| _ => println("Checking output is 2"), | |||
| d => d.poke(d.dut.io.writeEnable, 0), | |||
| d => d.expect(d.dut.io.dataOut, 2) | |||
| )) | |||
| iotesters.Driver.execute(() => new daisyVector(4, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyVector](ins, c).myTester | |||
| } should be(true) | |||
| } | |||
| it should "Work in general" in { | |||
| val ins = { | |||
| val inputs = List.fill(10)(scala.util.Random.nextInt(10000)) | |||
| println(inputs) | |||
| val in = inputs.zipWithIndex.map{ case(in,idx) => | |||
| CycleTask[daisyVector]( | |||
| idx, | |||
| d => d.poke(d.dut.io.dataIn, in), | |||
| d => d.poke(d.dut.io.writeEnable, 1) | |||
| ) | |||
| } | |||
| val out = inputs.zipWithIndex.map{ case(expected, idx) => | |||
| CycleTask[daisyVector]( | |||
| idx + 4, | |||
| d => d.expect(d.dut.io.dataOut, expected) | |||
| ) | |||
| } | |||
| in ::: out | |||
| } | |||
| iotesters.Driver.execute(() => new daisyVector(4, 32), new TesterOptionsManager) { c => | |||
| IoSpec[daisyVector](ins, c).myTester | |||
| } should be(true) | |||
| } | |||
| } | |||
| @@ -1,317 +1,74 @@ | |||
| package Core | |||
| import chisel3._ | |||
| import chisel3.util._ | |||
| import chisel3.core.Input | |||
| import chisel3.iotesters._ | |||
| import org.scalatest.{Matchers, FlatSpec} | |||
| class daisyVectorTest(c: daisyVector, inputs: List[(Int, Int, Int)]) extends PeekPokeTester(c) { | |||
| (inputs).foreach { | |||
| case(enIn, dataIn, dataOut) => { | |||
| poke(c.io.readEnable, enIn) | |||
| poke(c.io.dataIn, dataIn) | |||
| expect(c.io.dataOut, dataOut) | |||
| step(1) | |||
| } | |||
| } | |||
| } | |||
| class daisyDotTest(c: daisyDot, inputs: List[(Int, Int, Option[Int], Int)]) extends PeekPokeTester(c) { | |||
| (inputs).foreach { | |||
| case(inA, inB, dataOut, dataValid) => { | |||
| poke(c.io.dataInA, inA) | |||
| poke(c.io.dataInB, inB) | |||
| dataOut.foreach { expect(c.io.dataOut, _) } | |||
| expect(c.io.outputValid, dataValid) | |||
| step(1) | |||
| } | |||
| } | |||
| } | |||
| class daisyGridTest(c: daisyGrid, inputs: List[(Int, Int, Int, Int)]) extends PeekPokeTester(c) { | |||
| (inputs).foreach { | |||
| case(readEnable, dataIn, readRow, dataOut) => { | |||
| poke(c.io.readEnable, readEnable) | |||
| poke(c.io.dataIn, dataIn) | |||
| poke(c.io.rowSelect, readRow) | |||
| expect(c.io.dataOut, dataOut) | |||
| step(1) | |||
| } | |||
| } | |||
| } | |||
| class daisyVecMatTest(c: daisyVecMat, inputs: List[(Int,Int,Int,Int,Option[Int],Int,Int)]) extends PeekPokeTester(c) { | |||
| (inputs).foreach { | |||
| case(dataInA, readEnableA, dataInB, readEnableB, dataOutExpect, dataValidExpect, doneExpect) => { | |||
| poke(c.io.dataInA, dataInA) | |||
| poke(c.io.dataInB, dataInB) | |||
| poke(c.io.readEnableA, readEnableA) | |||
| poke(c.io.readEnableB, readEnableB) | |||
| expect(c.io.dataValid, dataValidExpect) | |||
| expect(c.io.done, doneExpect) | |||
| dataOutExpect.foreach { expect(c.io.dataOut, _) } | |||
| step(1) | |||
| } | |||
| } | |||
| } | |||
| class daisyVecSpec extends FlatSpec with Matchers { | |||
| val input1 = List.fill(10)((0, 0x45, 0)) | |||
| val input2 = input1 ++ List( | |||
| // enableIn, dataIn, expected | |||
| (1, 2, 0), | |||
| (1, 2, 0), | |||
| (1, 2, 0), | |||
| (1, 2, 0), | |||
| (0, 0, 2), | |||
| (0, 0, 2), | |||
| (0, 0, 2), | |||
| (0, 0, 2), | |||
| (0, 0, 2), | |||
| (0, 0, 2), | |||
| (0, 0, 2), | |||
| (0, 0, 2)) | |||
| object testUtils { | |||
| /** | |||
| Somewhat unintuitively named, a cycle task is a list test tasks at some time step. | |||
| In order to not have to supply a list the scala varargs syntax (*) is used. | |||
| As an example, at step 13 we want to input a value to a signal in: (PeekPokeTester[T] => Unit) | |||
| and check an output out: ((PeekPokeTester[T] => Unit) with the possibility of test failure exception) | |||
| Thanks to varargs syntax this would be | |||
| CycleTask[MyModule](13, in, out) | |||
| val input3 = { | |||
| val inputs = List.fill(100)(scala.util.Random.nextInt(10000)) | |||
| val withExpected = (List.fill(4)(0) ++ inputs) zip inputs | |||
| val withEnabled = withExpected.map{ case(expected, in) => (1, in, expected) } | |||
| Sometimes it is convenient to delay a bunch of checks by some set amount of cycles. | |||
| For instance, assume a component needs 10 cycles to set up, but it's more convenient | |||
| to write tests from T = 0, we do that and then call .delay(10) to ensure the T0 for the | |||
| tasks is actually T = 10 | |||
| */ | |||
| case class CycleTask[T <: Module](step: Int, run: PeekPokeTester[T] => Unit*){ | |||
| withEnabled | |||
| } | |||
| behavior of "daisy vector" | |||
| it should "not read when read enable is low" in { | |||
| iotesters.Driver.execute(() => new daisyVector(4, 32), new TesterOptionsManager) { c => | |||
| new daisyVectorTest(c, input1) | |||
| } should be(true) | |||
| } | |||
| it should "read only when read enable is asserted" in { | |||
| iotesters.Driver.execute(() => new daisyVector(4, 32), new TesterOptionsManager) { c => | |||
| new daisyVectorTest(c, input2) | |||
| } should be(true) | |||
| } | |||
| it should "Work in general" in { | |||
| iotesters.Driver.execute(() => new daisyVector(4, 32), new TesterOptionsManager) { c => | |||
| new daisyVectorTest(c, input3) | |||
| } should be(true) | |||
| } | |||
| } | |||
| class daisyDotSpec extends FlatSpec with Matchers { | |||
| behavior of "daisy vector" | |||
| val input1 = List( | |||
| (0, 0, None, 0), | |||
| (0, 0, None, 0), | |||
| (0, 0, None, 1), | |||
| (0, 0, None, 0), | |||
| (0, 0, None, 0), | |||
| (0, 0, None, 1), | |||
| (0, 0, None, 0), | |||
| (0, 0, None, 0), | |||
| (0, 0, None, 1)) | |||
| it should "Only signal valid output at end of calculation" in { | |||
| iotesters.Driver.execute(() => new daisyDot(3, 32), new TesterOptionsManager) { c => | |||
| new daisyDotTest(c, input1) | |||
| } should be(true) | |||
| } | |||
| val input2 = List( | |||
| (1, 0, None, 0), | |||
| (1, 0, None, 0), | |||
| (1, 0, Some(3), 1), | |||
| (1, 0, None, 0), | |||
| (1, 0, None, 0), | |||
| (1, 0, Some(3), 1), | |||
| (1, 0, None, 0), | |||
| (1, 0, None, 0), | |||
| (1, 0, Some(3), 1)) | |||
| it should "Be able to count to 3" in { | |||
| iotesters.Driver.execute(() => new daisyDot(3, 32), new TesterOptionsManager) { c => | |||
| new daisyDotTest(c, input1) | |||
| } should be(true) | |||
| // :_* is necessary for calling var args with explicit list | |||
| def delay(by: Int) = CycleTask[T](step + by, run:_*) | |||
| } | |||
| def createProblem(vecLen: Int): List[(Int, Int, Option[Int], Int)] = { | |||
| val in1 = List.fill(vecLen)(scala.util.Random.nextInt(10)) | |||
| val in2 = List.fill(vecLen)(scala.util.Random.nextInt(10)) | |||
| val dotProduct = (in1, in2).zipped.map(_*_).sum | |||
| /** | |||
| Takes in a list of cycle tasks, sorts them by timestep to execute and runs until all cycletasks are done | |||
| */ | |||
| case class IoSpec[T <: Module]( | |||
| instructions: Seq[CycleTask[T]], | |||
| component: T | |||
| ){ | |||
| val lastStep = instructions.maxBy(_.step).step | |||
| val instructionsMap = instructions.groupBy(_.step) | |||
| (in1, in2, (0 to vecLen)).zipped.map{ | |||
| case(a, b, idx) => | |||
| val dpExpect = if(idx == (vecLen - 1)) Some(dotProduct) else None | |||
| val outExpect = if(idx == (vecLen - 1)) 1 else 0 | |||
| (a, b, dpExpect, outExpect) | |||
| } | |||
| } | |||
| def createProblems(vecLen: Int): List[(Int, Int, Option[Int], Int)] = | |||
| List.fill(10)(createProblem(vecLen)).flatten | |||
| it should "Be able to calculate dot products" in { | |||
| iotesters.Driver.execute(() => new daisyDot(10, 32), new TesterOptionsManager) { c => | |||
| new daisyDotTest(c, createProblems(10)) | |||
| } should be(true) | |||
| } | |||
| } | |||
| class daisyGridSpec extends FlatSpec with Matchers { | |||
| type Matrix[A] = List[List[A]] | |||
| behavior of "daisy grid" | |||
| def genMatrix(dims: (Int,Int)): Matrix[Int] = | |||
| List.fill(dims._1)( | |||
| List.fill(dims._2)(scala.util.Random.nextInt(100)) | |||
| ) | |||
| def readRowCheck(dims: (Int,Int)): List[(Int,Int,Int,Int)] = { | |||
| // readEn, dataIn, readRow, expected dataOut | |||
| List.fill(dims._1 - 1)(( 1, 1, 0, 0)) ++ | |||
| List.fill(dims._1 - 1)((0, 0, 0, 1)) ++ | |||
| List.fill(dims._1 - 1)((0, 0, 0, 1)) | |||
| } | |||
| def readRow2Check(dims: (Int,Int)): List[(Int,Int,Int,Int)] = { | |||
| // readEn, dataIn, readRow, expected dataOut | |||
| List.fill(dims._1 - 1)(( 1, 1, 1, 0)) ++ | |||
| List.fill(dims._1 - 1)((0, 0, 1, 1)) ++ | |||
| List.fill(dims._1 - 1)((0, 0, 1, 1)) | |||
| } | |||
| def readMatrix(dims: (Int,Int)): List[(Int,Int,Int,Int)] = { | |||
| val m = genMatrix(dims) | |||
| val input = m.zipWithIndex.map{ case(row, rowIdx) => | |||
| row.zipWithIndex.map{ case(a, colIdx) => | |||
| // readEn, dataIn, readRow, expected dataOut | |||
| ( 1, a, rowIdx, 0) | |||
| class tester(c: T) extends PeekPokeTester(c) | |||
| val myTester: PeekPokeTester[T] = new tester(component) { | |||
| for(ii <- 0 to lastStep){ | |||
| instructionsMap.getOrElse(ii, Nil).foreach(_.run.foreach(t => t(this))) | |||
| step(1) | |||
| } | |||
| }.flatten | |||
| val output = m.zipWithIndex.map{ case(row, rowIdx) => | |||
| row.zipWithIndex.map{ case(a, colIdx) => | |||
| // readEn, dataIn, readRow, expected dataOut | |||
| ( 0, 0, rowIdx, a) | |||
| } | |||
| }.flatten | |||
| input ++ output | |||
| } | |||
| it should "work like a regular daisyVec when row select is fixed to 0" in { | |||
| iotesters.Driver.execute(() => new daisyGrid(5, 4, 32), new TesterOptionsManager) { c => | |||
| new daisyGridTest(c, readRowCheck((5,4))) | |||
| } should be(true) | |||
| } | |||
| it should "work like a regular daisyVec when row select is fixed to 1" in { | |||
| iotesters.Driver.execute(() => new daisyGrid(5, 4, 32), new TesterOptionsManager) { c => | |||
| new daisyGridTest(c, readRow2Check((5,4))) | |||
| } should be(true) | |||
| } | |||
| } | |||
| it should "be able to read a matrix" in { | |||
| iotesters.Driver.execute(() => new daisyGrid(5, 4, 32), new TesterOptionsManager) { c => | |||
| new daisyGridTest(c, readMatrix((5,4))) | |||
| } should be(true) | |||
| } | |||
| } | |||
| class testUtilSpec extends FlatSpec with Matchers { | |||
| import testUtils._ | |||
| class daisyVecMatSpec extends FlatSpec with Matchers { | |||
| type Matrix[A] = List[List[A]] | |||
| def genMatrix(dims: (Int,Int)): Matrix[Int] = | |||
| List.fill(dims._1)( | |||
| List.fill(dims._2)(scala.util.Random.nextInt(4)) | |||
| ) | |||
| def generateInputs(dims: (Int,Int)): List[(Int,Int,Int,Int,Option[Int],Int,Int)] = { | |||
| val matrixB = genMatrix(dims) | |||
| val vecA = genMatrix((1, (dims._1))).head | |||
| println("multiplying: ") | |||
| println(vecA.mkString("[","\t","]")) | |||
| println("matrix:") | |||
| matrixB.foreach { row => | |||
| println(row.mkString("[","\t","]")) | |||
| } | |||
| def answers: List[Int] = matrixB.transpose.map( col => | |||
| (col, vecA).zipped.map(_*_).sum | |||
| val ins = List[CycleTask[daisyVector]]( | |||
| CycleTask( | |||
| 1, | |||
| d => d.poke(d.dut.io.dataIn, 1), | |||
| d => d.expect(d.dut.io.dataOut, 0, s"fail at step ${d.t}") | |||
| ) | |||
| ) | |||
| println("should equal") | |||
| println(answers.mkString("[","\t","]")) | |||
| val vecAndMatrixInput = (matrixB.head zip vecA).map{ | |||
| case(m, v) => | |||
| (v, 1, m, 1, None, 0, 0) | |||
| } | |||
| val matrixInput = matrixB.tail.flatten.map{ m => | |||
| (0, 0, m, 1, None, 0, 0) | |||
| } | |||
| val checkOutput = answers.map( a => | |||
| { | |||
| val filler = List.fill(dims._2 - 1)((0, 0, 0, 0, None, 0, 0)) | |||
| val check = List((0, 0, 0, 0, Some(a), 0, 0)) | |||
| filler ++ check | |||
| }).flatten | |||
| vecAndMatrixInput ::: matrixInput ::: checkOutput | |||
| } | |||
| behavior of "my simple test harness attempt" | |||
| it should "not NPE" in { | |||
| iotesters.Driver.execute(() => new daisyVector(4, 32), new TesterOptionsManager) { c => | |||
| val myTest = IoSpec[daisyVector](ins, c) | |||
| behavior of "vec mat multiplier" | |||
| myTest.myTester | |||
| it should "compile" in { | |||
| iotesters.Driver.execute(() => new daisyVecMat(5, 4, 5, 32), new TesterOptionsManager) { c => | |||
| new daisyVecMatTest(c, Nil) | |||
| } should be(true) | |||
| } | |||
| it should "Not assert valid output when loading data" in { | |||
| iotesters.Driver.execute(() => new daisyVecMat(5, 4, 5, 32), new TesterOptionsManager) { c => | |||
| new daisyVecMatTest(c, generateInputs((5,4))) | |||
| } should be(true) | |||
| } | |||
| } | |||