Home My Page Projects Code Snippets Project Openings diderot
Summary Activity Tracker Tasks SCM

SCM Repository

[diderot] View of /branches/charisee/src/compiler/tree-il-forvis-basevis/lowOp-to-treeOp.sml
ViewVC logotype

View of /branches/charisee/src/compiler/tree-il-forvis-basevis/lowOp-to-treeOp.sml

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3692 - (download) (annotate)
Sun Mar 20 01:17:08 2016 UTC (3 years, 5 months ago) by cchiw
File size: 12654 byte(s)
tree-il-forvis-base vis and charisee
 (*
* tree-ir-for-vis-newir
* This function transitions low-il operators to tree-il operators
    When there is a LowIR vector op then it breaks it into HW supported TreeIR vector operation
    i.e. A6+B6 => Mux[A4+B4, A2+B2]
    The following variables are used
    isAligned/A:bool-Is the array aligned
    isFill:bool-Is the vector filled with zeros i.e. length 3 vectors represented with length 4
    nSize: int -The Size of the vector operation,  (4) 
    oSize:int-The Size of the orginal arguments in vector operation if less than new Size.   (3) 
    pieces: sizes of vector operations. i.e.2->[2], 6->[4, 2]   3->[4]
*) 

 (* FIXME: add signature!!! *) 
signature lott = sig
val  vecToTree:  (LowOps.rator) * LowIR.var-> TreeIR.stm list * TreeIR.stm list *LowIR.var * (LowIR.var*TreeIR.exp * TreeIR.var) list 
end


structure LowOpToTreeOp = 
  struct
    local
      structure Src = LowIR
      structure SrcOp = LowOps
      structure SrcTy = LowTypes
      structure SrcV = Src.Var
      structure SrcSV = Src.StateVar
      structure SrcGV = Src.GlobalVar
(*
structure DstOp = LowOps
structure DstTy = LowTypes
*)

      structure DstOp = TreeOps
      structure DstTy = TreeTypes
      structure Dst = TreeIR
      structure DstV = Dst.Var
      structure DstSV = Dst.StateVar
      structure DstGV = Dst.GlobalVar
      structure Ty = TreeIR.Ty
    in

     (**************************************) 
     (* isAlignedLoad: bool* ty->bool
    * Do we Load this array assuming it's aligned?
    * Decides if isAligned is true when creating E_load
    * Currently, it is only true when the vector argument can be represent in the HW
    * Fix here
    *) 
    fun isAlignedLoad (isFill, Ty.TensorTy [_]) = (not isFill)
    | isAlignedLoad _ = false

     (* isAlignedStore: bool*int->bool
    * Do we Store this array assuming it's aligned?
    * Decides if isAligned is true when creating S_Store and E_Mux
    * Fix here
    *) 
    fun isAlignedStore (isFill, _) = (not isFill)

     (**************************************) 
     (*mkStmt:TreeIR.Var* bool*int*ty*TreeIR.Exp list ->TreeIR.stmt
    * Makes top-level TreeIR.Stmt
    * If lhs is a global then S_Store is used otherwise uses S_Assign
    *)
    (*local kind*)
    fun mkStmtLocal (lhs, isFill, oSize, pieces, ops) = let
        val alignedStore = isAlignedStore (isFill, length pieces)
        in
            Dst.S_Assign ([lhs], Dst.E_Mux (alignedStore, isFill, oSize, pieces, ops))
        end

    (*global kind*)
    fun mkStmtGlob (lhs, isFill, oSize, pieces, ops) = let
        val alignedStore = isAlignedStore (isFill, length pieces)
        in
            Dst.S_StoreVec (Dst.E_Var lhs, 0, alignedStore, isFill, oSize, Ty.TensorTy[oSize], pieces, ops)
        end

    (*FIXME: variables no longer have kinds: assumes local kind*)
    fun mkStmt e=  mkStmtLocal e

     (*getArg:->TreeIR.Exp
    *  Gets the argument for the operation
    *  if the argument is a mux then we get the piece needed for the operation
    *  if the argument is a local var then we assume it is the right size
    *  if the argument is a global var or state var then we load the array with offset and sizes
    *  otherwise the arg was not loaded probably produces an error
    *) 
    fun getArg (isFill, t, count, nSize, oSize, offset) =  (case t
        of Dst.E_Mux (_, _, _, _, ops) => List.nth (ops, count)
        | Dst.E_Var _ => t
        | Dst.E_State  stv
            => Dst.E_LoadArr(isAlignedLoad (isFill, DstSV.ty stv), nSize, oSize, t, Dst.E_Lit (Literal.Int offset))
        | Dst.E_Global gbv
            => Dst.E_LoadArr(isAlignedLoad (isFill, DstGV.ty gbv), nSize, oSize, t, Dst.E_Lit (Literal.Int offset))
        | a1 => raise Fail ("Warning argument to vector operation is")
         (*end case*)) 

  (**************************************) 
    (*Special functions needed to transform specific operators*)

    (* transformProjectLast()
    * ProjectLast is an op that slices a vector from a higher order tensor
    * That vec could be in pieces
    * So we load each one and the next step wraps a mux around it
    * We can assume the higher order tensor is an array
    * This rewrites the ProjectLast op as an E_LoadArr.
    *) 
    fun transformProjectLast (origrator, argVec, x) = let
        val  (lhs, oSize, _,  (isFill, _, pieces) ) = x
        (*val SrcOp.ProjectLast (_, _, indTy, argTy) = origrator ;;previous*)
        val SrcOp.ProjectLast(argTy,indTy) = origrator
        val alignedLd = isAlignedLoad (isFill,  argTy)
        (*shift index position based on type of argument*)

        val shift =  (case  (indTy, argTy) 
            of  ([i], Ty.TensorTy[_, m]) => i*m
            |  ([i, j], Ty.TensorTy[_, m, n]) => (i*n*m) + (j*n) 
            |  ([i, j, k], Ty.TensorTy[_, m, n, p]) => (i*m*n*p) + (j*p*n) + (k*p) 
            |  _ => raise Fail "ProjectLast Tensor of a unhandled size"  (*can add more sizes in the future*)
             (*end case *) ) 
        fun mkLoad  ([], _, code) = code
          | mkLoad  (nSize::es, offset, code) = 
                mkLoad  (es, offset + IntInf.fromInt nSize, 
                code@[Dst.E_LoadArr (alignedLd, nSize, oSize, argVec, Dst.E_Lit (Literal.Int offset))])
        val ops = mkLoad  (pieces, IntInf.fromInt shift, []) 
        in
            mkStmt (lhs, isFill, oSize, pieces, ops) 
        end
    (* transformGenVec:
    * TreeIR.Var*TreeIRExp list *TreeIRExp list * bool * int * int list *LowIROP ->TreeIR.stmt
    * Take the original vector op and breaks it "pieces" which are HW supported sizes
    * gets arguments with getArg () which either loads or finds(inside Mux) agruments
    * and then puts them inside TreeIR op  ("rator")
    * special case VSumOp, because it maintains new and old size.
    *)
    fun transformGenVec (argVecs, argsS, x) = let
        val  (lhs, oSize, SOME dstrator,  (isFill, _, pieces)) = x

        val c = List.map   (fn  (_, exp) => exp) argVecs
        fun createOps  ([], _, _, code) = List.rev code
        | createOps  (nSize::es, count, offset, code) =
            let
                val exps = List.map  (fn exp => getArg (isFill,  exp, count, nSize, oSize, offset) ) c
                val code0 =  (nSize, argsS@exps)
            in
                createOps  (es, count+1, offset + IntInf.fromInt nSize, code0::code)
            end
        val code = createOps  (pieces, 0, IntInf.fromInt 0, [])
        val ops = List.map  (fn (nSize, args) => Dst.E_Op (dstrator nSize, args) ) code
        in
            mkStmt (lhs, isFill, oSize, pieces, ops)
        end

    (*transformVSum() transforms the VSum operation.
    * Works much like transformGenVec()
    * except RAdd() adds expressions together then uses assignment
    *)
    fun transformVSum (argVec, x) = let
        val  (lhs, oSize, _,  (isFill, _, pieces) ) = x

        val ops =  (case pieces
            of [nSize] => let
                val op2 = DstOp.VSum ([nSize], oSize) 
                val arg2 = getArg (isFill, argVec, 0, nSize, oSize, 0)
                in
                    [Dst.E_Op (op2, [arg2])]
                end
            | _ => let
                fun createOps  ([], _, _, code) = List.rev code
                | createOps  (nSize::es, count, offset, code) = let
                    val argsLd = getArg (isFill, argVec, count, nSize, oSize, offset)
                    val exp =  (nSize, [argsLd]) 
                    in
                        createOps  (es, count+1, offset + IntInf.fromInt nSize, exp::code)
                    end
                val indexAt = IntInf.fromInt 0
                val code = createOps  (pieces, 0, indexAt, []) 
                val args = List.foldr op@ []  (List.map  (fn (_, args) => args) code) 
                in
                    [Dst.E_Op (DstOp.VSum (pieces, oSize), args) ]
                end
            (*end case*))
        fun RAdd [e1] = e1
        | RAdd (e1::e2::es) = RAdd (Dst.E_Op (DstOp.RAdd, [e1, e2])::es)
        | RAdd _ = raise Fail "RAdd issue"
        in
            Dst.S_Assign ([lhs], RAdd ops) 
        end

     (**************************************) 
    fun vecToTree (e1, x) = case e1
        of (SOME (SrcOp.ProjectLast e), [(_, argVec)])              => transformProjectLast (SrcOp.ProjectLast e, argVec, x)
        | (SOME (SrcOp.VSum _), [(_, argVec) ])                   => transformVSum (argVec, x)
        | (SOME (SrcOp.VScale _), (_, argSca)::argVecs)           => transformGenVec (argVecs, [argSca], x)
        | (SOME (SrcOp.Lerp (Ty.TensorTy[_])), [a, b, (_, argSca)]) => transformGenVec ([a, b], [argSca], x)
        | (SOME _ ,_ )  => raise Fail "unknown speciality operator"
        | (NONE, argVecs)                                           => transformGenVec (argVecs, [], x)

     (**************************************) 
     (* consVecToTree:int*int*int list*TreeIR.Exp* bool->TreeIR.Exp
     * Takes Cons of a vector and returns TreeIR.exp inside E_Mux.
     *  When isFill is true, creates zeros
     *) 
    fun consVecToTree (_, oSize, [nSize], args, true) = let
        val nArg = length (args) 
        val n = nSize-nArg
        val newArgs = List.tabulate (n,  (fn _ => Dst.E_Lit (Literal.Int 0)))
        val op1 = Dst.E_ConsE (nSize, oSize, args@newArgs)
        val aligned = isAlignedStore (true, 1)
        in
            Dst.E_Mux (aligned, true, oSize, [nSize], [op1])
        end
    | consVecToTree (_, _, _, _, true) = raise Fail"In ConsVecToTree-isFill with more than 1 piece"
    | consVecToTree (nSize, oSize, pieces, args, isFill) = let
        val aligned = isAlignedStore (isFill, length pieces)
        fun createOps  ([], _, _, _) = []
        | createOps  (nSize::es, offset, arg, code) = 
            (Dst.E_ConsE (nSize, nSize, List.take (arg, nSize) ))::
            createOps  (es, offset + IntInf.fromInt nSize, List.drop (arg, nSize), code)
        val ops = createOps  (pieces, 0, args, []) 
        in
            Dst.E_Mux (aligned, isFill, oSize, pieces, ops)
        end
 (***************************************************************************) 
     (*Low-IL operators to Tree-IL operators*) 
    fun expandOp rator =  (case rator
        of SrcOp.IAdd => DstOp.IAdd
        | SrcOp.ISub => DstOp.ISub
        | SrcOp.IMul => DstOp.IMul
        | SrcOp.IDiv => DstOp.IDiv
        | SrcOp.INeg => DstOp.INeg
        | SrcOp.Abs ty => DstOp.Abs ty
        | SrcOp.LT ty => DstOp.LT  ty
        | SrcOp.LTE ty => DstOp.LTE  ty
        | SrcOp.EQ ty => DstOp.EQ  ty
        | SrcOp.NEQ ty => DstOp.NEQ  ty
        | SrcOp.GT ty => DstOp.GT  ty
        | SrcOp.GTE ty => DstOp.GTE  ty
        | SrcOp.Not => DstOp.Not
        | SrcOp.Max ty   => DstOp.Max ty
        | SrcOp.Min ty  => DstOp.Min ty
        | SrcOp.Clamp ty => DstOp.Clamp ty
        | SrcOp.Lerp ty => DstOp.Lerp  ty
        | SrcOp.Sqrt => DstOp.Sqrt
        | SrcOp.Cos => DstOp.Cos
        | SrcOp.ArcCos => DstOp.ArcCos
        | SrcOp.Sine => DstOp.Sine
        | SrcOp.ArcSin => DstOp.ArcSin
        | SrcOp.Tan => DstOp.Tan
        | SrcOp.ArcTan => DstOp.ArcTan
        | SrcOp.Zero ty => DstOp.Zero  ty
        | SrcOp.EigenVals2x2 => DstOp.EigenVals2x2
        | SrcOp.EigenVals3x3 => DstOp.EigenVals3x3
        | SrcOp.EigenVecs2x2 => DstOp.EigenVecs2x2
        | SrcOp.EigenVecs3x3 => DstOp.EigenVecs3x3
        | SrcOp.Select (ty as SrcTy.TupleTy tys, i) => DstOp.Select ( ty, i) 
        | SrcOp.Index  (ty, i ) => DstOp.Index  ( ty, i) 
        | SrcOp.Subscript ty => DstOp.Subscript  ty
        | SrcOp.Ceiling d => DstOp.Ceiling d
        | SrcOp.Floor d => DstOp.Floor d
        | SrcOp.Round d => DstOp.Round d
        | SrcOp.Trunc d => DstOp.Trunc d
        | SrcOp.IntToReal => DstOp.IntToReal
        | SrcOp.RealToInt d => DstOp.RealToInt d
        | SrcOp.Inside info => DstOp.Inside info
        | SrcOp.Translate V => DstOp.Translate V
        | SrcOp.RAdd => DstOp.RAdd
        | SrcOp.RSub => DstOp.RSub
        | SrcOp.RMul => DstOp.RMul
        | SrcOp.RDiv => DstOp.RDiv

	    | SrcOp.Print tys => DstOp.Print tys
(*
         (*missing low-ir op*)
        | SrcOp.LoadImage args => DstOp.LoadImage args
        | SrcOp.Exp => DstOp.Exp
        | SrcOp.PrincipleEvec ty => DstOp.PrincipleEvec ty
	    | SrcOp.powInt => DstOp.powInt  (*missing low-ir op*)
        | SrcOp.Normalize d => (DstOp.Normalize d)
        | SrcOp.imgAddr (v, indexAt, dim) => DstOp.imgAddr (v, indexAt, dim)
        | SrcOp.baseAddr V => DstOp.baseAddr V (*missing low-ir op*)
	    | SrcOp.Input inp => DstOp.Input inp
*)
        | rator => raise Fail  ("bogus operator " ^ SrcOp.toString rator) 
       (* end case *) ) 
    end

  end

root@smlnj-gforge.cs.uchicago.edu
ViewVC Help
Powered by ViewVC 1.0.0