(*evalKrn *Evaluate an EIN kernel expression to low-IL ops *iterate over the range of the support, determine differentiation, and calls segements *) structure EvalKrn = struct local structure DstIL = LowIL structure DstTy = LowILTypes structure Var = LowIL.Var structure E=Ein structure H=Helper structure IMap = IntRedBlackMap in fun find e=H.find e fun intToReal n=H.intToReal n fun assgnCons e=H.assgnCons e fun indexTensor e=H.indexTensor e fun mkAddVec e=H.mkAddVec e fun mkSubSca e= H.mkSubSca e fun mkProdVec e =H.mkProdVec e fun mkDotVec (avail,a,b,last) =H.mkDotVec (avail,last,[a,b]) fun err str=raise Fail (str) fun lookup k d = IMap.find (d, k) fun insert (k, v) d = IMap.insert (d, k, v) (* convert a rational to a FloatLit.float value. We do this by long division * with a cutoff when we get to 12 digits. *) fun ratToFloat r = (case Rational.explode r of {sign=0, ...} => FloatLit.zero false | {sign, num, denom=1} => FloatLit.fromInt (IntInf.fromInt sign * num) | {sign, num, denom} => let (* normalize so that num <= denom *) val (denom, exp) = let fun lp (n, denom) = if (denom < num) then lp (n+1, denom*10) else (denom, n) in lp (1, denom) end (* normalize so that num <= denom < 10*num *) val (num, exp) = let fun lp (n, num) = if (10*num < denom) then lp (n-1, 10*num) else (num, n) in lp (exp, num) end (* divide num/denom, computing the resulting digits *) fun divLp (n, a) = let val (q, r) = IntInf.divMod (a, denom) in if (r = 0) then (q, []) else if (n < 12) then let val (d, dd) = divLp (n+1, 10*r) in if (d < 10) then (q, (IntInf.toInt d) ::dd) else (q+1, 0::dd) end else if (IntInf.div (10*r, denom) < 5) then (q, []) else (q+1, []) (* round up *) end val digits = let val (d, dd) = divLp (0, num) in (IntInf.toInt d) ::dd end in FloatLit.fromDigits{isNeg= (sign < 0) , digits=digits, exp=exp} end (* end case *) ) (* expand the EvalKernel operations into vector operations. The parameters * are * result -- the lhs variable to store the result * d -- the vector width of the operation, which should be equal * to twice the support of the kernel * h -- the kernel * k -- the derivative of the kernel to evaluate * * The generated code is computing * * result = a_0 + x* (a_1 + x* (a_2 + ... x*a_n) ... ) * * as a d-wide vector operation, where n is the degree of the kth derivative * of h and the a_i are coefficient vectors that have an element for each * piece of h. The computation is implemented as follows * * m_n = x * a_n * s_{n-1} = a_{n-1} + m_n * m_{n-1} = x * s_{n-1} * s_{n-2} = a_{n-2} + m_{n-1} * m_{n-2} = x * s_{n-2} * ... * s_1 = a_1 + m_2 * m_1 = x * s_1 * result = a_0 + m_1 * * Note that the coeffient vectors are flipped (cf high-to-low/probe.sml) . *) fun expandEvalKernel (avail,pre,d, h, k, x) = let val {isCont, segs} = Kernel.curve (h, k) (* degree of polynomial *) val deg = List.length (hd segs) - 1 (*segs is length 2*support, inner list is listof poynomial*) val segs = Vector.fromList (List.rev (List.map Vector.fromList segs)) fun coefficient d i = Literal.Float (ratToFloat (Vector.sub (Vector.sub (segs, i) , d))) val ty = DstTy.vecTy d val coeffs = List.tabulate (deg+1,fn i => Var.new ("P"^Int.toString i, ty)) val (_, avail, coeffs) = let fun mk (x, (i::xs, avail,current) ) = let val lits = List.tabulate (d, coefficient i) val vars = List.tabulate (d, fn _ => Var.new ("_f", DstTy.TensorTy [])) val vars= ListPair.map (fn (x, lit) => (AvailRHS.addAssign avail (x, DstIL.LIT lit))) (vars, lits) val var = AvailRHS.addAssign avail (x, DstIL.CONS (Var.ty x, vars)) in (xs, avail,var::current) end in (List.foldr mk (List.tabulate (deg+1, fn e=>e ), avail,[]) (List.rev coeffs)) end (*get dot product and addition of list of coeffs*) fun mkdot (avail, [e2,e1]) = let val (avail, vA) = mkProdVec (avail, d, [x,e2]) in mkAddVec (avail, d, [e1,vA]) end | mkdot (avail,e2::e1::es) = let val (avail, vA) = mkProdVec (avail, d, [x,e2]) val (avail, vB) = mkAddVec (avail, d, [e1,vA]) in mkdot (avail,vB::es) end | mkdot (avail, [e1]) = mkProdVec (avail,d,[x,e1]) | mkdot _ = raise Fail "0 or 1 item in Kernel coeffs" in mkdot (avail, coeffs) end (*mkkrns:dict*string*E.params*Var List*sum_id list* (E.mu*E.mu) list*Kernel*int*int*int*int * kernels * comments on functions * * evalDels:dictionary* (E.mu*E.mu) list->int * evaluate each delta and therefore each differentiation level for each kernel * * mkSimpleOp:dict*string*E.params*Var list*E.body * -> Var * LowIL.assign list * turn position into low-IL op * *mkpos: (E.kernel_id*int*E.pos) list* Var list* Var list *dict*int* LowIL.assign list* int * -> Var * LowIL.assign list * bind summation indices by creating mapp and evaluate position * * consfn: Var list list*Var list*LowIL.assign list * ->Var * LowIL.assign list * con everything on the list, makes vectors * * evalK: (E.kernel_id*int*E.pos) list* var list*int*int*param_id*LowIL.assign list * ->Var * LowIL.assign list * evaluate kernel with segments *) fun mkkrns (avail, mappOrig,params, args, krns, h, sid, lb, range0, range1) = let fun evalDels (mapp, dels) = List.foldl (fn (x, y) =>x+y) 0 (List.map (fn (i, j) =>H.deltaToInt (mapp, i, j) ) dels) fun mkSimpleOp (avail, mapp, params, args, E.Op2 (E.Sub,E.Tensor (t1,ix1) ,E.Value v1) ) = let val (avail, vA) = indexTensor (avail, mapp, ( params, args, t1, ix1, DstTy.TensorTy []) ) in (case (find (v1,mapp) ) of 0=> (avail, vA) | j=>let val (avail, vB) = intToReal (avail, j) in mkSubSca (avail, [vA,vB]) end (*end case*) ) end fun mkpos (avail, k, fin, rest, dict, i, n) = (case (k, i) of ([], _) => (avail, List.rev fin) | ( (_, _, pos1) ::ks,0) => let val mapp=insert (n, lb) dict val (avail,rest') = mkSimpleOp (avail, mapp, params, args, pos1) val e=rest@[rest'] val mapp'= insert (n, 0) dict in mkpos (avail, ks, e::fin, [], mapp', range0, n+1) end | ( (_,_,pos1) ::es,_) => let val mapp= insert (n, lb+i) dict val (avail, rest') = mkSimpleOp (avail, mapp, params, args, pos1) in mkpos (avail, k, fin, rest@[rest'], dict, i-1, n) end (*end case*) ) fun consfn (avail, [], _, rest) = (avail, List.rev rest) | consfn (avail, e1::es, n, rest) = let val (avail, vA) = assgnCons (avail, "h"^Int.toString(n), [length (e1)], List.rev e1) in consfn (avail, es, n+1, vA::rest) end fun evalK (avail, [], [], newId) = (avail, List.rev newId) | evalK (avail, kn::kns, x::xs, newId) = let val (_, dk, pos) = kn val directionX= (case pos of E.Op2 (E.Sub,E.Tensor (_,[E.C directionX]) ,_) => directionX | _ => 0 (*end case*) ) val name=String.concat["h",Int.toString(directionX),"_",Int.toString dk] val (avail, id) = expandEvalKernel (avail, name, range1, h, dk, x) in evalK (avail, kns, xs, id::newId) end | evalK _ =raise Fail "Non-equal variable list, error in mkKrns" val newkrns = List.map (fn (id, d1, pos) => (id, evalDels (mappOrig,d1) , pos) ) krns val (avail, lftkrn) = mkpos (avail, newkrns, [], [], mappOrig, range0, sid) val (avail, lft) = consfn (avail, lftkrn, 0, []) in evalK (avail, newkrns, lft, []) end end (* local *) end
Click to toggle
does not end with </html> tag
does not end with </body> tag
The output has ended thus: ) in evalK (avail, newkrns, lft, []) end end (* local *) end