SCM Repository
View of /trunk/src/compiler/high-to-mid/probe.sml
Parent Directory
|
Revision Log
Revision 374 -
(download)
(annotate)
Sun Oct 3 18:56:32 2010 UTC (11 years, 8 months ago) by jhr
File size: 8098 byte(s)
Sun Oct 3 18:56:32 2010 UTC (11 years, 8 months ago) by jhr
File size: 8098 byte(s)
Debugging probe.sml.
(* probe.sml * * COPYRIGHT (c) 2010 The Diderot Project (http://diderot.cs.uchicago.edu) * All rights reserved. * * Expansion of probe operations in the HighIL to MidIL translation. *) structure Probe : sig val expand : MidIL.var * FieldDef.field_def * MidIL.var -> MidIL.assign list end = struct structure SrcIL = HighIL structure SrcOp = HighOps structure DstIL = MidIL structure DstOp = MidOps structure DstV = DstIL.Var structure VMap = SrcIL.Var.Map structure IT = Shape (* generate a new variable indexed by dimension *) fun newVar_dim (prefix, d) = DstV.new (prefix ^ Partials.axisToString(Partials.axis d)) fun assign (x, rator, args) = (x, DstIL.OP(rator, args)) fun cons (x, args) = (x, DstIL.CONS args) fun realLit (x, i) = (x, DstIL.LIT(Literal.Float(FloatLit.fromInt i))) fun intLit (x, i) = (x, DstIL.LIT(Literal.Int(IntInf.fromInt i))) (* generate code for a evaluating a single element of a probe operation *) fun probeElem { dim, (* dimension of space *) h, s, (* kernel h with support s *) n, f, (* Dst vars for integer and fractional components of position *) voxIter (* iterator over voxels *) } (result, pdOp) = let (* generate the variables that hold the convolution coefficients *) val convCoeffs = let val Partials.D l = pdOp fun mkVar (_, []) = [] | mkVar (i, d::dd) = (case d of 0 => newVar_dim("h", i) :: mkVar(i+1, dd) | 1 => newVar_dim("dh", i) :: mkVar(i+1, dd) | _ => newVar_dim(concat["d", Int.toString d, "h"], i) :: mkVar(i+1, dd) (* end case *)) in mkVar (0, l) end val _ = print(concat["probeElem: ", Partials.partialToString pdOp, " in ", Int.toString(List.length convCoeffs), "D space\n"]) (* for each dimension, we evaluate the kernel at the coordinates for that axis *) val coeffCode = let fun gen (x, k, (d, code)) = let val d = d-1 val fd = newVar_dim ("f", d) val a = DstV.new "a" val tmps = List.tabulate(2*s, fn i => (DstV.new("t"^Int.toString i), s - (i+1))) fun mkArg ((t, n), code) = let val t' = DstV.new "r" in realLit (t', n) :: assign (t, DstOp.Add DstOp.realTy, [fd, t']) :: code end val code = cons(a, List.map #1 tmps) :: assign(x, DstOp.EvalKernel(2*s, h, k), [a]) :: code val code = assign(fd, DstOp.Select(dim, d), [f]) :: List.foldr mkArg code tmps in (d, code) end val Partials.D l = pdOp in #2 (ListPair.foldr gen (dim, []) (convCoeffs, l)) end (* generate the reduction code *) fun genReduce (result, [hh], IT.LF{vox, offsets}, code) = assign (result, DstOp.Dot(2*s), [vox, hh]) :: code | genReduce (result, hh::r, IT.ND(_, kids), code) = let val tv = DstV.new "tv" val tmps = List.tabulate(2*s, fn i => DstV.new("t"^Int.toString i)) fun lp ([], [], code) = code | lp (t::ts, kid::kids, code) = genReduce(t, r, kid, lp(ts, kids, code)) val code = cons(tv, tmps) :: assign(result, DstOp.Dot(2*s), [hh, tv]) :: code in lp (tmps, kids, code) end | genReduce _ = raise Fail "genReduce" val reduceCode = genReduce (result, convCoeffs, voxIter, []) in coeffCode @ reduceCode end (* generate code for probing the field (D^k (v * h)) at pos *) fun probe (result, (k, v, h), pos) = let val ImageInfo.ImgInfo{dim, ty=([], ty), ...} = v val dimTy = DstOp.VecTy dim val s = Kernel.support h (* generate the transform code *) val x = DstV.new "x" (* image-space position *) val f = DstV.new "f" val nd = DstV.new "nd" val n = DstV.new "n" val transformCode = [ assign(x, DstOp.Transform v, [pos]), assign(nd, DstOp.Floor dim, [x]), assign(f, DstOp.Sub dimTy, [x, nd]), assign(n, DstOp.TruncToInt dim, [nd]) ] (* generate the shape of the differentiation tensor with variables representing * the elements *) val diffIter = let val partial = Partials.partial dim fun f (i, axes) = Partials.axis i :: axes fun g axes = (DstV.new(String.concat("r" :: List.map Partials.axisToString axes)), partial axes) in IT.create (k, dim, fn _ => (), f, g, []) end val _ = let val indentWid = ref 2 fun inc () = (indentWid := !indentWid + 2) fun dec () = (indentWid := !indentWid - 2) fun indent () = print(CharVector.tabulate(!indentWid, fn _ => #" ")) fun nd () = (indent(); print "ND\n"); fun lf (x, partial) = ( indent(); print(concat["LF(", DstV.toString x, ", ", Partials.partialToString partial, ")\n"])) fun pr (Shape.ND(attr, kids)) = (nd attr; inc(); List.app pr kids; dec()) | pr (Shape.LF attr) = lf attr in print "diffIter:\n"; pr diffIter end (* generate code to load the voxel data; since we a vector load operation to load the * fastest dimension, the height of the tree is one less than the dimension of space. *) val voxIter = let fun f (i, (offsets, id)) = (i - (s - 1) :: offsets, i::id) fun g (offsets, id) = { offsets = ~(s-1) :: offsets, vox = DstV.new(String.concat("v" :: List.map Int.toString id)) } in IT.create (dim-1, 2*s, fn _ => (), f, g, ([], [])) end val _ = let val indentWid = ref 2 fun inc () = (indentWid := !indentWid + 2) fun dec () = (indentWid := !indentWid - 2) fun indent () = print(CharVector.tabulate(!indentWid, fn _ => #" ")) fun nd () = (indent(); print "ND\n"); fun lf {offsets, vox} = ( indent(); print "LF{offsets = ["; print(String.concatWith "," (List.map Int.toString offsets)); print "], vox = "; print(DstV.toString vox); print "}\n") fun pr (Shape.ND(attr, kids)) = (nd attr; inc(); List.app pr kids; dec()) | pr (Shape.LF attr) = lf attr in print "voxIter:\n"; pr voxIter end val loadCode = let fun genCode ({offsets, vox}, code) = let fun computeIndices (_, []) = ([], []) | computeIndices (i, offset::offsets) = let val index = newVar_dim("i", i) val t1 = DstV.new "t1" val t2 = DstV.new "t2" val (indices, code) = computeIndices (i+1, offsets) val code = intLit(t1, offset) :: assign(t2, DstOp.Select(2*s, i), [n]) :: assign(index, DstOp.Add(DstOp.IntTy), [t1, t2]) :: code val indices = index::indices in (indices, code) end val (indices, indicesCode) = computeIndices (0, offsets) val a = DstV.new "a" in indicesCode @ [ assign(a, DstOp.VoxelAddress v, indices), assign(vox, DstOp.LoadVoxels(ty, 2*s), [a]) ] @ code end in IT.foldr genCode [] voxIter end (* generate code to evaluate and construct the result tensor *) val probeElem = probeElem {dim = dim, h = h, s = s, n = n, f = f, voxIter = voxIter} fun genProbe (result, IT.ND(_, kids as (IT.LF _)::_), code) = let (* the kids will all be leaves *) fun genProbeCode (IT.LF arg, code) = probeElem arg @ code fun getProbeVar (IT.LF(t, _)) = t in List.foldr genProbeCode (cons (result, List.map getProbeVar kids) :: code) kids end | genProbe (result, IT.ND(_, kids), code) = let val tmps = List.tabulate(dim, fn i => DstV.new("t"^Int.toString i)) val code = cons(result, tmps) :: code fun lp ([], [], code) = code | lp (t::ts, kid::kids, code) = genProbe(t, kid, lp(ts, kids, code)) in lp (tmps, kids, code) end | genProbe (result, IT.LF(t, pdOp), code) = (* for scalar fields *) probeElem (result, pdOp) @ code val probeCode = genProbe (result, diffIter, []) in transformCode @ loadCode @ probeCode end fun expand (result, fld, pos) = let fun expand' (result, FieldDef.CONV(k, v, h)) = probe (result, (k, v, h), pos) (* should push negation down to probe operation | expand' (result, FieldDef.NEG fld) = let val r = DstV.new "value" val stms = expand' (r, fld) val ty = ?? in expand' (r, fld) @ [assign(r, DstOp.Neg ty, [r])] end *) | expand' (result, FieldDef.SUM(fld1, dlf2)) = raise Fail "expandInside: SUM" in expand' (result, fld) end end
root@smlnj-gforge.cs.uchicago.edu | ViewVC Help |
Powered by ViewVC 1.0.0 |