~ s/RDB2RDF/SparqlToSql/g
authorEric Prud'hommeaux <eric@w3.org>
Sun, 31 Jan 2010 11:12:27 -0500
changeset 148 541248a96d42
parent 147 e4597da0affe
child 149 4735e186c47b
~ s/RDB2RDF/SparqlToSql/g
src/main/scala/RDB2RDFMain.scala
src/main/scala/SparqlToSql.scala
src/test/scala/RDB2RDFTest.scala
src/test/scala/SparqlToSqlTest.scala
--- a/src/main/scala/RDB2RDFMain.scala	Sun Jan 31 10:39:08 2010 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,701 +0,0 @@
-/* RDB2RDF: convert SPARQL queries to sound SQL queries.
- *
- * Please read from the bottom -- i.e. apply calls mapGraphPattern with the root
- * graph pattern. mapGraphPattern handles all the graph pattern types in SPARQL,
- * effectively peforming the Convert Graph Patterns step in SPARQL 1.0 12.2.1
- *   <http://www.w3.org/TR/rdf-sparql-query/#convertGraphPattern>
- * with the target semantics in SQL instead of SPARQL.
- */
-
-package w3c.sw.rdb2rdf
-
-import scala.util.parsing.combinator._
-import java.net.URI
-import w3c.sw.sql
-import w3c.sw.sql.PrettySql.toPrettySql
-import w3c.sw.sparql
-import w3c.sw.util
-
-case class StemURI(s:String)
-case class PrimaryKey(attr:sql.Attribute)
-
-sealed abstract class Binding
-case class RDFNode(relvarattr:sql.RelVarAttr) extends Binding
-case class Str(relvarattr:sql.RelVarAttr) extends Binding
-case class Int(relvarattr:sql.RelVarAttr) extends Binding
-case class Enum(relvarattr:sql.RelVarAttr) extends Binding
-
-object RDB2RDF {
-  case class R2RState(joins:util.AddOrderedSet[sql.Join], varmap:Map[sparql.Var, SQL2RDFValueMapper], exprs:Set[sql.Expression])
-
-  sealed abstract class FullOrPartialBinding
-  case class FullBinding(relvarattr:sql.RelVarAttr) extends FullOrPartialBinding
-  case class BindingConstraint(expr:sql.RelationalExpression, relvarattr:sql.RelVarAttr)
-  case class PartialBinding(binders:Set[BindingConstraint]) extends FullOrPartialBinding
-
-  def toExpr(against:FullOrPartialBinding):sql.Expression = {
-    /* if (g_union1._DISJOINT_ != 0, g_union1.who, if (g_union2._DISJOINT_ != 3, g_union2.who, NULL)) */
-    against match {
-      case FullBinding(relvarattr) =>
-	sql.PrimaryExpressionAttr(relvarattr)
-      case PartialBinding(binders) =>
-	binders.toList.reverse.foldLeft(sql.ConstNULL():sql.Expression)((exp, binding) => {
-	  val BindingConstraint(expr, relvarattr) = binding
-	  sql.IfElse(expr, sql.PrimaryExpressionAttr(relvarattr), exp)
-	})
-    }
-  }
-  def addExpr(binding:FullOrPartialBinding, relVarAttr:sql.RelVarAttr, expr:sql.RelationalExpression):FullOrPartialBinding = {
-    binding match {
-      case FullBinding(relvarattr) =>
-	binding
-      case PartialBinding(binders) =>
-	PartialBinding(binders + BindingConstraint(expr, relVarAttr))
-    }
-  }
-  def toConstraint999(constrainMe:sql.RelVarAttr, against:FullOrPartialBinding):sql.Expression = {
-    against match {
-      case FullBinding(relvarattr) =>
-	sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(constrainMe),
-				   sql.PrimaryExpressionAttr(relvarattr))
-      case PartialBinding(binders) =>
-	sql.ExprConjunction({binders.map(b => {
-	  val BindingConstraint(expr, relvarattr) = b
-	  sql.ExprDisjunction(Set(expr,
-				  sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(constrainMe),
-							     sql.PrimaryExpressionAttr(relvarattr))))})})
-    }
-  }
-
-  sealed abstract class SQL2RDFValueMapper(binding:FullOrPartialBinding)
-  case class IntMapper(binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
-  case class StringMapper(binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
-  case class DateMapper(binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
-  case class RDFNoder(relation:sql.Relation, binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
-  case class RDFBNoder(relation:sql.Relation, binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
-
-  case class NodeUri(stem:Stem, rel:Rel, attr:Attr, v:CellValue)
-  case class Stem(s:String) {
-    override def toString = "" + s
-  }
-  case class Rel(s:String) { // !! NUKE
-    override def toString = "" + s
-  }
-  case class Attr(s:String) {
-    override def toString = "" + s
-  }
-  case class CellValue(s:String)
-  case class PUri(stem:Stem, rel:Rel, attr:Attr) {
-    override def toString = "<" + stem + "/" + rel + "#" + attr + ">"
-  }
-  /* stemURI + '/' + (\w+) + '#' (\w+) */
-  def parsePredicateURI(u:sparql.Uri):PUri = {
-    val x:String = u.s
-    val uri = new URI(x)
-    val path = uri.getPath().split("/").toList.filterNot(_ == "")
-    val subPath = path.slice(0, path.size - 1).mkString("/")
-    val stem = uri.getScheme() + "://" + uri.getAuthority + "/" + subPath
-    PUri(Stem(stem), Rel(path.last), Attr(uri.getFragment))
-  }
-
-  /* stemURI + '/' (\w+) '/' (\w+) '.' (\w+) '#record' */
-  def parseObjectURI(u:sparql.Uri):NodeUri = {
-    val x:String = u.s
-    val uri = new URI(x)
-    val path = uri.getPath().split("/").toList.filterNot(_ == "")
-    val subPath = path.slice(0, path.size - 2).mkString("/")
-    val rel = path(path.size - 2)
-    val attrPair = path(path.size-1).split("\\.")
-    val stem = uri.getScheme() + "://" + uri.getAuthority + "/" + subPath
-    assert("record" == uri.getFragment)
-    NodeUri(Stem(stem), Rel(rel), Attr(attrPair(0)), CellValue(attrPair(1)))
-  }
-/*
-Sparql.parseObjectURI(
-Sparql.parsePredicateURI(
-*/
-  def relVarFromS(s:sparql.S):sql.RelVar = {
-    s match {
-      case sparql.SUri(ob) => relVarFromNode(ob)
-      case sparql.SVar(v) => relVarFromVar(v)
-    }
-  }
-
-  def relVarFromO(o:sparql.O):sql.RelVar = {
-    o match {
-      case sparql.OUri(ob) => relVarFromNode(ob)
-      case sparql.OVar(v) => relVarFromVar(v)
-      case sparql.OLit(l) => relVarFromLiteral(l)
-    }
-  }
-
-  def relVarFromNode(u:sparql.Uri):sql.RelVar = {
-    val NodeUri(stem, rel, Attr(a), CellValue(v)) = parseObjectURI(u)
-    sql.RelVar(sql.Name("R_" + a + v))
-  }
-
-  def relVarFromLiteral(l:sparql.Literal):sql.RelVar = {
-    sql.RelVar(sql.Name("R_" + l.lit.lexicalForm))
-  }
-
-  def relVarFromVar(vr:sparql.Var):sql.RelVar = {
-    val sparql.Var(v) = vr
-    sql.RelVar(sql.Name("R_" + v))
-  }
-
-  def attrAliasNameFromVar(v:sparql.Var):sql.Name = sql.Name("" + v.s)
-
-  def uriConstraint(state:R2RState, constrainMe:sql.RelVarAttr, u:NodeUri, enforeForeignKeys:Boolean):R2RState = {
-    val relvar =
-      if (enforeForeignKeys)
-	sql.RelVarAttr(constrainMe.relvar, sql.Attribute(sql.Name(u.attr.s)))
-      else
-	constrainMe
-    R2RState(state.joins,
-	     state.varmap,
-	     state.exprs + sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(relvar),
-						      sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name(u.v.s))))
-  }
-
-  def literalConstraint(state:R2RState, constrainMe:sql.RelVarAttr, lit:sparql.Literal, dt:sql.Datatype):R2RState = {
-    R2RState(state.joins,
-	     state.varmap,
-	     state.exprs + sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(constrainMe),
-						      sql.PrimaryExpressionTyped(dt,sql.Name(lit.lit.lexicalForm))))    
-  }
-
-  /** varConstraint
-   * examples:
-   * SELECT ?emp WHERE { ?emp emp:manager <http://hr.example/our/favorite/DB/Employee/id.18#record> ; emp:name ?name }
-   * SQL Results                     SPARQL Results
-   * A_emp A_name    ?emp                                                      ?name
-   * 4     "Bob"     <http://hr.example/our/favorite/DB/Employee/id.4#record>  "Bob"^^xsd:string
-   * 6     "Sue"     <http://hr.example/our/favorite/DB/Employee/id.6#record>  "Sue"^^xsd:string
-   * 
-   * type String -> RDFStringConstructor // adds ^^xsd:string
-   * type primary key -> RDFNodeConstructor // prefixes with stemURL + relation + attribute  and adds #record
-   * */
-  def varConstraint(state:R2RState, alias:sql.RelVar, optAttr:Option[sql.Attribute], v:sparql.Var, db:sql.DatabaseDesc, rel:sql.Relation):R2RState = {
-    /* e.g.                                 Employee      _emp.id            
-    **                                      Employee      _emp.lastName      
-    **                                      Employee      _emp.manager       
-    */
-    val constrainMe = if (optAttr.isDefined) sql.RelVarAttr(alias, optAttr.get) else sql.RelVarAttr(alias, sql.Attribute(sql.Name("_no_such_attribute")))
-    val reldesc = db.relationdescs(rel)
-    val boundTo = FullBinding(constrainMe)
-    val binding = reldesc.primarykey match {
-      case Some(sql.Attribute(constrainMe.attribute.n)) => RDFNoder(rel, boundTo)
-      case _ => {
-	// e.g. sql.Attribute(sql.Name("id")) or None
-	if (reldesc.attributes.contains(constrainMe.attribute)) {
-	  reldesc.attributes(constrainMe.attribute) match {
-	    case sql.ForeignKey(fkrel, fkattr) => RDFNoder(rel, boundTo)
-	    case sql.Value(sql.Datatype("Int")) => IntMapper(boundTo)
-	    case sql.Value(sql.Datatype("String")) => StringMapper(boundTo)
-	    case sql.Value(sql.Datatype("Date")) => DateMapper(boundTo)
-	  }
-	} else {
-	  RDFBNoder(rel, boundTo)
-	}
-      }
-    }
-
-    if (state.varmap.contains(v) && state.varmap(v) != constrainMe) {
-      /* The variable has already been bound to another attribute. */
-      /* Constraint against the initial binding for this variable. */
-      val constraint = sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(constrainMe),
-						  sql.PrimaryExpressionAttr(varToAttribute(state.varmap, v)))
-      R2RState(state.joins, state.varmap, 
-	       if (varToAttributeDisjoints(state.varmap, v).size > 0) {
-		 state.exprs ++ {varToAttributeDisjoints(state.varmap, v) map ((d) => sql.ExprDisjunction(Set(d, constraint)))}
-	       } else
-		 state.exprs + constraint
-	     )
-    } else {
-      /* This is a new variable or a replacement bindinig for an old variable. */
-      R2RState(state.joins, state.varmap + (v -> binding), state.exprs)
-    }
-  }
-
-  def toString(relvarattr:sql.RelVarAttr) : String = {
-    relvarattr.relvar.n.s + "." + relvarattr.attribute.n.s
-  }
-  // def toString(mapper:SQL2RDFValueMapper) : String = {
-  //   mapper match {
-  //     case IntMapper(relvar, disjoints) => "INT: " + toString(relvar)
-  //     case StringMapper(relvar, disjoints) => "STRING: " + toString(relvar)
-  //     case DateMapper(relvar, disjoints) => "DATE: " + toString(relvar)
-  //     case RDFNoder(relation, relvar, disjoints) => "RDFNoder: " + relation.n.s + ", " + toString(relvar)
-  //     case RDFBNoder(relation, relvar, disjoints) => "RDFBNoder: " + relation.n.s + ", " + toString(relvar)
-  //   }
-  // }
-
-  /* bindOnPredicate: map a given triple to one or two joined tables, variable
-   * bindings to RelVarAttrs, and constraints if those variables were
-   * already bound. */
-  def bindOnPredicate(db:sql.DatabaseDesc, stateP:R2RState, triple:sparql.TriplePattern, enforceForeignKeys:Boolean):R2RState = {
-    val sparql.TriplePattern(s, p, o) = triple
-    p match {
-      case sparql.PVar(v) => error("variable predicates require tedious enumeration; too tedious for me.")
-      case sparql.PUri(uri) => {
-	val PUri(stem, spRel, spAttr) = parsePredicateURI(uri)
-	/* Attributes that come from the predicate: */
-	val rel = sql.Relation(sql.Name(spRel.s))
-	val attr = sql.Attribute(sql.Name(spAttr.s))
-	val relvar = relVarFromS(s)
-
-	/* Attributes that come from the subject: */
-	val objattr = sql.RelVarAttr(relvar, attr)
-	val state_postSubj = s match {
-	  case sparql.SUri(u) => uriConstraint(stateP, sql.RelVarAttr(relvar, db.relationdescs(rel).primarykey.get), parseObjectURI(u), true)
-	  case sparql.SVar(v) => varConstraint(stateP, relvar, db.relationdescs(rel).primarykey, v, db, rel)
-	}
-	val state_subjJoin = R2RState(state_postSubj.joins + sql.InnerJoin(sql.AliasedResource(rel,relvar), None), state_postSubj.varmap, state_postSubj.exprs)
-
-	try { db.relationdescs(rel).attributes(attr) } catch {
-	  case e:java.util.NoSuchElementException =>
-	    throw new Exception("error processing { " + s + " " + p + " " + o + " } :db.relationdescs(" + rel + ").attributes(" + attr + ") not found in " + db)
-	}
-	val (targetattr:sql.RelVarAttr, targetrel, dt, state_fkeys:R2RState) = db.relationdescs(rel).attributes(attr) match {
-	  case sql.ForeignKey(fkrel, fkattr) => {
-	    try { db.relationdescs(fkrel).attributes(fkattr) } catch {
-	      case e:java.util.NoSuchElementException =>
-		throw new Exception("db.relationdescs(" + fkrel + ").attributes(" + fkattr + ") not found in " + db)
-	    }
-	    val fkdt = db.relationdescs(fkrel).attributes(fkattr) match {
-	      case sql.ForeignKey(dfkrel, dfkattr) => error("foreign key " + rel.n + "." + attr.n + 
-							"->" + fkrel.n + "." + fkattr.n + 
-							"->" + dfkrel.n + "." + dfkattr.n)
-	      case sql.Value(x) => x
-	    }
-	    if (enforceForeignKeys) {
-	      val oRelVar = relVarFromO(o)
-	      val fkaliasattr = sql.RelVarAttr(oRelVar, fkattr)
-	      val state_t = R2RState(state_subjJoin.joins + sql.InnerJoin(sql.AliasedResource(fkrel,oRelVar), None),
-				     state_subjJoin.varmap,
-				     state_subjJoin.exprs + sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(fkaliasattr),
-										       sql.PrimaryExpressionAttr(objattr)))
-
-	      (fkaliasattr, fkrel, fkdt, state_t)
-	    } else {
-	      (objattr, rel, fkdt, state_subjJoin)
-	    }
-	  }
-	  case sql.Value(dt) => (objattr, rel, dt, state_subjJoin)
-	}
-	o match {
-	  case sparql.OLit(l) => literalConstraint(state_fkeys, targetattr, l, dt)
-	  case sparql.OUri(u) => uriConstraint    (state_fkeys, targetattr, parseObjectURI(u), enforceForeignKeys)
-	  case sparql.OVar(v) => varConstraint    (state_fkeys, targetattr.relvar, Some(targetattr.attribute), v, db, targetrel)
-	}
-      }
-    }
-  }
-
-  def findVars(triple:sparql.TriplePattern):Set[sparql.Var] = {
-    val sparql.TriplePattern(s, p, o) = triple
-    val varS:Set[sparql.Var] = s match {
-      case sparql.SVar(v) => Set(v)
-      case _              => Set()
-    }
-    val varO:Set[sparql.Var] = o match {
-      case sparql.OVar(v) => Set(v)
-      case _              => Set()
-    }
-    varS ++ varO
-  }
-
-  def findVars(gp:sparql.GraphPattern):Set[sparql.Var] = {
-    gp match {
-      case sparql.TableFilter(gp2:sparql.GraphPattern, expr:sparql.Expression) =>
-	findVars(gp2)
-
-      case sparql.TriplesBlock(triplepatterns) =>
-	/* Examine each triple, updating the compilation state. */
-	triplepatterns.foldLeft(Set[sparql.Var]())((x, y) => x ++ findVars(y))
-
-      case sparql.TableConjunction(list) =>
-	/* Examine each triple, updating the compilation state. */
-	list.foldLeft(Set[sparql.Var]())((x, y) => x ++ findVars(y))
-
-      case sparql.OptionalGraphPattern(gp2) =>
-	/* Examine each triple, updating the compilation state. */
-	findVars(gp2)
-
-      case x => error("no code to handle " + x)
-    }
-  }
-
-  def bindingConstraintToAttribute(constraint:BindingConstraint):sql.RelVarAttr = {
-    val BindingConstraint(expr:sql.RelationalExpression, relvarattr:sql.RelVarAttr) = constraint;
-    relvarattr
-  }
-  def bindingToAttribute(binding:FullOrPartialBinding):sql.RelVarAttr = {
-    binding match {
-      case FullBinding(relvarattr:sql.RelVarAttr) => relvarattr
-      case PartialBinding(binders) => bindingConstraintToAttribute(binders.toList(0))
-    }
-  }
-  def varToAttribute(varmap:Map[sparql.Var, SQL2RDFValueMapper], vvar:sparql.Var):sql.RelVarAttr = {
-    varmap(vvar) match {
-      case IntMapper(binding) => bindingToAttribute(binding)
-      case StringMapper(binding) => bindingToAttribute(binding)
-      case DateMapper(binding) => bindingToAttribute(binding)
-      case RDFNoder(relation, binding) => bindingToAttribute(binding)
-      case RDFBNoder(relation, binding) =>  bindingToAttribute(binding) // error("BNode should not need relvar " + relvar)
-    }
-  }
-
-  def bindingConstraintToExpression(constraint:BindingConstraint):sql.RelationalExpression = {
-    val BindingConstraint(expr:sql.RelationalExpression, relvarattr:sql.RelVarAttr) = constraint;
-    expr
-  }
-  def bindingToDisjoints(binding:FullOrPartialBinding):Set[sql.RelationalExpression] = {
-    binding match {
-      case FullBinding(relvarattr:sql.RelVarAttr) => Set[sql.RelationalExpression]()
-      case PartialBinding(binders) => binders.map({b => bindingConstraintToExpression(b)})
-    }
-  }
-  def varToAttributeDisjoints(varmap:Map[sparql.Var, SQL2RDFValueMapper], vvar:sparql.Var):Set[sql.RelationalExpression] = {
-    varmap(vvar) match {
-      case IntMapper(binding) => bindingToDisjoints(binding)
-      case StringMapper(binding) => bindingToDisjoints(binding)
-      case DateMapper(binding) => bindingToDisjoints(binding)
-      case RDFNoder(relation, binding) => bindingToDisjoints(binding)
-      case RDFBNoder(relation, binding) =>  bindingToDisjoints(binding) // error("BNode should not need relvar " + relvar)
-    }
-  }
-
-  def varToConcat(varmap:Map[sparql.Var, SQL2RDFValueMapper], vvar:sparql.Var, stem:StemURI):sql.Expression = {
-    varmap(vvar) match {
-      case IntMapper(binding) => sql.PrimaryExpressionAttr(bindingToAttribute(binding))
-      case StringMapper(binding) => 
-	sql.Concat(List(sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("'")),
-		    sql.PrimaryExpressionAttr(bindingToAttribute(binding)),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("'^^<http://www.w3.org/2001/XMLSchema#string>"))))
-      case DateMapper(binding) => sql.PrimaryExpressionAttr(bindingToAttribute(binding))
-      case RDFNoder(relation, binding) => 
-	sql.Concat(List(sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name(stem.s)),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),relation.n),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("/")),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),bindingToAttribute(binding).attribute.n),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name(".")),
-		    sql.PrimaryExpressionAttr(bindingToAttribute(binding)),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("#record"))))
-      case RDFBNoder(relation, binding) => 
-	sql.Concat(List(sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("_:")),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),relation.n),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name(".")),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),bindingToAttribute(binding).attribute.n),
-		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name(".")),
-		    sql.PrimaryExpressionAttr(bindingToAttribute(binding))))
-    }
-    
-  }
-
-  def filter2expr(varmap:Map[sparql.Var, SQL2RDFValueMapper], f:sparql.PrimaryExpression):sql.RelationalExpression = {
-    val (lTerm:sparql.Term, rTerm:sparql.Term, sqlexpr) = f match { // sqlexpr::((sql.RelVarAttr,sql.PrimaryExpressionAttr)=>sql.RelationalExpression)
-      case sparql.PrimaryExpressionEq(l, r) => (l.term, r.term, sql.RelationalExpressionEq(_,_))
-      case sparql.PrimaryExpressionLt(l, r) => (l.term, r.term, sql.RelationalExpressionLt(_,_))
-    }
-// sql.RelationalExpressionEq(_,_) === (x,y) => PrymaryExpressionEq(x,y)
-    lTerm match {
-      // does not handle FILTER (<x> = ?v)
-      case sparql.TermUri(obj) => error("only SPARQL PrimaryExpressions with a variable on the left have been implemented: punting on " + f)
-      // FILTER (?v = <x> && ?v = ?x && ?v = 7)
-      case sparql.TermVar(v) => { // :sparql.Var
-	val l = varToAttribute(varmap, v)
-	val r = rTerm match {
-	  case sparql.TermUri(obj) => null // :sparql.Uri
-	  case sparql.TermVar(v) => { // :sparql.Var
-	    sql.PrimaryExpressionAttr(varToAttribute(varmap, v))
-	  }
-	  case sparql.TermLit(lit) => null // :sparql.Literal => sql.PrimaryExpressionTyped(sql.Datatype, lit.n)
-	}
-	sqlexpr(sql.PrimaryExpressionAttr(l), r)
-      }
-      // does not handle FILTER (7 = ?v)
-      case sparql.TermLit(lit) => error("only SPARQL PrimaryExpressions with a variable on the left have been implemented: punting on " + f)
-    }
-  }
-
-  /* subselectVars: Promote variables in OPTIONAL or UNION subselects to the
-   * outer varmap/expressions.
-   * <outerState> could be <myState> -- spliting roles could make proofs easier?
-   */
-  def subselectVars(myState:R2RState, v:sparql.Var, optionalAlias:sql.RelVar,
-		    optionalCond:sql.RelationalExpression,
-		    outerVarmap:Map[sparql.Var, SQL2RDFValueMapper],
-		    nestedVarmap:Map[sparql.Var, SQL2RDFValueMapper],
-		    isOpt:Boolean):R2RState = {
-    val varAliasAttr = sql.RelVarAttr(optionalAlias, sql.Attribute(attrAliasNameFromVar(v)))
-    if (myState.varmap.contains(v)) {
-      /* The variable has already been bound. */
-      val newMap:Map[sparql.Var, SQL2RDFValueMapper] = if (varToAttribute(myState.varmap, v) == varAliasAttr) {
-	/* Same var was bound earlier. */
-	Map(v -> { myState.varmap(v) match {
-	  case IntMapper(binding)      => IntMapper(addExpr(binding, varAliasAttr, optionalCond))
-	  case StringMapper(binding)   => StringMapper(addExpr(binding, varAliasAttr, optionalCond))
-	  case DateMapper(binding)     => DateMapper(addExpr(binding, varAliasAttr, optionalCond))
-	  case RDFNoder(rel, binding)  => RDFNoder(rel, addExpr(binding, varAliasAttr, optionalCond))
-	  case RDFBNoder(rel, binding) => RDFBNoder(rel, addExpr(binding, varAliasAttr, optionalCond))
-	} } )
-      } else
-	Map()
-      val newConstraints =
-	if (varToAttribute(outerVarmap, v) != varAliasAttr) {
-	  /* Constraint against binding from earlier GP. */
-	  val constraint = sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(varAliasAttr),
-						      sql.PrimaryExpressionAttr(varToAttribute(outerVarmap, v)))
-	  if (varToAttributeDisjoints(outerVarmap, v).size > 0)
-	    // (union0._DISJOINT_ != 0 OR union0.x=union1.x) AND (union1._DISJOINT_ != 2 OR union0.x=union1.x)
-	    varToAttributeDisjoints(outerVarmap, v) map ((d) =>
-	      sql.ExprDisjunction({
-		if (isOpt) Set(d, constraint)
-		else Set(sql.ExprConjunction(Set(d, optionalCond)), constraint)
-	      }))
-		else {
-		  if (isOpt) Set(constraint)
-		  else Set(sql.ExprDisjunction(Set(optionalCond, constraint)))
-		}
-	} else {
-	  Set()
-	}
-      R2RState(myState.joins, myState.varmap ++ newMap, myState.exprs ++ newConstraints)
-    } else {
-      /* This variable is new to the outer context. */
-      val p = PartialBinding(Set(BindingConstraint(optionalCond, varAliasAttr)))
-      val mapper:SQL2RDFValueMapper = nestedVarmap(v) match {
-	case IntMapper(_)      => IntMapper(p)
-	case StringMapper(_)   => StringMapper(p)
-	case DateMapper(_)   => DateMapper(p)
-	case RDFNoder(rel, _)  => RDFNoder(rel, p)
-	case RDFBNoder(rel, _) => RDFBNoder(rel, p)
-      }
-      R2RState(myState.joins, myState.varmap + (v -> mapper), myState.exprs)
-    }
-  }
-
-  def synthesizeOuterJoin(initState:R2RState, gp:sparql.GraphPattern, negate:Boolean, db:sql.DatabaseDesc, enforceForeignKeys:Boolean):R2RState = {
-    /* SPARQL OPTIONALs are treated as SQL subselects.
-     * Set up initial state for this subselect.
-     */
-    val leftJoinAlias = sql.RelVar(sql.Name("G_opt" + initState.joins.size))
-    val initDisjoints:Set[sql.Select] = Set()
-    val emptyState = R2RState(
-      util.AddOrderedSet[sql.Join](), 
-      Map[sparql.Var, SQL2RDFValueMapper](), 
-      Set[sql.Expression]()
-    )
-
-    /* Create the select for the nested graph pattern.
-     */
-    val optionalState = mapGraphPattern(db, emptyState, gp, enforceForeignKeys)
-    val optionalVars = findVars(gp)
-    val disjointNo = sql.NamedAttribute(sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("" + initState.joins.size)),
-					sql.AttrAlias(sql.Name("_DISJOINT_")))
-    val leftJoinVars = findVars(gp).toList
-    val attrlist:Set[sql.NamedAttribute] = leftJoinVars.foldLeft(Set(disjointNo))((attrs, v) =>
-      attrs ++ Set(sql.NamedAttribute(varToAttribute(optionalState.varmap, v), sql.AttrAlias(attrAliasNameFromVar(v))))
-      										)
-    val subselect = sql.Select(
-      sql.AttributeList(attrlist),
-      sql.TableList(optionalState.joins),
-      optionalState.exprs.size match {
-      	case 0 => None
-      	case 1 => Some(optionalState.exprs.toList(0))
-      	case _ => Some(sql.ExprConjunction(optionalState.exprs))
-      }
-    )
-
-    /* Create a condition to test if this OPTIONAL was matched (called
-     * _DISJOINT_ as OPTIONAL behaves pretty much like a disjunction).
-     */
-    val optionalCond = sql.RelationalExpressionNull(sql.PrimaryExpressionAttr(
-      sql.RelVarAttr(leftJoinAlias, sql.Attribute(sql.Name("_DISJOINT_")))))
-
-    /* Bind variables to the attributes projected from the subselect; handle
-     * corefs (equivalence with earlier bindings).
-     */
-    val outerState2 =
-      optionalVars.foldLeft(
-	R2RState(initState.joins,
-		 initState.varmap,
-		 Set[sql.Expression]()))((myState, v) => 
-		   subselectVars(myState, v, leftJoinAlias, optionalCond,
-				 initState.varmap, optionalState.varmap, true))
-
-    /* The final state includes the subselect as a join, the variables bound
-     * to subselect projection, and no new expresssions. The expressions
-     * derived from corefs are conditions for the LEFT OUTER JOIN.
-     */
-    val join = sql.LeftOuterJoin(sql.AliasedResource(sql.Subselect(subselect), leftJoinAlias), 
-	 outerState2.exprs.size match {
-	   case 0 =>
-	     sql.RelationalExpressionEq(sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("1")),
-					sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("1")))
-	     // /* Require corefs unless we have a leading OPTIONAL. */
-	     // if (...)
-	     // else
-	     //   error ("Nested GP has no variables shared with its context; cowaredly refusing to join ON 1.")
-	   case 1 => outerState2.exprs.toList(0)
-	   case _ => sql.ExprConjunction(outerState2.exprs)
-	 }
-       )
-    val exprs =
-      if (negate) {
-	initState.exprs + sql.RelationalExpressionNull(sql.PrimaryExpressionAttr(sql.RelVarAttr(leftJoinAlias, sql.Attribute(sql.Name("_DISJOINT_")))))
-      } else initState.exprs
-    R2RState(initState.joins + join, outerState2.varmap, exprs)
-  }
-
-  def mapGraphPattern(db:sql.DatabaseDesc, state:R2RState, gp:sparql.GraphPattern, enforceForeignKeys:Boolean):R2RState = {
-    gp match {
-      case sparql.TableFilter(gp2:sparql.GraphPattern, expr:sparql.Expression) => {
-	val state2 = mapGraphPattern(db, state, gp2, enforceForeignKeys)
-
-	/* Add constraints for all the FILTERS */
-	val filterExprs:Set[sql.RelationalExpression] =
-	  expr.conjuncts.toSet map ((x:sparql.PrimaryExpression) => filter2expr(state2.varmap, x))
-
-	R2RState(state2.joins, state2.varmap, state2.exprs ++ filterExprs)
-      }
-      case sparql.TriplesBlock(triplepatterns) => {
-	/* Examine each triple, updating the compilation state. */
-	val state2 = triplepatterns.foldLeft(state)((incState,s) => bindOnPredicate(db, incState, s, enforceForeignKeys))
-	val nullExprs = findVars(gp).foldLeft(Set[sql.Expression]())((s, vvar) => {
-	  if (varToAttributeDisjoints(state2.varmap, vvar).size == 0)
-	    s ++ Set(sql.RelationalExpressionNotNull(sql.PrimaryExpressionAttr(varToAttribute(state2.varmap, vvar))))
-	  else
-	    s
-	})
-	R2RState(state2.joins, state2.varmap, state2.exprs ++ nullExprs)
-      }
-      case sparql.TableConjunction(list) => {
-	list.foldLeft(state)((incState,s) => mapGraphPattern(db, incState, s, enforceForeignKeys))
-      }
-      case sparql.TableDisjunction(disjoints) => {
-	/* SPARQL UNIONs are treated as SQL subselects.
-	 * Set up initial state for this subselect.
-	 */
-	val unionAlias = sql.RelVar(sql.Name("G_union" + state.joins.size))
-	val emptyState = R2RState(
-	  util.AddOrderedSet[sql.Join](), 
-	  Map[sparql.Var, SQL2RDFValueMapper](), 
-	  Set[sql.Expression]()
-	)
-	val unionVars = disjoints.foldLeft(Set[sparql.Var]())((mySet,disjoint) =>
-	  mySet ++ findVars(disjoint)).toList
-
-	/* Map the disjoints to subselects.
-	 * <no> is used for uniquely naming flags in the SELECTs used to
-	 * indicate which disjoint produced a tuple.
-	 */
-	val (subselects, _) = disjoints.foldLeft((Set[sql.Select](), 0))((incPair,disjoint) => {
-	  val (subselects, no) = incPair
-	  val disjointState = mapGraphPattern(db, emptyState, disjoint, enforceForeignKeys)
-	  val disjointNo = sql.NamedAttribute(sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("" + no)),
-					      sql.AttrAlias(sql.Name("_DISJOINT_")))
-
-	  val attrlist:Set[sql.NamedAttribute] = unionVars.foldLeft(Set(disjointNo))((attrs, v) => {
-	    val attrOrNull = if (disjointState.varmap.contains(v)) varToAttribute(disjointState.varmap, v) else sql.ConstNULL()
-	    attrs ++ Set(sql.NamedAttribute(attrOrNull, sql.AttrAlias(attrAliasNameFromVar(v))))
-	  })
-
-	  val subselect = sql.Select(
-	    sql.AttributeList(attrlist),
-	    sql.TableList(disjointState.joins),
-	    disjointState.exprs.size match {
-	      case 0 => None
-	      case 1 => Some(disjointState.exprs.toList(0))
-	      case _ => Some(sql.ExprConjunction(disjointState.exprs))
-	    }
-	  )
-	  (subselects + subselect, no+1)
-	})
-
-	/* Bind variables to the attributes projected from the subselect; handle
-	 * corefs (equivalence with earlier bindings).
-	 * <no> is used for uniquely naming flags in the SELECTs used to
-	 * indicate which disjoint produced a tuple.
-	 * <state2> will have no additional tables in the TableList.
-	 */
-	val (state2, _) = disjoints.foldLeft((state, 0))((incPair,disjoint) => {
-	  val (outerState, no) = incPair
-	  val disjointState = mapGraphPattern(db, emptyState, disjoint, enforceForeignKeys)
-	  val disjointVars = findVars(disjoint)
-
-	  /* Create a condition to test if this OPTIONAL was matched (called
-	   * _DISJOINT_ as OPTIONAL behaves pretty much like a disjunction).
-	   */
-	  val disjointCond = sql.RelationalExpressionNe(sql.PrimaryExpressionAttr(sql.RelVarAttr(unionAlias, sql.Attribute(sql.Name("_DISJOINT_")))),
-							sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("" + no)))
-	  val outerState2 = disjointVars.foldLeft(outerState)((myState, v) =>
-	      subselectVars(myState, v, unionAlias, disjointCond, outerState.varmap, disjointState.varmap, false))
-	  (outerState2, no+1)
-	})
-	val subselect = sql.Subselect(sql.Union(subselects))
-	R2RState(state.joins + sql.InnerJoin(sql.AliasedResource(subselect,unionAlias), None), state2.varmap, state2.exprs)
-      }
-      case sparql.OptionalGraphPattern(gp) => {
-	/* state_postLeadingTable: create an initial table if the first conjoint is optional.
-	 * e.g. ... FROM (SELECT 1 AS _EMPTY_) AS _EMPTY_ LEFT OUTER JOIN ...
-	 */
-	val state_postLeadingTable =
-	  if (state.joins.size == 0)
-	    R2RState(state.joins + sql.InnerJoin(sql.AliasedResource(sql.Subselect(
-	      sql.Select(
-		sql.AttributeList(Set(sql.NamedAttribute(sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("1")),
-							 sql.AttrAlias(sql.Name("_EMPTY_"))))),
-		sql.TableList(util.AddOrderedSet()),
-		None
-	      )), sql.RelVar(sql.Name("_EMPTY_"))), None), state.varmap, state.exprs)
-	  else
-	    state
-	synthesizeOuterJoin(state_postLeadingTable, gp, false, db, enforceForeignKeys)
-      }
-      case sparql.MinusGraphPattern(gp) => {
-	if (state.joins.size == 0) state
-	else synthesizeOuterJoin(state, gp, true, db, enforceForeignKeys)
-      }
-      case sparql.GraphGraphPattern(gp) => error("no code to handle GraphGraphPatterns (" + gp + ")")
-    }
-  }
-
-  def apply (db:sql.DatabaseDesc, sparquery:sparql.Select, stem:StemURI, enforceForeignKeys:Boolean, concat:Boolean) : sql.Select = {
-    val sparql.Select(attrs, triples) = sparquery
-
-    /* Create an object to hold our compilation state. */
-    val initState = R2RState(
-      util.AddOrderedSet[sql.Join](), 
-      Map[sparql.Var, SQL2RDFValueMapper](), 
-      Set[sql.Expression]()
-    )
-
-    val r2rState = mapGraphPattern(db, initState, sparquery.gp, enforceForeignKeys)
-
-    /* Select the attributes corresponding to the variables
-     * in the SPARQL SELECT.  */
-    val attrlist:Set[sql.NamedAttribute] = attrs.attributelist.foldLeft(Set[sql.NamedAttribute]())((attrs, v) => 
-      attrs + sql.NamedAttribute({
-	if (concat) varToConcat(r2rState.varmap, v, stem)
-	else varToAttribute(r2rState.varmap, v)
-      } , sql.AttrAlias(attrAliasNameFromVar(v))
-      ))
-
-    /* Construct the generated query as an abstract syntax. */
-    val select = sql.Select(
-      sql.AttributeList(attrlist),
-      sql.TableList(r2rState.joins),
-      r2rState.exprs.size match {
-	case 0 => None
-	case 1 => Some(r2rState.exprs.toList(0))
-	case _ => Some(sql.ExprConjunction(r2rState.exprs))
-      }
-    )
-    // println("r2rState.varmap: " + r2rState.varmap)
-    // println("select.expression: " + select.expression)
-    select.makePretty()
-  }
-}
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/SparqlToSql.scala	Sun Jan 31 11:12:27 2010 -0500
@@ -0,0 +1,701 @@
+/* SparqlToScala: convert SPARQL queries to sound SQL queries.
+ *
+ * Please read from the bottom -- i.e. apply calls mapGraphPattern with the root
+ * graph pattern. mapGraphPattern handles all the graph pattern types in SPARQL,
+ * effectively peforming the Convert Graph Patterns step in SPARQL 1.0 12.2.1
+ *   <http://www.w3.org/TR/rdf-sparql-query/#convertGraphPattern>
+ * with the target semantics in SQL instead of SPARQL.
+ */
+
+package w3c.sw.rdb2rdf
+
+import scala.util.parsing.combinator._
+import java.net.URI
+import w3c.sw.sql
+import w3c.sw.sql.PrettySql.toPrettySql
+import w3c.sw.sparql
+import w3c.sw.util
+
+case class StemURI(s:String)
+case class PrimaryKey(attr:sql.Attribute)
+
+sealed abstract class Binding
+case class RDFNode(relvarattr:sql.RelVarAttr) extends Binding
+case class Str(relvarattr:sql.RelVarAttr) extends Binding
+case class Int(relvarattr:sql.RelVarAttr) extends Binding
+case class Enum(relvarattr:sql.RelVarAttr) extends Binding
+
+object SparqlToScala {
+  case class R2RState(joins:util.AddOrderedSet[sql.Join], varmap:Map[sparql.Var, SQL2RDFValueMapper], exprs:Set[sql.Expression])
+
+  sealed abstract class FullOrPartialBinding
+  case class FullBinding(relvarattr:sql.RelVarAttr) extends FullOrPartialBinding
+  case class BindingConstraint(expr:sql.RelationalExpression, relvarattr:sql.RelVarAttr)
+  case class PartialBinding(binders:Set[BindingConstraint]) extends FullOrPartialBinding
+
+  def toExpr(against:FullOrPartialBinding):sql.Expression = {
+    /* if (g_union1._DISJOINT_ != 0, g_union1.who, if (g_union2._DISJOINT_ != 3, g_union2.who, NULL)) */
+    against match {
+      case FullBinding(relvarattr) =>
+	sql.PrimaryExpressionAttr(relvarattr)
+      case PartialBinding(binders) =>
+	binders.toList.reverse.foldLeft(sql.ConstNULL():sql.Expression)((exp, binding) => {
+	  val BindingConstraint(expr, relvarattr) = binding
+	  sql.IfElse(expr, sql.PrimaryExpressionAttr(relvarattr), exp)
+	})
+    }
+  }
+  def addExpr(binding:FullOrPartialBinding, relVarAttr:sql.RelVarAttr, expr:sql.RelationalExpression):FullOrPartialBinding = {
+    binding match {
+      case FullBinding(relvarattr) =>
+	binding
+      case PartialBinding(binders) =>
+	PartialBinding(binders + BindingConstraint(expr, relVarAttr))
+    }
+  }
+  def toConstraint999(constrainMe:sql.RelVarAttr, against:FullOrPartialBinding):sql.Expression = {
+    against match {
+      case FullBinding(relvarattr) =>
+	sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(constrainMe),
+				   sql.PrimaryExpressionAttr(relvarattr))
+      case PartialBinding(binders) =>
+	sql.ExprConjunction({binders.map(b => {
+	  val BindingConstraint(expr, relvarattr) = b
+	  sql.ExprDisjunction(Set(expr,
+				  sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(constrainMe),
+							     sql.PrimaryExpressionAttr(relvarattr))))})})
+    }
+  }
+
+  sealed abstract class SQL2RDFValueMapper(binding:FullOrPartialBinding)
+  case class IntMapper(binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
+  case class StringMapper(binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
+  case class DateMapper(binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
+  case class RDFNoder(relation:sql.Relation, binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
+  case class RDFBNoder(relation:sql.Relation, binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
+
+  case class NodeUri(stem:Stem, rel:Rel, attr:Attr, v:CellValue)
+  case class Stem(s:String) {
+    override def toString = "" + s
+  }
+  case class Rel(s:String) { // !! NUKE
+    override def toString = "" + s
+  }
+  case class Attr(s:String) {
+    override def toString = "" + s
+  }
+  case class CellValue(s:String)
+  case class PUri(stem:Stem, rel:Rel, attr:Attr) {
+    override def toString = "<" + stem + "/" + rel + "#" + attr + ">"
+  }
+  /* stemURI + '/' + (\w+) + '#' (\w+) */
+  def parsePredicateURI(u:sparql.Uri):PUri = {
+    val x:String = u.s
+    val uri = new URI(x)
+    val path = uri.getPath().split("/").toList.filterNot(_ == "")
+    val subPath = path.slice(0, path.size - 1).mkString("/")
+    val stem = uri.getScheme() + "://" + uri.getAuthority + "/" + subPath
+    PUri(Stem(stem), Rel(path.last), Attr(uri.getFragment))
+  }
+
+  /* stemURI + '/' (\w+) '/' (\w+) '.' (\w+) '#record' */
+  def parseObjectURI(u:sparql.Uri):NodeUri = {
+    val x:String = u.s
+    val uri = new URI(x)
+    val path = uri.getPath().split("/").toList.filterNot(_ == "")
+    val subPath = path.slice(0, path.size - 2).mkString("/")
+    val rel = path(path.size - 2)
+    val attrPair = path(path.size-1).split("\\.")
+    val stem = uri.getScheme() + "://" + uri.getAuthority + "/" + subPath
+    assert("record" == uri.getFragment)
+    NodeUri(Stem(stem), Rel(rel), Attr(attrPair(0)), CellValue(attrPair(1)))
+  }
+/*
+Sparql.parseObjectURI(
+Sparql.parsePredicateURI(
+*/
+  def relVarFromS(s:sparql.S):sql.RelVar = {
+    s match {
+      case sparql.SUri(ob) => relVarFromNode(ob)
+      case sparql.SVar(v) => relVarFromVar(v)
+    }
+  }
+
+  def relVarFromO(o:sparql.O):sql.RelVar = {
+    o match {
+      case sparql.OUri(ob) => relVarFromNode(ob)
+      case sparql.OVar(v) => relVarFromVar(v)
+      case sparql.OLit(l) => relVarFromLiteral(l)
+    }
+  }
+
+  def relVarFromNode(u:sparql.Uri):sql.RelVar = {
+    val NodeUri(stem, rel, Attr(a), CellValue(v)) = parseObjectURI(u)
+    sql.RelVar(sql.Name("R_" + a + v))
+  }
+
+  def relVarFromLiteral(l:sparql.Literal):sql.RelVar = {
+    sql.RelVar(sql.Name("R_" + l.lit.lexicalForm))
+  }
+
+  def relVarFromVar(vr:sparql.Var):sql.RelVar = {
+    val sparql.Var(v) = vr
+    sql.RelVar(sql.Name("R_" + v))
+  }
+
+  def attrAliasNameFromVar(v:sparql.Var):sql.Name = sql.Name("" + v.s)
+
+  def uriConstraint(state:R2RState, constrainMe:sql.RelVarAttr, u:NodeUri, enforeForeignKeys:Boolean):R2RState = {
+    val relvar =
+      if (enforeForeignKeys)
+	sql.RelVarAttr(constrainMe.relvar, sql.Attribute(sql.Name(u.attr.s)))
+      else
+	constrainMe
+    R2RState(state.joins,
+	     state.varmap,
+	     state.exprs + sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(relvar),
+						      sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name(u.v.s))))
+  }
+
+  def literalConstraint(state:R2RState, constrainMe:sql.RelVarAttr, lit:sparql.Literal, dt:sql.Datatype):R2RState = {
+    R2RState(state.joins,
+	     state.varmap,
+	     state.exprs + sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(constrainMe),
+						      sql.PrimaryExpressionTyped(dt,sql.Name(lit.lit.lexicalForm))))    
+  }
+
+  /** varConstraint
+   * examples:
+   * SELECT ?emp WHERE { ?emp emp:manager <http://hr.example/our/favorite/DB/Employee/id.18#record> ; emp:name ?name }
+   * SQL Results                     SPARQL Results
+   * A_emp A_name    ?emp                                                      ?name
+   * 4     "Bob"     <http://hr.example/our/favorite/DB/Employee/id.4#record>  "Bob"^^xsd:string
+   * 6     "Sue"     <http://hr.example/our/favorite/DB/Employee/id.6#record>  "Sue"^^xsd:string
+   * 
+   * type String -> RDFStringConstructor // adds ^^xsd:string
+   * type primary key -> RDFNodeConstructor // prefixes with stemURL + relation + attribute  and adds #record
+   * */
+  def varConstraint(state:R2RState, alias:sql.RelVar, optAttr:Option[sql.Attribute], v:sparql.Var, db:sql.DatabaseDesc, rel:sql.Relation):R2RState = {
+    /* e.g.                                 Employee      _emp.id            
+    **                                      Employee      _emp.lastName      
+    **                                      Employee      _emp.manager       
+    */
+    val constrainMe = if (optAttr.isDefined) sql.RelVarAttr(alias, optAttr.get) else sql.RelVarAttr(alias, sql.Attribute(sql.Name("_no_such_attribute")))
+    val reldesc = db.relationdescs(rel)
+    val boundTo = FullBinding(constrainMe)
+    val binding = reldesc.primarykey match {
+      case Some(sql.Attribute(constrainMe.attribute.n)) => RDFNoder(rel, boundTo)
+      case _ => {
+	// e.g. sql.Attribute(sql.Name("id")) or None
+	if (reldesc.attributes.contains(constrainMe.attribute)) {
+	  reldesc.attributes(constrainMe.attribute) match {
+	    case sql.ForeignKey(fkrel, fkattr) => RDFNoder(rel, boundTo)
+	    case sql.Value(sql.Datatype("Int")) => IntMapper(boundTo)
+	    case sql.Value(sql.Datatype("String")) => StringMapper(boundTo)
+	    case sql.Value(sql.Datatype("Date")) => DateMapper(boundTo)
+	  }
+	} else {
+	  RDFBNoder(rel, boundTo)
+	}
+      }
+    }
+
+    if (state.varmap.contains(v) && state.varmap(v) != constrainMe) {
+      /* The variable has already been bound to another attribute. */
+      /* Constraint against the initial binding for this variable. */
+      val constraint = sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(constrainMe),
+						  sql.PrimaryExpressionAttr(varToAttribute(state.varmap, v)))
+      R2RState(state.joins, state.varmap, 
+	       if (varToAttributeDisjoints(state.varmap, v).size > 0) {
+		 state.exprs ++ {varToAttributeDisjoints(state.varmap, v) map ((d) => sql.ExprDisjunction(Set(d, constraint)))}
+	       } else
+		 state.exprs + constraint
+	     )
+    } else {
+      /* This is a new variable or a replacement bindinig for an old variable. */
+      R2RState(state.joins, state.varmap + (v -> binding), state.exprs)
+    }
+  }
+
+  def toString(relvarattr:sql.RelVarAttr) : String = {
+    relvarattr.relvar.n.s + "." + relvarattr.attribute.n.s
+  }
+  // def toString(mapper:SQL2RDFValueMapper) : String = {
+  //   mapper match {
+  //     case IntMapper(relvar, disjoints) => "INT: " + toString(relvar)
+  //     case StringMapper(relvar, disjoints) => "STRING: " + toString(relvar)
+  //     case DateMapper(relvar, disjoints) => "DATE: " + toString(relvar)
+  //     case RDFNoder(relation, relvar, disjoints) => "RDFNoder: " + relation.n.s + ", " + toString(relvar)
+  //     case RDFBNoder(relation, relvar, disjoints) => "RDFBNoder: " + relation.n.s + ", " + toString(relvar)
+  //   }
+  // }
+
+  /* bindOnPredicate: map a given triple to one or two joined tables, variable
+   * bindings to RelVarAttrs, and constraints if those variables were
+   * already bound. */
+  def bindOnPredicate(db:sql.DatabaseDesc, stateP:R2RState, triple:sparql.TriplePattern, enforceForeignKeys:Boolean):R2RState = {
+    val sparql.TriplePattern(s, p, o) = triple
+    p match {
+      case sparql.PVar(v) => error("variable predicates require tedious enumeration; too tedious for me.")
+      case sparql.PUri(uri) => {
+	val PUri(stem, spRel, spAttr) = parsePredicateURI(uri)
+	/* Attributes that come from the predicate: */
+	val rel = sql.Relation(sql.Name(spRel.s))
+	val attr = sql.Attribute(sql.Name(spAttr.s))
+	val relvar = relVarFromS(s)
+
+	/* Attributes that come from the subject: */
+	val objattr = sql.RelVarAttr(relvar, attr)
+	val state_postSubj = s match {
+	  case sparql.SUri(u) => uriConstraint(stateP, sql.RelVarAttr(relvar, db.relationdescs(rel).primarykey.get), parseObjectURI(u), true)
+	  case sparql.SVar(v) => varConstraint(stateP, relvar, db.relationdescs(rel).primarykey, v, db, rel)
+	}
+	val state_subjJoin = R2RState(state_postSubj.joins + sql.InnerJoin(sql.AliasedResource(rel,relvar), None), state_postSubj.varmap, state_postSubj.exprs)
+
+	try { db.relationdescs(rel).attributes(attr) } catch {
+	  case e:java.util.NoSuchElementException =>
+	    throw new Exception("error processing { " + s + " " + p + " " + o + " } :db.relationdescs(" + rel + ").attributes(" + attr + ") not found in " + db)
+	}
+	val (targetattr:sql.RelVarAttr, targetrel, dt, state_fkeys:R2RState) = db.relationdescs(rel).attributes(attr) match {
+	  case sql.ForeignKey(fkrel, fkattr) => {
+	    try { db.relationdescs(fkrel).attributes(fkattr) } catch {
+	      case e:java.util.NoSuchElementException =>
+		throw new Exception("db.relationdescs(" + fkrel + ").attributes(" + fkattr + ") not found in " + db)
+	    }
+	    val fkdt = db.relationdescs(fkrel).attributes(fkattr) match {
+	      case sql.ForeignKey(dfkrel, dfkattr) => error("foreign key " + rel.n + "." + attr.n + 
+							"->" + fkrel.n + "." + fkattr.n + 
+							"->" + dfkrel.n + "." + dfkattr.n)
+	      case sql.Value(x) => x
+	    }
+	    if (enforceForeignKeys) {
+	      val oRelVar = relVarFromO(o)
+	      val fkaliasattr = sql.RelVarAttr(oRelVar, fkattr)
+	      val state_t = R2RState(state_subjJoin.joins + sql.InnerJoin(sql.AliasedResource(fkrel,oRelVar), None),
+				     state_subjJoin.varmap,
+				     state_subjJoin.exprs + sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(fkaliasattr),
+										       sql.PrimaryExpressionAttr(objattr)))
+
+	      (fkaliasattr, fkrel, fkdt, state_t)
+	    } else {
+	      (objattr, rel, fkdt, state_subjJoin)
+	    }
+	  }
+	  case sql.Value(dt) => (objattr, rel, dt, state_subjJoin)
+	}
+	o match {
+	  case sparql.OLit(l) => literalConstraint(state_fkeys, targetattr, l, dt)
+	  case sparql.OUri(u) => uriConstraint    (state_fkeys, targetattr, parseObjectURI(u), enforceForeignKeys)
+	  case sparql.OVar(v) => varConstraint    (state_fkeys, targetattr.relvar, Some(targetattr.attribute), v, db, targetrel)
+	}
+      }
+    }
+  }
+
+  def findVars(triple:sparql.TriplePattern):Set[sparql.Var] = {
+    val sparql.TriplePattern(s, p, o) = triple
+    val varS:Set[sparql.Var] = s match {
+      case sparql.SVar(v) => Set(v)
+      case _              => Set()
+    }
+    val varO:Set[sparql.Var] = o match {
+      case sparql.OVar(v) => Set(v)
+      case _              => Set()
+    }
+    varS ++ varO
+  }
+
+  def findVars(gp:sparql.GraphPattern):Set[sparql.Var] = {
+    gp match {
+      case sparql.TableFilter(gp2:sparql.GraphPattern, expr:sparql.Expression) =>
+	findVars(gp2)
+
+      case sparql.TriplesBlock(triplepatterns) =>
+	/* Examine each triple, updating the compilation state. */
+	triplepatterns.foldLeft(Set[sparql.Var]())((x, y) => x ++ findVars(y))
+
+      case sparql.TableConjunction(list) =>
+	/* Examine each triple, updating the compilation state. */
+	list.foldLeft(Set[sparql.Var]())((x, y) => x ++ findVars(y))
+
+      case sparql.OptionalGraphPattern(gp2) =>
+	/* Examine each triple, updating the compilation state. */
+	findVars(gp2)
+
+      case x => error("no code to handle " + x)
+    }
+  }
+
+  def bindingConstraintToAttribute(constraint:BindingConstraint):sql.RelVarAttr = {
+    val BindingConstraint(expr:sql.RelationalExpression, relvarattr:sql.RelVarAttr) = constraint;
+    relvarattr
+  }
+  def bindingToAttribute(binding:FullOrPartialBinding):sql.RelVarAttr = {
+    binding match {
+      case FullBinding(relvarattr:sql.RelVarAttr) => relvarattr
+      case PartialBinding(binders) => bindingConstraintToAttribute(binders.toList(0))
+    }
+  }
+  def varToAttribute(varmap:Map[sparql.Var, SQL2RDFValueMapper], vvar:sparql.Var):sql.RelVarAttr = {
+    varmap(vvar) match {
+      case IntMapper(binding) => bindingToAttribute(binding)
+      case StringMapper(binding) => bindingToAttribute(binding)
+      case DateMapper(binding) => bindingToAttribute(binding)
+      case RDFNoder(relation, binding) => bindingToAttribute(binding)
+      case RDFBNoder(relation, binding) =>  bindingToAttribute(binding) // error("BNode should not need relvar " + relvar)
+    }
+  }
+
+  def bindingConstraintToExpression(constraint:BindingConstraint):sql.RelationalExpression = {
+    val BindingConstraint(expr:sql.RelationalExpression, relvarattr:sql.RelVarAttr) = constraint;
+    expr
+  }
+  def bindingToDisjoints(binding:FullOrPartialBinding):Set[sql.RelationalExpression] = {
+    binding match {
+      case FullBinding(relvarattr:sql.RelVarAttr) => Set[sql.RelationalExpression]()
+      case PartialBinding(binders) => binders.map({b => bindingConstraintToExpression(b)})
+    }
+  }
+  def varToAttributeDisjoints(varmap:Map[sparql.Var, SQL2RDFValueMapper], vvar:sparql.Var):Set[sql.RelationalExpression] = {
+    varmap(vvar) match {
+      case IntMapper(binding) => bindingToDisjoints(binding)
+      case StringMapper(binding) => bindingToDisjoints(binding)
+      case DateMapper(binding) => bindingToDisjoints(binding)
+      case RDFNoder(relation, binding) => bindingToDisjoints(binding)
+      case RDFBNoder(relation, binding) =>  bindingToDisjoints(binding) // error("BNode should not need relvar " + relvar)
+    }
+  }
+
+  def varToConcat(varmap:Map[sparql.Var, SQL2RDFValueMapper], vvar:sparql.Var, stem:StemURI):sql.Expression = {
+    varmap(vvar) match {
+      case IntMapper(binding) => sql.PrimaryExpressionAttr(bindingToAttribute(binding))
+      case StringMapper(binding) => 
+	sql.Concat(List(sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("'")),
+		    sql.PrimaryExpressionAttr(bindingToAttribute(binding)),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("'^^<http://www.w3.org/2001/XMLSchema#string>"))))
+      case DateMapper(binding) => sql.PrimaryExpressionAttr(bindingToAttribute(binding))
+      case RDFNoder(relation, binding) => 
+	sql.Concat(List(sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name(stem.s)),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),relation.n),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("/")),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),bindingToAttribute(binding).attribute.n),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name(".")),
+		    sql.PrimaryExpressionAttr(bindingToAttribute(binding)),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("#record"))))
+      case RDFBNoder(relation, binding) => 
+	sql.Concat(List(sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name("_:")),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),relation.n),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name(".")),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),bindingToAttribute(binding).attribute.n),
+		    sql.PrimaryExpressionTyped(sql.Datatype("String"),sql.Name(".")),
+		    sql.PrimaryExpressionAttr(bindingToAttribute(binding))))
+    }
+    
+  }
+
+  def filter2expr(varmap:Map[sparql.Var, SQL2RDFValueMapper], f:sparql.PrimaryExpression):sql.RelationalExpression = {
+    val (lTerm:sparql.Term, rTerm:sparql.Term, sqlexpr) = f match { // sqlexpr::((sql.RelVarAttr,sql.PrimaryExpressionAttr)=>sql.RelationalExpression)
+      case sparql.PrimaryExpressionEq(l, r) => (l.term, r.term, sql.RelationalExpressionEq(_,_))
+      case sparql.PrimaryExpressionLt(l, r) => (l.term, r.term, sql.RelationalExpressionLt(_,_))
+    }
+// sql.RelationalExpressionEq(_,_) === (x,y) => PrymaryExpressionEq(x,y)
+    lTerm match {
+      // does not handle FILTER (<x> = ?v)
+      case sparql.TermUri(obj) => error("only SPARQL PrimaryExpressions with a variable on the left have been implemented: punting on " + f)
+      // FILTER (?v = <x> && ?v = ?x && ?v = 7)
+      case sparql.TermVar(v) => { // :sparql.Var
+	val l = varToAttribute(varmap, v)
+	val r = rTerm match {
+	  case sparql.TermUri(obj) => null // :sparql.Uri
+	  case sparql.TermVar(v) => { // :sparql.Var
+	    sql.PrimaryExpressionAttr(varToAttribute(varmap, v))
+	  }
+	  case sparql.TermLit(lit) => null // :sparql.Literal => sql.PrimaryExpressionTyped(sql.Datatype, lit.n)
+	}
+	sqlexpr(sql.PrimaryExpressionAttr(l), r)
+      }
+      // does not handle FILTER (7 = ?v)
+      case sparql.TermLit(lit) => error("only SPARQL PrimaryExpressions with a variable on the left have been implemented: punting on " + f)
+    }
+  }
+
+  /* subselectVars: Promote variables in OPTIONAL or UNION subselects to the
+   * outer varmap/expressions.
+   * <outerState> could be <myState> -- spliting roles could make proofs easier?
+   */
+  def subselectVars(myState:R2RState, v:sparql.Var, optionalAlias:sql.RelVar,
+		    optionalCond:sql.RelationalExpression,
+		    outerVarmap:Map[sparql.Var, SQL2RDFValueMapper],
+		    nestedVarmap:Map[sparql.Var, SQL2RDFValueMapper],
+		    isOpt:Boolean):R2RState = {
+    val varAliasAttr = sql.RelVarAttr(optionalAlias, sql.Attribute(attrAliasNameFromVar(v)))
+    if (myState.varmap.contains(v)) {
+      /* The variable has already been bound. */
+      val newMap:Map[sparql.Var, SQL2RDFValueMapper] = if (varToAttribute(myState.varmap, v) == varAliasAttr) {
+	/* Same var was bound earlier. */
+	Map(v -> { myState.varmap(v) match {
+	  case IntMapper(binding)      => IntMapper(addExpr(binding, varAliasAttr, optionalCond))
+	  case StringMapper(binding)   => StringMapper(addExpr(binding, varAliasAttr, optionalCond))
+	  case DateMapper(binding)     => DateMapper(addExpr(binding, varAliasAttr, optionalCond))
+	  case RDFNoder(rel, binding)  => RDFNoder(rel, addExpr(binding, varAliasAttr, optionalCond))
+	  case RDFBNoder(rel, binding) => RDFBNoder(rel, addExpr(binding, varAliasAttr, optionalCond))
+	} } )
+      } else
+	Map()
+      val newConstraints =
+	if (varToAttribute(outerVarmap, v) != varAliasAttr) {
+	  /* Constraint against binding from earlier GP. */
+	  val constraint = sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(varAliasAttr),
+						      sql.PrimaryExpressionAttr(varToAttribute(outerVarmap, v)))
+	  if (varToAttributeDisjoints(outerVarmap, v).size > 0)
+	    // (union0._DISJOINT_ != 0 OR union0.x=union1.x) AND (union1._DISJOINT_ != 2 OR union0.x=union1.x)
+	    varToAttributeDisjoints(outerVarmap, v) map ((d) =>
+	      sql.ExprDisjunction({
+		if (isOpt) Set(d, constraint)
+		else Set(sql.ExprConjunction(Set(d, optionalCond)), constraint)
+	      }))
+		else {
+		  if (isOpt) Set(constraint)
+		  else Set(sql.ExprDisjunction(Set(optionalCond, constraint)))
+		}
+	} else {
+	  Set()
+	}
+      R2RState(myState.joins, myState.varmap ++ newMap, myState.exprs ++ newConstraints)
+    } else {
+      /* This variable is new to the outer context. */
+      val p = PartialBinding(Set(BindingConstraint(optionalCond, varAliasAttr)))
+      val mapper:SQL2RDFValueMapper = nestedVarmap(v) match {
+	case IntMapper(_)      => IntMapper(p)
+	case StringMapper(_)   => StringMapper(p)
+	case DateMapper(_)   => DateMapper(p)
+	case RDFNoder(rel, _)  => RDFNoder(rel, p)
+	case RDFBNoder(rel, _) => RDFBNoder(rel, p)
+      }
+      R2RState(myState.joins, myState.varmap + (v -> mapper), myState.exprs)
+    }
+  }
+
+  def synthesizeOuterJoin(initState:R2RState, gp:sparql.GraphPattern, negate:Boolean, db:sql.DatabaseDesc, enforceForeignKeys:Boolean):R2RState = {
+    /* SPARQL OPTIONALs are treated as SQL subselects.
+     * Set up initial state for this subselect.
+     */
+    val leftJoinAlias = sql.RelVar(sql.Name("G_opt" + initState.joins.size))
+    val initDisjoints:Set[sql.Select] = Set()
+    val emptyState = R2RState(
+      util.AddOrderedSet[sql.Join](), 
+      Map[sparql.Var, SQL2RDFValueMapper](), 
+      Set[sql.Expression]()
+    )
+
+    /* Create the select for the nested graph pattern.
+     */
+    val optionalState = mapGraphPattern(db, emptyState, gp, enforceForeignKeys)
+    val optionalVars = findVars(gp)
+    val disjointNo = sql.NamedAttribute(sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("" + initState.joins.size)),
+					sql.AttrAlias(sql.Name("_DISJOINT_")))
+    val leftJoinVars = findVars(gp).toList
+    val attrlist:Set[sql.NamedAttribute] = leftJoinVars.foldLeft(Set(disjointNo))((attrs, v) =>
+      attrs ++ Set(sql.NamedAttribute(varToAttribute(optionalState.varmap, v), sql.AttrAlias(attrAliasNameFromVar(v))))
+      										)
+    val subselect = sql.Select(
+      sql.AttributeList(attrlist),
+      sql.TableList(optionalState.joins),
+      optionalState.exprs.size match {
+      	case 0 => None
+      	case 1 => Some(optionalState.exprs.toList(0))
+      	case _ => Some(sql.ExprConjunction(optionalState.exprs))
+      }
+    )
+
+    /* Create a condition to test if this OPTIONAL was matched (called
+     * _DISJOINT_ as OPTIONAL behaves pretty much like a disjunction).
+     */
+    val optionalCond = sql.RelationalExpressionNull(sql.PrimaryExpressionAttr(
+      sql.RelVarAttr(leftJoinAlias, sql.Attribute(sql.Name("_DISJOINT_")))))
+
+    /* Bind variables to the attributes projected from the subselect; handle
+     * corefs (equivalence with earlier bindings).
+     */
+    val outerState2 =
+      optionalVars.foldLeft(
+	R2RState(initState.joins,
+		 initState.varmap,
+		 Set[sql.Expression]()))((myState, v) => 
+		   subselectVars(myState, v, leftJoinAlias, optionalCond,
+				 initState.varmap, optionalState.varmap, true))
+
+    /* The final state includes the subselect as a join, the variables bound
+     * to subselect projection, and no new expresssions. The expressions
+     * derived from corefs are conditions for the LEFT OUTER JOIN.
+     */
+    val join = sql.LeftOuterJoin(sql.AliasedResource(sql.Subselect(subselect), leftJoinAlias), 
+	 outerState2.exprs.size match {
+	   case 0 =>
+	     sql.RelationalExpressionEq(sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("1")),
+					sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("1")))
+	     // /* Require corefs unless we have a leading OPTIONAL. */
+	     // if (...)
+	     // else
+	     //   error ("Nested GP has no variables shared with its context; cowaredly refusing to join ON 1.")
+	   case 1 => outerState2.exprs.toList(0)
+	   case _ => sql.ExprConjunction(outerState2.exprs)
+	 }
+       )
+    val exprs =
+      if (negate) {
+	initState.exprs + sql.RelationalExpressionNull(sql.PrimaryExpressionAttr(sql.RelVarAttr(leftJoinAlias, sql.Attribute(sql.Name("_DISJOINT_")))))
+      } else initState.exprs
+    R2RState(initState.joins + join, outerState2.varmap, exprs)
+  }
+
+  def mapGraphPattern(db:sql.DatabaseDesc, state:R2RState, gp:sparql.GraphPattern, enforceForeignKeys:Boolean):R2RState = {
+    gp match {
+      case sparql.TableFilter(gp2:sparql.GraphPattern, expr:sparql.Expression) => {
+	val state2 = mapGraphPattern(db, state, gp2, enforceForeignKeys)
+
+	/* Add constraints for all the FILTERS */
+	val filterExprs:Set[sql.RelationalExpression] =
+	  expr.conjuncts.toSet map ((x:sparql.PrimaryExpression) => filter2expr(state2.varmap, x))
+
+	R2RState(state2.joins, state2.varmap, state2.exprs ++ filterExprs)
+      }
+      case sparql.TriplesBlock(triplepatterns) => {
+	/* Examine each triple, updating the compilation state. */
+	val state2 = triplepatterns.foldLeft(state)((incState,s) => bindOnPredicate(db, incState, s, enforceForeignKeys))
+	val nullExprs = findVars(gp).foldLeft(Set[sql.Expression]())((s, vvar) => {
+	  if (varToAttributeDisjoints(state2.varmap, vvar).size == 0)
+	    s ++ Set(sql.RelationalExpressionNotNull(sql.PrimaryExpressionAttr(varToAttribute(state2.varmap, vvar))))
+	  else
+	    s
+	})
+	R2RState(state2.joins, state2.varmap, state2.exprs ++ nullExprs)
+      }
+      case sparql.TableConjunction(list) => {
+	list.foldLeft(state)((incState,s) => mapGraphPattern(db, incState, s, enforceForeignKeys))
+      }
+      case sparql.TableDisjunction(disjoints) => {
+	/* SPARQL UNIONs are treated as SQL subselects.
+	 * Set up initial state for this subselect.
+	 */
+	val unionAlias = sql.RelVar(sql.Name("G_union" + state.joins.size))
+	val emptyState = R2RState(
+	  util.AddOrderedSet[sql.Join](), 
+	  Map[sparql.Var, SQL2RDFValueMapper](), 
+	  Set[sql.Expression]()
+	)
+	val unionVars = disjoints.foldLeft(Set[sparql.Var]())((mySet,disjoint) =>
+	  mySet ++ findVars(disjoint)).toList
+
+	/* Map the disjoints to subselects.
+	 * <no> is used for uniquely naming flags in the SELECTs used to
+	 * indicate which disjoint produced a tuple.
+	 */
+	val (subselects, _) = disjoints.foldLeft((Set[sql.Select](), 0))((incPair,disjoint) => {
+	  val (subselects, no) = incPair
+	  val disjointState = mapGraphPattern(db, emptyState, disjoint, enforceForeignKeys)
+	  val disjointNo = sql.NamedAttribute(sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("" + no)),
+					      sql.AttrAlias(sql.Name("_DISJOINT_")))
+
+	  val attrlist:Set[sql.NamedAttribute] = unionVars.foldLeft(Set(disjointNo))((attrs, v) => {
+	    val attrOrNull = if (disjointState.varmap.contains(v)) varToAttribute(disjointState.varmap, v) else sql.ConstNULL()
+	    attrs ++ Set(sql.NamedAttribute(attrOrNull, sql.AttrAlias(attrAliasNameFromVar(v))))
+	  })
+
+	  val subselect = sql.Select(
+	    sql.AttributeList(attrlist),
+	    sql.TableList(disjointState.joins),
+	    disjointState.exprs.size match {
+	      case 0 => None
+	      case 1 => Some(disjointState.exprs.toList(0))
+	      case _ => Some(sql.ExprConjunction(disjointState.exprs))
+	    }
+	  )
+	  (subselects + subselect, no+1)
+	})
+
+	/* Bind variables to the attributes projected from the subselect; handle
+	 * corefs (equivalence with earlier bindings).
+	 * <no> is used for uniquely naming flags in the SELECTs used to
+	 * indicate which disjoint produced a tuple.
+	 * <state2> will have no additional tables in the TableList.
+	 */
+	val (state2, _) = disjoints.foldLeft((state, 0))((incPair,disjoint) => {
+	  val (outerState, no) = incPair
+	  val disjointState = mapGraphPattern(db, emptyState, disjoint, enforceForeignKeys)
+	  val disjointVars = findVars(disjoint)
+
+	  /* Create a condition to test if this OPTIONAL was matched (called
+	   * _DISJOINT_ as OPTIONAL behaves pretty much like a disjunction).
+	   */
+	  val disjointCond = sql.RelationalExpressionNe(sql.PrimaryExpressionAttr(sql.RelVarAttr(unionAlias, sql.Attribute(sql.Name("_DISJOINT_")))),
+							sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("" + no)))
+	  val outerState2 = disjointVars.foldLeft(outerState)((myState, v) =>
+	      subselectVars(myState, v, unionAlias, disjointCond, outerState.varmap, disjointState.varmap, false))
+	  (outerState2, no+1)
+	})
+	val subselect = sql.Subselect(sql.Union(subselects))
+	R2RState(state.joins + sql.InnerJoin(sql.AliasedResource(subselect,unionAlias), None), state2.varmap, state2.exprs)
+      }
+      case sparql.OptionalGraphPattern(gp) => {
+	/* state_postLeadingTable: create an initial table if the first conjoint is optional.
+	 * e.g. ... FROM (SELECT 1 AS _EMPTY_) AS _EMPTY_ LEFT OUTER JOIN ...
+	 */
+	val state_postLeadingTable =
+	  if (state.joins.size == 0)
+	    R2RState(state.joins + sql.InnerJoin(sql.AliasedResource(sql.Subselect(
+	      sql.Select(
+		sql.AttributeList(Set(sql.NamedAttribute(sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("1")),
+							 sql.AttrAlias(sql.Name("_EMPTY_"))))),
+		sql.TableList(util.AddOrderedSet()),
+		None
+	      )), sql.RelVar(sql.Name("_EMPTY_"))), None), state.varmap, state.exprs)
+	  else
+	    state
+	synthesizeOuterJoin(state_postLeadingTable, gp, false, db, enforceForeignKeys)
+      }
+      case sparql.MinusGraphPattern(gp) => {
+	if (state.joins.size == 0) state
+	else synthesizeOuterJoin(state, gp, true, db, enforceForeignKeys)
+      }
+      case sparql.GraphGraphPattern(gp) => error("no code to handle GraphGraphPatterns (" + gp + ")")
+    }
+  }
+
+  def apply (db:sql.DatabaseDesc, sparquery:sparql.Select, stem:StemURI, enforceForeignKeys:Boolean, concat:Boolean) : sql.Select = {
+    val sparql.Select(attrs, triples) = sparquery
+
+    /* Create an object to hold our compilation state. */
+    val initState = R2RState(
+      util.AddOrderedSet[sql.Join](), 
+      Map[sparql.Var, SQL2RDFValueMapper](), 
+      Set[sql.Expression]()
+    )
+
+    val r2rState = mapGraphPattern(db, initState, sparquery.gp, enforceForeignKeys)
+
+    /* Select the attributes corresponding to the variables
+     * in the SPARQL SELECT.  */
+    val attrlist:Set[sql.NamedAttribute] = attrs.attributelist.foldLeft(Set[sql.NamedAttribute]())((attrs, v) => 
+      attrs + sql.NamedAttribute({
+	if (concat) varToConcat(r2rState.varmap, v, stem)
+	else varToAttribute(r2rState.varmap, v)
+      } , sql.AttrAlias(attrAliasNameFromVar(v))
+      ))
+
+    /* Construct the generated query as an abstract syntax. */
+    val select = sql.Select(
+      sql.AttributeList(attrlist),
+      sql.TableList(r2rState.joins),
+      r2rState.exprs.size match {
+	case 0 => None
+	case 1 => Some(r2rState.exprs.toList(0))
+	case _ => Some(sql.ExprConjunction(r2rState.exprs))
+      }
+    )
+    // println("r2rState.varmap: " + r2rState.varmap)
+    // println("select.expression: " + select.expression)
+    select.makePretty()
+  }
+}
+
--- a/src/test/scala/RDB2RDFTest.scala	Sun Jan 31 10:39:08 2010 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1035 +0,0 @@
-/* RDB2RDFTest: transform SPARQL to SQL and compare against a reference query.
- * $Id$
- */
-
-package w3c.sw
-
-import org.scalatest.FunSuite
-import java.net.URI
-import w3c.sw.sql.{Sql,DatabaseDesc,Relation,RelationDesc,Attribute,Value,Datatype,ForeignKey,Name}
-import w3c.sw.sparql.Sparql
-import w3c.sw.rdb2rdf.{RDB2RDF,StemURI}
-
-/* The RDB2RDFTest class transforms SPARQL queries to a relational data
- * structure and compares them to a structure parsed from SQL.
- */
-class RDB2RDFTest extends FunSuite {
-
-  /* These tests use a schema and queries designed to work with this
-   * example database:
-       Employee+----------+------------+---------+
-       | empid | lastName | birthday   | manager |
-       +-------+----------+------------+---------+
-       |    18 | Johnson  | 1969-11-08 |    NULL |    TaskAssignments--------+
-       |   253 | Smith    | 1979-01-18 |      18 |    | id | task | employee |
-       |   255 | Jones    | 1981-03-24 |     253 |    +----+------+----------+
-       |    19 | Xu       | 1966-11-08 |    NULL |    |  1 |    1 |       18 |
-       |   254 | Ishita   | 1971-10-31 |     253 |    |  2 |    2 |      253 |
-       +-------+----------+------------+---------+    |  3 |    3 |       19 |
-       					              |  4 |    4 |      253 |
-       Tasks----+--------+------+		      |  5 |    1 |      253 |
-       | taskid | name   | lead |		      |  6 |    2 |      255 |
-       +--------+--------+------+		      |  7 |    3 |      255 |
-       |      1 | widget |   18 |		      |  8 |    4 |      254 |
-       |      2 | dingus |  253 |		      +----+------+----------+
-       |      3 | thingy |   18 |
-       |      4 | toy    |  253 |
-       +--------+--------+------+
- */
-
-  val db:DatabaseDesc = DatabaseDesc(
-    Map(Relation("Employee") -> 
-	RelationDesc(Option(Attribute("empid")), 
-		     Map(Attribute("empid") -> Value(Datatype.INTEGER),
-			 Attribute("lastName") -> Value(Datatype.STRING),
-			 Attribute("birthday") -> Value(Datatype.DATE),
-			 Attribute("manager") -> ForeignKey(Relation("Employee"), Attribute("empid")))),
-	Relation("Tasks") -> 
-	RelationDesc(Option(Attribute("taskid")),
-		     Map(Attribute("taskid") -> Value(Datatype.INTEGER),
-			 Attribute("name") -> Value(Datatype.STRING),
-			 Attribute("lead") -> ForeignKey(Relation("Employee"), Attribute("empid")))),
-	Relation("TaskAssignments") -> 
-	RelationDesc(Option(Attribute("id")),
-		     Map(Attribute("task") -> ForeignKey(Relation("Tasks"), Attribute("taskid")), 
-			 Attribute("employee") -> ForeignKey(Relation("Employee"),  Attribute("empid"))))
-      ))
-
-/* The reference RDF representation (queriable with the SPARQL in the tests) is:
- */
-
-  val dbAsTurtle = """
-PREFIX empP : <http://hr.example/DB/Employee#>
-PREFIX task : <http://hr.example/DB/Tasks#>
-PREFIX tass : <http://hr.example/DB/TaskAssignments#>
-PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
-
-<http://hr.example/DB/Employee/empid.18#record>
-   empP:lastName "Johnson"^^xsd:string ; empP:birthday "1969-11-08"^^xsd:date .
-<http://hr.example/DB/Employee/empid.253#record>
-   empP:lastName "Smith"^^xsd:string ; empP:birthday "1969-11-08"^^xsd:date ;
-   empP:manager <http://hr.example/DB/Employee/empid.18#record> .
-<http://hr.example/DB/Employee/empid.255#record>
-   empP:lastName "Jones"^^xsd:string ; empP:birthday "1981-03-24"^^xsd:date ;
-   empP:manager <http://hr.example/DB/Employee/empid.253#record> .
-<http://hr.example/DB/Employee/empid.19#record>
-   empP:lastName "Xu"^^xsd:string ; empP:birthday "1966-11-08"^^xsd:date .
-<http://hr.example/DB/Employee/empid.254#record>
-   empP:lastName "Ishita"^^xsd:string ; empP:birthday "1971-10-31"^^xsd:date ;
-   empP:manager <http://hr.example/DB/Employee/empid.253#record> .
-
-<http://hr.example/DB/Tasks/taskid.1#record>
-   task:lastName "widget"^^xsd:string ; 
-   task:manager <http://hr.example/DB/Employee/empid.18#record> .
-<http://hr.example/DB/Tasks/taskid.2#record>
-   task:lastName "dingus"^^xsd:string ; 
-   task:manager <http://hr.example/DB/Employee/empid.253#record> .
-<http://hr.example/DB/Tasks/taskid.3#record>
-   task:lastName "thingy"^^xsd:string ; 
-   task:manager <http://hr.example/DB/Employee/empid.18#record> .
-<http://hr.example/DB/Tasks/taskid.4#record>
-   task:lastName "toy"^^xsd:string ; 
-   task:manager <http://hr.example/DB/Employee/empid.253#record> .
-
-<http://hr.example/DB/TaskAssignment/id.1#record>
-   tass:task <http://hr.example/DB/Tasks/taskid.1#record>
-   tass:employee <http://hr.example/DB/Employee/empid.18#record> .
-<http://hr.example/DB/TaskAssignment/id.2#record>
-   tass:task <http://hr.example/DB/Tasks/taskid.2#record>
-   tass:employee <http://hr.example/DB/Employee/empid.253#record> .
-<http://hr.example/DB/TaskAssignment/id.3#record>
-   tass:task <http://hr.example/DB/Tasks/taskid.3#record>
-   tass:employee <http://hr.example/DB/Employee/empid.19#record> .
-<http://hr.example/DB/TaskAssignment/id.4#record>
-   tass:task <http://hr.example/DB/Tasks/taskid.4#record>
-   tass:employee <http://hr.example/DB/Employee/empid.253#record> .
-<http://hr.example/DB/TaskAssignment/id.5#record>
-   tass:task <http://hr.example/DB/Tasks/taskid.1#record>
-   tass:employee <http://hr.example/DB/Employee/empid.253#record> .
-<http://hr.example/DB/TaskAssignment/id.6#record>
-   tass:task <http://hr.example/DB/Tasks/taskid.2#record>
-   tass:employee <http://hr.example/DB/Employee/empid.255#record> .
-<http://hr.example/DB/TaskAssignment/id.7#record>
-   tass:task <http://hr.example/DB/Tasks/taskid.3#record>
-   tass:employee <http://hr.example/DB/Employee/empid.255#record> .
-<http://hr.example/DB/TaskAssignment/id.8#record>
-   tass:task <http://hr.example/DB/Tasks/taskid.4#record>
-   tass:employee <http://hr.example/DB/Employee/empid.254#record> .
-"""
-  /* The obvious test is that the results from the SPARQL query and the
-   * relational query match.
-   *
-   * Data can be converted to turtle strings, or left as native formats for
-   * mapping the the querier. The first examples constrast queries relying
-   * on a post-query transformation against those returing turtle atoms.
-   */
-
-  test("decompose a predicate uri in stem, rel and attr") {
-    val uri = sparql.Uri("http://hr.example/our/favorite/DB/Employee#lastName")
-    val puri:RDB2RDF.PUri = RDB2RDF.parsePredicateURI(uri)
-    assert(puri === RDB2RDF.PUri(RDB2RDF.Stem("http://hr.example/our/favorite/DB"),
-				 RDB2RDF.Rel("Employee"),
-				 RDB2RDF.Attr("lastName")))
-  }
-
-  test("decompose a object uri in stem, rel and attr") {
-    val uri = sparql.Uri("http://hr.example/our/favorite/DB/Employee/id.18#record")
-    val objuri:RDB2RDF.NodeUri = RDB2RDF.parseObjectURI(uri)
-    assert(objuri === RDB2RDF.NodeUri(RDB2RDF.Stem("http://hr.example/our/favorite/DB"),
-				      RDB2RDF.Rel("Employee"),
-				      RDB2RDF.Attr("id"),
-				      RDB2RDF.CellValue("18")))
-  }
-
-  /* Disable turtle string-izing (RDB2RDF parm 5) and return native format: */
-  test("?s <p> <x>") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-SELECT ?emp {
-?emp  empP:manager    <http://hr.example/DB/Employee/empid.18#record>
-}
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.empid AS emp
-       FROM Employee AS R_emp
-            INNER JOIN Employee AS R_empid18
- WHERE R_empid18.empid=R_emp.manager AND R_empid18.empid=18 AND R_emp.empid IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
-    assert(generated === parsed)
-    val output = """
-+-----+
-| emp |
-+-----+
-| 253 | 
-+-----+
-"""
-  }
-
-  /* Enable turtle string-izing and test URI generation: */
-  test("SELECT <x> { ?sf <p> <x>} (in-SQL Nodizer)") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-SELECT ?emp {
-?emp  empP:manager    <http://hr.example/DB/Employee/empid.18#record>
-}
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT CONCAT("http://hr.example/DB/", "Employee", "/", "empid", ".", R_emp.empid, "#record") AS emp
-       FROM Employee AS R_emp
-            INNER JOIN Employee AS R_empid18
- WHERE R_empid18.empid=R_emp.manager AND R_empid18.empid=18 AND R_emp.empid IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, true)
-    assert(generated === parsed)
-    val output = """
-+------------------------------------------------+
-| emp                                            |
-+------------------------------------------------+
-| http://hr.example/DB/Employee/empid.253#record | 
-+------------------------------------------------+
-"""
-  }
-
-  /* Enable turtle string-izing and test RDFLiteral generation: */
-  test("SELECT <x> { ?sf <p> \"asdf\"} (in-SQL Nodizer)") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-SELECT ?name {
-?emp  empP:lastName  ?name
-}
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT CONCAT("'", R_emp.lastName, "'^^<http://www.w3.org/2001/XMLSchema#string>") AS name
-       FROM Employee AS R_emp
- WHERE R_emp.empid IS NOT NULL AND R_emp.lastName IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, true)
-    assert(generated === parsed)
-    val output = """
-+----------------------------------------------------+
-| name                                               |
-+----------------------------------------------------+
-| Johnson^^<http://www.w3.org/2001/XMLSchema#string> | 
-| Smith^^<http://www.w3.org/2001/XMLSchema#string>   | 
-| Jones^^<http://www.w3.org/2001/XMLSchema#string>   | 
-| Xu^^<http://www.w3.org/2001/XMLSchema#string>      | 
-| Ishita^^<http://www.w3.org/2001/XMLSchema#string>  | 
-+----------------------------------------------------+
-"""
-  }
-
-  test("<s> <p> ?x") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-SELECT ?manager {
-<http://hr.example/DB/Employee/empid.253#record>  empP:manager    ?manager
-}
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_manager.empid AS manager
-       FROM Employee AS R_empid253
-            INNER JOIN Employee AS R_manager
- WHERE R_manager.empid=R_empid253.manager AND R_empid253.empid=253
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
-    assert(generated === parsed)
-    val output = """
-+---------+
-| manager |
-+---------+
-|      18 | 
-+---------+
-"""
-  }
-
-  test("?s <p> 18") {
-    /* Literal foreign keys should probably throw an error,
-     * instead does what user meant. */
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
-SELECT ?emp {
-?emp  empP:manager    "18"^^xsd:integer
-}
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.empid AS emp
-       FROM Employee AS R_emp
-            INNER JOIN Employee AS R_18
- WHERE R_18.empid=R_emp.manager AND R_18.empid=18 AND R_emp.empid IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
-    assert(generated === parsed)
-    val output = """
-+-----+
-| emp |
-+-----+
-| 253 | 
-+-----+
-"""
-  }
-
-  test("?s1 <p> ?x . ?s2 <p> ?x") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX tass : <http://hr.example/DB/TaskAssignments#>
-SELECT ?task1 ?task2 {
-   ?task1  tass:employee    ?who .
-   ?task2  tass:employee    ?who
-   FILTER(?task1 < ?task2)
-}
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_task1.id AS task1, R_task2.id AS task2
-  FROM TaskAssignments AS R_task1
-       INNER JOIN Employee AS R_who ON R_who.empid=R_task1.employee
-       INNER JOIN TaskAssignments AS R_task2 ON R_who.empid=R_task2.employee
- WHERE R_task1.id<R_task2.id 
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
-    assert(generated === parsed)
-    val output = """
-+-------+-------+
-| task1 | task2 |
-+-------+-------+
-|     2 |     4 | 
-|     2 |     5 | 
-|     4 |     5 | 
-|     6 |     7 | 
-+-------+-------+
-"""
-  }
-
-  test("transform SQLbgp") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-SELECT ?empName ?manageName {
-?emp      empP:lastName   ?empName .
-?emp      empP:manager    ?manager .
-?manager  empP:lastName   ?manageName 
-}
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.lastName AS empName, R_manager.lastName AS manageName
-       FROM Employee AS R_emp
-            INNER JOIN Employee AS R_manager ON R_manager.empid=R_emp.manager
- WHERE R_emp    .lastName IS NOT NULL AND R_manager.lastName IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
-    assert(generated === parsed)
-    val output = """
-+---------+------------+
-| empName | manageName |
-+---------+------------+
-| Smith   | Johnson    | 
-| Jones   | Smith      | 
-| Ishita  | Smith      | 
-+---------+------------+
-"""
-  }
-
-  test("transform tup1 no-enforce") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-SELECT ?empName {
- ?emp      empP:lastName   ?empName .
- ?emp      empP:manager    <http://hr.example/DB/Employee/empid.18#record>
- }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.lastName AS empName
-       FROM Employee AS R_emp
- WHERE R_emp.manager=18 AND R_emp.lastName IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-+---------+
-| empName |
-+---------+
-| Smith   | 
-+---------+
-"""
-  }
-
-  test("transform tup1 enforce") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-SELECT ?empName {
- ?emp      empP:lastName   ?empName .
- ?emp      empP:manager    <http://hr.example/DB/Employee/empid.18#record>
- }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.lastName AS empName
-       FROM Employee AS R_emp
-            INNER JOIN Employee AS R_empid18
- WHERE R_empid18.empid=R_emp.manager AND R_empid18.empid=18 AND R_emp.lastName IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
-    assert(generated === parsed)
-    val output = """
-+---------+
-| empName |
-+---------+
-| Smith   | 
-+---------+
-"""
-  }
-
-
-  test("transform litConst1") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
-SELECT ?empName {
- ?emp      empP:lastName   ?empName .
- ?emp      empP:manager    ?manager .
- ?manager  empP:lastName   "Johnson"^^xsd:string
- }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.lastName AS empName
-  FROM Employee AS R_emp
-       INNER JOIN Employee AS R_manager
-WHERE R_manager.empid=R_emp.manager AND R_manager.lastName="Johnson" AND R_emp.lastName IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
-    assert(generated === parsed)
-    val output = """
-+---------+
-| empName |
-+---------+
-| Smith   | 
-+---------+
-"""
-  }
-
-  test("transform filter1") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-PREFIX task : <http://hr.example/DB/Tasks#>
-PREFIX tass : <http://hr.example/DB/TaskAssignments#>
-SELECT ?empName ?grandManagName {
-         ?emp          empP:lastName   ?empName .
-         ?emp          empP:birthday   ?empBday .
-         ?lower        tass:employee  ?emp .
-         ?lower        tass:task      ?ltask .
-         ?ltask        task:lead      ?taskLead .
-         ?taskLead     empP:birthday   ?manBday .
-         ?upper        tass:employee  ?taskLead .
-         ?upper        tass:task      ?utask .
-         ?utask        task:lead      ?grandManager .
-         ?grandManager empP:birthday   ?grandManBday .
-         ?grandManager empP:lastName   ?grandManagName
-         FILTER (?manBday < ?empBday && ?grandManBday < ?manBday)
-}
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.lastName AS empName,
-       R_grandManager.lastName AS grandManagName
-  FROM Employee AS R_emp
-       INNER JOIN TaskAssignments AS R_lower ON R_emp.empid=R_lower.employee
-       INNER JOIN Tasks AS R_ltask ON R_ltask.taskid=R_lower.task
-       INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_ltask.lead
-       INNER JOIN TaskAssignments AS R_upper ON R_taskLead.empid=R_upper.employee
-       INNER JOIN Tasks AS R_utask ON R_utask.taskid=R_upper.task
-       INNER JOIN Employee AS R_grandManager ON R_grandManager.empid=R_utask.lead
- WHERE R_taskLead.birthday<R_emp.birthday AND R_grandManager.birthday<R_taskLead.birthday
-   AND R_emp.lastName IS NOT NULL AND R_grandManager.lastName IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
-    assert(generated === parsed)
-    val output = """
-+---------+----------------+
-| empName | grandManagName |
-+---------+----------------+
-| Jones   | Johnson        | 
-+---------+----------------+
-"""
-  }
-
-  /* Employess above and below Smith */
-  test("transform disj1") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-PREFIX task : <http://hr.example/DB/Tasks#>
-PREFIX tass : <http://hr.example/DB/TaskAssignments#>
-SELECT ?name
- WHERE { ?who empP:lastName "Smith"^^xsd:string
-         { ?above   tass:employee  ?who .
-           ?above   tass:task      ?atask .
-           ?atask   task:lead      ?taskLead .
-           ?taskLead empP:lastName ?name }
-         UNION
-         { ?below   tass:task      ?btask .
-           ?btask   task:lead      ?who .
-           ?below   tass:employee  ?managed .
-           ?managed empP:lastName  ?name } }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT G_union1.name AS name
-  FROM Employee AS R_who
-       INNER JOIN (
-         SELECT 0 AS _DISJOINT_, R_above.id AS above, R_above.task AS atask,
-                R_above.employee AS who, R_taskLead.lastName AS name, 
-                R_atask.lead AS taskLead,
-                NULL AS below, NULL AS btask, NULL AS managed
-           FROM TaskAssignments AS R_above
-                INNER JOIN Tasks AS R_atask ON R_atask.taskid=R_above.task
-                INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_atask.lead
-          WHERE R_above.employee IS NOT NULL
-            AND R_taskLead.lastName IS NOT NULL
-       UNION
-         SELECT 1 AS _DISJOINT_, NULL AS above, NULL AS atask,
-                R_btask.lead AS who,R_managed.lastName AS name,
-                NULL AS taskLead,
-                R_below.id AS below, R_below.task AS btask,
-                R_below.employee AS managed
-           FROM TaskAssignments AS R_below
-                INNER JOIN Tasks AS R_btask ON R_btask.taskid=R_below.task
-                INNER JOIN Employee AS R_managed ON R_managed.empid=R_below.employee
-          WHERE R_managed.lastName IS NOT NULL
-            AND R_btask.lead IS NOT NULL
-                       ) AS G_union1
- WHERE R_who.lastName="Smith"
-       AND R_who.empid IS NOT NULL
-       AND (G_union1._DISJOINT_!=0 OR G_union1.who=R_who.empid)
-       AND (G_union1._DISJOINT_!=1 OR G_union1.who=R_who.empid)
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-+---------+
-| name    |
-+---------+
-| Johnson | 
-| Smith   | 
-| Smith   | 
-| Smith   | 
-| Jones   | 
-| Smith   | 
-| Ishita  | 
-+---------+
-"""
-  }
-
-  test("transform assymDisj1") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-PREFIX task : <http://hr.example/DB/Tasks#>
-PREFIX tass : <http://hr.example/DB/TaskAssignments#>
-PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
-SELECT ?name ?bday
- WHERE { { ?above   tass:employee  ?who .
-           ?above   tass:task      ?atask .
-           ?atask   task:lead      ?taskLead .
-           ?taskLead empP:lastName  ?name }
-         UNION
-         { ?below   tass:task     ?btask .
-           ?btask   task:lead     ?who .
-           ?below   tass:employee ?managed .
-           ?managed empP:lastName  ?name .
-           ?managed empP:birthday  ?bday } 
-         ?who empP:lastName "Smith"^^xsd:string .
-         ?who empP:birthday ?bday }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT G_union0.name AS name, G_union0.bday AS bday
-  FROM (
-         SELECT R_above.id AS above, R_above.task AS atask, NULL AS bday, NULL AS below,
-                NULL AS btask, NULL AS managed, R_taskLead.lastName AS name,
-                R_atask.lead AS taskLead, R_above.employee AS who, 0 AS _DISJOINT_
-           FROM TaskAssignments AS R_above
-                INNER JOIN Tasks AS R_atask ON R_atask.taskid=R_above.task
-                INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_atask.lead
-          WHERE R_above.employee IS NOT NULL AND R_taskLead.lastName IS NOT NULL
-       UNION
-         SELECT NULL AS above, NULL AS atask, R_managed.birthday AS bday, R_below.id AS below,
-                R_below.task AS btask, R_below.employee AS managed, R_managed.lastName AS name,
-                NULL AS taskLead, R_btask.lead AS who, 1 AS _DISJOINT_
-           FROM TaskAssignments AS R_below
-                INNER JOIN Tasks AS R_btask ON R_btask.taskid=R_below.task
-                INNER JOIN Employee AS R_managed ON R_managed.empid=R_below.employee
-          WHERE R_btask.lead IS NOT NULL AND R_managed.birthday IS NOT NULL AND R_managed.lastName IS NOT NULL
-                  ) AS G_union0
-       INNER JOIN Employee AS R_who
- WHERE (G_union0._DISJOINT_!=0 OR R_who.empid=G_union0.who)
-   AND (G_union0._DISJOINT_!=1 OR R_who.birthday=G_union0.bday)
-   AND (G_union0._DISJOINT_!=1 OR R_who.empid=G_union0.who)
-   AND R_who.lastName="Smith"
-""").get // !!!    AND (G_union0.bday IS NOT NULL) AND (G_union0.who IS NOT NULL)
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-"""
-  }
-
-  test("transform assymDisj1 reversed") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-PREFIX task : <http://hr.example/DB/Tasks#>
-PREFIX tass : <http://hr.example/DB/TaskAssignments#>
-PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
-SELECT ?name
- WHERE {
-         ?who empP:lastName "Smith"^^xsd:string .
-         ?who empP:birthday ?bday
-         { ?above   tass:employee ?who .
-           ?above   tass:task     ?atask .
-           ?atask   task:lead     ?taskLead .
-           ?taskLead empP:lastName  ?name }
-         UNION
-         { ?below   tass:task     ?btask .
-           ?btask   task:lead     ?who .
-           ?below   tass:employee ?managed .
-           ?managed empP:lastName  ?name .
-           ?managed empP:birthday  ?bday } 
-       }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT G_union1.name AS name
-  FROM Employee AS R_who
-       INNER JOIN (
-         SELECT R_above.id AS above, R_above.task AS atask, NULL AS bday, NULL AS below,
-                NULL AS btask, NULL AS managed, R_taskLead.lastName AS name,
-                R_atask.lead AS taskLead, R_above.employee AS who, 0 AS _DISJOINT_
-           FROM TaskAssignments AS R_above
-                INNER JOIN Tasks AS R_atask ON R_atask.taskid=R_above.task
-                INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_atask.lead
-          WHERE R_above.employee IS NOT NULL AND R_taskLead.lastName IS NOT NULL
-       UNION
-         SELECT NULL AS above, NULL AS atask, R_managed.birthday AS bday, R_below.id AS below,
-                R_below.task AS btask, R_below.employee AS managed, R_managed.lastName AS name,
-                NULL AS taskLead, R_btask.lead AS who, 1 AS _DISJOINT_
-           FROM TaskAssignments AS R_below
-                INNER JOIN Tasks AS R_btask ON R_btask.taskid=R_below.task
-                INNER JOIN Employee AS R_managed ON R_managed.empid=R_below.employee
-          WHERE R_btask.lead IS NOT NULL AND R_managed.birthday IS NOT NULL AND R_managed.lastName IS NOT NULL
-                  ) AS G_union1
- WHERE (G_union1._DISJOINT_!=0 OR G_union1.who=R_who.empid)
-   AND (G_union1._DISJOINT_!=1 OR G_union1.bday=R_who.birthday)
-   AND (G_union1._DISJOINT_!=1 OR G_union1.who=R_who.empid)
-   AND R_who.birthday IS NOT NULL AND R_who.lastName="Smith"
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-"""
-  }
-
-  test("transform assymDisj1 interspersed") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX empP : <http://hr.example/DB/Employee#>
-PREFIX task : <http://hr.example/DB/Tasks#>
-PREFIX tass : <http://hr.example/DB/TaskAssignments#>
-PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
-SELECT ?name
- WHERE {
-         ?who empP:lastName "Smith"^^xsd:string
-         { ?above   tass:employee  ?who .
-           ?above   tass:task      ?atask .
-           ?atask   task:lead      ?taskLead .
-           ?taskLead empP:lastName ?name }
-         UNION
-         { ?below   tass:task     ?btask .
-           ?btask   task:lead     ?who .
-           ?below   tass:employee ?managed .
-           ?managed empP:lastName ?name .
-           ?managed empP:birthday ?bday } 
-         ?who empP:birthday ?bday
-       }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT G_union1.name AS name
-  FROM Employee AS R_who
-       INNER JOIN (
-         SELECT R_above.id AS above, R_above.task AS atask, NULL AS bday, NULL AS below,
-                NULL AS btask, NULL AS managed, R_taskLead.lastName AS name,
-                R_atask.lead AS taskLead, R_above.employee AS who, 0 AS _DISJOINT_
-           FROM TaskAssignments AS R_above
-                INNER JOIN Tasks AS R_atask ON R_atask.taskid=R_above.task
-                INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_atask.lead
-          WHERE R_above.employee IS NOT NULL AND R_taskLead.lastName IS NOT NULL
-       UNION
-         SELECT NULL AS above, NULL AS atask, R_managed.birthday AS bday, R_below.id AS below,
-                R_below.task AS btask, R_below.employee AS managed, R_managed.lastName AS name,
-                NULL AS taskLead, R_btask.lead AS who, 1 AS _DISJOINT_
-           FROM TaskAssignments AS R_below
-                INNER JOIN Tasks AS R_btask ON R_btask.taskid=R_below.task
-                INNER JOIN Employee AS R_managed ON R_managed.empid=R_below.employee
-          WHERE R_btask.lead IS NOT NULL AND R_managed.birthday IS NOT NULL AND R_managed.lastName IS NOT NULL
-                  ) AS G_union1
- WHERE (G_union1._DISJOINT_!=0 OR G_union1.who=R_who.empid)
-   AND (G_union1._DISJOINT_!=1 OR G_union1.who=R_who.empid)
-   AND (G_union1._DISJOINT_!=1 OR R_who.birthday=G_union1.bday)
-   AND R_who.lastName="Smith"
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-"""
-  }
-
-  test("transform optJoin1") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX emplP: <http://hr.example/DB/Employee#>
-
-SELECT ?empName ?managName ?grandManagName
- WHERE {      ?emp          emplP:lastName   ?empName
-   OPTIONAL { ?emp          emplP:manager    ?manager .
-              ?manager      emplP:lastName   ?managName .
-              ?manager      emplP:manager    ?grandManager .
-              ?grandManager emplP:lastName   ?grandManagName } }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.lastName AS empName, G_opt1.grandManagName AS grandManagName, G_opt1.managName AS managName
-  FROM Employee AS R_emp
-       LEFT OUTER JOIN (
-       SELECT R_emp.empid AS emp, R_grandManager.lastName AS grandManagName, R_manager.manager AS grandManager, R_manager.lastName AS managName, R_emp.manager AS manager, 1 AS _DISJOINT_
-         FROM Employee AS R_emp
-              INNER JOIN Employee AS R_manager ON R_manager.empid=R_emp.manager
-              INNER JOIN Employee AS R_grandManager ON R_grandManager.empid=R_manager.manager
-        WHERE R_emp.empid IS NOT NULL AND R_grandManager.lastName IS NOT NULL AND R_manager.lastName IS NOT NULL
-                  ) AS G_opt1 ON G_opt1.emp=R_emp.empid
- WHERE R_emp.empid IS NOT NULL AND R_emp.lastName IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-"""
-  }
-
-  test("transform leadOpt1") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX emplP: <http://hr.example/DB/Employee#>
-
-SELECT ?empName ?grandManagName
- WHERE { OPTIONAL { ?taskLead     emplP:manager    ?emp .
-                    ?taskLead     emplP:manager    ?grandManager .
-                    ?grandManager emplP:lastName   ?grandManagName } 
-                  ?emp            emplP:lastName   ?empName }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.lastName AS empName, G_opt1.grandManagName AS grandManagName
-  FROM ( SELECT 1 AS _EMPTY_ ) AS _EMPTY_
-       LEFT OUTER JOIN (
-       SELECT R_taskLead.manager AS emp, R_grandManager.lastName AS grandManagName, R_taskLead.manager AS grandManager, R_taskLead.empid AS taskLead, 1 AS _DISJOINT_
-         FROM Employee AS R_taskLead
-              INNER JOIN Employee AS R_grandManager ON R_grandManager.empid=R_taskLead.manager
-        WHERE R_grandManager.lastName IS NOT NULL
-                  ) AS G_opt1 ON 1=1
-       INNER JOIN Employee AS R_emp
- WHERE (G_opt1._DISJOINT_ IS NULL OR R_emp.empid=G_opt1.emp)
-   AND R_emp.lastName IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-"""
-  }
-
-  test("transform nestOpt") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX emplP: <http://hr.example/DB/Employee#>
-
-SELECT ?empName ?managName ?grandManagName
- WHERE {          ?emp            emplP:lastName   ?empName
-       OPTIONAL { ?emp            emplP:manager    ?manager .
-                  ?manager        emplP:lastName   ?managName
-         OPTIONAL { ?manager      emplP:manager    ?grandManager .
-                    ?grandManager emplP:lastName   ?grandManagName } }
-       }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp.lastName AS empName, G_opt1.grandManagName AS grandManagName, G_opt1.managName AS managName
-  FROM Employee AS R_emp
-       LEFT OUTER JOIN (
-       SELECT R_emp.empid AS emp, G_opt2.grandManagName AS grandManagName, G_opt2.grandManager AS grandManager, R_manager.lastName AS managName, R_emp.manager AS manager, 1 AS _DISJOINT_
-         FROM Employee AS R_emp
-              INNER JOIN Employee AS R_manager ON R_manager.empid=R_emp.manager
-              LEFT OUTER JOIN (
-              SELECT R_grandManager.lastName AS grandManagName, R_manager.manager AS grandManager, R_manager.empid AS manager, 2 AS _DISJOINT_
-                FROM Employee AS R_manager
-                     INNER JOIN Employee AS R_grandManager ON R_grandManager.empid=R_manager.manager
-               WHERE R_grandManager.lastName IS NOT NULL AND R_manager.empid IS NOT NULL
-                         ) AS G_opt2 ON G_opt2.manager=R_emp.manager
-        WHERE R_emp.empid IS NOT NULL
-          AND R_manager.lastName IS NOT NULL
-                  ) AS G_opt1 ON G_opt1.emp=R_emp.empid
- WHERE R_emp.empid IS NOT NULL AND R_emp.lastName IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-"""
-  }
-
-  test("transform equivOpt1") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX emplP: <http://hr.example/DB/Employee#>
-
-SELECT ?emp1Name ?emp2Name ?emp3Name
- WHERE { ?emp1     emplP:lastName   ?emp1Name
-         OPTIONAL { ?emp1     emplP:birthday   ?birthday }
-         ?emp2     emplP:lastName   ?emp2Name
-         OPTIONAL { ?emp2     emplP:birthday   ?birthday }
-         ?emp3     emplP:lastName   ?emp3Name .
-         ?emp3     emplP:birthday   ?birthday .
-         ?emp4     emplP:lastName   ?emp4Name .
-         ?emp4     emplP:birthday   ?birthday
-         FILTER ( ?emp1Name < ?emp2Name && ?emp2Name < ?emp3Name && ?emp3Name < ?emp4Name) }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_emp1.lastName AS emp1Name, R_emp2.lastName AS emp2Name, R_emp3.lastName AS emp3Name
-  FROM Employee AS R_emp1
-       LEFT OUTER JOIN (
-       SELECT R_emp1.birthday AS birthday, R_emp1.empid AS emp1, 1 AS _DISJOINT_
-         FROM Employee AS R_emp1
-        WHERE R_emp1.birthday IS NOT NULL AND R_emp1.empid IS NOT NULL
-                  ) AS G_opt1 ON G_opt1.emp1=R_emp1.empid
-       INNER JOIN Employee AS R_emp2
-       LEFT OUTER JOIN (
-       SELECT R_emp2.birthday AS birthday, R_emp2.empid AS emp2, 3 AS _DISJOINT_
-         FROM Employee AS R_emp2
-        WHERE R_emp2.birthday IS NOT NULL AND R_emp2.empid IS NOT NULL
-                  ) AS G_opt3 ON (G_opt1._DISJOINT_ IS NULL OR G_opt3.birthday=G_opt1.birthday) AND G_opt3.emp2=R_emp2.empid
-       INNER JOIN Employee AS R_emp3
-       INNER JOIN Employee AS R_emp4
- WHERE (G_opt1._DISJOINT_ IS NULL OR R_emp3.birthday=G_opt1.birthday)
-   AND (G_opt1._DISJOINT_ IS NULL OR R_emp4.birthday=G_opt1.birthday)
-   AND R_emp1.lastName<R_emp2.lastName
-   AND R_emp2.empid IS NOT NULL AND R_emp2.lastName<R_emp3.lastName
-   AND R_emp3.lastName<R_emp4.lastName AND R_emp1.empid IS NOT NULL
-""").get
-    val generated = RDB2RDF(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-"""
-  }
-
-  val hosp1:DatabaseDesc = DatabaseDesc(
-    Map(Relation("Person") -> 
-	RelationDesc(Option(Attribute("ID")), 
-		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
-			 Attribute("MiddleName") -> Value(Datatype.STRING),
-			 Attribute("DateOfBirth") -> Value(Datatype.DATE),
-			 Attribute("SexDE") -> ForeignKey(Relation("Sex_DE"), Attribute("ID")))),
-	Relation("Sex_DE") -> 
-	RelationDesc(Option(Attribute("ID")),
-		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
-			 Attribute("EntryName") -> Value(Datatype.STRING))),
-	Relation("Item_Medication") -> 
-	RelationDesc(Option(Attribute("ID")),
-		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
-			 Attribute("PatientID") -> ForeignKey(Relation("Person"),  Attribute("ID")),
-			 Attribute("PerformedDTTM") -> Value(Datatype.DATE),
-			 Attribute("EntryName") -> Value(Datatype.STRING))),
-	Relation("Medication") -> 
-	RelationDesc(Option(Attribute("ID")),
-		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
-			 Attribute("ItemID") -> ForeignKey(Relation("Item_Medication"),  Attribute("ID")),
-			 Attribute("MedDictDE") -> ForeignKey(Relation("Medication_DE"), Attribute("ID")))),
-	Relation("Medication_DE") -> 
-	RelationDesc(Option(Attribute("ID")),
-		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
-			 Attribute("NDC") -> Value(Datatype.INTEGER))),
-	Relation("NDCcodes") -> 
-	RelationDesc(Option(Attribute("ID")),
-		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
-			 Attribute("NDC") -> Value(Datatype.INTEGER),
-			 Attribute("ingredient") -> Value(Datatype.INTEGER)))
-      ))
-
-  test("swobjects/tests/healthCare/lists-notBound/db.rq AS OPTIONAL") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX Person: <http://hospital.example/DB/Person#>
-PREFIX Sex_DE: <http://hospital.example/DB/Sex_DE#>
-PREFIX Item_Medication: <http://hospital.example/DB/Item_Medication#>
-PREFIX Medication: <http://hospital.example/DB/Medication#>
-PREFIX Medication_DE: <http://hospital.example/DB/Medication_DE#>
-PREFIX NDCcodes: <http://hospital.example/DB/NDCcodes#>
-PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
-
-SELECT ?patient
- WHERE {
-    ?patient Person:MiddleName ?middleName .
-    ?patient Person:DateOfBirth ?dob .
-    ?patient Person:SexDE ?sexEntry .
-    ?sexEntry Sex_DE:EntryName ?sex .
-
-    ?indicItem Item_Medication:PatientID ?patient .
-    ?indicItem Item_Medication:PerformedDTTM ?indicDate .
-    ?indicItem Item_Medication:EntryName ?takes .
-    ?indicMed Medication:ItemID ?indicItem .
-    ?indicMed Medication:MedDictDE ?indicDE .
-    ?indicDE Medication_DE:NDC ?indicNDC .
-    ?indicCode NDCcodes:NDC ?indicNDC .
-    ?indicCode NDCcodes:ingredient "6809"^^xsd:integer
-
-    OPTIONAL {
-        ?disqualItem Item_Medication:PatientID ?patient .
-        ?disqualItem Item_Medication:PerformedDTTM ?disqualDate .
-        ?disqualItem Item_Medication:EntryName ?takes .
-        ?disqualMed Medication:ItemID ?disqualItem .
-        ?disqualMed Medication:MedDictDE ?disqualDE .
-        ?disqualDE Medication_DE:NDC ?disqualNDC .
-        ?disqualCode NDCcodes:NDC ?disqualNDC .
-        ?disqualCode NDCcodes:ingredient "11289"^^xsd:integer
-    }
-      }
-""").get // 	FILTER (!BOUND(?disqualItem))
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_patient.ID AS patient
-  FROM Person AS R_patient
-       INNER JOIN Sex_DE AS R_sexEntry ON R_sexEntry.ID=R_patient.SexDE
-       INNER JOIN Item_Medication AS R_indicItem ON R_indicItem.PatientID=R_patient.ID
-       INNER JOIN Medication AS R_indicMed ON R_indicMed.ItemID=R_indicItem.ID
-       INNER JOIN Medication_DE AS R_indicDE ON R_indicDE.ID=R_indicMed.MedDictDE
-       INNER JOIN NDCcodes AS R_indicCode ON R_indicCode.NDC=R_indicDE.NDC
-       LEFT OUTER JOIN (
-   SELECT R_disqualCode.ID AS disqualCode, R_disqualMed.MedDictDE AS disqualDE,
-          R_disqualItem.PerformedDTTM AS disqualDate, R_disqualItem.ID AS disqualItem,
-          R_disqualMed.ID AS disqualMed, R_disqualDE.NDC AS disqualNDC,
-          R_disqualItem.PatientID AS patient, R_disqualItem.EntryName AS takes, 6 AS _DISJOINT_
-     FROM Item_Medication AS R_disqualItem
-          INNER JOIN Medication AS R_disqualMed ON R_disqualMed.ItemID=R_disqualItem.ID
-          INNER JOIN Medication_DE AS R_disqualDE ON R_disqualDE.ID=R_disqualMed.MedDictDE
-          INNER JOIN NDCcodes AS R_disqualCode ON R_disqualCode.NDC=R_disqualDE.NDC
-    WHERE R_disqualCode.ingredient=11289
-      AND R_disqualItem.EntryName IS NOT NULL
-      AND R_disqualItem.PatientID IS NOT NULL
-      AND R_disqualItem.PerformedDTTM IS NOT NULL
-              ) AS G_opt6 ON G_opt6.patient=R_patient.ID
-                         AND G_opt6.takes=R_indicItem.EntryName
- WHERE R_indicCode.ingredient=6809
-   AND R_indicItem.EntryName IS NOT NULL
-   AND R_indicItem.PerformedDTTM IS NOT NULL
-   AND R_patient.DateOfBirth IS NOT NULL
-   AND R_patient.MiddleName IS NOT NULL
-   AND R_sexEntry.EntryName IS NOT NULL
-""").get //    AND G_opt6.patient IS NULL
-    val generated = RDB2RDF(hosp1, sparqlSelect, StemURI("http://hospital.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-"""
-  }
-
-  test("swobjects/tests/healthCare/lists-notBound/db.rq") {
-    val sparqlParser = Sparql()
-    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
-PREFIX Person: <http://hospital.example/DB/Person#>
-PREFIX Sex_DE: <http://hospital.example/DB/Sex_DE#>
-PREFIX Item_Medication: <http://hospital.example/DB/Item_Medication#>
-PREFIX Medication: <http://hospital.example/DB/Medication#>
-PREFIX Medication_DE: <http://hospital.example/DB/Medication_DE#>
-PREFIX NDCcodes: <http://hospital.example/DB/NDCcodes#>
-PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
-
-SELECT ?patient
- WHERE {
-    ?patient Person:MiddleName ?middleName .
-    ?patient Person:DateOfBirth ?dob .
-    ?patient Person:SexDE ?sexEntry .
-    ?sexEntry Sex_DE:EntryName ?sex .
-
-    ?indicItem Item_Medication:PatientID ?patient .
-    ?indicItem Item_Medication:PerformedDTTM ?indicDate .
-    ?indicItem Item_Medication:EntryName ?takes .
-    ?indicMed Medication:ItemID ?indicItem .
-    ?indicMed Medication:MedDictDE ?indicDE .
-    ?indicDE Medication_DE:NDC ?indicNDC .
-    ?indicCode NDCcodes:NDC ?indicNDC .
-    ?indicCode NDCcodes:ingredient "6809"^^xsd:integer
-
-    MINUS {
-        ?disqualItem Item_Medication:PatientID ?patient .
-        ?disqualItem Item_Medication:PerformedDTTM ?disqualDate .
-        ?disqualItem Item_Medication:EntryName ?takes .
-        ?disqualMed Medication:ItemID ?disqualItem .
-        ?disqualMed Medication:MedDictDE ?disqualDE .
-        ?disqualDE Medication_DE:NDC ?disqualNDC .
-        ?disqualCode NDCcodes:NDC ?disqualNDC .
-        ?disqualCode NDCcodes:ingredient "11289"^^xsd:integer
-    }
-      }
-""").get
-    val sqlParser = Sql()
-    val parsed = sqlParser.parseAll(sqlParser.select, """
-SELECT R_patient.ID AS patient
-  FROM Person AS R_patient
-       INNER JOIN Sex_DE AS R_sexEntry ON R_sexEntry.ID=R_patient.SexDE
-       INNER JOIN Item_Medication AS R_indicItem ON R_indicItem.PatientID=R_patient.ID
-       INNER JOIN Medication AS R_indicMed ON R_indicMed.ItemID=R_indicItem.ID
-       INNER JOIN Medication_DE AS R_indicDE ON R_indicDE.ID=R_indicMed.MedDictDE
-       INNER JOIN NDCcodes AS R_indicCode ON R_indicCode.NDC=R_indicDE.NDC
-       LEFT OUTER JOIN (
-   SELECT R_disqualCode.ID AS disqualCode, R_disqualMed.MedDictDE AS disqualDE,
-          R_disqualItem.PerformedDTTM AS disqualDate, R_disqualItem.ID AS disqualItem,
-          R_disqualMed.ID AS disqualMed, R_disqualDE.NDC AS disqualNDC,
-          R_disqualItem.PatientID AS patient, R_disqualItem.EntryName AS takes, 6 AS _DISJOINT_
-     FROM Item_Medication AS R_disqualItem
-          INNER JOIN Medication AS R_disqualMed ON R_disqualMed.ItemID=R_disqualItem.ID
-          INNER JOIN Medication_DE AS R_disqualDE ON R_disqualDE.ID=R_disqualMed.MedDictDE
-          INNER JOIN NDCcodes AS R_disqualCode ON R_disqualCode.NDC=R_disqualDE.NDC
-    WHERE R_disqualCode.ingredient=11289
-      AND R_disqualItem.EntryName IS NOT NULL
-      AND R_disqualItem.PatientID IS NOT NULL
-      AND R_disqualItem.PerformedDTTM IS NOT NULL
-              ) AS G_opt6 ON G_opt6.patient=R_patient.ID
-                         AND G_opt6.takes=R_indicItem.EntryName
- WHERE G_opt6._DISJOINT_ IS NULL
-   AND R_indicCode.ingredient=6809
-   AND R_indicItem.EntryName IS NOT NULL
-   AND R_indicItem.PerformedDTTM IS NOT NULL
-   AND R_patient.DateOfBirth IS NOT NULL
-   AND R_patient.MiddleName IS NOT NULL
-   AND R_sexEntry.EntryName IS NOT NULL
-""").get
-    val generated = RDB2RDF(hosp1, sparqlSelect, StemURI("http://hospital.example/DB/"), false, false)
-    assert(generated === parsed)
-    val output = """
-"""
-  }
-
-}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/test/scala/SparqlToSqlTest.scala	Sun Jan 31 11:12:27 2010 -0500
@@ -0,0 +1,1035 @@
+/* SparqlToScalaTest: transform SPARQL to SQL and compare against a reference query.
+ * $Id$
+ */
+
+package w3c.sw
+
+import org.scalatest.FunSuite
+import java.net.URI
+import w3c.sw.sql.{Sql,DatabaseDesc,Relation,RelationDesc,Attribute,Value,Datatype,ForeignKey,Name}
+import w3c.sw.sparql.Sparql
+import w3c.sw.rdb2rdf.{SparqlToScala,StemURI}
+
+/* The SparqlToScalaTest class transforms SPARQL queries to a relational data
+ * structure and compares them to a structure parsed from SQL.
+ */
+class SparqlToScalaTest extends FunSuite {
+
+  /* These tests use a schema and queries designed to work with this
+   * example database:
+       Employee+----------+------------+---------+
+       | empid | lastName | birthday   | manager |
+       +-------+----------+------------+---------+
+       |    18 | Johnson  | 1969-11-08 |    NULL |    TaskAssignments--------+
+       |   253 | Smith    | 1979-01-18 |      18 |    | id | task | employee |
+       |   255 | Jones    | 1981-03-24 |     253 |    +----+------+----------+
+       |    19 | Xu       | 1966-11-08 |    NULL |    |  1 |    1 |       18 |
+       |   254 | Ishita   | 1971-10-31 |     253 |    |  2 |    2 |      253 |
+       +-------+----------+------------+---------+    |  3 |    3 |       19 |
+       					              |  4 |    4 |      253 |
+       Tasks----+--------+------+		      |  5 |    1 |      253 |
+       | taskid | name   | lead |		      |  6 |    2 |      255 |
+       +--------+--------+------+		      |  7 |    3 |      255 |
+       |      1 | widget |   18 |		      |  8 |    4 |      254 |
+       |      2 | dingus |  253 |		      +----+------+----------+
+       |      3 | thingy |   18 |
+       |      4 | toy    |  253 |
+       +--------+--------+------+
+ */
+
+  val db:DatabaseDesc = DatabaseDesc(
+    Map(Relation("Employee") -> 
+	RelationDesc(Option(Attribute("empid")), 
+		     Map(Attribute("empid") -> Value(Datatype.INTEGER),
+			 Attribute("lastName") -> Value(Datatype.STRING),
+			 Attribute("birthday") -> Value(Datatype.DATE),
+			 Attribute("manager") -> ForeignKey(Relation("Employee"), Attribute("empid")))),
+	Relation("Tasks") -> 
+	RelationDesc(Option(Attribute("taskid")),
+		     Map(Attribute("taskid") -> Value(Datatype.INTEGER),
+			 Attribute("name") -> Value(Datatype.STRING),
+			 Attribute("lead") -> ForeignKey(Relation("Employee"), Attribute("empid")))),
+	Relation("TaskAssignments") -> 
+	RelationDesc(Option(Attribute("id")),
+		     Map(Attribute("task") -> ForeignKey(Relation("Tasks"), Attribute("taskid")), 
+			 Attribute("employee") -> ForeignKey(Relation("Employee"),  Attribute("empid"))))
+      ))
+
+/* The reference RDF representation (queriable with the SPARQL in the tests) is:
+ */
+
+  val dbAsTurtle = """
+PREFIX empP : <http://hr.example/DB/Employee#>
+PREFIX task : <http://hr.example/DB/Tasks#>
+PREFIX tass : <http://hr.example/DB/TaskAssignments#>
+PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
+
+<http://hr.example/DB/Employee/empid.18#record>
+   empP:lastName "Johnson"^^xsd:string ; empP:birthday "1969-11-08"^^xsd:date .
+<http://hr.example/DB/Employee/empid.253#record>
+   empP:lastName "Smith"^^xsd:string ; empP:birthday "1969-11-08"^^xsd:date ;
+   empP:manager <http://hr.example/DB/Employee/empid.18#record> .
+<http://hr.example/DB/Employee/empid.255#record>
+   empP:lastName "Jones"^^xsd:string ; empP:birthday "1981-03-24"^^xsd:date ;
+   empP:manager <http://hr.example/DB/Employee/empid.253#record> .
+<http://hr.example/DB/Employee/empid.19#record>
+   empP:lastName "Xu"^^xsd:string ; empP:birthday "1966-11-08"^^xsd:date .
+<http://hr.example/DB/Employee/empid.254#record>
+   empP:lastName "Ishita"^^xsd:string ; empP:birthday "1971-10-31"^^xsd:date ;
+   empP:manager <http://hr.example/DB/Employee/empid.253#record> .
+
+<http://hr.example/DB/Tasks/taskid.1#record>
+   task:lastName "widget"^^xsd:string ; 
+   task:manager <http://hr.example/DB/Employee/empid.18#record> .
+<http://hr.example/DB/Tasks/taskid.2#record>
+   task:lastName "dingus"^^xsd:string ; 
+   task:manager <http://hr.example/DB/Employee/empid.253#record> .
+<http://hr.example/DB/Tasks/taskid.3#record>
+   task:lastName "thingy"^^xsd:string ; 
+   task:manager <http://hr.example/DB/Employee/empid.18#record> .
+<http://hr.example/DB/Tasks/taskid.4#record>
+   task:lastName "toy"^^xsd:string ; 
+   task:manager <http://hr.example/DB/Employee/empid.253#record> .
+
+<http://hr.example/DB/TaskAssignment/id.1#record>
+   tass:task <http://hr.example/DB/Tasks/taskid.1#record>
+   tass:employee <http://hr.example/DB/Employee/empid.18#record> .
+<http://hr.example/DB/TaskAssignment/id.2#record>
+   tass:task <http://hr.example/DB/Tasks/taskid.2#record>
+   tass:employee <http://hr.example/DB/Employee/empid.253#record> .
+<http://hr.example/DB/TaskAssignment/id.3#record>
+   tass:task <http://hr.example/DB/Tasks/taskid.3#record>
+   tass:employee <http://hr.example/DB/Employee/empid.19#record> .
+<http://hr.example/DB/TaskAssignment/id.4#record>
+   tass:task <http://hr.example/DB/Tasks/taskid.4#record>
+   tass:employee <http://hr.example/DB/Employee/empid.253#record> .
+<http://hr.example/DB/TaskAssignment/id.5#record>
+   tass:task <http://hr.example/DB/Tasks/taskid.1#record>
+   tass:employee <http://hr.example/DB/Employee/empid.253#record> .
+<http://hr.example/DB/TaskAssignment/id.6#record>
+   tass:task <http://hr.example/DB/Tasks/taskid.2#record>
+   tass:employee <http://hr.example/DB/Employee/empid.255#record> .
+<http://hr.example/DB/TaskAssignment/id.7#record>
+   tass:task <http://hr.example/DB/Tasks/taskid.3#record>
+   tass:employee <http://hr.example/DB/Employee/empid.255#record> .
+<http://hr.example/DB/TaskAssignment/id.8#record>
+   tass:task <http://hr.example/DB/Tasks/taskid.4#record>
+   tass:employee <http://hr.example/DB/Employee/empid.254#record> .
+"""
+  /* The obvious test is that the results from the SPARQL query and the
+   * relational query match.
+   *
+   * Data can be converted to turtle strings, or left as native formats for
+   * mapping the the querier. The first examples constrast queries relying
+   * on a post-query transformation against those returing turtle atoms.
+   */
+
+  test("decompose a predicate uri in stem, rel and attr") {
+    val uri = sparql.Uri("http://hr.example/our/favorite/DB/Employee#lastName")
+    val puri:SparqlToScala.PUri = SparqlToScala.parsePredicateURI(uri)
+    assert(puri === SparqlToScala.PUri(SparqlToScala.Stem("http://hr.example/our/favorite/DB"),
+				 SparqlToScala.Rel("Employee"),
+				 SparqlToScala.Attr("lastName")))
+  }
+
+  test("decompose a object uri in stem, rel and attr") {
+    val uri = sparql.Uri("http://hr.example/our/favorite/DB/Employee/id.18#record")
+    val objuri:SparqlToScala.NodeUri = SparqlToScala.parseObjectURI(uri)
+    assert(objuri === SparqlToScala.NodeUri(SparqlToScala.Stem("http://hr.example/our/favorite/DB"),
+				      SparqlToScala.Rel("Employee"),
+				      SparqlToScala.Attr("id"),
+				      SparqlToScala.CellValue("18")))
+  }
+
+  /* Disable turtle string-izing (SparqlToScala parm 5) and return native format: */
+  test("?s <p> <x>") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+SELECT ?emp {
+?emp  empP:manager    <http://hr.example/DB/Employee/empid.18#record>
+}
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.empid AS emp
+       FROM Employee AS R_emp
+            INNER JOIN Employee AS R_empid18
+ WHERE R_empid18.empid=R_emp.manager AND R_empid18.empid=18 AND R_emp.empid IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
+    assert(generated === parsed)
+    val output = """
++-----+
+| emp |
++-----+
+| 253 | 
++-----+
+"""
+  }
+
+  /* Enable turtle string-izing and test URI generation: */
+  test("SELECT <x> { ?sf <p> <x>} (in-SQL Nodizer)") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+SELECT ?emp {
+?emp  empP:manager    <http://hr.example/DB/Employee/empid.18#record>
+}
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT CONCAT("http://hr.example/DB/", "Employee", "/", "empid", ".", R_emp.empid, "#record") AS emp
+       FROM Employee AS R_emp
+            INNER JOIN Employee AS R_empid18
+ WHERE R_empid18.empid=R_emp.manager AND R_empid18.empid=18 AND R_emp.empid IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, true)
+    assert(generated === parsed)
+    val output = """
++------------------------------------------------+
+| emp                                            |
++------------------------------------------------+
+| http://hr.example/DB/Employee/empid.253#record | 
++------------------------------------------------+
+"""
+  }
+
+  /* Enable turtle string-izing and test RDFLiteral generation: */
+  test("SELECT <x> { ?sf <p> \"asdf\"} (in-SQL Nodizer)") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+SELECT ?name {
+?emp  empP:lastName  ?name
+}
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT CONCAT("'", R_emp.lastName, "'^^<http://www.w3.org/2001/XMLSchema#string>") AS name
+       FROM Employee AS R_emp
+ WHERE R_emp.empid IS NOT NULL AND R_emp.lastName IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, true)
+    assert(generated === parsed)
+    val output = """
++----------------------------------------------------+
+| name                                               |
++----------------------------------------------------+
+| Johnson^^<http://www.w3.org/2001/XMLSchema#string> | 
+| Smith^^<http://www.w3.org/2001/XMLSchema#string>   | 
+| Jones^^<http://www.w3.org/2001/XMLSchema#string>   | 
+| Xu^^<http://www.w3.org/2001/XMLSchema#string>      | 
+| Ishita^^<http://www.w3.org/2001/XMLSchema#string>  | 
++----------------------------------------------------+
+"""
+  }
+
+  test("<s> <p> ?x") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+SELECT ?manager {
+<http://hr.example/DB/Employee/empid.253#record>  empP:manager    ?manager
+}
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_manager.empid AS manager
+       FROM Employee AS R_empid253
+            INNER JOIN Employee AS R_manager
+ WHERE R_manager.empid=R_empid253.manager AND R_empid253.empid=253
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
+    assert(generated === parsed)
+    val output = """
++---------+
+| manager |
++---------+
+|      18 | 
++---------+
+"""
+  }
+
+  test("?s <p> 18") {
+    /* Literal foreign keys should probably throw an error,
+     * instead does what user meant. */
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
+SELECT ?emp {
+?emp  empP:manager    "18"^^xsd:integer
+}
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.empid AS emp
+       FROM Employee AS R_emp
+            INNER JOIN Employee AS R_18
+ WHERE R_18.empid=R_emp.manager AND R_18.empid=18 AND R_emp.empid IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
+    assert(generated === parsed)
+    val output = """
++-----+
+| emp |
++-----+
+| 253 | 
++-----+
+"""
+  }
+
+  test("?s1 <p> ?x . ?s2 <p> ?x") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX tass : <http://hr.example/DB/TaskAssignments#>
+SELECT ?task1 ?task2 {
+   ?task1  tass:employee    ?who .
+   ?task2  tass:employee    ?who
+   FILTER(?task1 < ?task2)
+}
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_task1.id AS task1, R_task2.id AS task2
+  FROM TaskAssignments AS R_task1
+       INNER JOIN Employee AS R_who ON R_who.empid=R_task1.employee
+       INNER JOIN TaskAssignments AS R_task2 ON R_who.empid=R_task2.employee
+ WHERE R_task1.id<R_task2.id 
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
+    assert(generated === parsed)
+    val output = """
++-------+-------+
+| task1 | task2 |
++-------+-------+
+|     2 |     4 | 
+|     2 |     5 | 
+|     4 |     5 | 
+|     6 |     7 | 
++-------+-------+
+"""
+  }
+
+  test("transform SQLbgp") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+SELECT ?empName ?manageName {
+?emp      empP:lastName   ?empName .
+?emp      empP:manager    ?manager .
+?manager  empP:lastName   ?manageName 
+}
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.lastName AS empName, R_manager.lastName AS manageName
+       FROM Employee AS R_emp
+            INNER JOIN Employee AS R_manager ON R_manager.empid=R_emp.manager
+ WHERE R_emp    .lastName IS NOT NULL AND R_manager.lastName IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
+    assert(generated === parsed)
+    val output = """
++---------+------------+
+| empName | manageName |
++---------+------------+
+| Smith   | Johnson    | 
+| Jones   | Smith      | 
+| Ishita  | Smith      | 
++---------+------------+
+"""
+  }
+
+  test("transform tup1 no-enforce") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+SELECT ?empName {
+ ?emp      empP:lastName   ?empName .
+ ?emp      empP:manager    <http://hr.example/DB/Employee/empid.18#record>
+ }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.lastName AS empName
+       FROM Employee AS R_emp
+ WHERE R_emp.manager=18 AND R_emp.lastName IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
++---------+
+| empName |
++---------+
+| Smith   | 
++---------+
+"""
+  }
+
+  test("transform tup1 enforce") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+SELECT ?empName {
+ ?emp      empP:lastName   ?empName .
+ ?emp      empP:manager    <http://hr.example/DB/Employee/empid.18#record>
+ }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.lastName AS empName
+       FROM Employee AS R_emp
+            INNER JOIN Employee AS R_empid18
+ WHERE R_empid18.empid=R_emp.manager AND R_empid18.empid=18 AND R_emp.lastName IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
+    assert(generated === parsed)
+    val output = """
++---------+
+| empName |
++---------+
+| Smith   | 
++---------+
+"""
+  }
+
+
+  test("transform litConst1") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
+SELECT ?empName {
+ ?emp      empP:lastName   ?empName .
+ ?emp      empP:manager    ?manager .
+ ?manager  empP:lastName   "Johnson"^^xsd:string
+ }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.lastName AS empName
+  FROM Employee AS R_emp
+       INNER JOIN Employee AS R_manager
+WHERE R_manager.empid=R_emp.manager AND R_manager.lastName="Johnson" AND R_emp.lastName IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
+    assert(generated === parsed)
+    val output = """
++---------+
+| empName |
++---------+
+| Smith   | 
++---------+
+"""
+  }
+
+  test("transform filter1") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+PREFIX task : <http://hr.example/DB/Tasks#>
+PREFIX tass : <http://hr.example/DB/TaskAssignments#>
+SELECT ?empName ?grandManagName {
+         ?emp          empP:lastName   ?empName .
+         ?emp          empP:birthday   ?empBday .
+         ?lower        tass:employee  ?emp .
+         ?lower        tass:task      ?ltask .
+         ?ltask        task:lead      ?taskLead .
+         ?taskLead     empP:birthday   ?manBday .
+         ?upper        tass:employee  ?taskLead .
+         ?upper        tass:task      ?utask .
+         ?utask        task:lead      ?grandManager .
+         ?grandManager empP:birthday   ?grandManBday .
+         ?grandManager empP:lastName   ?grandManagName
+         FILTER (?manBday < ?empBday && ?grandManBday < ?manBday)
+}
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.lastName AS empName,
+       R_grandManager.lastName AS grandManagName
+  FROM Employee AS R_emp
+       INNER JOIN TaskAssignments AS R_lower ON R_emp.empid=R_lower.employee
+       INNER JOIN Tasks AS R_ltask ON R_ltask.taskid=R_lower.task
+       INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_ltask.lead
+       INNER JOIN TaskAssignments AS R_upper ON R_taskLead.empid=R_upper.employee
+       INNER JOIN Tasks AS R_utask ON R_utask.taskid=R_upper.task
+       INNER JOIN Employee AS R_grandManager ON R_grandManager.empid=R_utask.lead
+ WHERE R_taskLead.birthday<R_emp.birthday AND R_grandManager.birthday<R_taskLead.birthday
+   AND R_emp.lastName IS NOT NULL AND R_grandManager.lastName IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), true, false)
+    assert(generated === parsed)
+    val output = """
++---------+----------------+
+| empName | grandManagName |
++---------+----------------+
+| Jones   | Johnson        | 
++---------+----------------+
+"""
+  }
+
+  /* Employess above and below Smith */
+  test("transform disj1") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+PREFIX task : <http://hr.example/DB/Tasks#>
+PREFIX tass : <http://hr.example/DB/TaskAssignments#>
+SELECT ?name
+ WHERE { ?who empP:lastName "Smith"^^xsd:string
+         { ?above   tass:employee  ?who .
+           ?above   tass:task      ?atask .
+           ?atask   task:lead      ?taskLead .
+           ?taskLead empP:lastName ?name }
+         UNION
+         { ?below   tass:task      ?btask .
+           ?btask   task:lead      ?who .
+           ?below   tass:employee  ?managed .
+           ?managed empP:lastName  ?name } }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT G_union1.name AS name
+  FROM Employee AS R_who
+       INNER JOIN (
+         SELECT 0 AS _DISJOINT_, R_above.id AS above, R_above.task AS atask,
+                R_above.employee AS who, R_taskLead.lastName AS name, 
+                R_atask.lead AS taskLead,
+                NULL AS below, NULL AS btask, NULL AS managed
+           FROM TaskAssignments AS R_above
+                INNER JOIN Tasks AS R_atask ON R_atask.taskid=R_above.task
+                INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_atask.lead
+          WHERE R_above.employee IS NOT NULL
+            AND R_taskLead.lastName IS NOT NULL
+       UNION
+         SELECT 1 AS _DISJOINT_, NULL AS above, NULL AS atask,
+                R_btask.lead AS who,R_managed.lastName AS name,
+                NULL AS taskLead,
+                R_below.id AS below, R_below.task AS btask,
+                R_below.employee AS managed
+           FROM TaskAssignments AS R_below
+                INNER JOIN Tasks AS R_btask ON R_btask.taskid=R_below.task
+                INNER JOIN Employee AS R_managed ON R_managed.empid=R_below.employee
+          WHERE R_managed.lastName IS NOT NULL
+            AND R_btask.lead IS NOT NULL
+                       ) AS G_union1
+ WHERE R_who.lastName="Smith"
+       AND R_who.empid IS NOT NULL
+       AND (G_union1._DISJOINT_!=0 OR G_union1.who=R_who.empid)
+       AND (G_union1._DISJOINT_!=1 OR G_union1.who=R_who.empid)
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
++---------+
+| name    |
++---------+
+| Johnson | 
+| Smith   | 
+| Smith   | 
+| Smith   | 
+| Jones   | 
+| Smith   | 
+| Ishita  | 
++---------+
+"""
+  }
+
+  test("transform assymDisj1") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+PREFIX task : <http://hr.example/DB/Tasks#>
+PREFIX tass : <http://hr.example/DB/TaskAssignments#>
+PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
+SELECT ?name ?bday
+ WHERE { { ?above   tass:employee  ?who .
+           ?above   tass:task      ?atask .
+           ?atask   task:lead      ?taskLead .
+           ?taskLead empP:lastName  ?name }
+         UNION
+         { ?below   tass:task     ?btask .
+           ?btask   task:lead     ?who .
+           ?below   tass:employee ?managed .
+           ?managed empP:lastName  ?name .
+           ?managed empP:birthday  ?bday } 
+         ?who empP:lastName "Smith"^^xsd:string .
+         ?who empP:birthday ?bday }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT G_union0.name AS name, G_union0.bday AS bday
+  FROM (
+         SELECT R_above.id AS above, R_above.task AS atask, NULL AS bday, NULL AS below,
+                NULL AS btask, NULL AS managed, R_taskLead.lastName AS name,
+                R_atask.lead AS taskLead, R_above.employee AS who, 0 AS _DISJOINT_
+           FROM TaskAssignments AS R_above
+                INNER JOIN Tasks AS R_atask ON R_atask.taskid=R_above.task
+                INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_atask.lead
+          WHERE R_above.employee IS NOT NULL AND R_taskLead.lastName IS NOT NULL
+       UNION
+         SELECT NULL AS above, NULL AS atask, R_managed.birthday AS bday, R_below.id AS below,
+                R_below.task AS btask, R_below.employee AS managed, R_managed.lastName AS name,
+                NULL AS taskLead, R_btask.lead AS who, 1 AS _DISJOINT_
+           FROM TaskAssignments AS R_below
+                INNER JOIN Tasks AS R_btask ON R_btask.taskid=R_below.task
+                INNER JOIN Employee AS R_managed ON R_managed.empid=R_below.employee
+          WHERE R_btask.lead IS NOT NULL AND R_managed.birthday IS NOT NULL AND R_managed.lastName IS NOT NULL
+                  ) AS G_union0
+       INNER JOIN Employee AS R_who
+ WHERE (G_union0._DISJOINT_!=0 OR R_who.empid=G_union0.who)
+   AND (G_union0._DISJOINT_!=1 OR R_who.birthday=G_union0.bday)
+   AND (G_union0._DISJOINT_!=1 OR R_who.empid=G_union0.who)
+   AND R_who.lastName="Smith"
+""").get // !!!    AND (G_union0.bday IS NOT NULL) AND (G_union0.who IS NOT NULL)
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
+"""
+  }
+
+  test("transform assymDisj1 reversed") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+PREFIX task : <http://hr.example/DB/Tasks#>
+PREFIX tass : <http://hr.example/DB/TaskAssignments#>
+PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
+SELECT ?name
+ WHERE {
+         ?who empP:lastName "Smith"^^xsd:string .
+         ?who empP:birthday ?bday
+         { ?above   tass:employee ?who .
+           ?above   tass:task     ?atask .
+           ?atask   task:lead     ?taskLead .
+           ?taskLead empP:lastName  ?name }
+         UNION
+         { ?below   tass:task     ?btask .
+           ?btask   task:lead     ?who .
+           ?below   tass:employee ?managed .
+           ?managed empP:lastName  ?name .
+           ?managed empP:birthday  ?bday } 
+       }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT G_union1.name AS name
+  FROM Employee AS R_who
+       INNER JOIN (
+         SELECT R_above.id AS above, R_above.task AS atask, NULL AS bday, NULL AS below,
+                NULL AS btask, NULL AS managed, R_taskLead.lastName AS name,
+                R_atask.lead AS taskLead, R_above.employee AS who, 0 AS _DISJOINT_
+           FROM TaskAssignments AS R_above
+                INNER JOIN Tasks AS R_atask ON R_atask.taskid=R_above.task
+                INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_atask.lead
+          WHERE R_above.employee IS NOT NULL AND R_taskLead.lastName IS NOT NULL
+       UNION
+         SELECT NULL AS above, NULL AS atask, R_managed.birthday AS bday, R_below.id AS below,
+                R_below.task AS btask, R_below.employee AS managed, R_managed.lastName AS name,
+                NULL AS taskLead, R_btask.lead AS who, 1 AS _DISJOINT_
+           FROM TaskAssignments AS R_below
+                INNER JOIN Tasks AS R_btask ON R_btask.taskid=R_below.task
+                INNER JOIN Employee AS R_managed ON R_managed.empid=R_below.employee
+          WHERE R_btask.lead IS NOT NULL AND R_managed.birthday IS NOT NULL AND R_managed.lastName IS NOT NULL
+                  ) AS G_union1
+ WHERE (G_union1._DISJOINT_!=0 OR G_union1.who=R_who.empid)
+   AND (G_union1._DISJOINT_!=1 OR G_union1.bday=R_who.birthday)
+   AND (G_union1._DISJOINT_!=1 OR G_union1.who=R_who.empid)
+   AND R_who.birthday IS NOT NULL AND R_who.lastName="Smith"
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
+"""
+  }
+
+  test("transform assymDisj1 interspersed") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX empP : <http://hr.example/DB/Employee#>
+PREFIX task : <http://hr.example/DB/Tasks#>
+PREFIX tass : <http://hr.example/DB/TaskAssignments#>
+PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
+SELECT ?name
+ WHERE {
+         ?who empP:lastName "Smith"^^xsd:string
+         { ?above   tass:employee  ?who .
+           ?above   tass:task      ?atask .
+           ?atask   task:lead      ?taskLead .
+           ?taskLead empP:lastName ?name }
+         UNION
+         { ?below   tass:task     ?btask .
+           ?btask   task:lead     ?who .
+           ?below   tass:employee ?managed .
+           ?managed empP:lastName ?name .
+           ?managed empP:birthday ?bday } 
+         ?who empP:birthday ?bday
+       }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT G_union1.name AS name
+  FROM Employee AS R_who
+       INNER JOIN (
+         SELECT R_above.id AS above, R_above.task AS atask, NULL AS bday, NULL AS below,
+                NULL AS btask, NULL AS managed, R_taskLead.lastName AS name,
+                R_atask.lead AS taskLead, R_above.employee AS who, 0 AS _DISJOINT_
+           FROM TaskAssignments AS R_above
+                INNER JOIN Tasks AS R_atask ON R_atask.taskid=R_above.task
+                INNER JOIN Employee AS R_taskLead ON R_taskLead.empid=R_atask.lead
+          WHERE R_above.employee IS NOT NULL AND R_taskLead.lastName IS NOT NULL
+       UNION
+         SELECT NULL AS above, NULL AS atask, R_managed.birthday AS bday, R_below.id AS below,
+                R_below.task AS btask, R_below.employee AS managed, R_managed.lastName AS name,
+                NULL AS taskLead, R_btask.lead AS who, 1 AS _DISJOINT_
+           FROM TaskAssignments AS R_below
+                INNER JOIN Tasks AS R_btask ON R_btask.taskid=R_below.task
+                INNER JOIN Employee AS R_managed ON R_managed.empid=R_below.employee
+          WHERE R_btask.lead IS NOT NULL AND R_managed.birthday IS NOT NULL AND R_managed.lastName IS NOT NULL
+                  ) AS G_union1
+ WHERE (G_union1._DISJOINT_!=0 OR G_union1.who=R_who.empid)
+   AND (G_union1._DISJOINT_!=1 OR G_union1.who=R_who.empid)
+   AND (G_union1._DISJOINT_!=1 OR R_who.birthday=G_union1.bday)
+   AND R_who.lastName="Smith"
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
+"""
+  }
+
+  test("transform optJoin1") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX emplP: <http://hr.example/DB/Employee#>
+
+SELECT ?empName ?managName ?grandManagName
+ WHERE {      ?emp          emplP:lastName   ?empName
+   OPTIONAL { ?emp          emplP:manager    ?manager .
+              ?manager      emplP:lastName   ?managName .
+              ?manager      emplP:manager    ?grandManager .
+              ?grandManager emplP:lastName   ?grandManagName } }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.lastName AS empName, G_opt1.grandManagName AS grandManagName, G_opt1.managName AS managName
+  FROM Employee AS R_emp
+       LEFT OUTER JOIN (
+       SELECT R_emp.empid AS emp, R_grandManager.lastName AS grandManagName, R_manager.manager AS grandManager, R_manager.lastName AS managName, R_emp.manager AS manager, 1 AS _DISJOINT_
+         FROM Employee AS R_emp
+              INNER JOIN Employee AS R_manager ON R_manager.empid=R_emp.manager
+              INNER JOIN Employee AS R_grandManager ON R_grandManager.empid=R_manager.manager
+        WHERE R_emp.empid IS NOT NULL AND R_grandManager.lastName IS NOT NULL AND R_manager.lastName IS NOT NULL
+                  ) AS G_opt1 ON G_opt1.emp=R_emp.empid
+ WHERE R_emp.empid IS NOT NULL AND R_emp.lastName IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
+"""
+  }
+
+  test("transform leadOpt1") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX emplP: <http://hr.example/DB/Employee#>
+
+SELECT ?empName ?grandManagName
+ WHERE { OPTIONAL { ?taskLead     emplP:manager    ?emp .
+                    ?taskLead     emplP:manager    ?grandManager .
+                    ?grandManager emplP:lastName   ?grandManagName } 
+                  ?emp            emplP:lastName   ?empName }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.lastName AS empName, G_opt1.grandManagName AS grandManagName
+  FROM ( SELECT 1 AS _EMPTY_ ) AS _EMPTY_
+       LEFT OUTER JOIN (
+       SELECT R_taskLead.manager AS emp, R_grandManager.lastName AS grandManagName, R_taskLead.manager AS grandManager, R_taskLead.empid AS taskLead, 1 AS _DISJOINT_
+         FROM Employee AS R_taskLead
+              INNER JOIN Employee AS R_grandManager ON R_grandManager.empid=R_taskLead.manager
+        WHERE R_grandManager.lastName IS NOT NULL
+                  ) AS G_opt1 ON 1=1
+       INNER JOIN Employee AS R_emp
+ WHERE (G_opt1._DISJOINT_ IS NULL OR R_emp.empid=G_opt1.emp)
+   AND R_emp.lastName IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
+"""
+  }
+
+  test("transform nestOpt") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX emplP: <http://hr.example/DB/Employee#>
+
+SELECT ?empName ?managName ?grandManagName
+ WHERE {          ?emp            emplP:lastName   ?empName
+       OPTIONAL { ?emp            emplP:manager    ?manager .
+                  ?manager        emplP:lastName   ?managName
+         OPTIONAL { ?manager      emplP:manager    ?grandManager .
+                    ?grandManager emplP:lastName   ?grandManagName } }
+       }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp.lastName AS empName, G_opt1.grandManagName AS grandManagName, G_opt1.managName AS managName
+  FROM Employee AS R_emp
+       LEFT OUTER JOIN (
+       SELECT R_emp.empid AS emp, G_opt2.grandManagName AS grandManagName, G_opt2.grandManager AS grandManager, R_manager.lastName AS managName, R_emp.manager AS manager, 1 AS _DISJOINT_
+         FROM Employee AS R_emp
+              INNER JOIN Employee AS R_manager ON R_manager.empid=R_emp.manager
+              LEFT OUTER JOIN (
+              SELECT R_grandManager.lastName AS grandManagName, R_manager.manager AS grandManager, R_manager.empid AS manager, 2 AS _DISJOINT_
+                FROM Employee AS R_manager
+                     INNER JOIN Employee AS R_grandManager ON R_grandManager.empid=R_manager.manager
+               WHERE R_grandManager.lastName IS NOT NULL AND R_manager.empid IS NOT NULL
+                         ) AS G_opt2 ON G_opt2.manager=R_emp.manager
+        WHERE R_emp.empid IS NOT NULL
+          AND R_manager.lastName IS NOT NULL
+                  ) AS G_opt1 ON G_opt1.emp=R_emp.empid
+ WHERE R_emp.empid IS NOT NULL AND R_emp.lastName IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
+"""
+  }
+
+  test("transform equivOpt1") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX emplP: <http://hr.example/DB/Employee#>
+
+SELECT ?emp1Name ?emp2Name ?emp3Name
+ WHERE { ?emp1     emplP:lastName   ?emp1Name
+         OPTIONAL { ?emp1     emplP:birthday   ?birthday }
+         ?emp2     emplP:lastName   ?emp2Name
+         OPTIONAL { ?emp2     emplP:birthday   ?birthday }
+         ?emp3     emplP:lastName   ?emp3Name .
+         ?emp3     emplP:birthday   ?birthday .
+         ?emp4     emplP:lastName   ?emp4Name .
+         ?emp4     emplP:birthday   ?birthday
+         FILTER ( ?emp1Name < ?emp2Name && ?emp2Name < ?emp3Name && ?emp3Name < ?emp4Name) }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_emp1.lastName AS emp1Name, R_emp2.lastName AS emp2Name, R_emp3.lastName AS emp3Name
+  FROM Employee AS R_emp1
+       LEFT OUTER JOIN (
+       SELECT R_emp1.birthday AS birthday, R_emp1.empid AS emp1, 1 AS _DISJOINT_
+         FROM Employee AS R_emp1
+        WHERE R_emp1.birthday IS NOT NULL AND R_emp1.empid IS NOT NULL
+                  ) AS G_opt1 ON G_opt1.emp1=R_emp1.empid
+       INNER JOIN Employee AS R_emp2
+       LEFT OUTER JOIN (
+       SELECT R_emp2.birthday AS birthday, R_emp2.empid AS emp2, 3 AS _DISJOINT_
+         FROM Employee AS R_emp2
+        WHERE R_emp2.birthday IS NOT NULL AND R_emp2.empid IS NOT NULL
+                  ) AS G_opt3 ON (G_opt1._DISJOINT_ IS NULL OR G_opt3.birthday=G_opt1.birthday) AND G_opt3.emp2=R_emp2.empid
+       INNER JOIN Employee AS R_emp3
+       INNER JOIN Employee AS R_emp4
+ WHERE (G_opt1._DISJOINT_ IS NULL OR R_emp3.birthday=G_opt1.birthday)
+   AND (G_opt1._DISJOINT_ IS NULL OR R_emp4.birthday=G_opt1.birthday)
+   AND R_emp1.lastName<R_emp2.lastName
+   AND R_emp2.empid IS NOT NULL AND R_emp2.lastName<R_emp3.lastName
+   AND R_emp3.lastName<R_emp4.lastName AND R_emp1.empid IS NOT NULL
+""").get
+    val generated = SparqlToScala(db, sparqlSelect, StemURI("http://hr.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
+"""
+  }
+
+  val hosp1:DatabaseDesc = DatabaseDesc(
+    Map(Relation("Person") -> 
+	RelationDesc(Option(Attribute("ID")), 
+		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
+			 Attribute("MiddleName") -> Value(Datatype.STRING),
+			 Attribute("DateOfBirth") -> Value(Datatype.DATE),
+			 Attribute("SexDE") -> ForeignKey(Relation("Sex_DE"), Attribute("ID")))),
+	Relation("Sex_DE") -> 
+	RelationDesc(Option(Attribute("ID")),
+		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
+			 Attribute("EntryName") -> Value(Datatype.STRING))),
+	Relation("Item_Medication") -> 
+	RelationDesc(Option(Attribute("ID")),
+		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
+			 Attribute("PatientID") -> ForeignKey(Relation("Person"),  Attribute("ID")),
+			 Attribute("PerformedDTTM") -> Value(Datatype.DATE),
+			 Attribute("EntryName") -> Value(Datatype.STRING))),
+	Relation("Medication") -> 
+	RelationDesc(Option(Attribute("ID")),
+		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
+			 Attribute("ItemID") -> ForeignKey(Relation("Item_Medication"),  Attribute("ID")),
+			 Attribute("MedDictDE") -> ForeignKey(Relation("Medication_DE"), Attribute("ID")))),
+	Relation("Medication_DE") -> 
+	RelationDesc(Option(Attribute("ID")),
+		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
+			 Attribute("NDC") -> Value(Datatype.INTEGER))),
+	Relation("NDCcodes") -> 
+	RelationDesc(Option(Attribute("ID")),
+		     Map(Attribute("ID") -> Value(Datatype.INTEGER),
+			 Attribute("NDC") -> Value(Datatype.INTEGER),
+			 Attribute("ingredient") -> Value(Datatype.INTEGER)))
+      ))
+
+  test("swobjects/tests/healthCare/lists-notBound/db.rq AS OPTIONAL") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX Person: <http://hospital.example/DB/Person#>
+PREFIX Sex_DE: <http://hospital.example/DB/Sex_DE#>
+PREFIX Item_Medication: <http://hospital.example/DB/Item_Medication#>
+PREFIX Medication: <http://hospital.example/DB/Medication#>
+PREFIX Medication_DE: <http://hospital.example/DB/Medication_DE#>
+PREFIX NDCcodes: <http://hospital.example/DB/NDCcodes#>
+PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
+
+SELECT ?patient
+ WHERE {
+    ?patient Person:MiddleName ?middleName .
+    ?patient Person:DateOfBirth ?dob .
+    ?patient Person:SexDE ?sexEntry .
+    ?sexEntry Sex_DE:EntryName ?sex .
+
+    ?indicItem Item_Medication:PatientID ?patient .
+    ?indicItem Item_Medication:PerformedDTTM ?indicDate .
+    ?indicItem Item_Medication:EntryName ?takes .
+    ?indicMed Medication:ItemID ?indicItem .
+    ?indicMed Medication:MedDictDE ?indicDE .
+    ?indicDE Medication_DE:NDC ?indicNDC .
+    ?indicCode NDCcodes:NDC ?indicNDC .
+    ?indicCode NDCcodes:ingredient "6809"^^xsd:integer
+
+    OPTIONAL {
+        ?disqualItem Item_Medication:PatientID ?patient .
+        ?disqualItem Item_Medication:PerformedDTTM ?disqualDate .
+        ?disqualItem Item_Medication:EntryName ?takes .
+        ?disqualMed Medication:ItemID ?disqualItem .
+        ?disqualMed Medication:MedDictDE ?disqualDE .
+        ?disqualDE Medication_DE:NDC ?disqualNDC .
+        ?disqualCode NDCcodes:NDC ?disqualNDC .
+        ?disqualCode NDCcodes:ingredient "11289"^^xsd:integer
+    }
+      }
+""").get // 	FILTER (!BOUND(?disqualItem))
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_patient.ID AS patient
+  FROM Person AS R_patient
+       INNER JOIN Sex_DE AS R_sexEntry ON R_sexEntry.ID=R_patient.SexDE
+       INNER JOIN Item_Medication AS R_indicItem ON R_indicItem.PatientID=R_patient.ID
+       INNER JOIN Medication AS R_indicMed ON R_indicMed.ItemID=R_indicItem.ID
+       INNER JOIN Medication_DE AS R_indicDE ON R_indicDE.ID=R_indicMed.MedDictDE
+       INNER JOIN NDCcodes AS R_indicCode ON R_indicCode.NDC=R_indicDE.NDC
+       LEFT OUTER JOIN (
+   SELECT R_disqualCode.ID AS disqualCode, R_disqualMed.MedDictDE AS disqualDE,
+          R_disqualItem.PerformedDTTM AS disqualDate, R_disqualItem.ID AS disqualItem,
+          R_disqualMed.ID AS disqualMed, R_disqualDE.NDC AS disqualNDC,
+          R_disqualItem.PatientID AS patient, R_disqualItem.EntryName AS takes, 6 AS _DISJOINT_
+     FROM Item_Medication AS R_disqualItem
+          INNER JOIN Medication AS R_disqualMed ON R_disqualMed.ItemID=R_disqualItem.ID
+          INNER JOIN Medication_DE AS R_disqualDE ON R_disqualDE.ID=R_disqualMed.MedDictDE
+          INNER JOIN NDCcodes AS R_disqualCode ON R_disqualCode.NDC=R_disqualDE.NDC
+    WHERE R_disqualCode.ingredient=11289
+      AND R_disqualItem.EntryName IS NOT NULL
+      AND R_disqualItem.PatientID IS NOT NULL
+      AND R_disqualItem.PerformedDTTM IS NOT NULL
+              ) AS G_opt6 ON G_opt6.patient=R_patient.ID
+                         AND G_opt6.takes=R_indicItem.EntryName
+ WHERE R_indicCode.ingredient=6809
+   AND R_indicItem.EntryName IS NOT NULL
+   AND R_indicItem.PerformedDTTM IS NOT NULL
+   AND R_patient.DateOfBirth IS NOT NULL
+   AND R_patient.MiddleName IS NOT NULL
+   AND R_sexEntry.EntryName IS NOT NULL
+""").get //    AND G_opt6.patient IS NULL
+    val generated = SparqlToScala(hosp1, sparqlSelect, StemURI("http://hospital.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
+"""
+  }
+
+  test("swobjects/tests/healthCare/lists-notBound/db.rq") {
+    val sparqlParser = Sparql()
+    val sparqlSelect = sparqlParser.parseAll(sparqlParser.select, """
+PREFIX Person: <http://hospital.example/DB/Person#>
+PREFIX Sex_DE: <http://hospital.example/DB/Sex_DE#>
+PREFIX Item_Medication: <http://hospital.example/DB/Item_Medication#>
+PREFIX Medication: <http://hospital.example/DB/Medication#>
+PREFIX Medication_DE: <http://hospital.example/DB/Medication_DE#>
+PREFIX NDCcodes: <http://hospital.example/DB/NDCcodes#>
+PREFIX xsd : <http://www.w3.org/2001/XMLSchema#>
+
+SELECT ?patient
+ WHERE {
+    ?patient Person:MiddleName ?middleName .
+    ?patient Person:DateOfBirth ?dob .
+    ?patient Person:SexDE ?sexEntry .
+    ?sexEntry Sex_DE:EntryName ?sex .
+
+    ?indicItem Item_Medication:PatientID ?patient .
+    ?indicItem Item_Medication:PerformedDTTM ?indicDate .
+    ?indicItem Item_Medication:EntryName ?takes .
+    ?indicMed Medication:ItemID ?indicItem .
+    ?indicMed Medication:MedDictDE ?indicDE .
+    ?indicDE Medication_DE:NDC ?indicNDC .
+    ?indicCode NDCcodes:NDC ?indicNDC .
+    ?indicCode NDCcodes:ingredient "6809"^^xsd:integer
+
+    MINUS {
+        ?disqualItem Item_Medication:PatientID ?patient .
+        ?disqualItem Item_Medication:PerformedDTTM ?disqualDate .
+        ?disqualItem Item_Medication:EntryName ?takes .
+        ?disqualMed Medication:ItemID ?disqualItem .
+        ?disqualMed Medication:MedDictDE ?disqualDE .
+        ?disqualDE Medication_DE:NDC ?disqualNDC .
+        ?disqualCode NDCcodes:NDC ?disqualNDC .
+        ?disqualCode NDCcodes:ingredient "11289"^^xsd:integer
+    }
+      }
+""").get
+    val sqlParser = Sql()
+    val parsed = sqlParser.parseAll(sqlParser.select, """
+SELECT R_patient.ID AS patient
+  FROM Person AS R_patient
+       INNER JOIN Sex_DE AS R_sexEntry ON R_sexEntry.ID=R_patient.SexDE
+       INNER JOIN Item_Medication AS R_indicItem ON R_indicItem.PatientID=R_patient.ID
+       INNER JOIN Medication AS R_indicMed ON R_indicMed.ItemID=R_indicItem.ID
+       INNER JOIN Medication_DE AS R_indicDE ON R_indicDE.ID=R_indicMed.MedDictDE
+       INNER JOIN NDCcodes AS R_indicCode ON R_indicCode.NDC=R_indicDE.NDC
+       LEFT OUTER JOIN (
+   SELECT R_disqualCode.ID AS disqualCode, R_disqualMed.MedDictDE AS disqualDE,
+          R_disqualItem.PerformedDTTM AS disqualDate, R_disqualItem.ID AS disqualItem,
+          R_disqualMed.ID AS disqualMed, R_disqualDE.NDC AS disqualNDC,
+          R_disqualItem.PatientID AS patient, R_disqualItem.EntryName AS takes, 6 AS _DISJOINT_
+     FROM Item_Medication AS R_disqualItem
+          INNER JOIN Medication AS R_disqualMed ON R_disqualMed.ItemID=R_disqualItem.ID
+          INNER JOIN Medication_DE AS R_disqualDE ON R_disqualDE.ID=R_disqualMed.MedDictDE
+          INNER JOIN NDCcodes AS R_disqualCode ON R_disqualCode.NDC=R_disqualDE.NDC
+    WHERE R_disqualCode.ingredient=11289
+      AND R_disqualItem.EntryName IS NOT NULL
+      AND R_disqualItem.PatientID IS NOT NULL
+      AND R_disqualItem.PerformedDTTM IS NOT NULL
+              ) AS G_opt6 ON G_opt6.patient=R_patient.ID
+                         AND G_opt6.takes=R_indicItem.EntryName
+ WHERE G_opt6._DISJOINT_ IS NULL
+   AND R_indicCode.ingredient=6809
+   AND R_indicItem.EntryName IS NOT NULL
+   AND R_indicItem.PerformedDTTM IS NOT NULL
+   AND R_patient.DateOfBirth IS NOT NULL
+   AND R_patient.MiddleName IS NOT NULL
+   AND R_sexEntry.EntryName IS NOT NULL
+""").get
+    val generated = SparqlToScala(hosp1, sparqlSelect, StemURI("http://hospital.example/DB/"), false, false)
+    assert(generated === parsed)
+    val output = """
+"""
+  }
+
+}