--- a/src/main/scala/SparqlToSparql.scala Sun Feb 28 15:08:32 2010 -0500
+++ b/src/main/scala/SparqlToSparql.scala Mon May 10 06:01:07 2010 -0400
@@ -37,7 +37,10 @@
case _ => error("not implemented" + gp)
}
}
- def substituteGraphPattern (gp:sparql.GraphPattern, vartermmap:Map[sparql.Var, sparql.Term], varPrefix:String):sparql.GraphPattern = {
+
+ type VarMap = Map[sparql.Var, sparql.Term]
+
+ def substituteGraphPattern (gp:sparql.GraphPattern, vartermmap:VarMap, varPrefix:String):sparql.GraphPattern = {
val mapped = vartermmap.foldLeft(gp)((incrementalGP, varterm) => {
val (varr, term) = varterm
substitute(incrementalGP, sparql.TermVar(varr), term)
@@ -59,7 +62,7 @@
substitute(substitute(construct.gp, trigger.s, tp.s), trigger.o, tp.o)
}
}
- case class Bindings (b:Map[sparql.Construct, List[Map[sparql.Var, sparql.Term]]]) {
+ case class Bindings (b:Map[sparql.Construct, List[VarMap]]) {
def countEditions () = {
var count = 0
b.map((constructlist) =>
@@ -95,16 +98,16 @@
}).mkString("\n ", ",\n ", "") + "))"
def ensureGraphPattern (construct:sparql.Construct) = {
if (b.contains(construct)) this
- else Bindings(b + (construct -> List[Map[sparql.Var, sparql.Term]]()))
+ else Bindings(b + (construct -> List[VarMap]()))
}
// val varsS:Option[Bindings] = vars.maybeRebind(construct, v, tos)
- // b:Map[sparql.Construct, List[Map[sparql.Var, sparql.Term]]]
+ // b:Map[sparql.Construct, List[VarMap]]
def mustBind (construct:sparql.Construct, vs:sparql.Term, tos:sparql.Term, vo:sparql.Term, too:sparql.Term):Bindings = {
/* ridiculous traversal for the first viably matching rule edition. */
var matched = false
- val existing:List[Map[sparql.Var, sparql.Term]] = b(construct).map((map) => {
- def _matches (l:sparql.Term, r:sparql.Term):(Boolean, Map[sparql.Var, sparql.Term]) = {
- val empty = Map[sparql.Var, sparql.Term]()
+ val existing:List[VarMap] = b(construct).map((map:VarMap) => {
+ def _matches (l:sparql.Term, r:sparql.Term):(Boolean, VarMap) = {
+ val empty:VarMap = Map[sparql.Var, sparql.Term]()
(l, r) match {
case (v:sparql.TermVar, x) =>
// println("(v:sparql.TermVar, x)" + v.v + ":" + x)
@@ -112,7 +115,7 @@
if (r == map(v.v)) (true, empty)
else (false, empty)
} else {
- (true, Map(v.v -> r))
+ (true, Map[sparql.Var, sparql.Term](v.v -> r))
}
case (x, v:sparql.TermVar) => {
// println("query variable " + v + " known equal to " + x + " at compile time")
@@ -139,13 +142,19 @@
Bindings(b.map((constructlist) => {
val (oldConstr, l) = constructlist
if (oldConstr == construct) {
- def _newBinding (l:sparql.Term, r:sparql.Term):Map[sparql.Var, sparql.Term] = {
- val empty = Map[sparql.Var, sparql.Term]()
+ def _newBinding (l:sparql.Term, r:sparql.Term):VarMap = {
+ val empty:VarMap = Map[sparql.Var, sparql.Term]()
(l, r) match {
case (v:sparql.TermVar, _) =>
- Map(v.v -> r)
+ Map[sparql.Var, sparql.Term](v.v -> r)
+ case (b:sparql.TermBNode, _) => {
+ println(".. synthetic query variable " + b + "")
+ Map[sparql.Var, sparql.Term]()
+ // println("@@ mustBind:_newBinding(BNode) + " + b)
+ // Map(sparql.Var("bnode_" + b.b.s) -> r) // !!!
+ }
case (_, v:sparql.TermVar) => {
- println("query variable " + v + " known equal to " + l + " at compile time")
+ println(".. query variable " + v + " known equal to " + l + " at compile time")
Map[sparql.Var, sparql.Term]()
}
case (_, _) => Map[sparql.Var, sparql.Term]()
@@ -161,7 +170,7 @@
}
}
}
- def createEmptyBindings () = Bindings(Map[sparql.Construct, List[Map[sparql.Var, sparql.Term]]]())
+ def createEmptyBindings () = Bindings(Map[sparql.Construct, List[VarMap]]())
case class RuleMap (rules:Map[sparql.Uri, List[RuleIndex]]) {
def transform (prove:List[sparql.TriplePattern], used:Set[sparql.TriplePattern], varsP:Bindings):Bindings = {
@@ -249,8 +258,10 @@
var _ruleNo = 0
val ruleMap = RuleMap({
constructs.foldLeft(Map[sparql.Uri, List[RuleIndex]]())((m, rule) => {
+ // Register abbreviations for debugging output.
RuleLabels.update(rule.head.toString, "head" + _ruleNo)
RuleLabels.update(rule.gp.toString, "body" + _ruleNo)
+
_ruleNo = _ruleNo + 1
rule.head.triplepatterns.foldLeft(m)((m, tp) => m + ({
tp.p match {
--- a/src/main/scala/SparqlToSql.scala Sun Feb 28 15:08:32 2010 -0500
+++ b/src/main/scala/SparqlToSql.scala Mon May 10 06:01:07 2010 -0400
@@ -1,9 +1,9 @@
-/* SparqlToSql: convert SPARQL queries to sound SQL queries.
- *
+/** SparqlToSql: convert SPARQL queries to sound SQL queries.
+ * <pre>
* Please read from the bottom -- i.e. apply calls mapGraphPattern with the root
* graph pattern. mapGraphPattern handles all the graph pattern types in SPARQL,
* effectively peforming the Convert Graph Patterns step in SPARQL 1.0 12.2.1
- * <http://www.w3.org/TR/rdf-sparql-query/#convertGraphPattern>
+ * <a href="http://www.w3.org/TR/rdf-sparql-query/#convertGraphPattern">SPARQL rules for converting graph patterns</a>
* with the target semantics in SQL instead of SPARQL.
*/
@@ -26,16 +26,44 @@
case class Int(relvarattr:sql.RelVarAttr) extends Binding
case class Enum(relvarattr:sql.RelVarAttr) extends Binding
+/**
+ * Converts a SPARQL object to an SQL object equivalent over the direct graph.
+ *
+ * @see {@link w3c.sw.sparql.Sparql Sparql}
+ * @see {@link w3c.sw.sql.Sql#Select Sql}
+ */
object SparqlToSql {
- case class R2RState(joins:util.AddOrderedSet[sql.Join], varmap:Map[sparql.Var, SQL2RDFValueMapper], exprs:Set[sql.Expression])
+ /**
+ * Accumulated state for generating an Sql object.
+ *
+ * @param joins an AddOrderedSet of SQL joins
+ * @param varmap a map from Sparql.Assignable to SQL terms
+ * @param exprs a set of accumulated SQL expressions
+ */
+ case class R2RState(joins:util.AddOrderedSet[sql.Join], varmap:Map[sparql.Assignable, SQL2RDFValueMapper], exprs:Set[sql.Expression])
+ /**
+ * Binding for a sparql.Variable or BNode
+ */
sealed abstract class FullOrPartialBinding
case class FullBinding(relvarattr:sql.RelVarAttr) extends FullOrPartialBinding
case class BindingConstraint(expr:sql.RelationalExpression, relvarattr:sql.RelVarAttr)
+
+ /**
+ * Partial binding, a variable only (so far) found in OPTIONALs or assymetric
+ * UNIONs.
+ */
case class PartialBinding(binders:Set[BindingConstraint]) extends FullOrPartialBinding
+ /**
+ * Convert a binding to an SQL expression.
+ * <p/>
+ * example return:
+ * <code>if (g_union1._DISJOINT_ != 0, g_union1.who, if (g_union2._DISJOINT_ != 3, g_union2.who, NULL))</code>
+ * @param against binding to be expressed
+ * @return SQL expression representing that binding
+ */
def toExpr(against:FullOrPartialBinding):sql.Expression = {
- /* if (g_union1._DISJOINT_ != 0, g_union1.who, if (g_union2._DISJOINT_ != 3, g_union2.who, NULL)) */
against match {
case FullBinding(relvarattr) =>
sql.PrimaryExpressionAttr(relvarattr)
@@ -46,6 +74,12 @@
})
}
}
+ /**
+ * Accumulate bindings on previously bound sparql variables.
+ * @param binding previous binding
+ * @param relVarAttr SQL relvar attribute to be bound, e.g. <code>G_union1.who</code>
+ * @param expr expr binding constraint, e.g. <code>G_opt6._DISJOINT_ IS NULL</code> or <code>G_union1._DISJOINT_!=0</code>
+ */
def addExpr(binding:FullOrPartialBinding, relVarAttr:sql.RelVarAttr, expr:sql.RelationalExpression):FullOrPartialBinding = {
binding match {
case FullBinding(relvarattr) =>
@@ -68,28 +102,62 @@
}
}
+ /**
+ * SQL terms representing SPARQL variables and bnodes.
+ */
sealed abstract class SQL2RDFValueMapper(binding:FullOrPartialBinding)
case class IntMapper(binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
case class StringMapper(binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
case class DateMapper(binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
+ /**
+ * map to a URL for a tuple in the database.
+ */
case class RDFNoder(relation:sql.Relation, binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
+ /**
+ * map to a blank node label for a tuple in the database.
+ */
case class RDFBNoder(relation:sql.Relation, binding:FullOrPartialBinding) extends SQL2RDFValueMapper(binding)
+ /**
+ * a URL representing a tuple in a database.
+ */
case class NodeUri(stem:Stem, rel:Rel, attr:Attr, v:CellValue)
+
+ /**
+ * url stem to base the direct graph.
+ * <p/>
+ * e.g. http://mydb.example/theDatabase
+ */
case class Stem(s:String) {
override def toString = "" + s
}
- case class Rel(s:String) { // !! NUKE
+
+ /**
+ * SQL relation (table) name
+ */
+ case class Rel(s:String) {
override def toString = "" + s
}
+ /**
+ * SQL attribute (column) name
+ */
case class Attr(s:String) {
override def toString = "" + s
}
+ /**
+ * value in a database
+ */
case class CellValue(s:String)
+ /**
+ * a URL representing a predicate in a database.
+ */
case class PUri(stem:Stem, rel:Rel, attr:Attr) {
override def toString = "<" + stem + "/" + rel + "#" + attr + ">"
}
- /* stemURI + '/' + (\w+) + '#' (\w+) */
+ /**
+ * parse predicate URL in direct graph into stem, relation and attribute
+ * <pre>stemURI + '/' + (\w+) + '#' (\w+)</pre>
+ */
def parsePredicateURI(u:sparql.Uri):PUri = {
val x:String = u.s
val uri = new URI(x)
@@ -99,7 +167,10 @@
PUri(Stem(stem), Rel(path.last), Attr(uri.getFragment))
}
- /* stemURI + '/' (\w+) '/' (\w+) '.' (\w+) '#record' */
+ /**
+ * parse node URL in direct graph into stem, relation, attribute and value
+ * <pre>stemURI + '/' (\w+) '/' (\w+) '.' (\w+) '#record'</pre>
+ */
def parseObjectURI(u:sparql.Uri):NodeUri = {
val x:String = u.s
val uri = new URI(x)
@@ -111,14 +182,18 @@
assert("record" == uri.getFragment)
NodeUri(Stem(stem), Rel(rel), Attr(attrPair(0)), CellValue(attrPair(1)))
}
-/*
-Sparql.parseObjectURI(
-Sparql.parsePredicateURI(
-*/
+ /**
+ * synthesize a relvar name from a SPARQL term.
+ * <p/>
+ * e.g. <code>?emp</code> =><code>R_emp</code>
+ * e.g. <code><http://hr.example/DB/Employee/empid.253#record></code> =><code>R_empid253</code>
+ * e.g. <code>18</code> =><code>R_18</code>
+ */
def relVarFromTerm(s:sparql.Term):sql.RelVar = {
s match {
case sparql.TermUri(ob) => relVarFromNode(ob)
case sparql.TermVar(v) => relVarFromVar(v)
+ case sparql.TermBNode(v) => relVarFromBNode(v)
case sparql.TermLit(l) => relVarFromLiteral(l)
}
}
@@ -137,11 +212,32 @@
sql.RelVar(sql.Name("R_" + v))
}
- def attrAliasNameFromVar(v:sparql.Var):sql.Name = sql.Name("" + v.s)
+ def relVarFromBNode(vr:sparql.BNode):sql.RelVar = {
+ val sparql.BNode(b) = vr
+ sql.RelVar(sql.Name("B_" + b))
+ }
- def uriConstraint(state:R2RState, constrainMe:sql.RelVarAttr, u:NodeUri, enforeForeignKeys:Boolean):R2RState = {
+ /**
+ * synthesize a SQL name from a SPARQL variable or bnode.
+ * <p/>
+ * e.g. <code>?emp</code> =><code>emp</code>
+ */
+ def attrAliasNameFromVar(v:sparql.Assignable):sql.Name = sql.Name("" + v.s)
+
+ /**
+ * add constraints implied by a URI
+ * @param state state to be appended
+ * @param constrainMe relvar attribute to constrain, e.g. <code>R_empid18.empid</code>
+ * @param u SparqlToSql URL object, e.g. <code>NodeUri(http://hr.example/DB,Employee,empid,CellValue(18))</code>
+ * @return state + expression for the URI
+ * <p/>
+ * u:<code>NodeUri(http://hr.example/DB,Employee,empid,CellValue(18)), true</code> =><code>R_emp.manager=18</code>
+ * u:<code>NodeUri(http://hr.example/DB,Employee,empid,CellValue(18)), false</code> =><code>R_empid18.empid=18</code>
+ * (the latter produces another join on R_empid18 in order to enforce a foreign key.)
+ */
+ def uriConstraint(state:R2RState, constrainMe:sql.RelVarAttr, u:NodeUri, enforceForeignKeys:Boolean):R2RState = {
val relvar =
- if (enforeForeignKeys)
+ if (enforceForeignKeys)
sql.RelVarAttr(constrainMe.relvar, sql.Attribute(sql.Name(u.attr.s)))
else
constrainMe
@@ -151,6 +247,14 @@
sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name(u.v.s))))
}
+ /**
+ * add constraints implied by a literal in a SPARQL triple pattern
+ * @param state state to be appended
+ * @param constrainMe relvar attribute to constrain, e.g. <code>R_18.empid</code>
+ * @param lit sparq.Literal, e.g. <code>18</code>
+ * @param dt sparq datatype, e.g. <code>xsd:integer</code>
+ * @return state + expression for the URI, e.g. <code>R_18.empid=18</code>
+ */
def literalConstraint(state:R2RState, constrainMe:sql.RelVarAttr, lit:sparql.Literal, dt:sql.Datatype):R2RState = {
R2RState(state.joins,
state.varmap,
@@ -159,54 +263,84 @@
}
/** varConstraint
- * examples:
- * SELECT ?emp WHERE { ?emp emp:manager <http://hr.example/our/favorite/DB/Employee/id.18#record> ; emp:name ?name }
- * SQL Results SPARQL Results
- * A_emp A_name ?emp ?name
- * 4 "Bob" <http://hr.example/our/favorite/DB/Employee/id.4#record> "Bob"^^xsd:string
- * 6 "Sue" <http://hr.example/our/favorite/DB/Employee/id.6#record> "Sue"^^xsd:string
+ * @param state earlier R2RState
+ * @param alias relvar to bind, e.g. Employees AS R_emp
+ * @param optAttr SQL attribute to bind, e.g. Employees.lastName
+ * @param v SPARQL variable or blank node to bind
+ * @param db database description
+ * @param rel SQL relation to bind, e.g. Employee
+ * @return a new R2RState incorporating the new binding
+ * For example, <code>SELECT ?emp WHERE { ?emp emp:lastName ?name }</code> will call varConstraint twice:
*
- * type String -> RDFStringConstructor // adds ^^xsd:string
- * type primary key -> RDFNodeConstructor // prefixes with stemURL + relation + attribute and adds #record
- * */
- def varConstraint(state:R2RState, alias:sql.RelVar, optAttr:Option[sql.Attribute], v:sparql.Var, db:sql.DatabaseDesc, rel:sql.Relation):R2RState = {
- /* e.g. Employee _emp.id
- ** Employee _emp.lastName
- ** Employee _emp.manager
- */
+ * given: alias:=R_emp, optAttr:=lastName, v:=?name, rel:=Employee) ->
+ * return: (VarAssignable(?name),StringMapper(FullBinding(R_emp.lastName)))
+ * which maps "Smith" to "Smith"^^xsd:string
+ *
+ * given: alias:=R_emp, optAttr:=empid, v:=?emp, rel:=Employee) ->
+ * return: (VarAssignable(?emp),RDFNoder(Employee,FullBinding(R_emp.empid)))
+ * which maps 4 to <http://hr.example/our/favorite/DB/Employee/id.4#record>
+ */
+ def varConstraint(state:R2RState, alias:sql.RelVar, optAttr:Option[sql.Attribute], v:sparql.Assignable, db:sql.DatabaseDesc, rel:sql.Relation):R2RState = {
val constrainMe = if (optAttr.isDefined) sql.RelVarAttr(alias, optAttr.get) else sql.RelVarAttr(alias, sql.Attribute(sql.Name("_no_such_attribute")))
val reldesc = db.relationdescs(rel)
val boundTo = FullBinding(constrainMe)
+
+ /**
+ * Bind optAttr to an SQL generator like RDFNoder(Employee,FullBinding(R_emp.empid))
+ */
val binding = reldesc.primarykey match {
+ /** <pre>varConstraint(R_emp, Some(empid), VarAssignable(?emp), Employee) -> RDFNoder(Employee,FullBinding(R_emp.empid))</pre> */
case Some(sql.Attribute(constrainMe.attribute.n)) => RDFNoder(rel, boundTo)
case _ => {
- // e.g. sql.Attribute(sql.Name("id")) or None
+
if (reldesc.attributes.contains(constrainMe.attribute)) {
reldesc.attributes(constrainMe.attribute) match {
+ /** varConstraint(R_patient, Some(SexDE), VarAssignable(?_0_sexEntry), Person) -> RDFNoder(Person,FullBinding(R_patient.SexDE)) */
case sql.ForeignKey(fkrel, fkattr) => RDFNoder(rel, boundTo)
+ /** varConstraint(R__0_indicDE, Some(NDC), VarAssignable(?_0_indicNDC), Medication_DE) -> IntMapper(FullBinding(R__0_indicDE.NDC)) */
case sql.Value(sql.Datatype("Int")) => IntMapper(boundTo)
+ /** varConstraint(R_emp, Some(lastName), VarAssignable(?name), Employee) -> StringMapper(FullBinding(R_emp.lastName)) */
case sql.Value(sql.Datatype("String")) => StringMapper(boundTo)
+ /** varConstraint(R_patient, Some(DateOfBirth), VarAssignable(?dob), Person) -> DateMapper(FullBinding(R_patient.DateOfBirth)) */
case sql.Value(sql.Datatype("Date")) => DateMapper(boundTo)
}
} else {
- RDFBNoder(rel, boundTo)
- }
+ /** Default behavior for unknown attributes. */
+ RDFBNoder(rel, boundTo) // @@ untested
+ }
}
}
- if (state.varmap.contains(v) && state.varmap(v) != constrainMe) {
- /* The variable has already been bound to another attribute. */
- /* Constraint against the initial binding for this variable. */
+ if (state.varmap.contains(v) && state.varmap(v) == constrainMe) {
+ /**
+ * No useful contributions for this variable.
+ * (We could re-add the binding; this case is just for clarity of behavior.)
+ */
+ state
+ } else if (state.varmap.contains(v)) {
+ /**
+ * The variable has already been bound to another attribute.
+ * Constraint against the initial binding for this variable.
+ * e.g. <code>R__0_0_indicCode.ID=R__0_0_indicCode.ID</code>
+ */
val constraint = sql.RelationalExpressionEq(sql.PrimaryExpressionAttr(constrainMe),
sql.PrimaryExpressionAttr(varToAttribute(state.varmap, v)))
R2RState(state.joins, state.varmap,
- if (varToAttributeDisjoints(state.varmap, v).size > 0) {
+ if (varToAttributeDisjoints(state.varmap, v).size > 0)
+ /**
+ * Enumerate the set of constraints capturing all sides of a disjoint or option.
+ * e.g. a UNION where two disjoints constrain R_who.empid=G_union0.who:
+ * Set((G_union0._DISJOINT_!=0) OR (R_who.empid=G_union0.who),
+ * (G_union0._DISJOINT_!=1) OR (R_who.empid=G_union0.who))
+ */
state.exprs ++ {varToAttributeDisjoints(state.varmap, v) map ((d) => sql.ExprDisjunction(Set(d, constraint)))}
- } else
+ else
state.exprs + constraint
)
} else {
- /* This is a new variable or a replacement bindinig for an old variable. */
+ /**
+ * Add binding for new variable.
+ */
R2RState(state.joins, state.varmap + (v -> binding), state.exprs)
}
}
@@ -242,9 +376,13 @@
val objattr = sql.RelVarAttr(relvar, attr)
val state_postSubj = s match {
case sparql.TermUri(u) => uriConstraint(stateP, sql.RelVarAttr(relvar, db.relationdescs(rel).primarykey.get), parseObjectURI(u), true)
- case sparql.TermVar(v) => try { varConstraint(stateP, relvar, db.relationdescs(rel).primarykey, v, db, rel) } catch {
- case e:java.util.NoSuchElementException =>
- throw new Exception("error processing { " + s + " " + p + " " + o + " } :db.relationdescs(" + rel + ") not found in " + db)
+ case sparql.TermVar(v) => try { varConstraint(stateP, relvar, db.relationdescs(rel).primarykey, sparql.VarAssignable(v), db, rel) } catch {
+ case e:java.util.NoSuchElementException =>
+ throw new Exception("error processing { " + s + " " + p + " " + o + " } :db.relationdescs(" + rel + ") not found in " + db)
+ }
+ case sparql.TermBNode(b) => try { varConstraint(stateP, relvar, db.relationdescs(rel).primarykey, sparql.BNodeAssignable(b), db, rel) } catch {
+ case e:java.util.NoSuchElementException =>
+ throw new Exception("error processing { " + s + " " + p + " " + o + " } :db.relationdescs(" + rel + ") not found in " + db)
}
case _ => error("illegal SPARQL subject: " + s)
}
@@ -284,7 +422,8 @@
o match {
case sparql.TermLit(l) => literalConstraint(state_fkeys, targetattr, l, dt)
case sparql.TermUri(u) => uriConstraint (state_fkeys, targetattr, parseObjectURI(u), enforceForeignKeys)
- case sparql.TermVar(v) => varConstraint (state_fkeys, targetattr.relvar, Some(targetattr.attribute), v, db, targetrel)
+ case sparql.TermVar(v) => varConstraint (state_fkeys, targetattr.relvar, Some(targetattr.attribute), sparql.VarAssignable(v), db, targetrel)
+ case sparql.TermBNode(b) => varConstraint (state_fkeys, targetattr.relvar, Some(targetattr.attribute), sparql.BNodeAssignable(b), db, targetrel)
}
}
case _ => error("illegal SPARQL predicate: " + p)
@@ -301,7 +440,7 @@
case PartialBinding(binders) => bindingConstraintToAttribute(binders.toList(0))
}
}
- def varToAttribute(varmap:Map[sparql.Var, SQL2RDFValueMapper], vvar:sparql.Var):sql.RelVarAttr = {
+ def varToAttribute(varmap:Map[sparql.Assignable, SQL2RDFValueMapper], vvar:sparql.Assignable):sql.RelVarAttr = {
val mapper = try { varmap(vvar) } catch {
case e:java.util.NoSuchElementException =>
throw new Exception("mapper for variable " + vvar + " not found in " + varmap)
@@ -325,7 +464,7 @@
case PartialBinding(binders) => binders.map({b => bindingConstraintToExpression(b)})
}
}
- def varToAttributeDisjoints(varmap:Map[sparql.Var, SQL2RDFValueMapper], vvar:sparql.Var):Set[sql.RelationalExpression] = {
+ def varToAttributeDisjoints(varmap:Map[sparql.Assignable, SQL2RDFValueMapper], vvar:sparql.Assignable):Set[sql.RelationalExpression] = {
varmap(vvar) match {
case IntMapper(binding) => bindingToDisjoints(binding)
case StringMapper(binding) => bindingToDisjoints(binding)
@@ -335,7 +474,15 @@
}
}
- def varToConcat(varmap:Map[sparql.Var, SQL2RDFValueMapper], vvar:sparql.Var, stem:StemURI):sql.Expression = {
+ /**
+ * Converts a variable bound to a URL to an SQL expression for that URL.
+ *
+ * @param varmap map from variable to SQL2RDFValueMapper
+ * @param vvar the variable to be represented
+ * @return an SQL CONCAT expression
+ * @see VarMap
+ */
+ def varToConcat(varmap:Map[sparql.Assignable, SQL2RDFValueMapper], vvar:sparql.Assignable, stem:StemURI):sql.Expression = {
varmap(vvar) match {
case IntMapper(binding) => sql.PrimaryExpressionAttr(bindingToAttribute(binding))
case StringMapper(binding) =>
@@ -362,7 +509,25 @@
}
- def filter2expr(varmap:Map[sparql.Var, SQL2RDFValueMapper], f:sparql.PrimaryExpression):sql.RelationalExpression = {
+ def assignable2expr(varmap:Map[sparql.Assignable, SQL2RDFValueMapper], l:sql.RelVarAttr, rTerm:sparql.Term, sqlexpr:(sql.PrimaryExpression, sql.PrimaryExpression) => sql.RelationalExpression):sql.RelationalExpression = { // :sparql.Var
+ val r:sql.PrimaryExpression = rTerm match {
+ case sparql.TermUri(u) => error("not implemented: translating RDF URI to SQL: " + u) // :sparql.Uri
+ case sparql.TermVar(v) => sql.PrimaryExpressionAttr(varToAttribute(varmap, sparql.VarAssignable(v)))
+ case sparql.TermBNode(b) => sql.PrimaryExpressionAttr(varToAttribute(varmap, sparql.BNodeAssignable(b)))
+ case sparql.TermLit(sparql.Literal(rdf.RDFLiteral(lit,rdf.Datatype(dt)))) =>
+ sql.PrimaryExpressionTyped({
+ dt.toString match {
+ case "http://www.w3.org/2001/XMLSchema#string" => sql.Datatype.STRING
+ case "http://www.w3.org/2001/XMLSchema#integer" => sql.Datatype.INTEGER
+ case "http://www.w3.org/2001/XMLSchema#date" => sql.Datatype.DATE
+ case _ => error("unable to translate to RDF literal SQL: \"" + lit + "\"^^<" + dt + ">")
+ }
+ }, lit)
+ }
+ sqlexpr(sql.PrimaryExpressionAttr(l), r)
+ }
+
+ def filter2expr(varmap:Map[sparql.Assignable, SQL2RDFValueMapper], f:sparql.PrimaryExpression):sql.RelationalExpression = {
val (lTerm:sparql.Term, rTerm:sparql.Term, sqlexpr) = f match { // sqlexpr::((sql.RelVarAttr,sql.PrimaryExpressionAttr)=>sql.RelationalExpression)
case sparql.PrimaryExpressionEq(l, r) => (l.term, r.term, sql.RelationalExpressionEq(_,_))
case sparql.PrimaryExpressionLt(l, r) => (l.term, r.term, sql.RelationalExpressionLt(_,_))
@@ -372,25 +537,8 @@
// does not handle FILTER (<x> = ?v)
case sparql.TermUri(obj) => error("only SPARQL PrimaryExpressions with a variable on the left have been implemented: punting on " + f)
// FILTER (?v = <x> && ?v = ?x && ?v = 7)
- case sparql.TermVar(v) => { // :sparql.Var
- val l = varToAttribute(varmap, v)
- val r = rTerm match {
- case sparql.TermUri(u) => error("not implemented: translating RDF URI to SQL: " + u) // :sparql.Uri
- case sparql.TermVar(v) => { // :sparql.Var
- sql.PrimaryExpressionAttr(varToAttribute(varmap, v))
- }
- case sparql.TermLit(sparql.Literal(rdf.RDFLiteral(lit,rdf.Datatype(dt)))) =>
- sql.PrimaryExpressionTyped({
- dt.toString match {
- case "http://www.w3.org/2001/XMLSchema#string" => sql.Datatype.STRING
- case "http://www.w3.org/2001/XMLSchema#integer" => sql.Datatype.INTEGER
- case "http://www.w3.org/2001/XMLSchema#date" => sql.Datatype.DATE
- case _ => error("unable to translate to RDF literal SQL: \"" + lit + "\"^^<" + dt + ">")
- }
- }, lit)
- }
- sqlexpr(sql.PrimaryExpressionAttr(l), r)
- }
+ case sparql.TermVar(v) => assignable2expr(varmap, varToAttribute(varmap, sparql.VarAssignable(v)), rTerm, sqlexpr)
+ case sparql.TermBNode(b) => assignable2expr(varmap, varToAttribute(varmap, sparql.BNodeAssignable(b)), rTerm, sqlexpr)
// does not handle FILTER (7 = ?v)
case sparql.TermLit(lit) => error("only SPARQL PrimaryExpressions with a variable on the left have been implemented: punting on " + f)
}
@@ -400,15 +548,15 @@
* outer varmap/expressions.
* <outerState> could be <myState> -- spliting roles could make proofs easier?
*/
- def subselectVars(myState:R2RState, v:sparql.Var, optionalAlias:sql.RelVar,
+ def subselectVars(myState:R2RState, v:sparql.Assignable, optionalAlias:sql.RelVar,
optionalCond:sql.RelationalExpression,
- outerVarmap:Map[sparql.Var, SQL2RDFValueMapper],
- nestedVarmap:Map[sparql.Var, SQL2RDFValueMapper],
+ outerVarmap:Map[sparql.Assignable, SQL2RDFValueMapper],
+ nestedVarmap:Map[sparql.Assignable, SQL2RDFValueMapper],
isOpt:Boolean):R2RState = {
val varAliasAttr = sql.RelVarAttr(optionalAlias, sql.Attribute(attrAliasNameFromVar(v)))
if (myState.varmap.contains(v)) {
/* The variable has already been bound. */
- val newMap:Map[sparql.Var, SQL2RDFValueMapper] = if (varToAttribute(myState.varmap, v) == varAliasAttr) {
+ val newMap:Map[sparql.Assignable, SQL2RDFValueMapper] = if (varToAttribute(myState.varmap, v) == varAliasAttr) {
/* Same var was bound earlier. */
Map(v -> { myState.varmap(v) match {
case IntMapper(binding) => IntMapper(addExpr(binding, varAliasAttr, optionalCond))
@@ -461,7 +609,7 @@
val initDisjoints:Set[sql.Select] = Set()
val emptyState = R2RState(
util.AddOrderedSet[sql.Join](),
- Map[sparql.Var, SQL2RDFValueMapper](),
+ Map[sparql.Assignable, SQL2RDFValueMapper](),
Set[sql.Expression]()
)
@@ -473,7 +621,7 @@
sql.AttrAlias(sql.Name("_DISJOINT_")))
val leftJoinVars = gp.findVars.toList
val attrlist:Set[sql.NamedAttribute] = leftJoinVars.foldLeft(Set(disjointNo))((attrs, v) =>
- attrs ++ Set(sql.NamedAttribute(varToAttribute(optionalState.varmap, v), sql.AttrAlias(attrAliasNameFromVar(v))))
+ attrs ++ Set(sql.NamedAttribute(varToAttribute(optionalState.varmap, sparql.VarAssignable(v)), sql.AttrAlias(attrAliasNameFromVar(sparql.VarAssignable(v)))))
)
val subselect = sql.Select(
sql.AttributeList(attrlist),
@@ -499,7 +647,7 @@
R2RState(initState.joins,
initState.varmap,
Set[sql.Expression]()))((myState, v) =>
- subselectVars(myState, v, leftJoinAlias, optionalCond,
+ subselectVars(myState, sparql.VarAssignable(v), leftJoinAlias, optionalCond,
initState.varmap, optionalState.varmap, true))
/* The final state includes the subselect as a join, the variables bound
@@ -540,7 +688,7 @@
case sparql.TriplesBlock(triplepatterns) => {
/* Examine each triple, updating the compilation state. */
val state2 = triplepatterns.foldLeft(state)((incState,s) => bindOnPredicate(db, incState, s, enforceForeignKeys))
- val nullExprs = gp.findVars.foldLeft(Set[sql.Expression]())((s, vvar) => {
+ val nullExprs = gp.findAssignables.foldLeft(Set[sql.Expression]())((s, vvar) => {
if (varToAttributeDisjoints(state2.varmap, vvar).size == 0)
s ++ Set(sql.RelationalExpressionNotNull(sql.PrimaryExpressionAttr(varToAttribute(state2.varmap, vvar))))
else
@@ -558,7 +706,7 @@
val unionAlias = sql.RelVar(sql.Name("G_union" + state.joins.size))
val emptyState = R2RState(
util.AddOrderedSet[sql.Join](),
- Map[sparql.Var, SQL2RDFValueMapper](),
+ Map[sparql.Assignable, SQL2RDFValueMapper](),
Set[sql.Expression]()
)
val unionVars = disjoints.foldLeft(Set[sparql.Var]())((mySet,disjoint) =>
@@ -575,8 +723,8 @@
sql.AttrAlias(sql.Name("_DISJOINT_")))
val attrlist:Set[sql.NamedAttribute] = unionVars.foldLeft(Set(disjointNo))((attrs, v) => {
- val attrOrNull = if (disjointState.varmap.contains(v)) varToAttribute(disjointState.varmap, v) else sql.ConstNULL()
- attrs ++ Set(sql.NamedAttribute(attrOrNull, sql.AttrAlias(attrAliasNameFromVar(v))))
+ val attrOrNull = if (disjointState.varmap.contains(sparql.VarAssignable(v))) varToAttribute(disjointState.varmap, sparql.VarAssignable(v)) else sql.ConstNULL()
+ attrs ++ Set(sql.NamedAttribute(attrOrNull, sql.AttrAlias(attrAliasNameFromVar(sparql.VarAssignable(v)))))
})
val subselect = sql.Select(
@@ -608,7 +756,7 @@
val disjointCond = sql.RelationalExpressionNe(sql.PrimaryExpressionAttr(sql.RelVarAttr(unionAlias, sql.Attribute(sql.Name("_DISJOINT_")))),
sql.PrimaryExpressionTyped(sql.Datatype.INTEGER,sql.Name("" + no)))
val outerState2 = disjointVars.foldLeft(outerState)((myState, v) =>
- subselectVars(myState, v, unionAlias, disjointCond, outerState.varmap, disjointState.varmap, false))
+ subselectVars(myState, sparql.VarAssignable(v), unionAlias, disjointCond, outerState.varmap, disjointState.varmap, false))
(outerState2, no+1)
})
val subselect = sql.Subselect(sql.Union(subselects))
@@ -645,7 +793,7 @@
/* Create an object to hold our compilation state. */
val initState = R2RState(
util.AddOrderedSet[sql.Join](),
- Map[sparql.Var, SQL2RDFValueMapper](),
+ Map[sparql.Assignable, SQL2RDFValueMapper](),
Set[sql.Expression]()
)
@@ -655,9 +803,9 @@
* in the SPARQL SELECT. */
val attrlist:Set[sql.NamedAttribute] = attrs.attributelist.foldLeft(Set[sql.NamedAttribute]())((attrs, v) =>
attrs + sql.NamedAttribute({
- if (concat) varToConcat(r2rState.varmap, v, stem)
- else varToAttribute(r2rState.varmap, v)
- } , sql.AttrAlias(attrAliasNameFromVar(v))
+ if (concat) varToConcat(r2rState.varmap, sparql.VarAssignable(v), stem)
+ else varToAttribute(r2rState.varmap, sparql.VarAssignable(v))
+ } , sql.AttrAlias(attrAliasNameFromVar(sparql.VarAssignable(v)))
))
/* Construct the generated query as an abstract syntax. */