--- a/directmapping/src/main/scala/DirectMapping.scala Sat Feb 12 16:10:50 2011 -0500
+++ b/directmapping/src/main/scala/DirectMapping.scala Sat Feb 12 16:41:13 2011 -0500
@@ -17,7 +17,7 @@
*/
type KeyMap = PartialFunction[CandidateKey, PartialFunction[List[CellValue], Node]]
- def KeyMap(r:Relation):KeyMap = {
+ def keyMapForRelation(r:Relation):KeyMap = {
def ++(m:Map[CandidateKey, Map[List[CellValue], Node]],
pairs:List[(CandidateKey, List[CellValue])],
n:Node):Map[CandidateKey, Map[List[CellValue], Node]] = {
@@ -39,9 +39,9 @@
type NodeMap = PartialFunction[RelName, KeyMap]
- def NodeMap(db:Database):NodeMap = {
+ def dbToNodeMap(db:Database):NodeMap = {
val idxables = db.keySet filter { rn => db(rn).candidates nonEmpty }
- idxables map { rn:RelName => rn -> KeyMap(db(rn)) } toMap
+ idxables map { rn => rn -> keyMapForRelation(db(rn)) } toMap
}
/**
@@ -64,8 +64,8 @@
val s:Node =
r.pk match {
case Some(pk) =>
- /** Table has a primkary key. */
- NodeIRI(nodemap(r.name, pk.attrs, t.notNullLexvalues(pk)))
+ /** Table has a primkary key. */
+ NodeIRI(iri(r, pk, t.notNullLexvalues(pk)))
case None =>
/** Table has no primkary key (but has some candidate keys). */
NodeBNode(freshbnode())
@@ -75,7 +75,7 @@
/** The triples-generating functions start with databasemap: */
def directDB (db:Database) : Graph = {
- val nodeMap = NodeMap(db)
+ val nodeMap = dbToNodeMap(db)
Graph(db.keySet flatMap { (rn:RelName) => directR(db(rn), nodeMap, db) })
}
@@ -106,7 +106,7 @@
( scalars(t, r) flatMap { directL(r.name, s, _, r.header, t) } ) +
Triple(SubjectNode(s),
PredicateIRI(IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")),
- ObjectNode(NodeIRI(IRI(UE(relName2string(r.name))))))
+ ObjectNode(NodeIRI(IRI(UE(r)))))
}
// should be done by BNode
@@ -144,18 +144,20 @@
Triple(SubjectNode(s), PredicateIRI(p), o)
}
- // These implicits make nodemap and predicatemap functions prettier.
- implicit def relName2string (rn:RelName) = rn.n
- implicit def attrName2string (rn:AttrName) = rn.n
-
- def nodemap (rn:RelName, as:List[AttrName], ls:List[LexicalValue]) : IRI = {
- val pairs:List[String] = as.zip(ls) map { case (attrName, lexicalValue) => UE(attrName) + "." + UE(lexicalValue.s) }
- IRI(UE(rn) + "/" + pairs.mkString("_") + "#_")
- }
+ // These invariants make nodemap and predicatemap functions prettier.
+ def UE(s:String):String = s.replaceAll(" ", "+")
+ def UE(rn:RelName):String = UE(rn.n)
+ def UE(r:Relation):String = UE(r.name)
+ def UE(a:AttrName):String = UE(a.n)
def predicatemap (rn:RelName, as:AttrList) : IRI =
IRI(UE(rn) + "#" + as.attrs.mkString("_"))
+ def iri(rn:Relation, as:AttrList, ls:List[LexicalValue]):IRI = {
+ val pairs:List[String] = as.attrs zip ls map { case (attrName, lexicalValue) => UE(attrName) + "." + UE(lexicalValue.s) }
+ IRI(UE(rn) + "/" + pairs.mkString("_") + "#_")
+ }
+
// TODO: aren't they already part of the RDF model?
def XSD (d:Datatype) : IRI =
d match {
@@ -175,7 +177,6 @@
case _ => TypedLiteral(l.s, XSD(d))
}
- def UE (s:String) : String = s.replaceAll(" ", "+")
}