changeset 0:f3fe2188ab5f

Initial port of scardf for Scala 2.8
author Alexandre Bertails <bertails@w3.org>
date Mon, 15 Mar 2010 20:30:12 -0400
parents
children 4319b991f2a3
files .hgignore project/build.properties project/build/Project.scala src/main/scala/net/croz/scardf/Lit.scala src/main/scala/net/croz/scardf/Model.scala src/main/scala/net/croz/scardf/Node.scala src/main/scala/net/croz/scardf/NodeBag.scala src/main/scala/net/croz/scardf/NodeBagConverter.scala src/main/scala/net/croz/scardf/PredicateTree.scala src/main/scala/net/croz/scardf/Prop.scala src/main/scala/net/croz/scardf/PropPath.scala src/main/scala/net/croz/scardf/QVar.scala src/main/scala/net/croz/scardf/RdfList.scala src/main/scala/net/croz/scardf/Res.scala src/main/scala/net/croz/scardf/Sparql.scala src/main/scala/net/croz/scardf/Stmt.scala src/main/scala/net/croz/scardf/Vocabulary.scala src/main/scala/net/croz/scardf/build/Fetcher.scala src/main/scala/net/croz/scardf/query/NTripleHelper.java src/main/scala/net/croz/scardf/query/SparqlQ.scala src/main/scala/net/croz/scardf/query/TripletFactory.scala src/main/scala/net/croz/scardf/util/Log.scala src/test/scala/net/croz/scardf/LitSpec.scala src/test/scala/net/croz/scardf/LocateSpanQuerySpec.scala src/test/scala/net/croz/scardf/NoVarSpec.scala src/test/scala/net/croz/scardf/PropPathSpec.scala src/test/scala/net/croz/scardf/QuerySpecs.scala src/test/scala/net/croz/scardf/ScardfSpecs.scala src/test/scala/net/croz/scardf/example_data.scala
diffstat 29 files changed, 1971 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/.hgignore	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,13 @@
+syntax: glob
+target/
+lib_managed/
+src_managed/
+lift_example/
+project/boot/
+.classpath
+.project
+.manager
+*~
+*.class
+*.log
+*\#
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/project/build.properties	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,9 @@
+#Project properties
+#Thu Feb 25 21:29:24 EST 2010
+project.organization=W3C
+project.name=scardf-2.8
+sbt.version=0.7.1
+project.version=1.0
+def.scala.version=2.7.7
+build.scala.versions=2.8.0.Beta1
+project.initialize=false
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/project/build/Project.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,15 @@
+import sbt._
+
+class Project(info: ProjectInfo) extends DefaultProject(info) {
+
+  val scalatools = "scala-tools" at "http://scala-tools.org/repo-snapshots"
+
+  val scalatest = "org.scalatest" % "scalatest" % "1.0.1-for-scala-2.8.0.Beta1-with-test-interfaces-0.3-SNAPSHOT" % "test->default"
+  val jena = "com.hp.hpl.jena" % "jena" % "2.6.2"
+  val arq = "com.hp.hpl.jena" % "arq" % "2.8.1"
+  val jodatime = "joda-time" % "joda-time" % "1.6"
+  val commonslogging = "commons-logging" % "commons-logging" % "1.1.1"
+
+  override def compileOptions = super.compileOptions ++ Seq(Unchecked)
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/Lit.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,60 @@
+package net.croz.scardf
+
+import com.hp.hpl.jena.rdf.model._
+import org.joda.time.LocalDate
+
+class Lit( val jLiteral: Literal ) extends Node( jLiteral ) {
+  override def rendering: String = {
+    if ( isString )
+      "\"" + asString + "\"" + ( if (lang.isDefined) {"@" + lang.get.code} else "" )
+    else 
+      jLiteral.getLexicalForm
+  }
+  def isString = jLiteral.getValue.getClass == classOf[String]
+
+  val datatype = jLiteral.getDatatype
+  val lang: Option[Lang] = {
+    val langcode = jLiteral.getLanguage
+    if (langcode == "") None else Some( Lang( langcode ) )
+  }
+  
+  override def toString = asString
+}
+
+object Lit {
+  private val jModel = ModelFactory.createDefaultModel
+  private val mapping = scala.collection.mutable.Map[Literal, Lit]()
+
+  def apply( jLiteral: Literal ): Lit = mapping.getOrElseUpdate( jLiteral, new Lit( jLiteral ) )
+  def apply( value: String ): Lit = apply( ResourceFactory.createTypedLiteral( value ) )
+  def apply( value: String, langCode: String ): Lit = apply( jModel.createLiteral( value, langCode ) )
+  def apply( value: String, lang: Lang ): Lit = apply( value, lang.code )
+  def apply( value: Int ): Lit = apply( ResourceFactory.createTypedLiteral( value ) )
+  def apply( value: Boolean ): Lit = apply( ResourceFactory.createTypedLiteral( value ) )
+  def apply( value: Double ): Lit = apply( ResourceFactory.createTypedLiteral( value ) )
+  def apply( value: BigDecimal ): Lit = apply( ResourceFactory.createTypedLiteral( value ) )
+  def apply( value: LocalDate ): Lit = apply( ResourceFactory.createTypedLiteral( value ) )
+  
+  def from( o: Any ): Lit = o match {
+    case None => null
+    case Some( x ) => from( x )
+    case str: String => Lit( str )
+    case i: Int => Lit( i )
+    case b: Boolean => Lit( b )
+    case d: Double => Lit( d )
+    case bd: BigDecimal => Lit( bd )
+    case ld: LocalDate => Lit( ld )
+    case x => throw new RdfConversionException( "Cannot create literal from " + x + ": unknown type" )
+  }
+}
+
+case class RdfConversionException( msg: String ) extends RuntimeException( msg )
+
+case class Lang( val code: String ) {
+  def apply( str: String ) = Lit( str, this )
+}
+
+object Lang {
+  val en = new Lang( "en" )
+  val hr = new Lang( "hr" )
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/Model.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,151 @@
+package net.croz.scardf
+
+import java.io.StringWriter
+import com.hp.hpl.jena.rdf.model.AnonId
+import com.hp.hpl.jena.rdf.model.ModelFactory
+import com.hp.hpl.jena.rdf.model.Resource
+import com.hp.hpl.jena.rdf.model.RDFList
+import com.hp.hpl.jena.rdf.model.RDFNode
+import com.hp.hpl.jena.rdf.model.Statement
+import com.hp.hpl.jena.rdf.model.Property
+import com.hp.hpl.jena.rdf.model.{Model => JModel}
+import com.hp.hpl.jena.ontology.OntModel
+
+class Model( val jModel: JModel ) extends util.Logging {
+
+  val internalId = Model.rnd.nextLong
+  Model remember this
+
+  var prefix = ""
+  
+  val mapping = scala.collection.mutable.Map[RDFNode, Node]()
+  val stmtMapping = scala.collection.mutable.Map[Statement, Stmt]()
+
+  def this() = this( ModelFactory.createOntologyModel )
+  
+  def regNs( namespaces: Map[String, String] ) = 
+    namespaces foreach { e => jModel.setNsPrefix( e._1, e._2 ) }
+  def regNs( pvMappings: Pair[String, Vocabulary]* ): Unit = 
+    regNs( Map( pvMappings map { p => (p._1, p._2.prefix) }: _* ) )
+    
+  def withPrefix( prefix: String ) = { this.prefix = prefix; this }
+  
+  private def remember( r: Res ) = {
+    //log.info( hashCode + " " + r + " " + mapping )
+    mapping += r.jResource -> r
+    r
+  }
+  
+  def getAnon() = remember( new Res( jModel.createResource( new AnonId() ), this ) )
+  def getAnon( id: String ) = remember( new Res( jModel createResource new AnonId( id ), this ) )
+  
+  def getRes( jRes: Resource ): Res = {
+    //log.info( this + " " + jRes + " " + (mapping get jRes) + " " + mapping )
+    mapping.getOrElseUpdate( jRes, newRes( jRes ) ).asInstanceOf[Res]
+  }
+  
+  def getRes( uri: String ): Res = getRes( jModel.getResource( this.prefix + uri ) )
+  
+  def /( res: Res ) = getRes( res.jResource )
+  
+  private def newRes( jResource: Resource ) = {
+    if ( jResource.canAs( classOf[RDFList] ) )
+      new RdfList( jResource.as( classOf[RDFList] ).asInstanceOf[RDFList], this )
+    else if ( jResource.canAs( classOf[Property] ) )
+      new Prop( jResource.as( classOf[Property] ).asInstanceOf[Property], this )
+    else if ( jResource.isURIResource )
+      new Res( jModel createResource jResource.getURI, this )
+    else
+      getAnon( jResource.getId.getLabelString )
+  }
+  
+  def getProp( jProp: Property ): Prop = 
+    mapping.getOrElseUpdate( jProp, new Prop( jProp, this ) ).asInstanceOf[Prop]
+  def getProp( uri: String ): Prop = getProp( jModel.getProperty( this.prefix + uri ) )
+
+  def getRdfList( jRdfList: RDFList ): RdfList = {
+    //log.info( this, "get list", jRdfList, mapping.get(jRdfList), mapping )
+    val r = mapping.getOrElseUpdate( jRdfList, new RdfList( jRdfList, this ) ).asInstanceOf[RdfList]
+    //log.info( this, jRdfList, mapping.get(jRdfList), mapping )
+    r
+  }
+  
+  def getStatement( jStatement: Statement ): Stmt =
+    stmtMapping.getOrElseUpdate( jStatement, new Stmt( jStatement, this ) )
+
+  def statements = new RichStmtIterator( jModel.listStatements )
+
+  def add( stmt: Stmt ) = jModel add stmt.jStatement
+  
+  def addAll( stmts: List[Stmt] ) = stmts map add
+  
+  def ++( other: Model ) = Model( jModel add other.jModel )
+  
+  def listRes( assignment: Pair[Prop, Any] ) = { 
+    val jp = assignment._1.jProperty
+    val result = assignment._2 match {
+      case n: Node => jModel.listResourcesWithProperty( jp, n.jNode )
+      case o => jModel.listResourcesWithProperty( jp, o )
+    }
+    new RichResIterator( result ) 
+  }
+
+  def local = Model( jModel match {
+    case om: OntModel => om.getBaseModel
+    case m => m
+  } )
+
+  def dump = jModel.write( System.out, "TURTLE" )
+  
+  def dumpAll = jModel match {
+    case om: OntModel => om.writeAll( System.out, "TURTLE", null )
+    case _ => dump
+  }
+  
+  def dumpStatements = for ( s <- statements ) println( s )
+  
+  def dumpedIn( syntax: String ) = {
+    val sw = new StringWriter
+    jModel.write( sw, syntax )
+    sw.toString
+  }
+  
+  def dumped = dumpedIn( "TURTLE" )
+
+  override def equals( that: Any ) = that match {
+    case m: Model => jModel == m.jModel
+    case _ => false
+  }
+  override def hashCode = jModel.hashCode
+  
+  def =~( that: Model ) = jModel isIsomorphicWith that.jModel
+  
+  override def toString = java.lang.Long.toHexString( internalId ) + "/" + jModel.hashCode
+}
+
+object Model {
+  val mapping = scala.collection.mutable.Map[JModel, Model]()
+  val rnd = new java.util.Random()
+  
+  private def remember( m: Model ) = {
+    mapping += m.jModel -> m
+    m
+  }
+
+  def apply(): Model = apply( null )
+  def apply( jm: JModel ): Model = {
+    val rm = if ( jm == null ) new Model 
+             else mapping.getOrElse( jm, new Model( jm ) )
+    rm
+  }
+  def newDefault = new Model( ModelFactory.createDefaultModel )
+  
+  def construct( expr: => Any ) = {
+    implicit val model = new Model
+    expr
+    model
+  }
+  
+  implicit def toRModel( jm: JModel ) = Model( jm )
+  implicit def toJModel( rm: Model ) = rm.jModel
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/Node.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,83 @@
+package net.croz.scardf
+
+import com.hp.hpl.jena.rdf.model.Property
+import com.hp.hpl.jena.rdf.model.Resource
+import com.hp.hpl.jena.rdf.model.RDFNode
+import com.hp.hpl.jena.rdf.model.RDFList
+import com.hp.hpl.jena.rdf.model.Literal
+import org.joda.time.LocalDate
+import org.joda.time.DateTime
+import org.joda.time.format.ISODateTimeFormat.date
+import org.joda.time.format.ISODateTimeFormat.dateTime
+
+class Node( val jNode: RDFNode ) {
+
+  def /( p: Prop ): NodeBag = asRes/p
+  def /( pp: PropPath ): NodeBag = asRes/pp
+  def /[T]( converter: NodeConverter[T] ): T = converter( this )
+
+  def isUriResource = jNode.isURIResource
+  def isRes = jNode.isResource
+  def isBlank = jNode.isAnon
+  def isRdfList = jNode.canAs( classOf[RDFList] )
+  def isLit = jNode.isLiteral
+  def isLitOn( lang: Lang ) = 
+    if ( isLit ) asLiteral.getLanguage == lang.code 
+    else false
+  
+  def rendering: String = {
+    if ( isLit ) asLit.rendering
+    else if ( isRes ) asRes.rendering
+    else "?node?"
+  }
+  
+  def lexic = 
+    if ( isLit && asLit.isString ) asString
+    else rendering
+
+  private def asLiteral: Literal = jNode.as( classOf[Literal] ).asInstanceOf[Literal]
+
+  def asRes: Res = Res( jNode.as( classOf[Resource] ).asInstanceOf[Resource] )
+  def asProp: Prop = Prop( jNode.as( classOf[Property] ).asInstanceOf[Property] )
+  def asLit = Lit( asLiteral )
+  def asRdfList: RdfList = RdfList from jNode.as( classOf[RDFList] ).asInstanceOf[RDFList]
+  def asString: String = asLiteral.getString
+  def asBoolean: Boolean = asLiteral.getBoolean
+  def asInt: Int = asLiteral.getInt
+  def asDouble: Double = asLiteral.getDouble
+  def asBigDecimal: BigDecimal = BigDecimal( asLiteral.getLexicalForm )
+  def asLocalDate: LocalDate = date.parseDateTime( asLiteral.getLexicalForm ).toLocalDate
+  def asDateTime: DateTime = dateTime.parseDateTime( asLiteral.getLexicalForm ).toDateTime
+  
+  override def equals( o: Any ) = o match {
+    case that: Node => this.jNode.asNode.sameValueAs( that.jNode.asNode )
+    case _ => false
+  }
+  override def hashCode = jNode.hashCode
+  override def toString = rendering
+}
+
+object Node {
+
+  def apply( jNode: RDFNode ) = wrap( jNode )
+  
+  def from( o: Any ): Node = o match {
+    case n: Node => n
+    case n: RDFNode => wrap( n )
+    case x => Lit from x
+  }
+  
+  private def wrap( jNode: RDFNode ): Node = jNode match {
+    case null        => null
+    case rl: RDFList => RdfList from rl
+    case p: Property => Prop( p )
+    case r: Resource => Res( r )
+    case l: Literal  => Lit( l )
+    case n => throw new RuntimeException( "Unknown type of RDFNode: " + n )
+  }
+  
+  /**
+   * Sorts a list of nodes by its rendering.
+   */
+  def sort( list: List[Node] ): List[Node] = list sortWith { (a, b) => a.rendering < b.rendering }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/NodeBag.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,93 @@
+package net.croz.scardf
+
+object NodeBag {
+  def apply( nodes: Node* ) = new NodeBag( nodes.toList )
+  def apply( nodesIt: Iterator[Node] ) = new NodeBag( nodesIt.toList )
+  implicit def toNodeBag( n: Node ) = new NodeBag( List( n ) )
+  implicit def toNodeBag( nOpt: Option[Node] ) = new NodeBag( nOpt.toList )
+}
+
+/**
+ * An unordered collection of nodes.
+ */
+class NodeBag( val list: List[Node] ) extends Iterable[Node] {
+  
+  /**
+   * Retrieves a node from this bag. It could be any one node contained in the bag.
+   * @throws NoSuchElementException if the bag is empty
+   * @see #nodeOption
+   */
+  def oneNode = list.head
+  
+  /**
+   * Retrieves some node inside this bag, or None if the bag is empty.
+   * @see #oneNode
+   */
+  def nodeOption: Option[Node] = if ( list.isEmpty ) None else Some( oneNode )
+  
+  /**
+   * Retrieves the single node contained in this bag.
+   * @throws RdfTraversalException if the size of this bag is not 1
+   */
+  def singleNode =
+    if ( list.isEmpty ) throw new RdfTraversalException( "Yielded no RDF node" )
+    else if ( list.size > 1 ) throw new RdfTraversalException( "Yielded multiple RDF nodes: " + list )
+    else oneNode 
+
+  /**
+   * Bag of all property values for given predicate of all nodes in this bag.
+   */
+  def /( predicate: Prop ): NodeBag = 
+    new NodeBag( list flatMap { _.asRes.valuesOf( predicate ).toList } )
+  
+  /**
+   * Applies given converter to this bag yielding a set of values.
+   */
+  def /[T]( converter: NodeBagConverter[T] ): T = converter( this )
+
+  /**
+  * Filters this bag for nodes that are literals in the given language.
+  */
+  def /( lang: Lang ): NodeBag = this/where( _ isLitOn lang )
+
+  /**
+   * All nodes in the bag are convertable to boolean "true".
+   * For an empty bag returns false.
+   */
+  def ? = this/asBoolean.set == Set( true )
+  
+  /**
+   * Does this bag contain any nodes? Inverse of {@link #isEmpty}
+   */
+  def /? = !list.isEmpty
+  
+  /**
+   * Alias for {@link #singleNode}
+   */
+  def /! = singleNode
+  
+  /**
+   * Simple string representation of the lexical values of all nodes in bag, separated with spaces.
+   */
+  def % = list.map( _.lexic ).mkString( "", " ", "" )
+  
+  def length = list.length
+  def iterator = list.iterator
+  def contains( n: Node ) = list contains n
+  
+  lazy val sorted = new NodeBag( Node sort list )
+
+  /**
+   * Equal if argument is a NodeBag with the same elements, regardless of order.
+   */
+  override def equals( o: Any ) = o match {
+    case that: NodeBag => this.sorted sameElements that.sorted
+    case _ => false
+  }
+  
+  override lazy val hashCode: Int = this.sorted.hashCode
+  
+  override def toString = list.mkString( "NodeBag(", ", ", ")" )
+}
+
+class RdfTraversalException( msg: String ) extends Exception( msg )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/NodeBagConverter.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,80 @@
+package net.croz.scardf
+
+import org.joda.time.{ LocalDate, DateTime }
+
+/**
+ * Apply method converts given node bag into some other object,
+ * using the function given in constructor.
+ */
+class NodeBagConverter[T]( fn: NodeBag => T ) extends (NodeBag => T) {
+  def apply( bag: NodeBag ) = fn( bag )
+}
+
+/**
+ * Converts a single-node bag to an object of type T (typically a literal value),
+ * given a Node => T function.
+ * Apply method throws an RdfTraversalException if the size of the bag is not 1.
+ */
+class NodeConverter[T]( fn: Node => T )
+extends NodeBagConverter[T]( bag => fn( bag.singleNode ) ) 
+{
+  /**
+   * Constructs another converter which will return None when given an empty bag,
+   * and apply this converter on the single node otherwise.
+   */
+  def option = new NodeBagConverter[Option[T]]( bag => {
+    if ( bag.isEmpty ) None
+    else if ( bag.size > 1 ) throw new RdfTraversalException( "Yielded multiple RDF nodes: " + bag )
+    else Some( this( bag.singleNode ) )
+  } )
+
+  /**
+   * Constructs another converter which will return given default value for an empty bag,
+   * and apply this converter on the single node otherwise.
+   */
+  def default( defaultValue: T ) = new NodeBagConverter[T]( bag => option( bag ) getOrElse defaultValue )
+
+  /**
+   * Constructs another bag converter which will return an Iterable through all values 
+   * converted from the nodes in given bag using this node converter.
+   */
+  def iterable = new NodeBagConverter[ Iterable[T] ]( _.map { this( _ ) } )
+
+  /**
+   * Constructs another bag converter which will return a set of all values converted from 
+   * the nodes in given bag using this node converter. 
+   */
+  def set = new NodeBagConverter[ Set[T] ]( bag => Set.empty ++ bag.map { this( _ ) } )
+}
+
+object asRes extends NodeConverter[Res]( _.asRes )
+object asProp extends NodeConverter[Prop]( _.asProp )
+object asLit extends NodeConverter[Lit]( _.asLit )
+object asString extends NodeConverter[String]( _.asString )
+object asBoolean extends NodeConverter[Boolean]( _.asBoolean )
+object asInt extends NodeConverter[Int]( _.asInt )
+object asDouble extends NodeConverter[Double]( _.asDouble )
+object asBigDecimal extends NodeConverter[BigDecimal]( _.asBigDecimal )
+object asLocalDate extends NodeConverter[LocalDate]( _.asLocalDate )
+object asDateTime extends NodeConverter[DateTime]( _.asDateTime )
+
+/** Converts single node to a list. */
+object asRdfList extends NodeConverter[RdfList] ( _.asRdfList )
+
+/** Converts single node to a new bag containing its list elements. */
+object asRdfListBag extends NodeConverter[NodeBag] ( _.asRdfList.toNodeBag )
+
+/**
+ * Converts a node bag to another node bag by filtering its nodes.
+ */
+class NodeBagFilter( ffn: Node => Boolean )
+extends NodeBagConverter[NodeBag]( bag => new NodeBag( bag.list filter ffn ) )
+
+/**
+ * Factory object for bag filters.
+ * @see NodeBagFilter
+ */
+object where {
+  def apply( ffn: Node => Boolean ) = new NodeBagFilter( ffn )
+  def apply( assignment: Pair[ Prop, Any ] ) = new NodeBagFilter( _.asRes has assignment )
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/PredicateTree.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,125 @@
+package net.croz.scardf
+
+object PredicateTree {
+  
+  val empty = new PredicateTree( Map() )
+  
+  def apply() = empty
+  def apply( pairs: Pair[Prop, PredicateTree]* ) = new PredicateTree( Map( pairs: _* ) )
+  def apply( p: Prop ): PredicateTree = toPredicateTree( p )
+  def apply( pp: PropPath ): PredicateTree = toPredicateTree( pp )
+  def apply( head: PredicateConstruct, tail: PredicateConstruct* ): PredicateTree =
+    ( head :: tail.toList ).map{ from( _ ) }.foldLeft( empty )(_++_)
+
+  def from( pc: PredicateConstruct ) = pc match {
+    case t: PredicateTree => t
+    case pp: PropPath => toPredicateTree( pp )
+    case p: Prop => toPredicateTree( p )
+  }
+  
+  //def strip( anchor: Res ) = 
+  
+  implicit def toPredicateTree( p: Prop ) = new PredicateTree( Map( p -> empty ) )
+  
+  implicit def toPredicateTree( pp: PropPath ): PredicateTree = prependTree( pp, empty )
+  
+  def prependTree( path: PropPath, endTree: PredicateTree ): PredicateTree = PredicateTree( 
+    path.head -> (
+      if ( path.size > 1 ) prependTree( path drop 1, endTree )
+      else endTree
+    )
+  )
+}
+
+/**
+ * A predicate tree is a mapping between URI references and other (possibly empty) predicate trees.
+ * Should not contain itself, either directly or through contained predicate trees,
+ * as this would lead to infinite loops in its methods.
+ */
+case class PredicateTree( branches: Map[Prop, PredicateTree] ) extends PredicateConstruct {
+  
+  def ++( other: PredicateTree ): PredicateTree = new PredicateTree( {
+    val mergedMap = scala.collection.mutable.Map[Prop, PredicateTree]()
+    mergedMap ++= branches
+    for ( (predicate, subtree) <- other.branches )
+      (branches get predicate) match {
+        case Some( existingTree ) => mergedMap( predicate ) = existingTree ++ subtree
+        case None => mergedMap( predicate ) = subtree
+      }
+    Map.empty ++ mergedMap
+  } )
+  
+  def --( other: PredicateTree ): PredicateTree = new PredicateTree( {
+    val remainMap = scala.collection.mutable.Map[Prop, PredicateTree]()
+    remainMap ++= branches
+    for ( (predicate, subtree) <- other.branches )
+      (branches get predicate) match {
+        case Some( PredicateTree.empty ) => remainMap remove predicate
+        case Some( existingTree ) => remainMap( predicate ) = existingTree -- subtree
+        case None => None
+      }
+    Map.empty ++ remainMap
+  } )
+  
+  def subtree( path: PropPath ): PredicateTree = 
+    if ( path.isEmpty ) this
+    else branches( path.head ).subtree( path drop 1 )
+  
+  def subMerge( path: PropPath, other: PredicateTree ) =
+    //TODO FIX! this does not return the whole graph!
+    subtree( path ) ++ other
+
+  /**
+   * Given a predicate tree T and an RDF graph G with a node N, a subgraph may be constructed with
+   * all triples from G in the form of (N, P, X), where P is any URI reference in the domain of T,
+   * and then by adding more statements recursivly for each X with a predicate tree mapped to P in T. 
+   */
+  def growFrom( r: Res ) = growIn( r, new Model )
+  private def growIn( root: Res, m: Model ): Res = {
+    val seed = root in m
+    for ( (predicate, subtree) <- branches ) {
+      val newValues = root/predicate map { n: Node => n match {
+        case res: Res => subtree.growIn( res, m )
+        case n => n
+      } }
+      newValues foreach { a: Any => m add seed( predicate -> a ) }
+    }
+    seed
+  }
+  
+  def growTemplateFrom( anchor: Res ) = {
+    val m = new Model()
+    val seed = anchor in m
+    for ( (predicate, subtree) <- branches )
+      m add seed( predicate -> subtree.growTemplateIn( m ) )
+    seed
+  }
+  
+  /**
+   * Template graph is an RDF graph with only blank and literal nodes, 
+   * and having one blank node singled out as an anchor.
+   */
+  def growTemplate = growTemplateIn( new Model )
+  
+  private def growTemplateIn( m: Model ): Res = {
+    val seed = m.getAnon
+    for ( (predicate, subtree) <- branches )
+      m add seed( predicate -> subtree.growTemplateIn( m ) )
+    seed
+  }
+  
+  def isEmpty = branches.isEmpty
+  def size = branches.size
+  
+  override def toString = {
+    val bStrs = for ( branch <- branches ) yield {
+      val subtree = branch._2
+      branch._1.uri + (
+        if ( subtree.isEmpty ) "" 
+        else if ( subtree.size == 1 ) " - " + subtree
+        else " -< " + subtree + " >"
+      )
+    }
+    bStrs.mkString( "", "; ", "" )
+  }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/Prop.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,50 @@
+package net.croz.scardf
+
+import com.hp.hpl.jena.rdf.model.{Model => JModel}
+import com.hp.hpl.jena.rdf.model.Property
+import com.hp.hpl.jena.datatypes.RDFDatatype
+import com.hp.hpl.jena.datatypes.TypeMapper
+
+class Prop( val jProperty: Property, m: Model ) extends Res( jProperty, m ) with PredicateChain
+{
+  def this( jProperty: Property ) = this( jProperty, Model( jProperty.getModel ) )
+  def of( res: Res ) = res/this/!
+  def ? = new NodeConverter[Boolean]( x => (x/this/!).asBoolean )
+  //def ->>( values: Any* ) = for ( v <- values ) yield (this, v)
+  def ~( p: Prop ) = PropPath( this, p )
+  def ~( subtrees: PredicateTree* ) = PredicateTree( this -> subtrees.reduceLeft( _ ++ _ ) )
+//  def ~( reqs: PredicateTree* )( opts: PredicateTree* ) = OptPredicateTree( reqs:_* )( opts:_* )
+
+  def apply( node: Node ): Node = node.asRes/this/!
+  def update( res: Res, value: Any ) = res state this -> value
+    
+  override def assign( prop: Prop, value: Any ): Prop = {
+    super.assign( prop, value )
+    this
+  }
+  
+  override def in( m: Model ) = m getProp jProperty
+  def withRange( r: Res ) = { assign( RDFS.range, r ); this }
+
+  def datatype: Option[RDFDatatype] = {
+    val ranges = this/RDFS.range
+    if ( !ranges.isEmpty ) {
+      val range = ranges.oneNode.asRes
+      if ( range isOfType RDFS.Datatype ) {
+        val dtype = TypeMapper.getInstance.getTypeByName( range.uri )
+        if ( dtype != null ) return Some( dtype )
+      }
+    }
+    None
+  }
+}
+
+object Prop {
+  def apply( uri: String )( implicit rmodel: Model ) = rmodel getProp uri
+  def apply( p: Property ): Prop = apply( p, Model( p.getModel ) )
+  def apply( p: Property, m: Model ): Prop = m getProp p
+
+  implicit def toProp( p: Property ): Prop = Prop( p )
+  implicit def toPropAnyPair( pa: Pair[Property, Any] ): Pair[Prop, Any] = (toProp( pa._1 ), pa._2)
+  implicit def toJProperty( p: Prop ): Property = p.jProperty
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/PropPath.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,41 @@
+package net.croz.scardf
+
+trait PredicateConstruct
+  
+object PropPath {
+  def apply( props: Prop* ) = new PropPath( props.toList )
+  implicit def toPropPath( prop: Prop ) = PropPath( prop )
+}
+
+/**
+ * List of properties forming a path over resources in a graph,
+ * a.k.a. a predicate chain.
+ */
+case class PropPath( propList: List[Prop] ) extends Seq[Prop] with PredicateChain {
+  def ~( p: Prop ) = new PropPath( propList ::: List( p ) )
+  def ~( pp: PropPath ) = new PropPath( propList ::: pp.propList )
+  def ~( t: PredicateTree ) = PredicateTree.prependTree( this, t )
+  def ~( t: PredicateTree, moretrees: PredicateTree* ): PredicateTree = 
+    this ~ moretrees.foldLeft( t )( _ ++ _ )
+
+  def prepend( other: PropPath ) = PropPath( other.propList ::: propList )
+  def subpath( start: Int, finish: Int ) = PropPath( propList.slice( start, finish ).toList )
+  override def drop(n: Int): PropPath = PropPath( propList drop n )
+  
+  def of( res: Res ) = apply( res )
+  def apply( node: Node ) = node.asRes/this/!
+  def update( res: Res, value: Any ) = if (!isEmpty)
+    res/subpath( 0, propList.size-1 )/asRes state propList.last -> value
+  
+  val length = propList.length
+  def iterator = propList.iterator
+  def apply( index: Int ) = propList( index )
+  
+  override def toString = propList.mkString( "PChain( ", ", ", ")" )
+
+}
+
+trait PredicateChain extends PredicateConstruct {
+  def apply( node: Node ): Node
+  def update( res: Res, value: Any )
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/QVar.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,33 @@
+package net.croz.scardf
+
+object QVar {
+  //TODO synchronize counter!
+  private var last = 0
+  def next = { last += 1; last }
+  
+  def apply() = new NumQVar
+  def apply( name: String ) = new QVar( name )
+  
+  def rendering( n: String ) = "?" + n
+  
+  implicit def toQVar( s: Symbol ) = new QVar( s.name )
+}
+
+/**
+ * An object representing a variable in queries.
+ * Two variables are equal if their names are equal.
+ */
+class QVar( val name: String ) {
+
+  override def equals( o: Any ) = o match {
+    case that: QVar => this.name == that.name
+    case _ => false
+  }
+  override def hashCode = name.hashCode
+
+  override val toString = "?" + name
+}
+
+class NumQVar extends QVar( "v" + QVar.next )
+
+object X extends QVar( "X" )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/RdfList.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,31 @@
+package net.croz.scardf
+
+import com.hp.hpl.jena.rdf.model.RDFNode
+import com.hp.hpl.jena.rdf.model.RDFList
+import scala.collection.JavaConversions.JListWrapper
+
+class RdfList( val jRdfList: RDFList, override val model: Model ) extends Res( jRdfList, model )
+with scala.Seq[Node] with util.Logging
+{
+  def toNodeBag: NodeBag = new NodeBag( iterator.toList )
+
+  def jlist: List[RDFNode] = 
+    JListWrapper(jRdfList.asJavaList.asInstanceOf[java.util.List[RDFNode]]).toList
+
+  def length = jRdfList.size
+  def iterator: Iterator[Node] = jlist.map{ n: RDFNode => Node( n ) }.iterator
+  def apply( i: Int ) = Node( jRdfList.get(i) )
+}
+
+object RdfList {
+  def from( l: RDFList ): RdfList = Model( l.getModel ) getRdfList l
+
+  def from( c: Iterable[Any] )( implicit model: Model ): RdfList = 
+    apply( c.toArray: _* )( model )
+  
+  def apply( nodes: Any* )( implicit model: Model ) = {
+    val jNodes = nodes map { Node from _ jNode }
+    val jList = model createList jNodes.toArray
+    model getRdfList jList
+  }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/Res.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,179 @@
+package net.croz.scardf
+
+import org.joda.time.LocalDate
+import org.joda.time.DateTime
+import org.joda.time.format.ISODateTimeFormat
+import com.hp.hpl.jena.rdf.model.{ RDFNode, Resource, ResIterator }
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype._
+import scala.collection.mutable.{Set => MSet}
+
+class Res( val jResource: Resource, val model: Model ) 
+extends Node( jResource ) with util.Logging {
+
+  val uri = jResource.getURI
+
+  def apply( a: (Prop, Any) ) = Stmt( this, a._1, a._2 )
+  def apply( assignments: (Prop, Any)* ) =
+    for ( a <- assignments ) yield Stmt( this, a._1, a._2 )
+  
+  def apply( m: Model ) = this in m
+
+  def has( assignment: (Prop, Any) ) = assignment match {
+    case (p, None) => !valuesOf( p ).hasNext
+    case _ => this( assignment )?
+  }
+  
+  override def /( p: Prop ): NodeBag = new NodeBag( valuesOf( p ).toList )
+  override def /( pp: PropPath ): NodeBag = pp.foldLeft( NodeBag( this ) ){ _/_ }
+
+  def valueOf( p: Prop ): Option[Node] = {
+    if ( jResource hasProperty p.jProperty ) 
+      Some( Node( jResource getProperty p.jProperty getObject ) ) 
+    else
+      None
+  }
+
+  def valuesOf( p: Prop ): Iterator[Node] = {
+    log.debug( this + " valuesof " + p + " = " + (jResource listProperties p.jProperty).toList )
+    new RichStmtIterator( jResource listProperties p.jProperty ) map {_.o}
+  }
+
+  def in( m: Model ) = m getRes jResource
+  def local = this in model.local
+  
+  def a( rdfClass: Res ) = state( RDF.Type -> rdfClass )
+  def an( rdfClass: Res ) = a( rdfClass )
+
+  def state( assignments: (Prop, Any)* ) = { 
+    assignments foreach { a => assign( a._1, a._2 ) }
+    this
+  }
+  
+  /**
+   * <ul>
+   *   <li>Scardf or Jena node - assign this node</li>
+   *   <li>All object - assign each value in All</li>
+   *   <li>an Option - assign some value if defined, skip if None</li>
+   *   <li>a tuple - assign each tuple member</li>
+   *   <li>a string - assign string as prop is typed, or xsd:string if no type is specified</li>
+   *   <li>a LangStr - assign string value in a language</li>
+   *   <li>a Boolean</li>
+   *   <li>an Int</li>
+   *   <li>a LocalDate</li>
+   *   <li>a DateTime</li>
+   *   <li>otherwise, throw a RuntimeException</li>
+   * </ul>
+   */
+  def assign( prop: Prop, value: Any ): Res = {
+    value match {
+      case n: Node      => jResource.addProperty( prop, n.jNode )
+      case jn: RDFNode  => jResource.addProperty( prop, jn )
+      case all: All     => for ( n <- all.nodes ) assign( prop, n )
+      case Some( x )    => assign( prop, x )
+      case None         => // ignore assignment
+      case (a, b)       => assign( prop, a ); assign( prop, b )
+      case (a, b, c)    => assign( prop, a ); assign( prop, b ); assign( prop, c )
+      case (a, b, c, d) => assign( prop, All( a, b, c, d ) )
+      case s: String    => jResource.addProperty( prop, s, prop.datatype.getOrElse( XSDstring ) )
+      case b: Boolean   => jResource.addProperty( prop, b.toString, XSDboolean )
+      case i: Int       => jResource.addProperty( prop, i.toString, XSDint )
+      case d: LocalDate => jResource.addProperty( prop, d.toString, XSDdate )
+      case d: DateTime  => jResource.addProperty( prop, ISODateTimeFormat.dateTime.print( d ), XSDdateTime )
+      case x            => throw new RuntimeException( x + " of unknown type" )
+    }
+    this
+  }
+  
+  def -( p: Prop ) = ResPropPair( this, p )
+
+  def isOfType( checkType: Res ) = this/RDF.Type contains checkType
+
+  def subgraphed = {
+    val g = new Model
+    val covered = MSet[Res]()
+    spreadTo( g, covered )
+    g getRes this.jResource
+  }
+  
+  def spreadTo( subgraph: Model, covered: MSet[Res] ): Unit = {
+    if ( covered contains this ) {
+      log debug "spreading already covered " + this 
+      return
+    }
+    covered += this
+    log debug "spreading subgraph to " + this
+    val outlinks = new RichStmtIterator(
+      model.local.jModel.listStatements( jResource, null, null: RDFNode )
+    )
+    val connectedNodes = scala.collection.mutable.Set[Node]()
+    for ( s <- outlinks ) {
+      connectedNodes += s.p
+      connectedNodes += s.o
+      subgraph add s
+    }
+    connectedNodes filter { _.isRes } map { _.asRes.spreadTo( subgraph, covered ) }
+  }
+  
+  override def rendering: String =
+    if ( jResource.isAnon ) "_:A" + jResource.getId.getLabelString.replace( ":", "" )
+    else "<" + uri + ">"
+}
+
+object Res {
+  def apply( uri: String )( implicit rmodel: Model ) = rmodel getRes uri
+
+  def apply()( implicit rmodel: Model ) = rmodel.getAnon
+  
+  def apply( r: Resource ): Res = apply( r, Model( r.getModel ) )
+  def apply( r: Resource, m: Model ) = m getRes r
+  
+  implicit def toRes( r: Resource ) = apply( r )
+  implicit def toNodeBag( rri: RichResIterator ) = new NodeBag( rri.toList )
+}
+
+object Anon {
+  def apply( assignments: (Prop, Any)* )( implicit rmodel: Model ) =
+    Res().state( assignments: _* )
+  
+  def apply( rdfClass: Res, assignments: (Prop, Any)* )( implicit rmodel: Model ) = 
+    Res().a( rdfClass ).state( assignments: _* )
+  
+  def apply( id: String )( implicit rmodel: Model ) = rmodel.getAnon( id )
+}
+
+object Blank {
+  val model = new Model
+  
+  def apply( assignments: (Prop, Any)* ): Subgraph = {
+    val subg = new Subgraph
+    assignments.toList map { p => p._2 match {
+      case n: Node => subg += (p._1, n)
+      case sg: Subgraph => subg += (p._1, sg)
+      case x: Any => subg += (p._1, Lit from x)
+    } }
+    subg
+  }
+  
+  implicit def toRModel( sg: Subgraph ) = sg.toModel
+}
+
+case class All( nodes: Any* )
+
+class Subgraph {
+  val root = Blank.model.getAnon
+  var slist = new scala.collection.mutable.ListBuffer[Stmt]()
+  def +=( p: Prop, n: Node ) = slist += Stmt( root, p, n )
+  def +=( p: Prop, sg: Subgraph ) = slist ++= ( Stmt( root, p, sg.root ) :: sg.slist.toList )
+  def toModel = {
+    val rm = new Model
+    rm addAll slist.toList
+    rm
+  }
+}
+
+class RichResIterator( jIterator: ResIterator ) extends Iterator[Res] {
+  override def hasNext = jIterator.hasNext
+  override def next = Res( jIterator.next.asInstanceOf[Resource] )
+}
+
+case class ResPropPair( s: Res, p: Prop )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/Sparql.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,87 @@
+package net.croz.scardf
+
+import net.croz.scardf.query._
+import com.hp.hpl.jena.rdf.model.Resource
+import com.hp.hpl.jena.query.{ QuerySolution, ResultSet }
+
+object Sparql {
+  def select( exprs: Any* ) = new SelectQ( exprs )
+  def selectX[T]( c: NodeConverter[T] ) = new SelectOptionQ( c )
+  def selectAllX[T]( c: NodeConverter[T] ) = new SelectIteratorQ( c )
+  
+  def extractRes( r: Res, replaces: Pair[Res, QVar]* ) = new ExtractResQ( r, Map( replaces: _* ) )
+  def extractResList( r: Res, replaces: Pair[Res, QVar]* ) = 
+    new ExtractResListQ( r, Map( replaces: _* ) )
+  def extract( props: Prop* ) = new ExtractQ( props: _* )
+  
+  def ask( triplets: (Any, Any, Any)* ) = new AskQ( triplets: _* )
+  
+  def describe( v: QVar ) = new DescribeQ( v )
+  def descriptionOf( r: Res ) = new DescribeResQ( r ) in r.model
+  
+  def construct( triplets: (Any, Any, Any)* ) = new ConstructQ( triplets: _* )
+  def construct( tempGraph: Model ): ConstructQ = {
+    val triplets = TripletFactory tripletsFrom tempGraph
+    construct( triplets: _* ) where( triplets: _* )
+  }
+  def construct( ptree: PredicateTree ) = new PTreeConstructQ( ptree )
+  
+  def take( expr: Any* ) = new TakeQ( expr: _* )
+}
+
+sealed abstract class QualifiedArguments( val modifier: String, val exprs: Any* )
+case class distinct( override val exprs: Any* ) extends QualifiedArguments( "DISTINCT", exprs )
+case class reduced( override val exprs: Any* ) extends QualifiedArguments( "REDUCED", exprs )
+
+case class QSolution( jSolution: QuerySolution ) {
+  /**
+   * Value of given variable for this solution.
+   * @throws NoSuchElementException if there's no associated value
+   */
+  def apply( v: QVar ): Node = get( v ).get
+  
+  def get( v: QVar ): Option[Node] = get( v.name )
+  
+  /**
+   * Optional value of variable given by its name.
+   */
+  def get( key: String ) = {
+    val solution = jSolution.get( key )
+    if ( solution == null ) None else Some( Node( solution ) )
+  }
+
+  /**
+   * Constructs a map of all variables and their values for this solution.
+   */
+  def toMap: Map[QVar, Node] = {
+    val result = scala.collection.mutable.Map[QVar, Node]()
+    val iterator = jSolution.varNames
+    while ( iterator.hasNext ) {
+      val qvar = QVar( iterator.next.asInstanceOf[String] )
+      get( qvar ) match {
+        case Some( value ) => result += qvar -> value
+        case None => // skip
+      }
+    }
+    Map.empty ++ result
+  }
+}
+
+/**
+ */
+case class QResultsIterator( rs: ResultSet ) extends Iterator[QSolution] {
+  def hasNext = rs.hasNext
+  def next = QSolution( rs.nextSolution )
+  
+  /**
+   * Constructs a list of maps of all solutions from this iterator.
+   * @see QSolution#toMap
+   */
+  def solutions = toList map { _.toMap }
+}
+
+abstract class OrderComparator( v: QVar, modifier: String ) {
+  def rendering = modifier + "( " + v + " )"
+}
+case class asc( v: QVar ) extends OrderComparator( v, "ASC" )
+case class desc( v: QVar ) extends OrderComparator( v, "DESC" )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/Stmt.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,24 @@
+package net.croz.scardf
+
+import com.hp.hpl.jena.rdf.model._
+
+case class Stmt( jStatement: Statement, rmodel: Model ) {
+  val s: Res = rmodel getRes jStatement.getSubject
+  val p: Prop = rmodel getProp jStatement.getPredicate
+  val o: Node = Node( jStatement.getObject )
+  
+  def ? : Boolean = rmodel contains jStatement
+}
+
+object Stmt {
+  def apply( s: Res, p: Prop, o: Any ): Stmt =
+    apply( s.model.jModel.createStatement( s.jResource, p.jProperty, (Node from o).jNode ) )
+
+  def apply( js: Statement ): Stmt = Model( js.getModel ).getStatement( js )
+}
+
+class RichStmtIterator( jIterator: StmtIterator ) extends Iterator[Stmt] {
+  override def hasNext = jIterator.hasNext
+  override def next = Stmt( jIterator.next.asInstanceOf[Statement] )
+}
+ 
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/Vocabulary.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,40 @@
+package net.croz.scardf
+
+import com.hp.hpl.jena.rdf.model.Property
+import com.hp.hpl.jena.rdf.model.Resource
+import com.hp.hpl.jena.vocabulary.{RDF => jRDF}
+import com.hp.hpl.jena.vocabulary.{RDFS => jRDFS}
+import com.hp.hpl.jena.vocabulary.{XSD => jXSD}
+
+class Vocabulary( val prefix: String ) {
+  val model = new Model withPrefix prefix
+  
+  def apply( name: String ) = pRes( name )
+  def \( name: String ) = pRes( name )
+  def รท( name: String ) = pRes( name )
+  def ~( name: String ) = pProp( name )  
+  def ^( name: String ) = pProp( name )  
+  def pRes( name: String ) = Res( name )( model )
+  def pProp( name: String ) = Prop( name )( model )
+  def wRes( r: Resource ) = Res( r, model )
+  def wProp( p: Property ) = Prop( p, model )
+}
+
+object RDF extends Vocabulary( jRDF.getURI ) {
+  val Type = wProp( jRDF.`type` )
+  val first = wRes( jRDF.first )
+  val rest = wRes( jRDF.rest )
+}
+
+object RDFS extends Vocabulary( jRDFS.getURI ) {
+  val range = wProp( jRDFS.range )
+  val Datatype = wRes( jRDFS.Datatype )
+}
+
+object XSD extends Vocabulary( jXSD.getURI ) {
+  val string = wRes( jXSD.xstring )
+  val boolean = wRes( jXSD.xboolean )
+  val int = wRes( jXSD.xint )
+  val date = wRes( jXSD.date )
+  val dateTime = wRes( jXSD.dateTime )
+}
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/build/Fetcher.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,30 @@
+package net.croz.scardf.build
+
+import net.croz.scardf._
+
+abstract class Fetcher {
+
+  def fetch( in: Res ): Res = {
+    val resultModel = new Model
+    if ( in.isRdfList )
+      fetchList( in.asRdfList, resultModel )
+    else
+      fetch( in, resultModel )
+  }
+
+  def fetchList( inList: RdfList, resultModel: Model ) = {
+    val rlist = new scala.collection.mutable.ListBuffer[Res]()
+    for ( r <- inList )
+      rlist += fetch( r.asRes, resultModel )
+    RdfList( rlist: _* )( resultModel )
+  }
+  
+  def fetch( one: Res, resultModel: Model ): Res
+}
+
+class TakeFetcher( takes: Iterable[_] ) extends Fetcher {
+  def fetch( one: Res, resultModel: Model ): Res = {
+    Sparql take takes to resultModel from one
+    one in resultModel
+  }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/query/NTripleHelper.java	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,14 @@
+package net.croz.scardf.query;
+
+import com.hp.hpl.jena.rdf.model.Resource;
+import com.hp.hpl.jena.rdf.model.impl.NTripleWriter;
+import java.io.*;
+
+class NTripleHelper extends NTripleWriter {
+  public static String ntRendering( Resource r ) {
+    StringWriter sw = new StringWriter();
+    PrintWriter pw = new PrintWriter( sw );
+    NTripleWriter.writeResource( r, pw );
+    return sw.toString();
+  }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/query/SparqlQ.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,222 @@
+package net.croz.scardf.query
+
+import net.croz.scardf._
+import com.hp.hpl.jena.rdf.model.Resource
+import com.hp.hpl.jena.query.{
+  Query, QueryExecution, QueryExecutionFactory, QueryFactory, QueryParseException, QuerySolutionMap
+}
+
+abstract class SparqlQ[+T <: SparqlQ[T]] extends util.Logging {
+  var conditions = new StringBuffer()
+  var optConditions = new StringBuffer()
+  var orderBySeq = new StringBuffer()
+  var upperBound = 0
+  var offsetAmount = 0
+
+  //TODO! move these to BaseSelectQ?
+  
+  def where( triples: (Any, Any, Any)* ): T = appendTriplets( conditions, triples: _* )
+  def optional( triples: (Any, Any, Any)* ): T = appendTriplets( optConditions, triples: _* )
+  
+  def orderBy( comparators: OrderComparator* ): T = {
+    for ( c <- comparators ) orderBySeq append c.rendering + " "
+    this.asInstanceOf[T]
+  }
+  
+  def limit( n: Int ) = {
+    upperBound = n
+    this.asInstanceOf[T]
+  }
+
+  def offset( n: Int ) = {
+    offsetAmount = n    
+    this.asInstanceOf[T]
+  }
+    
+  def appendTriplet( sbuffer: StringBuffer, s: Any, p: Any, o: Any ): T = {
+    val segments = List( s, p, o ) map rendering
+    sbuffer append segments.reduceLeft{ (x: String, y: String) => x + " " + y } + " . "
+    this.asInstanceOf[T]
+  }
+
+  def appendTriplets( sbuffer: StringBuffer, triples: (Any, Any, Any)* ): T = {
+    for ( t <- triples ) appendTriplet( sbuffer, t._1, t._2, t._3 )
+    this.asInstanceOf[T]
+  }
+  
+  def rendering( o: Any ): String = o match {
+    case qa: QualifiedArguments => qa.modifier + " " + rendering( qa.exprs )
+    case seq: Seq[_] => seq map { x: Any => rendering( x ) } mkString " "
+    case n: Node => n.rendering
+    case s: String => "\"" + s + "\""
+    case s: Symbol => QVar.rendering( s.name )
+    case x => x.toString
+  }
+  
+  def optionalConditions = if ( optConditions.length == 0 ) "" 
+                           else " OPTIONAL { " + optConditions + "}"
+  def orderByClause = if ( orderBySeq.length == 0 ) "" else " ORDER BY " + orderBySeq
+  def limitClause = if ( upperBound == 0 ) "" else " LIMIT " + upperBound
+  def offsetClause = if ( offsetAmount == 0 ) "" else " OFFSET " + offsetAmount
+
+  def execution( rmodel: Model, query: String ) = {
+    log.info( "Executing query " + query )
+    try {
+      val q = QueryFactory.create( query )
+      QueryExecutionFactory.create( q, rmodel.jModel, new QuerySolutionMap )
+    }
+    catch {
+      case e: QueryParseException =>
+        throw new RuntimeException( "Failed parsing \"" + query + "\" because of" + e.getMessage, e)
+    }
+  }
+}
+
+class DescribeQ( v: QVar ) extends SparqlQ[DescribeQ] {
+  def from( rmodel: Model ) = descriptionFor( rmodel )
+
+  def descriptionFor( rmodel: Model ) = {
+    val query = "DESCRIBE " + v + " WHERE { " + conditions + "}"
+    Model( execution( rmodel, query ).execDescribe )
+  }
+}
+
+class DescribeResQ( r: Res ) extends SparqlQ[DescribeResQ] {
+  def in( rmodel: Model ) = Model( execution( rmodel, "DESCRIBE " + r ).execDescribe )
+}
+
+abstract class BaseSelectQ[ T <: BaseSelectQ[T] ] extends SparqlQ[T] {
+
+  var selectExprs: List[_] = List()
+  
+  def queryStr = "SELECT " + rendering( selectExprs ) + 
+    " WHERE { " + conditions + optionalConditions + "}" +
+    orderByClause + limitClause + offsetClause
+  
+  def executeOn( model: Model ) = new QResultsIterator( execution( model, queryStr ).execSelect )
+  
+  def option( solutions: QResultsIterator, v: QVar ): Option[Node] = {
+    if ( !solutions.hasNext ) return None
+    val result = Some( solutions.next.get( v ).get )
+    if ( solutions.hasNext ) throw new RuntimeException( "Multiple solutions to " + this )
+    result
+  }
+}
+
+class SelectQ( exprs: Any* ) extends BaseSelectQ[SelectQ] {
+  selectExprs = exprs.toList
+  
+  def this( selectExpr: Any, conds: StringBuffer ) = {
+    this( selectExpr )
+    conditions = conds
+  }
+  
+  def from( model: Model ) = executeOn( model )
+}
+
+class SelectIteratorQ[T]( converter: NodeConverter[T] ) 
+extends BaseSelectQ[SelectIteratorQ[T]] {
+  selectExprs = List( X )
+  def from( model: Model ): Iterator[T] = executeOn( model ) map { _( X )/converter }
+}
+
+class SelectOptionQ[T]( converter: NodeConverter[T] ) extends BaseSelectQ[SelectOptionQ[T]] {
+  selectExprs = List( X )
+  def from( model: Model ): Option[T] = option( executeOn( model ), X ) map { _/converter }
+}
+
+abstract class BaseExtractQ[T <: BaseExtractQ[T]]( val r: Res, replaces: Map[Res, QVar] )
+extends BaseSelectQ[T] {
+  selectExprs = List( distinct( X ) )
+  private var condStr = r.model.dumpedIn( "N-TRIPLE" ).replaceAll( "\\s+", " " )
+  condStr = replaceVar( condStr, r, X )
+  for ( pair <- replaces )
+    condStr = replaceVar( condStr, pair._1, pair._2 )
+  conditions = new StringBuffer( condStr )
+  
+  private def replaceVar( qs: String, r: Res, v: QVar ) =
+    qs.replace( NTripleHelper.ntRendering( r.jResource ), rendering( v ) )
+}
+
+class ExtractResQ( override val r: Res, replaces: Map[Res, QVar] )
+extends BaseExtractQ[ExtractResListQ]( r, replaces ) {
+  def from( model: Model ): Option[Res] = option( executeOn( model ), X ) map { _.asRes }
+}
+
+class ExtractResListQ( override val r: Res, replaces: Map[Res, QVar] )
+extends BaseExtractQ[ExtractResListQ]( r, replaces ) {
+  def from( model: Model ): List[Res] = {
+    val l = executeOn( model ).toList
+    l map { _( X ).asRes }
+  }
+}
+
+class ConstructQ( triplets: (Any, Any, Any)* ) extends SparqlQ[ConstructQ] {
+  var constructions = new StringBuffer()
+  appendTriplets( constructions, triplets: _* )
+  
+  def from( rmodel: Model ) = constructionFrom( rmodel )
+  def constructionFrom( rmodel: Model ) = {
+    val query = "CONSTRUCT { " + constructions + "} WHERE { " + conditions + "}"
+    Model( execution( rmodel, query ).execConstruct )
+  }
+}
+
+class PTreeConstructQ( ptree: PredicateTree ) extends SparqlQ[PTreeConstructQ] {
+  def from( anchor: Res ) = {
+    var constructions, required, optionals = new StringBuffer()
+    val allTriplets = TripletFactory tripletsFrom ptree.growTemplateFrom( anchor ).model
+    appendTriplets( constructions, allTriplets: _* )
+    val query = "CONSTRUCT { " + constructions + "} WHERE { " + constructions + "}"
+    val markStart = System.currentTimeMillis
+    val result = Model( execution( anchor.model, query ).execConstruct )
+    log info "Span construction took " + (System.currentTimeMillis - markStart) + " ms"
+    result
+  }
+}
+
+class AskQ( triplets: (Any, Any, Any)* ) extends SparqlQ[AskQ] {
+  where( triplets: _* )
+  def in( model: Model ) = execution( model, "ASK { " + conditions + "}" ).execAsk
+}
+
+class ExtractQ( props: Prop* ) extends SparqlQ[ExtractQ] {
+  def from( focus: Res ) = {
+    val triplets = for ( p <- props ) yield (focus, p, QVar())
+    new ConstructQ( triplets: _* ) where( triplets: _* ) from focus.model
+  }
+}
+
+class TakeQ( exprs: Any* ) extends SparqlQ[TakeQ] with util.Logging {
+  private var putModel: Model = null
+  
+  def to( m: Model ) = { putModel = m; this }
+  
+  private def append( m: Model, subject: Res, predicate: Prop ) = {
+    val objects = subject/predicate
+    for ( o <- objects ) 
+      m add Stmt( subject, predicate, o )
+  }
+  
+  private def appendAll( m: Model, focus: Res, exprs: Iterable[Any] ): Unit =
+    for ( o <- exprs ) o match {
+      case set: Collection[_] => appendAll( m, focus, set.toSeq )
+      case p: Prop => append( m, focus, p )
+      case (predicate:Prop, list:List[Any]) => // generate a compilation warning because of erasure
+        append( m, focus, predicate )
+        for ( r <- focus/predicate ) 
+          appendAll( m, r.asRes, list )
+      case _ => throw new RuntimeException( "Unknown TAKE expression " + o )
+    }
+  
+  def from( focus: Res ) = {
+    log.debug("From " + exprs)
+    val result = if ( putModel == null ) new Model else putModel
+    appendAll( result, focus, exprs.toList )
+    result
+  }
+}
+
+class LocateSpanQ( focus: Res ) {
+  def span( ptree: PredicateTree ) = new SelectQ()
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/query/TripletFactory.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,22 @@
+package net.croz.scardf.query
+
+import net.croz.scardf._
+
+object TripletFactory {
+
+  def tripletsFrom( m: Model ) = {
+    val tf = new TripletFactory
+    m.local.statements map{ tf toTriplet _ } toList
+  }
+}
+
+private class TripletFactory {
+  val varMap = scala.collection.mutable.Map[ Res, QVar ]()
+  
+  def replaced( o: Any ) = o match {
+    case n: Res if n.isBlank => varMap.getOrElseUpdate( n, QVar() )
+    case other => other
+  }
+  
+  def toTriplet( stmt: Stmt ) = ( replaced( stmt.s ), stmt.p, replaced( stmt.o ) )
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/main/scala/net/croz/scardf/util/Log.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,23 @@
+package net.croz.scardf.util
+
+trait Logging {
+  protected[this] val log = new Log( getClass.getName )
+}
+
+import org.apache.commons.logging.LogFactory
+
+class Log( name: String ) {
+  private[this] val jLog = LogFactory.getLog( name )
+  
+  def trace( msg: => Any ) = if ( jLog.isTraceEnabled ) jLog trace msg
+  
+  def debug( msg: => Any ) = if ( jLog.isDebugEnabled ) jLog debug msg
+  
+  def info( msg: => Any ) = if ( jLog.isInfoEnabled ) jLog info msg
+
+  def warn( msg: => Any ): Unit = warn( msg, null )
+  def warn( msg: => Any, t: Throwable ) = if ( jLog.isWarnEnabled ) jLog.warn( msg, t )
+  
+  def error( msg: => Any ): Unit = error( msg, null )
+  def error( msg: => Any, t: Throwable ) = if ( jLog.isErrorEnabled ) jLog.error( msg, t )
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/test/scala/net/croz/scardf/LitSpec.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,40 @@
+package net.croz.scardf
+
+import org.scalatest.WordSpec
+import org.scalatest.matchers.ShouldMatchers
+import org.joda.time.LocalDate
+
+class LitSpecTest extends WordSpec with ShouldMatchers {
+
+  "Literals" should {
+    "be created from String" in {
+      Lit( "example" ).rendering should equal ("\"example\"")
+    }
+    "be created from String with Lang" in {
+      Lit( "example", Lang.en ).rendering should equal ("\"example\"@en")
+    }
+    "be created from Boolean" in {
+      Lit( true ).rendering should equal ("true")
+    }
+    "be created from Int" in {
+      Lit( 1 ).rendering should equal ("1")
+      Lit( 1 ).asInt should equal (1)
+    }
+    "be created from Double" in {
+      Lit( 1.1D ).rendering should equal ("1.1")
+      Lit( 1.1D ).asDouble should equal (1.1D)
+    }
+    "be created from BigDecimal" in {
+      val digits = "-1.234567890123456789012345678901234567890"
+      Lit( BigDecimal( digits ) ).rendering should equal (digits)
+      Lit( BigDecimal( digits ) ).asBigDecimal should equal (BigDecimal( digits ))
+    }
+    "be created from LocalDate" in {
+      Lit( new LocalDate( 2007, 7, 7 ) ).rendering should equal ("2007-07-07")
+      Lit( new LocalDate( 2007, 7, 7 ) ).asLocalDate should equal (new LocalDate( 2007, 7, 7 ))
+    }
+    "do equals" in {
+      Lit( "a" ) should equal (Lit( "a" ))
+    }
+  }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/test/scala/net/croz/scardf/LocateSpanQuerySpec.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,66 @@
+package net.croz.scardf
+
+import org.scalatest.WordSpec
+import org.scalatest.matchers.ShouldMatchers
+import PeopleVocabulary._
+import FamilyVocabulary._
+
+class LocateSpanQuerySpecTest extends WordSpec with ShouldMatchers {
+
+  "query system" should {
+    val template = Blank( Likes -> Swimming ).root
+    val ptree = PredicateTree( Name~(Given, Family), Height )
+    
+    "select some" in {
+      val qLikes = QVar()
+      val selectQ = Sparql select( X, qLikes ) where( (X, IsMale, true), (X, Likes, qLikes) )
+      println( selectQ from FamilyVocabulary.model solutions )
+    }
+    
+//    "select using locate-span" in {
+//      val q = new LocateSpanQ( template, ptree, List( Height -> false ) )
+//      val result = q from FamilyVocabulary.model
+//      val rm = result.model
+//      result must_==( RdfList( john, jane, anna )( rm ) )
+//    }
+  }
+}
+
+import scala.collection.mutable.{ ListBuffer, Map => MMap }
+
+class LocateSpanQ(
+  template: Res, 
+  ptree: PredicateTree, 
+  orderCriteria: List[ Pair[PredicateChain, Boolean] ] 
+) 
+{
+  private val qvars = MMap[PredicateChain, QVar]( PropPath() -> X )
+  private val constraints = new ListBuffer[Tuple3[Any,Any,Any]]
+  private val comparators = new ListBuffer[OrderComparator]
+  private var lim = 0
+  private var off = 0
+  
+  def selectQuery = (
+    new query.SelectQ( qvars.values.toList: _* ) where( constraints: _* )
+    orderBy( comparators: _* ) limit lim offset off
+  )
+  
+  def toRdfList( solutionIt: Iterator[QSolution] ) = {
+    var m: Model = null
+    for ( qsolution <- solutionIt ) {
+      val solution = qsolution.toMap
+      val a = solution( X ).asRes
+      ptree.growFrom( a )
+      for ( chain <- qvars.keySet )
+        chain( a ) = solution( qvars( chain ) )
+    }
+    RdfList()(m)
+  }
+  
+  def from( dataModel: Model ) = toRdfList( selectQuery from dataModel ) 
+}
+
+//SELECT ?X ?H ?G ?F
+//WHERE { ?X Likes Swimming; Height ?H; Name [ Given ?G; Family ?F ]. }
+//ORDER BY ?H
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/test/scala/net/croz/scardf/NoVarSpec.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,30 @@
+package net.croz.scardf
+
+import org.scalatest.WordSpec
+import org.scalatest.matchers.ShouldMatchers
+import org.joda.time.LocalDate
+
+class NoVarSpec extends WordSpec with ShouldMatchers {
+
+  "j" should {
+
+    import PeopleVocabulary._
+
+    "1" in {
+      val family = new Vocabulary( "http://voc.eg#" )
+      Spouse( family\"John" ) = family~"Jane"
+      (family~"Mother")( family\"Jane" ) = family~"Vilma"
+      val MotherInLaw = Spouse ~ (family~"Mother")
+      family\"John"/MotherInLaw/asRes should equal (family~"Vilma")
+    }
+
+    "2" in {
+      val family = new Vocabulary( "http://voc.eg#" )
+      val List( john, jane, vilma, anna, bob ) = 
+        List( "John", "Jane", "Vilma", "Anna", "Bob" ) map{ family\_ }
+      john.uri should equal ("http://voc.eg#John")
+    }
+
+  }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/test/scala/net/croz/scardf/PropPathSpec.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,113 @@
+package net.croz.scardf
+
+import org.scalatest.WordSpec
+import org.scalatest.matchers.ShouldMatchers
+import PeopleVocabulary._
+import FamilyVocabulary._
+
+class PropPathSpec extends WordSpec with ShouldMatchers {
+
+  "normal prop path" should {
+
+    val pp = PropPath( Name, Given )
+
+    "be constructed using dashes" in {
+      val longPath = Spouse~Likes~Name~Given
+      longPath.toList should equal (List( Spouse, Likes, Name, Given ))
+    }
+
+    "traverse graphs using slash operator" in {
+      anna/pp/asString should equal ("Anna")
+    }
+   
+    "traverse graphs using 'of'" in {
+      Spouse~Likes of john should equal (Swimming)
+    }
+  
+    "do equals" in {
+      Name~Given should equal (pp)
+    }
+
+  }
+
+  "empty prop path" should {
+    
+    val emptyPath = PropPath()
+    
+    "traverse graph" in {
+      assert(NodeBag( anna ) === anna/emptyPath)
+    }
+
+    "dead-end paths" should {
+      "yield empty node bags" in {
+	val nodebag:Any = 
+	assert(NodeBag() === john/( Spouse~Likes~Name~Given ))
+      }
+    }
+
+  }
+
+  "predicate tree" should {
+
+    import net.croz.scardf.{PredicateTree => pt}
+    
+    "be constructed with fork operators" in {
+      Spouse~( Name~Given, IsMale ) should equal (pt( Spouse -> pt( Name -> pt(Given), IsMale -> pt() ) ))
+    }
+
+    "grow graphs" in {
+      val ptree = Spouse~( Name~Given, IsMale )
+      val extRoot = ptree growFrom john
+      val m = extRoot.model
+      m should not equal (FamilyVocabulary.model)
+      val tm = new Model
+      val a = tm.getAnon
+      tm addAll List( 
+        john( Spouse -> jane ), 
+        jane( IsMale -> false ), 
+        jane( Name -> a ),
+        a( Given -> "Jane" )
+      )
+      m should not equal ( tm )
+    }
+    "grow a template graph" in {
+      val ptree = Spouse~( Name~Given, IsMale )
+      ptree.growTemplate.model should not equal ( 
+        Blank( Spouse -> Blank( IsMale -> Blank(), Name -> Blank( Given -> Blank() ) ) ).toModel
+      )
+    }
+    "merge trivial trees" in {
+      val t1 = pt( Spouse )
+      val t2 = pt( IsMale )
+      t1++t2 should equal (pt( Spouse, IsMale ))
+    }
+    "merge chains" in {
+      val t1: pt = Spouse~Name~Given
+      val t2 = Spouse~IsMale
+      t1++t2 should equal (Spouse~( Name~Given, IsMale ))
+    }
+    "merge a tree to itself" in {
+      val t = Spouse~( Name~Given, IsMale )
+      t++t should equal (t)
+    }
+    "merge three trees" in {
+      val t1 = Spouse~( Name~Given, IsMale )
+      val t2 = pt( Likes )
+      val t3 = pt( IsMale, Spouse~( IsMale, Name~Family ) )
+      t1++t2++t3 should equal (pt( IsMale, Likes, Spouse~( Name~(Given, Family), IsMale ) ))
+      t3++t1++t2 should equal (pt( IsMale, Likes, Spouse~( Name~(Given, Family), IsMale ) ))
+    }
+    "remove trivial tree from itself" in {
+      pt( IsMale ) -- pt( IsMale ) should equal (pt.empty)
+    }
+    "remove tree" in {
+      Spouse~( Name~Given, IsMale ) -- ( Spouse~Name~(Given, Family) ) should equal (Spouse~(Name, IsMale))
+      Spouse~( Name~Given, IsMale ) -- ( Spouse~Name ) should equal (Spouse~( Name~Given, IsMale ))
+    }
+    "subgraph from chain" in {
+      val t = Spouse~( Name~Given, IsMale )
+      val p = Spouse~Name
+      t subtree p should equal (pt( Given ))
+    }
+  }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/test/scala/net/croz/scardf/QuerySpecs.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,128 @@
+package net.croz.scardf
+
+import org.scalatest.FlatSpec
+import org.scalatest.matchers.ShouldMatchers
+import org.joda.time.LocalDate
+import PeopleVocabulary._
+import FamilyVocabulary._
+
+class QuerySpecs extends FlatSpec with ShouldMatchers {
+
+  val data = FamilyVocabulary.model
+
+  "Triplet factory" should "make correct number of triplets from graph" in {
+    implicit val tempGraph = new Model
+    Anon( Likes -> Anon(), Spouse -> Anon( Likes -> Anon() ) )
+    val ts = Set.empty ++ ( query.TripletFactory tripletsFrom tempGraph )
+    ts.size should equal (3)
+    for ( t <- ts ) assert(t.isInstanceOf[Tuple3[Any, Any, Any]])
+  }
+
+  "Query mechanism" should "select with number literal in where triple's object" in {
+    val person = QVar()
+    val personsHigh107 = 
+      Sparql select( person ) where( (person, Height, 107), (person, RDF.Type, Person) ) from data
+    personsHigh107.solutions should equal (List( Map( person -> anna ) ))
+  }
+  
+  it should "select, order, offset and limit" in {
+    val person, height = QVar()
+    val selectPersonsByHeight = ( Sparql 
+				 select( person, height ) 
+				 where( (person, RDF.Type, Person), (person, Height, height) )
+				 orderBy( asc( height ) )
+				 limit 2 offset 1
+			       )
+      ( selectPersonsByHeight from data ).solutions should equal (List(
+        Map( person -> anna, height -> Lit(107) ), Map( person -> jane, height -> Lit(150) )
+      ))
+  }
+  
+  it should "select using symbols" in {
+    val person, height = QVar()
+    val selectPersonsByHeight = ( Sparql 
+				 select( person, height ) 
+				 where( (person, RDF.Type, Person), (person, Height, height) )
+				 orderBy( asc( height ) )
+				 limit 2 offset 1
+			       )
+      ( selectPersonsByHeight from data ).solutions should equal (List(
+        Map( person -> anna, height -> Lit(107) ), Map( person -> jane, height -> Lit(150) )
+      ))
+  }
+  
+  it should "select with/without DISTINCT" in {
+    val person, hobby = QVar()
+    val distinctHobbies = Sparql select distinct( hobby ) where( (person, Likes, hobby) ) from data
+    distinctHobbies.solutions.map{ _(hobby) } should equal (List( Swimming, Science ))
+    val allHobbiesResult = Sparql select hobby where( (person, Likes, hobby) ) from data
+    val hobbies = allHobbiesResult.solutions.map{ _(hobby).asRes }
+    hobbies.size should be >= (2)
+    hobbies should (contain (Swimming) and contain (Science))
+  }
+
+  it should "select one X as option" in {
+    Sparql selectX asRes where( (X, Likes, Science) ) from data should equal (Some( john ))
+    Sparql selectX asInt where( (jane, Height, X) ) from data should equal (Some( 150 ))
+    Sparql selectX asInt where( (jane, Weight, X) ) from data should equal (None)
+  }
+
+  it should "select all X as iterator" in {
+    val iter = Sparql selectAllX asRes where( (X, Likes, Swimming) ) from data
+    Set.empty ++ iter.toList should equal (Set( anna, jane, john ))
+  }
+
+  it should "select with optional constraints" in {
+    val person, spouse = QVar()
+    val selectPersonsWithSpouses = ( Sparql 
+				    select( person, spouse ) 
+				    where( (person, RDF.Type, Person) )
+				    optional( (person, Spouse, spouse) )
+				  )
+      (selectPersonsWithSpouses from data).solutions == List(
+        Map( person -> anna ), 
+        Map( person -> bob ), 
+        Map( person -> jane, spouse -> john ),
+        Map( person -> john, spouse -> jane )
+      )   
+    }
+
+  it should "ask queries" in {
+    Sparql ask( (john, Likes, Science) ) in data should be (true)
+    Sparql ask( (X, IsMale, false), (X, Likes, Science) ) in data should be (false)
+  }
+
+  it should "construct graphs from template" in {
+    val template = Blank( Likes -> Blank(), Spouse -> Blank( Likes -> Blank() ) ).toModel
+    val constructedGraph = Sparql construct template from data
+    val expectedGraph = new Model
+    expectedGraph addAll List( 
+      john( Spouse -> jane ), john( Likes -> Swimming ), john( Likes -> Science ),
+      jane( Spouse -> john ), jane( Likes -> Swimming )
+    )
+    // TODO: introduce a new matcher for graph isomorphism
+    assert(constructedGraph.local =~ expectedGraph.local)
+  }
+
+  it should "construct graphs using predicate trees" in {
+    val ptree = PredicateTree( Likes, Spouse~Likes )
+    val constructedGraph = Sparql construct ptree from john
+    val expectedGraph = new Model
+    expectedGraph addAll List( 
+      john( Spouse -> jane ), john( Likes -> Swimming ), john( Likes -> Science ),
+      jane( Likes -> Swimming )
+    )
+    // TODO: introduce a new matcher for graph isomorphism
+    assert(constructedGraph.local =~ expectedGraph.local)
+  }
+//    "construct graphs using predicate trees, with missing nodes" in {
+//      val ptree = PredicateTree( Likes, Spouse-Likes )
+//      val constructedGraph = Sparql construct ptree from anna
+//      val expectedGraph = new Model
+//      expectedGraph addAll List( anna( Likes -> Swimming ) )
+//      constructedGraph.dump
+//      constructedGraph should not equal ( expectedGraph )
+//    }
+//  }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/test/scala/net/croz/scardf/ScardfSpecs.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,118 @@
+package net.croz.scardf
+
+import org.scalatest.WordSpec
+import org.scalatest.matchers.ShouldMatchers
+import org.joda.time.LocalDate
+import PeopleVocabulary._
+
+class ScardfSpecs extends WordSpec with ShouldMatchers {
+
+  "NodeBag" should {
+    "throw exception on taking one node from empty bag" in {
+      intercept[NoSuchElementException] { NodeBag().oneNode }
+    }
+
+    "sort" in {
+      assert(NodeBag( Lit( "a" ), Lit( "b" ), Lit( "a" ) ).sorted === (NodeBag( Lit( "a" ), Lit( "a" ), Lit( "b" ) )))
+    }
+
+    "do equals" in {
+      assert(NodeBag( Lit( "a" ) ) === (NodeBag( Lit( "a" ) )))
+      assert(NodeBag( Lit( "a" ) ) != (NodeBag( Lit( "a" ), Lit( "a" ) )))
+      assert((NodeBag( Lit( "a" ), Lit( "b" ), Lit( "a" ) )) === (NodeBag( Lit( "a" ), Lit( "a" ), Lit( "b" ) )))
+    }
+
+  }
+
+  "Constructed graph" should {
+    implicit val model = new Model() withPrefix "example:"
+    import PeopleVocabulary._
+    val jdoe = Res( "jdoe" ) a Person state(
+      Name -> Anon(
+        Given -> "John",
+        Family -> "Doe"
+      ),
+      Birthday -> "1977-07-27",
+      Height -> 167,
+      IsMale -> true,
+      Likes -> All( Swimming, Science ),
+      Children -> RdfList( Res( "anna" ), Res( "bob" ) )
+    )
+
+    "extract property value" in {
+      Given( Name( jdoe ) ) should equal (Lit( "John" ))
+    }
+
+    "assign value to property" in {
+      Weight( jdoe ) = 88
+      Weight( jdoe ) should equal (Lit( 88 ))
+      //(Name-Family)( Res( "anna" ) ) = "Doe"
+    }
+
+    "read path" in {
+      jdoe/Name/Given/asString should equal ("John")
+      jdoe/Height/asInt should equal (167)
+    }
+
+    "handle multiple-node results" in {
+      assert(jdoe/Spouse isEmpty)
+      jdoe/Spouse/asRes.option should equal (None)
+      assert(jdoe/Spouse/Name/Family isEmpty)
+      jdoe/Spouse/Name/Family/asString.default( "(unknown)" ) should equal ("(unknown)")
+      jdoe/Likes/asRes.set should equal (Set( Swimming, Science ))
+    }
+
+    "test boolean value" in {
+      ( jdoe/IsMale? ) should equal (true)
+      jdoe has Height -> 167 should equal (true)
+      ( jdoe( Likes -> Science )? ) should equal (true)
+    }
+
+    "read date" in {
+      jdoe/Birthday/asLocalDate should equal (new LocalDate( 1977, 7, 27 ))
+    }
+
+    "read collections" in {
+      (jdoe/Children/asRdfList).toList should equal (List( Res( "anna" ), Res( "bob" ) ))
+    }
+
+    "sparql query heighest" in {
+      val selectHeighest = Sparql select 'person where( ('person, Height, 'h) ) orderBy desc( 'h ) limit 1
+      val results = selectHeighest from model
+      results.solutions should equal (List( Map( QVar( "person" ) -> jdoe ) ))
+    }
+
+    "sparql query select X" in {
+      Sparql selectX asRes where( (X, Height, 167) ) from model should equal (Some( jdoe ))
+    }
+
+  }
+  /*
+  "read graph" should {
+    val turtleSrc = """
+@prefix :        <person:> .
+<example:jdoe>
+      a :Person ;
+      :Birthday "1977-07-27";
+      :Children (<example:anna> <example:bob>) ;
+      :Height 167;
+      :IsMale true;
+      :Likes  :Swimming , :Science ;
+      :Name   [ :Family "Doe";
+                :Given  "John"
+              ] .
+<example:anna> a :Person; :Name [:Family "Doe"; :Given "Anna"].
+<example:bob> a :Person; :Name [:Family "Doe"; :Given "Bob"].
+"""
+    import PeopleVocabulary._
+    val m = new Model
+    m.jModel.read( new java.io.StringReader( turtleSrc ), null, "TURTLE" )
+    "" in {
+      val rlist = m.getRes( "example:jdoe" )/Children/asRdfList
+      rlist.toList should equal (List( m.getRes( "example:anna" ), m.getRes( "example:bob" ) ))
+    }
+    "" in {
+      println( m.getRes( "example:jdoe" )/Children/asRdfList/Name/Family )
+    }
+  }*/
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/test/scala/net/croz/scardf/example_data.scala	Mon Mar 15 20:30:12 2010 -0400
@@ -0,0 +1,51 @@
+package net.croz.scardf
+
+object PeopleVocabulary extends Vocabulary( "http://person.eg#" ) {
+  val Person = pRes( "Person" )
+  val Name = pProp( "Name" )
+  val Given = pProp( "Given" )
+  val Family = pProp( "Family" )
+  val Birthday = pProp( "Birthday" ) withRange XSD.date
+  val IsMale = pProp( "IsMale" ) withRange XSD.boolean
+  val Height = pProp( "Height" ) withRange XSD.int
+  val Weight = pProp( "Weight" ) withRange XSD.int
+  val Hobby = pRes( "Hobby" )
+  val Likes = pProp( "Likes" ) withRange Hobby
+  val Swimming = pRes( "Swimming" ) a Hobby
+  val Science = pRes( "Science" ) a Hobby
+  val Spouse = pProp( "Spouse" ) withRange Person
+  val Children = pProp( "Children" )
+  val Father = pProp( "Father" )
+}
+
+import PeopleVocabulary._
+
+object FamilyVocabulary extends Vocabulary( "http://family.eg#" ) {
+  private implicit val m = model
+  private val aMale = IsMale -> true
+  private val aFemale = IsMale -> false
+  
+  val anna = Res( "anna" ) a Person state(
+    Name -> Anon( Given -> "Anna" ),
+    aFemale, Birthday -> "2004-04-14", Height -> 107,
+    Likes -> Swimming
+  )
+  val bob = Res( "bob" ) a Person state(
+    Name -> Anon( Given -> "Bob" ),
+    aMale, Birthday -> "2007-05-18", Height -> 87
+  )
+  val john = Res( "jdoe" ) a Person state(
+    Name -> Anon( Given -> "John" ),
+    aMale, Birthday -> "1977-07-27", Height -> 167,
+    Likes -> ( Swimming, Science ),
+    Children -> RdfList( anna, bob ), Spouse -> Res( "jane" )
+  )
+  val jane = Res( "jane" ) a Person state(
+    Name -> Anon( Given -> "Jane" ),
+    aFemale, Birthday -> "1976-06-26", Height -> 150,
+    Likes -> Swimming,
+    Children -> RdfList( anna, bob ), Spouse -> john
+  )
+  List( anna, bob, jane, john ) foreach { (Name~Family)( _ ) = "Doe" }
+  john/Children/asRdfList foreach { n => Father( n.asRes ) = john }
+}
Set up and maintained by W3C Systems Team, please report bugs to sysreq@w3.org.

W3C would like to thank Microsoft who donated the server that allows us to run this service.