Skip to content

Commit

Permalink
Merge pull request #817 from s1ck/specialize_return_types
Browse files Browse the repository at this point in the history
Use specialized types for CypherRecords and CypherResult
  • Loading branch information
s1ck authored Feb 22, 2019
2 parents b0148f7 + 1d1ffc2 commit d5c7ef5
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ import org.opencypher.okapi.api.table.{CypherPrintable, CypherRecords}
// TODO: make graph and records non-optional
trait CypherResult extends CypherPrintable {

type Records <: CypherRecords

type Graph <: PropertyGraph

/**
Expand All @@ -59,14 +61,14 @@ trait CypherResult extends CypherPrintable {
*
* @return a table of records, `None` otherwise.
*/
def getRecords: Option[CypherRecords]
def getRecords: Option[Records]

/**
* The table of records if one was returned by the query, otherwise an exception is thrown.
*
* @return a table of records.
*/
def records: CypherRecords = getRecords.get
def records: Records = getRecords.get

/**
* API for printable plans. This is used for explaining the execution plan of a Cypher query.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,15 @@
*/
package org.opencypher.okapi.relational.api.graph

import org.opencypher.okapi.api.graph.{PropertyGraph, QualifiedGraphName}
import org.opencypher.okapi.api.graph.{CypherResult, PropertyGraph, QualifiedGraphName}
import org.opencypher.okapi.api.schema.Schema
import org.opencypher.okapi.api.table.CypherRecords
import org.opencypher.okapi.api.types.{CTNode, CTRelationship, CypherType}
import org.opencypher.okapi.api.value.CypherValue
import org.opencypher.okapi.impl.exception.UnsupportedOperationException
import org.opencypher.okapi.ir.api.expr.PrefixId.GraphIdPrefix
import org.opencypher.okapi.relational.api.io.{EntityTable, NodeTable}
import org.opencypher.okapi.relational.api.planning.RelationalRuntimeContext
import org.opencypher.okapi.relational.api.planning.{RelationalCypherResult, RelationalRuntimeContext}
import org.opencypher.okapi.relational.api.table.{RelationalCypherRecords, Table}
import org.opencypher.okapi.relational.impl.graph.{EmptyGraph, PrefixedGraph, ScanGraph, UnionGraph}
import org.opencypher.okapi.relational.impl.operators.RelationalOperator
Expand Down Expand Up @@ -94,6 +96,13 @@ trait RelationalCypherGraph[T <: Table[T]] extends PropertyGraph {

def scanOperator(entityType: CypherType, exactLabelMatch: Boolean = false): RelationalOperator[T]

override def cypher(
query: String,
parameters: CypherValue.CypherMap,
drivingTable: Option[CypherRecords],
queryCatalog: Map[QualifiedGraphName, PropertyGraph]
): RelationalCypherResult[T] = session.cypherOnGraph(this, query, parameters, drivingTable, queryCatalog)

override def nodes(name: String, nodeCypherType: CTNode, exactLabelMatch: Boolean = false): RelationalCypherRecords[T] = {
val scan = scanOperator(nodeCypherType, exactLabelMatch)
val namedScan = scan.assignScanName(name)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,16 @@ case class RelationalCypherResult[T <: Table[T]](
maybeRelational: Option[RelationalOperator[T]]
)(implicit session: RelationalCypherSession[T]) extends CypherResult {

override type Records = RelationalCypherRecords[T]

override type Graph = RelationalCypherGraph[T]

override def getGraph: Option[Graph] = maybeRelational.flatMap {
case r: ReturnGraph[T] => Some(r.graph)
case _ => None
}

override def getRecords: Option[RelationalCypherRecords[T]] =
override def getRecords: Option[Records] =
maybeRelational.flatMap {
case _: ReturnGraph[T] => None
case other => Some(session.records.from(other.header, other.table, other.returnItems.map(_.map(_.name))))
Expand Down
1 change: 0 additions & 1 deletion spark-cypher-testing/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ dependencies {

compile group: 'org.apache.spark', name: "spark-core".scala(), version: ver.spark
compile group: 'org.apache.spark', name: "spark-sql".scala(), version: ver.spark
compile group: 'org.apache.spark', name: "spark-catalyst".scala(), version: ver.spark
compile group: 'org.apache.spark', name: "spark-hive".scala(), version: ver.spark
compile group: 'org.apache.hadoop', name: 'hadoop-minicluster', version: ver.hadoop
compile group: 'ch.cern.sparkmeasure', name: "spark-measure".scala(), version: ver.sparkmeasure
Expand Down

0 comments on commit d5c7ef5

Please sign in to comment.