testing framework for comparision between out implimented rules and spark's.

lessersparkoptimizer
Nick Brown 2023-07-18 22:35:30 -04:00
parent b912a41523
commit f4e85e3250
Signed by: bicknrown
GPG Key ID: 47AF495B3DCCE9C3
3 changed files with 61 additions and 1 deletions

View File

@ -7,7 +7,9 @@ import com.astraldb.typecheck.TypecheckMatch.MatchTypecheckError
import com.astraldb.typecheck.TypecheckExpression.ExpressionTypecheckError
import com.astraldb.codegen.Render
import com.astraldb.spec.Definition
import com.astraldb.spec.Type
import com.astraldb.codegen._
object Generate
{
@ -40,5 +42,20 @@ object Generate
of.close()
println(file)
}
val file = "LesserSparkOptimizer.scala"
val of = new BufferedWriter(new FileWriter(file))
of.write("""package com.astraldb.catalyst.lesserspark
|import org.apache.spark.sql.catalyst.rules.Rule
|import org.apache.spark.sql.catalyst.plans._
|import org.apache.spark.sql.catalyst.plans.logical._
|import org.apache.spark.sql.catalyst.expressions._
|import org.apache.spark.sql.catalyst.optimizer._
|import com.astraldb.catalyst.SparkMethods._
|""".stripMargin)
val content = LesserSparkOptimizer(Catalyst.definition)
of.write(content)
of.close()
println(file)
}
}
}

View File

@ -0,0 +1,11 @@
package com.astraldb.codegen
import com.astraldb.spec.Definition
object LesserSparkOptimizer
{
def apply(definition: Definition): String =
{
scala.LesserSparkOptimizer(definition).toString
}
}

View File

@ -0,0 +1,32 @@
@import com.astraldb.spec.Definition
@(ctx:Definition)
object Optimizer
{
val rules = Seq[Rule[LogicalPlan]](
@for(rule <- ctx.rules.distinctBy(_.safeLabel.split("_")(0))){
@rule.safeLabel.split("_")(0), }
)
def MAX_ITERATIONS = 100
def rewrite(plan: LogicalPlan): LogicalPlan =
{
var current = plan
var last = plan
for(i <- 0 until MAX_ITERATIONS)
{
for(rule <- rules)
{
current = rule(plan)
}
if(last.fastEquals(current))
{
return current
}
}
return current
}
}