initial macro testing, maven is painful

macrotesting
Nick Brown 2023-06-11 23:07:51 -04:00
parent 9b6beb068b
commit e098e00c54
Signed by: bicknrown
GPG Key ID: 47AF495B3DCCE9C3
3 changed files with 40 additions and 0 deletions

View File

@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.macros
import scala.language.experimental.macros
import scala.reflect.macros.blackbox.Context
// scalastyle:off
object ASTExtractor {
def extractImpl(c: Context)(plan: c.Expr[Any]) = {
import c.universe._
c.Expr[Unit](q"""
import scala.reflect.runtime.universe._
println(showRaw($plan))""")
}
def extract(plan: Any): Unit = macro extractImpl
}
// scalastyle:on

View File

@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst.optimizer
import scala.collection.mutable
import org.apache.spark.macros.ASTExtractor
import org.apache.spark.sql.catalyst.CustomLogger
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.catalog.{InMemoryCatalog, SessionCatalog}
@ -1397,6 +1398,7 @@ object InferFiltersFromConstraints extends Rule[LogicalPlan]
{
plan.transformWithPruning(_.containsAnyPattern(FILTER, JOIN)) {
case filter @ Filter(condition, child) =>
ASTExtractor.extract(filter)
CustomLogger.logMatchTime("stopTheClock : Match 1 InferFiltersFromConstraints", false)
{
val newFilters = filter.constraints --
@ -1409,6 +1411,7 @@ object InferFiltersFromConstraints extends Rule[LogicalPlan]
}
case join @ Join(left, right, joinType, conditionOpt, _) =>
ASTExtractor.extract(join)
CustomLogger.logMatchTime("stopTheClock : Match 2 InferFiltersFromConstraints", false)
{
joinType match {

View File

@ -18,6 +18,7 @@
package org.apache.spark.sql.catalyst.rules
import org.apache.spark.internal.Logging
// import org.apache.spark.macros.ASTExtractor
import org.apache.spark.sql.catalyst.CustomLogger
import org.apache.spark.sql.catalyst.QueryPlanningTracker
import org.apache.spark.sql.catalyst.trees.TreeNode
@ -210,6 +211,7 @@ abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
while (continue) {
curPlan = batch.rules.foldLeft(curPlan) {
case (plan, rule) =>
// ASTExtractor.extract(rule)
val startTime = System.nanoTime()
CustomLogger.checkMatch("push")
val result =