# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import re from collections import namedtuple from textwrap import dedent # To avoid adding a new direct dependency, we import markdown from within mkdocs. from mkdocs.structure.pages import markdown from pyspark.java_gateway import launch_gateway SQLConfEntry = namedtuple( "SQLConfEntry", ["name", "default", "description"]) def get_public_sql_configs(jvm): sql_configs = [ SQLConfEntry( name=_sql_config._1(), default=_sql_config._2(), description=_sql_config._3(), ) for _sql_config in jvm.org.apache.spark.sql.api.python.PythonSQLUtils.listSQLConfigs() ] return sql_configs def generate_sql_configs_table(sql_configs, path): """ Generates an HTML table at `path` that lists all public SQL configuration options. The table will look something like this: ```html
Property Name | Default | Meaning |
---|---|---|
spark.sql.adaptive.enabled |
false | When true, enable adaptive query execution. |
Property Name | Default | Meaning | {name} |
{default} | {description} | """ .format( name=config.name, default=default, description=markdown.markdown(config.description), ) )) f.write("
---|