From 0019005a2d0f150fd00ad926d054a8beca4bbd68 Mon Sep 17 00:00:00 2001 From: Nick Lavers Date: Tue, 17 Jan 2017 12:14:38 +0000 Subject: [SPARK-19219][SQL] Fix Parquet log output defaults ## What changes were proposed in this pull request? Changing the default parquet logging levels to reflect the changes made in PR [#15538](https://github.com/apache/spark/pull/15538), in order to prevent the flood of log messages by default. ## How was this patch tested? Default log output when reading from parquet 1.6 files was compared with and without this change. The change eliminates the extraneous logging and makes the output readable. Author: Nick Lavers Closes #16580 from nicklavers/spark-19219-set_default_parquet_log_level. --- core/src/main/resources/org/apache/spark/log4j-defaults.properties | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'core/src/main') diff --git a/core/src/main/resources/org/apache/spark/log4j-defaults.properties b/core/src/main/resources/org/apache/spark/log4j-defaults.properties index 89a7963a86..2770100150 100644 --- a/core/src/main/resources/org/apache/spark/log4j-defaults.properties +++ b/core/src/main/resources/org/apache/spark/log4j-defaults.properties @@ -36,3 +36,7 @@ log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO # SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR + +# Parquet related logging +log4j.logger.org.apache.parquet.CorruptStatistics=ERROR +log4j.logger.parquet.CorruptStatistics=ERROR -- cgit v1.2.3