Skip to content

Commit fe73039

Browse files
williamhyundongjoon-hyun
authored andcommitted
[SPARK-37791][EXAMPLES] Use log4j2 in examples
### What changes were proposed in this pull request? This PR aims to use log4j2 in examples. ### Why are the changes needed? Since Spark is migrating to log4j2, we best use this in our examples. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Manually review. Closes #35074 from williamhyun/log4j2. Authored-by: William Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent c4a9772 commit fe73039

File tree

8 files changed

+28
-18
lines changed

8 files changed

+28
-18
lines changed

examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818
// scalastyle:off println
1919
package org.apache.spark.examples.mllib
2020

21-
import org.apache.log4j.{Level, Logger}
21+
import org.apache.logging.log4j.Level
22+
import org.apache.logging.log4j.core.config.Configurator
2223
import scopt.OptionParser
2324

2425
import org.apache.spark.{SparkConf, SparkContext}
@@ -105,7 +106,7 @@ object BinaryClassification {
105106
val conf = new SparkConf().setAppName(s"BinaryClassification with $params")
106107
val sc = new SparkContext(conf)
107108

108-
Logger.getRootLogger.setLevel(Level.WARN)
109+
Configurator.setRootLevel(Level.WARN)
109110

110111
val examples = MLUtils.loadLibSVMFile(sc, params.input).cache()
111112

examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818
// scalastyle:off println
1919
package org.apache.spark.examples.mllib
2020

21-
import org.apache.log4j.{Level, Logger}
21+
import org.apache.logging.log4j.Level
22+
import org.apache.logging.log4j.core.config.Configurator
2223
import scopt.OptionParser
2324

2425
import org.apache.spark.{SparkConf, SparkContext}
@@ -79,7 +80,7 @@ object DenseKMeans {
7980
val conf = new SparkConf().setAppName(s"DenseKMeans with $params")
8081
val sc = new SparkContext(conf)
8182

82-
Logger.getRootLogger.setLevel(Level.WARN)
83+
Configurator.setRootLevel(Level.WARN)
8384

8485
val examples = sc.textFile(params.input).map { line =>
8586
Vectors.dense(line.split(' ').map(_.toDouble))

examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@ package org.apache.spark.examples.mllib
2020

2121
import java.util.Locale
2222

23-
import org.apache.log4j.{Level, Logger}
23+
import org.apache.logging.log4j.Level
24+
import org.apache.logging.log4j.core.config.Configurator
2425
import scopt.OptionParser
2526

2627
import org.apache.spark.{SparkConf, SparkContext}
@@ -111,7 +112,7 @@ object LDAExample {
111112
val conf = new SparkConf().setAppName(s"LDAExample with $params")
112113
val sc = new SparkContext(conf)
113114

114-
Logger.getRootLogger.setLevel(Level.WARN)
115+
Configurator.setRootLevel(Level.WARN)
115116

116117
// Load documents, and prepare them for LDA.
117118
val preprocessStart = System.nanoTime()

examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@ package org.apache.spark.examples.mllib
2020

2121
import scala.collection.mutable
2222

23-
import org.apache.log4j.{Level, Logger}
23+
import org.apache.logging.log4j.Level
24+
import org.apache.logging.log4j.core.config.Configurator
2425
import scopt.OptionParser
2526

2627
import org.apache.spark.{SparkConf, SparkContext}
@@ -103,7 +104,7 @@ object MovieLensALS {
103104
}
104105
val sc = new SparkContext(conf)
105106

106-
Logger.getRootLogger.setLevel(Level.WARN)
107+
Configurator.setRootLevel(Level.WARN)
107108

108109
val implicitPrefs = params.implicitPrefs
109110

examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818
// scalastyle:off println
1919
package org.apache.spark.examples.mllib
2020

21-
import org.apache.log4j.{Level, Logger}
21+
import org.apache.logging.log4j.Level
22+
import org.apache.logging.log4j.core.config.Configurator
2223
import scopt.OptionParser
2324

2425
import org.apache.spark.{SparkConf, SparkContext}
@@ -90,7 +91,7 @@ object PowerIterationClusteringExample {
9091
.setAppName(s"PowerIterationClustering with $params")
9192
val sc = new SparkContext(conf)
9293

93-
Logger.getRootLogger.setLevel(Level.WARN)
94+
Configurator.setRootLevel(Level.WARN)
9495

9596
// $example on$
9697
val circlesRdd = generateCirclesRdd(sc, params.k, params.numPoints)

examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818
// scalastyle:off println
1919
package org.apache.spark.examples.mllib
2020

21-
import org.apache.log4j.{Level, Logger}
21+
import org.apache.logging.log4j.Level
22+
import org.apache.logging.log4j.core.config.Configurator
2223
import scopt.OptionParser
2324

2425
import org.apache.spark.{SparkConf, SparkContext}
@@ -70,7 +71,7 @@ object SparseNaiveBayes {
7071
val conf = new SparkConf().setAppName(s"SparseNaiveBayes with $params")
7172
val sc = new SparkContext(conf)
7273

73-
Logger.getRootLogger.setLevel(Level.WARN)
74+
Configurator.setRootLevel(Level.WARN)
7475

7576
val minPartitions =
7677
if (params.minPartitions > 0) params.minPartitions else sc.defaultMinPartitions

examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala

+5-3
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,9 @@
1717

1818
package org.apache.spark.examples.streaming
1919

20-
import org.apache.log4j.{Level, Logger}
20+
import org.apache.logging.log4j.{Level, LogManager}
21+
import org.apache.logging.log4j.core.Logger
22+
import org.apache.logging.log4j.core.config.Configurator
2123

2224
import org.apache.spark.internal.Logging
2325

@@ -26,13 +28,13 @@ object StreamingExamples extends Logging {
2628

2729
/** Set reasonable logging levels for streaming if the user has not configured log4j. */
2830
def setStreamingLogLevels(): Unit = {
29-
val log4jInitialized = Logger.getRootLogger.getAllAppenders.hasMoreElements
31+
val log4jInitialized = !LogManager.getRootLogger.asInstanceOf[Logger].getAppenders.isEmpty
3032
if (!log4jInitialized) {
3133
// We first log something to initialize Spark's default logging, then we override the
3234
// logging level.
3335
logInfo("Setting log level to [WARN] for streaming example." +
3436
" To override add a custom log4j.properties to the classpath.")
35-
Logger.getRootLogger.setLevel(Level.WARN)
37+
Configurator.setRootLevel(Level.WARN)
3638
}
3739
}
3840
}

external/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala

+5-3
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,9 @@ import scala.util.Random
2525
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain
2626
import com.amazonaws.services.kinesis.AmazonKinesisClient
2727
import com.amazonaws.services.kinesis.model.PutRecordRequest
28-
import org.apache.log4j.{Level, Logger}
28+
import org.apache.logging.log4j.{Level, LogManager}
29+
import org.apache.logging.log4j.core.Logger
30+
import org.apache.logging.log4j.core.config.Configurator
2931

3032
import org.apache.spark.SparkConf
3133
import org.apache.spark.internal.Logging
@@ -270,13 +272,13 @@ object KinesisWordProducerASL {
270272
private[streaming] object StreamingExamples extends Logging {
271273
// Set reasonable logging levels for streaming if the user has not configured log4j.
272274
def setStreamingLogLevels(): Unit = {
273-
val log4jInitialized = Logger.getRootLogger.getAllAppenders.hasMoreElements
275+
val log4jInitialized = !LogManager.getRootLogger.asInstanceOf[Logger].getAppenders.isEmpty
274276
if (!log4jInitialized) {
275277
// We first log something to initialize Spark's default logging, then we override the
276278
// logging level.
277279
logInfo("Setting log level to [WARN] for streaming example." +
278280
" To override add a custom log4j.properties to the classpath.")
279-
Logger.getRootLogger.setLevel(Level.WARN)
281+
Configurator.setRootLevel(Level.WARN)
280282
}
281283
}
282284
}

0 commit comments

Comments
 (0)