From a252cbd5ca13fb7b758c839edc92b50336747d82 Mon Sep 17 00:00:00 2001 From: Gengliang Wang Date: Wed, 15 May 2024 16:43:45 -0700 Subject: [PATCH] [SPARK-48291][CORE] Rename Java Logger as SparkLogger ### What changes were proposed in this pull request? Two new classes `org.apache.spark.internal.Logger` and `org.apache.spark.internal.LoggerFactory` were introduced from https://github.com/apache/spark/pull/46301. Given that Logger is a widely recognized **interface** in Log4j, it may lead to confusion to have a class with the same name. To avoid this and clarify its purpose within the Spark framework, I propose renaming `org.apache.spark.internal.Logger` to `org.apache.spark.internal.SparkLogger`. Similarly, to maintain consistency, `org.apache.spark.internal.LoggerFactory` should be renamed to `org.apache.spark.internal.SparkLoggerFactory`. ### Why are the changes needed? To avoid naming confusion and clarify the java Spark logger purpose within the logging framework ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? GA tests ### Was this patch authored or co-authored using generative AI tooling? No Closes #46600 from gengliangwang/refactorLogger. Authored-by: Gengliang Wang Signed-off-by: Gengliang Wang --- .../org/apache/spark/network/TransportContext.java | 6 +++--- .../spark/network/client/TransportClient.java | 6 +++--- .../network/client/TransportClientFactory.java | 7 ++++--- .../network/client/TransportResponseHandler.java | 7 ++++--- .../spark/network/crypto/AuthClientBootstrap.java | 6 +++--- .../apache/spark/network/crypto/AuthRpcHandler.java | 6 +++--- .../spark/network/protocol/MessageDecoder.java | 6 +++--- .../spark/network/protocol/MessageEncoder.java | 6 +++--- .../spark/network/protocol/SslMessageEncoder.java | 6 +++--- .../spark/network/sasl/SaslClientBootstrap.java | 6 +++--- .../apache/spark/network/sasl/SaslRpcHandler.java | 6 +++--- .../apache/spark/network/sasl/SparkSaslClient.java | 6 +++--- .../apache/spark/network/sasl/SparkSaslServer.java | 6 +++--- .../network/server/ChunkFetchRequestHandler.java | 7 ++++--- .../network/server/OneForOneStreamManager.java | 7 ++++--- .../org/apache/spark/network/server/RpcHandler.java | 6 +++--- .../network/server/TransportChannelHandler.java | 7 ++++--- .../network/server/TransportRequestHandler.java | 7 ++++--- .../spark/network/server/TransportServer.java | 6 +++--- .../network/ssl/ReloadingX509TrustManager.java | 7 ++++--- .../org/apache/spark/network/ssl/SSLFactory.java | 6 +++--- .../org/apache/spark/network/util/DBProvider.java | 6 +++--- .../apache/spark/network/util/LevelDBProvider.java | 8 ++++---- .../org/apache/spark/network/util/NettyLogger.java | 6 +++--- .../apache/spark/network/util/RocksDBProvider.java | 8 ++++---- .../spark/network/sasl/ShuffleSecretManager.java | 7 ++++--- .../spark/network/shuffle/BlockStoreClient.java | 6 +++--- .../spark/network/shuffle/ExternalBlockHandler.java | 7 ++++--- .../shuffle/ExternalShuffleBlockResolver.java | 7 ++++--- .../network/shuffle/OneForOneBlockFetcher.java | 7 ++++--- .../spark/network/shuffle/OneForOneBlockPusher.java | 7 ++++--- .../network/shuffle/RemoteBlockPushResolver.java | 7 ++++--- .../network/shuffle/RetryingBlockTransferor.java | 7 ++++--- .../network/shuffle/ShuffleTransportContext.java | 9 +++++---- .../shuffle/checksum/ShuffleChecksumHelper.java | 8 ++++---- .../spark/network/yarn/YarnShuffleService.java | 13 +++++++------ .../internal/{Logger.java => SparkLogger.java} | 4 ++-- .../{LoggerFactory.java => SparkLoggerFactory.java} | 10 +++++----- .../org/apache/spark/network/util/JavaUtils.java | 6 +++--- .../java/org/apache/spark/util/LoggerSuiteBase.java | 4 ++-- .../org/apache/spark/util/PatternLoggerSuite.java | 8 ++++---- .../apache/spark/util/StructuredLoggerSuite.java | 9 +++++---- .../codahale/metrics/ganglia/GangliaReporter.java | 6 +++--- .../org/apache/spark/io/ReadAheadInputStream.java | 7 ++++--- .../org/apache/spark/memory/TaskMemoryManager.java | 6 +++--- .../shuffle/sort/BypassMergeSortShuffleWriter.java | 7 ++++--- .../spark/shuffle/sort/ShuffleExternalSorter.java | 7 ++++--- .../spark/shuffle/sort/UnsafeShuffleWriter.java | 6 +++--- .../sort/io/LocalDiskShuffleMapOutputWriter.java | 8 ++++---- .../apache/spark/unsafe/map/BytesToBytesMap.java | 6 +++--- .../unsafe/sort/UnsafeExternalSorter.java | 7 ++++--- .../unsafe/sort/UnsafeSorterSpillReader.java | 7 ++++--- .../catalyst/expressions/RowBasedKeyValueBatch.java | 7 ++++--- .../spark/sql/util/CaseInsensitiveStringMap.java | 7 ++++--- .../org/apache/hive/service/AbstractService.java | 6 +++--- .../org/apache/hive/service/CompositeService.java | 6 +++--- .../java/org/apache/hive/service/CookieSigner.java | 6 +++--- .../org/apache/hive/service/ServiceOperations.java | 6 +++--- .../java/org/apache/hive/service/ServiceUtils.java | 4 ++-- .../apache/hive/service/auth/HiveAuthFactory.java | 6 +++--- .../org/apache/hive/service/auth/HttpAuthUtils.java | 6 +++--- .../hive/service/auth/TSetIpAddressProcessor.java | 6 +++--- .../org/apache/hive/service/cli/CLIService.java | 6 +++--- .../org/apache/hive/service/cli/ColumnBasedSet.java | 6 +++--- .../cli/operation/ClassicTableTypeMapping.java | 6 +++--- .../hive/service/cli/operation/Operation.java | 6 +++--- .../service/cli/operation/OperationManager.java | 6 +++--- .../hive/service/cli/session/HiveSessionImpl.java | 6 +++--- .../hive/service/cli/session/SessionManager.java | 6 +++--- .../hive/service/cli/thrift/ThriftCLIService.java | 6 +++--- .../hive/service/cli/thrift/ThriftHttpServlet.java | 6 +++--- .../org/apache/hive/service/server/HiveServer2.java | 6 +++--- .../service/server/ThreadWithGarbageCleanup.java | 6 +++--- .../sql/hive/thriftserver/SparkSQLCLIService.scala | 6 +++--- 74 files changed, 255 insertions(+), 231 deletions(-) rename common/utils/src/main/java/org/apache/spark/internal/{Logger.java => SparkLogger.java} (98%) rename common/utils/src/main/java/org/apache/spark/internal/{LoggerFactory.java => SparkLoggerFactory.java} (81%) diff --git a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java index 815f4dc6e6cd6..e8ce6840e3fc3 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java +++ b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java @@ -35,8 +35,8 @@ import io.netty.handler.timeout.IdleStateHandler; import io.netty.handler.codec.MessageToMessageEncoder; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.client.TransportClient; import org.apache.spark.network.client.TransportClientBootstrap; import org.apache.spark.network.client.TransportClientFactory; @@ -73,7 +73,7 @@ * processes to send messages back to the client on an existing channel. */ public class TransportContext implements Closeable { - private static final Logger logger = LoggerFactory.getLogger(TransportContext.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(TransportContext.class); private static final NettyLogger nettyLogger = new NettyLogger(); private final TransportConf conf; diff --git a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java index 77ef6f09c1b5c..4c144a73a9299 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java +++ b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java @@ -36,8 +36,8 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.buffer.ManagedBuffer; @@ -73,7 +73,7 @@ * Concurrency: thread safe and can be called from multiple threads. */ public class TransportClient implements Closeable { - private static final Logger logger = LoggerFactory.getLogger(TransportClient.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(TransportClient.class); private final Channel channel; private final TransportResponseHandler handler; diff --git a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java index f2dbfd92b854c..e1f19f956cc0a 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java +++ b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java @@ -43,8 +43,8 @@ import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.TransportContext; @@ -79,7 +79,8 @@ private static class ClientPool { } } - private static final Logger logger = LoggerFactory.getLogger(TransportClientFactory.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(TransportClientFactory.class); private final TransportContext context; private final TransportConf conf; diff --git a/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java b/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java index 24ae570044e25..be4cf4a58abeb 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java +++ b/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java @@ -29,8 +29,8 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.protocol.ChunkFetchFailure; @@ -53,7 +53,8 @@ * Concurrency: thread safe and can be called from multiple threads. */ public class TransportResponseHandler extends MessageHandler { - private static final Logger logger = LoggerFactory.getLogger(TransportResponseHandler.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(TransportResponseHandler.class); private final Channel channel; diff --git a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java index 0bfede415ba54..08e2c084fe67b 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java +++ b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java @@ -27,8 +27,8 @@ import io.netty.buffer.Unpooled; import io.netty.channel.Channel; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.client.TransportClient; import org.apache.spark.network.client.TransportClientBootstrap; import org.apache.spark.network.sasl.SaslClientBootstrap; @@ -47,7 +47,7 @@ */ public class AuthClientBootstrap implements TransportClientBootstrap { - private static final Logger LOG = LoggerFactory.getLogger(AuthClientBootstrap.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(AuthClientBootstrap.class); private final TransportConf conf; private final String appId; diff --git a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthRpcHandler.java b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthRpcHandler.java index 778cb9a120e17..65367743e24f9 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthRpcHandler.java +++ b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthRpcHandler.java @@ -26,8 +26,8 @@ import io.netty.buffer.Unpooled; import io.netty.channel.Channel; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.client.RpcResponseCallback; @@ -48,7 +48,7 @@ * authenticated. A connection may be authenticated at most once. */ class AuthRpcHandler extends AbstractAuthRpcHandler { - private static final Logger LOG = LoggerFactory.getLogger(AuthRpcHandler.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(AuthRpcHandler.class); /** Transport configuration. */ private final TransportConf conf; diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageDecoder.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageDecoder.java index 4dbd968788d42..a9b700a7800e0 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageDecoder.java +++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageDecoder.java @@ -24,8 +24,8 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.MessageToMessageDecoder; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; /** * Decoder used by the client side to encode server-to-client responses. @@ -34,7 +34,7 @@ @ChannelHandler.Sharable public final class MessageDecoder extends MessageToMessageDecoder { - private static final Logger logger = LoggerFactory.getLogger(MessageDecoder.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(MessageDecoder.class); public static final MessageDecoder INSTANCE = new MessageDecoder(); diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java index 081329c74aa2c..ab20fb908eb42 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java +++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java @@ -25,8 +25,8 @@ import io.netty.handler.codec.MessageToMessageEncoder; import org.apache.spark.internal.LogKeys; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.MDC; /** @@ -36,7 +36,7 @@ @ChannelHandler.Sharable public final class MessageEncoder extends MessageToMessageEncoder { - private static final Logger logger = LoggerFactory.getLogger(MessageEncoder.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(MessageEncoder.class); public static final MessageEncoder INSTANCE = new MessageEncoder(); diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java index 94e4c1011cce2..abe6ccca7bfd6 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java +++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java @@ -26,8 +26,8 @@ import io.netty.handler.codec.MessageToMessageEncoder; import io.netty.handler.stream.ChunkedStream; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -38,7 +38,7 @@ @ChannelHandler.Sharable public final class SslMessageEncoder extends MessageToMessageEncoder { - private static final Logger logger = LoggerFactory.getLogger(SslMessageEncoder.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(SslMessageEncoder.class); private SslMessageEncoder() {} diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java index fca46f6120e6b..0a355d28c3668 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java +++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java @@ -27,8 +27,8 @@ import io.netty.buffer.Unpooled; import io.netty.channel.Channel; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.client.TransportClient; import org.apache.spark.network.client.TransportClientBootstrap; import org.apache.spark.network.util.JavaUtils; @@ -39,7 +39,7 @@ * server should be setup with a {@link SaslRpcHandler} with matching keys for the given appId. */ public class SaslClientBootstrap implements TransportClientBootstrap { - private static final Logger logger = LoggerFactory.getLogger(SaslClientBootstrap.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(SaslClientBootstrap.class); private final TransportConf conf; private final String appId; diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslRpcHandler.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslRpcHandler.java index 61a599fc6b9ec..b5fffe583ec63 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslRpcHandler.java +++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslRpcHandler.java @@ -25,8 +25,8 @@ import io.netty.buffer.Unpooled; import io.netty.channel.Channel; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.client.RpcResponseCallback; import org.apache.spark.network.client.TransportClient; import org.apache.spark.network.server.AbstractAuthRpcHandler; @@ -43,7 +43,7 @@ * which are individual RPCs. */ public class SaslRpcHandler extends AbstractAuthRpcHandler { - private static final Logger logger = LoggerFactory.getLogger(SaslRpcHandler.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(SaslRpcHandler.class); /** Transport configuration. */ private final TransportConf conf; diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java index cf391b7049b6c..3600c1045dbf4 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java +++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java @@ -32,8 +32,8 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import static org.apache.spark.network.sasl.SparkSaslServer.*; @@ -43,7 +43,7 @@ * firstToken, which is then followed by a set of challenges and responses. */ public class SparkSaslClient implements SaslEncryptionBackend { - private static final Logger logger = LoggerFactory.getLogger(SparkSaslClient.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(SparkSaslClient.class); private final String secretKeyId; private final SecretKeyHolder secretKeyHolder; diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java index 0008b9ad284a5..b897650afe832 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java +++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java @@ -37,8 +37,8 @@ import io.netty.buffer.Unpooled; import io.netty.handler.codec.base64.Base64; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; /** * A SASL Server for Spark which simply keeps track of the state of a single SASL session, from the @@ -46,7 +46,7 @@ * connections on some socket.) */ public class SparkSaslServer implements SaslEncryptionBackend { - private static final Logger logger = LoggerFactory.getLogger(SparkSaslServer.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(SparkSaslServer.class); /** * This is passed as the server name when creating the sasl client/server. diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/ChunkFetchRequestHandler.java b/common/network-common/src/main/java/org/apache/spark/network/server/ChunkFetchRequestHandler.java index dc3b559d36022..cc0bed7ed5b6d 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/server/ChunkFetchRequestHandler.java +++ b/common/network-common/src/main/java/org/apache/spark/network/server/ChunkFetchRequestHandler.java @@ -27,8 +27,8 @@ import io.netty.channel.SimpleChannelInboundHandler; import org.apache.spark.internal.LogKeys; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.MDC; import org.apache.spark.network.buffer.ManagedBuffer; import org.apache.spark.network.client.TransportClient; @@ -51,7 +51,8 @@ * registering executors, or waiting for response for an OpenBlocks messages. */ public class ChunkFetchRequestHandler extends SimpleChannelInboundHandler { - private static final Logger logger = LoggerFactory.getLogger(ChunkFetchRequestHandler.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(ChunkFetchRequestHandler.class); private final TransportClient client; private final StreamManager streamManager; diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java b/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java index 3163424692870..f322293782dee 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java +++ b/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java @@ -29,8 +29,8 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.buffer.ManagedBuffer; import org.apache.spark.network.client.TransportClient; @@ -39,7 +39,8 @@ * individually fetched as chunks by the client. Each registered buffer is one chunk. */ public class OneForOneStreamManager extends StreamManager { - private static final Logger logger = LoggerFactory.getLogger(OneForOneStreamManager.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(OneForOneStreamManager.class); private final AtomicLong nextStreamId; private final ConcurrentHashMap streams; diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/RpcHandler.java b/common/network-common/src/main/java/org/apache/spark/network/server/RpcHandler.java index b91e14e6332a3..a7c38917d17f6 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/server/RpcHandler.java +++ b/common/network-common/src/main/java/org/apache/spark/network/server/RpcHandler.java @@ -19,8 +19,8 @@ import java.nio.ByteBuffer; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.client.MergedBlockMetaResponseCallback; import org.apache.spark.network.client.RpcResponseCallback; import org.apache.spark.network.client.StreamCallbackWithID; @@ -122,7 +122,7 @@ public void exceptionCaught(Throwable cause, TransportClient client) { } private static class OneWayRpcCallback implements RpcResponseCallback { - private static final Logger logger = LoggerFactory.getLogger(OneWayRpcCallback.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(OneWayRpcCallback.class); @Override public void onSuccess(ByteBuffer response) { diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java index ad8b8d71bcc68..283f0f0a431fd 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java +++ b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java @@ -23,8 +23,8 @@ import io.netty.handler.timeout.IdleStateEvent; import org.apache.spark.network.TransportContext; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.client.TransportClient; @@ -53,7 +53,8 @@ * timeout if the client is continuously sending but getting no responses, for simplicity. */ public class TransportChannelHandler extends SimpleChannelInboundHandler { - private static final Logger logger = LoggerFactory.getLogger(TransportChannelHandler.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(TransportChannelHandler.class); private final TransportClient client; private final TransportResponseHandler responseHandler; diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java b/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java index 9c581193d16fb..687c3040ed083 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java +++ b/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java @@ -25,8 +25,8 @@ import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.buffer.ManagedBuffer; @@ -46,7 +46,8 @@ */ public class TransportRequestHandler extends MessageHandler { - private static final Logger logger = LoggerFactory.getLogger(TransportRequestHandler.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(TransportRequestHandler.class); /** The Netty channel that this handler is associated with. */ private final Channel channel; diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java index 4cbde59ed6e78..d1a19652f5649 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java +++ b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java @@ -35,8 +35,8 @@ import io.netty.channel.socket.SocketChannel; import org.apache.commons.lang3.SystemUtils; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.TransportContext; import org.apache.spark.network.util.*; @@ -44,7 +44,7 @@ * Server for the efficient, low-level streaming service. */ public class TransportServer implements Closeable { - private static final Logger logger = LoggerFactory.getLogger(TransportServer.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(TransportServer.class); private final TransportContext context; private final TransportConf conf; diff --git a/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java b/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java index 52e1c9a1fc6a5..09609d0ac8ad9 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java +++ b/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java @@ -30,8 +30,8 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; /** * A {@link TrustManager} implementation that reloads its configuration when @@ -45,7 +45,8 @@ public final class ReloadingX509TrustManager implements X509TrustManager, Runnable { - private static final Logger logger = LoggerFactory.getLogger(ReloadingX509TrustManager.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(ReloadingX509TrustManager.class); private final String type; private final File file; diff --git a/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java b/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java index 85589e4acceab..a2e42e3eb39f6 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java +++ b/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java @@ -49,12 +49,12 @@ import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.SslProvider; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.util.JavaUtils; public class SSLFactory { - private static final Logger logger = LoggerFactory.getLogger(SSLFactory.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(SSLFactory.class); /** * For a configuration specifying keystore/truststore files diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/DBProvider.java b/common/network-common/src/main/java/org/apache/spark/network/util/DBProvider.java index 950a5298fd6d5..94a64b3f4037c 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/DBProvider.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/DBProvider.java @@ -22,8 +22,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.shuffledb.DB; import org.apache.spark.network.shuffledb.DBBackend; import org.apache.spark.network.shuffledb.LevelDB; @@ -31,7 +31,7 @@ import org.apache.spark.network.shuffledb.StoreVersion; public class DBProvider { - private static final Logger logger = LoggerFactory.getLogger(DBProvider.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(DBProvider.class); public static DB initDB( DBBackend dbBackend, File dbFile, diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/LevelDBProvider.java b/common/network-common/src/main/java/org/apache/spark/network/util/LevelDBProvider.java index 184d83c67224d..391931961a474 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/LevelDBProvider.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/LevelDBProvider.java @@ -27,8 +27,8 @@ import org.iq80.leveldb.DB; import org.iq80.leveldb.Options; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.shuffledb.StoreVersion; @@ -37,7 +37,7 @@ * LevelDB utility class available in the network package. */ public class LevelDBProvider { - private static final Logger logger = LoggerFactory.getLogger(LevelDBProvider.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(LevelDBProvider.class); public static DB initLevelDB(File dbFile, StoreVersion version, ObjectMapper mapper) throws IOException { @@ -101,7 +101,7 @@ static DB initLevelDB(File file) throws IOException { } private static class LevelDBLogger implements org.iq80.leveldb.Logger { - private static final Logger LOG = LoggerFactory.getLogger(LevelDBLogger.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(LevelDBLogger.class); @Override public void log(String message) { diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java b/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java index 2d6dcdbe0e831..a7063151fae89 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java @@ -26,11 +26,11 @@ import io.netty.handler.logging.LoggingHandler; import io.netty.handler.logging.LogLevel; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; public class NettyLogger { - private static final Logger logger = LoggerFactory.getLogger(NettyLogger.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(NettyLogger.class); /** A Netty LoggingHandler which does not dump the message contents. */ private static class NoContentLoggingHandler extends LoggingHandler { diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/RocksDBProvider.java b/common/network-common/src/main/java/org/apache/spark/network/util/RocksDBProvider.java index 994e21eb439d5..1753c124c9935 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/RocksDBProvider.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/RocksDBProvider.java @@ -25,8 +25,8 @@ import com.google.common.annotations.VisibleForTesting; import org.rocksdb.*; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.shuffledb.StoreVersion; @@ -40,7 +40,7 @@ public class RocksDBProvider { org.rocksdb.RocksDB.loadLibrary(); } - private static final Logger logger = LoggerFactory.getLogger(RocksDBProvider.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(RocksDBProvider.class); public static RocksDB initRockDB(File dbFile, StoreVersion version, ObjectMapper mapper) throws IOException { @@ -135,7 +135,7 @@ static RocksDB initRocksDB(File file) throws IOException { } private static class RocksDBLogger extends org.rocksdb.Logger { - private static final Logger LOG = LoggerFactory.getLogger(RocksDBLogger.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(RocksDBLogger.class); RocksDBLogger(Options options) { super(options.infoLogLevel()); diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java b/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java index 6a490cc7897e6..d67f2a3099d35 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java @@ -20,8 +20,8 @@ import java.nio.ByteBuffer; import java.util.concurrent.ConcurrentHashMap; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -31,7 +31,8 @@ * A class that manages shuffle secret used by the external shuffle service. */ public class ShuffleSecretManager implements SecretKeyHolder { - private static final Logger logger = LoggerFactory.getLogger(ShuffleSecretManager.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(ShuffleSecretManager.class); private final ConcurrentHashMap shuffleSecretMap; diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/BlockStoreClient.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/BlockStoreClient.java index 695df81f89f3f..dcb0a52b0d66c 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/BlockStoreClient.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/BlockStoreClient.java @@ -27,8 +27,8 @@ import com.codahale.metrics.MetricSet; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.buffer.ManagedBuffer; @@ -44,7 +44,7 @@ * or external service. */ public abstract class BlockStoreClient implements Closeable { - protected final Logger logger = LoggerFactory.getLogger(this.getClass()); + protected final SparkLogger logger = SparkLoggerFactory.getLogger(this.getClass()); protected volatile TransportClientFactory clientFactory; protected String appId; diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java index a084fb4cc2139..5d33bfb345a9e 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java @@ -38,8 +38,8 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Sets; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.buffer.ManagedBuffer; @@ -67,7 +67,8 @@ */ public class ExternalBlockHandler extends RpcHandler implements RpcHandler.MergedBlockMetaReqHandler { - private static final Logger logger = LoggerFactory.getLogger(ExternalBlockHandler.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(ExternalBlockHandler.class); private static final String SHUFFLE_MERGER_IDENTIFIER = "shuffle-push-merger"; private static final String SHUFFLE_BLOCK_ID = "shuffle"; private static final String SHUFFLE_CHUNK_ID = "shuffleChunk"; diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java index 3e493327c36f2..e43eedd8b25eb 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java @@ -39,8 +39,8 @@ import com.google.common.cache.Weigher; import com.google.common.collect.Maps; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.buffer.FileSegmentManagedBuffer; @@ -64,7 +64,8 @@ * from Spark's IndexShuffleBlockResolver. */ public class ExternalShuffleBlockResolver { - private static final Logger logger = LoggerFactory.getLogger(ExternalShuffleBlockResolver.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(ExternalShuffleBlockResolver.class); private static final ObjectMapper mapper = new ObjectMapper(); diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java index 7d20ac50b7374..c5c6ab313e193 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java @@ -27,8 +27,8 @@ import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.buffer.ManagedBuffer; import org.apache.spark.network.client.ChunkReceivedCallback; import org.apache.spark.network.client.RpcResponseCallback; @@ -53,7 +53,8 @@ * {@link org.apache.spark.network.server.OneForOneStreamManager} on the server side. */ public class OneForOneBlockFetcher { - private static final Logger logger = LoggerFactory.getLogger(OneForOneBlockFetcher.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(OneForOneBlockFetcher.class); private static final String SHUFFLE_BLOCK_PREFIX = "shuffle_"; private static final String SHUFFLE_CHUNK_PREFIX = "shuffleChunk_"; private static final String SHUFFLE_BLOCK_SPLIT = "shuffle"; diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockPusher.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockPusher.java index bbb8661fb397f..d90ca1a88a267 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockPusher.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockPusher.java @@ -23,8 +23,8 @@ import com.google.common.base.Preconditions; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.buffer.ManagedBuffer; import org.apache.spark.network.buffer.NioManagedBuffer; import org.apache.spark.network.client.RpcResponseCallback; @@ -44,7 +44,8 @@ * @since 3.1.0 */ public class OneForOneBlockPusher { - private static final Logger logger = LoggerFactory.getLogger(OneForOneBlockPusher.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(OneForOneBlockPusher.class); private static final ErrorHandler PUSH_ERROR_HANDLER = new ErrorHandler.BlockPushErrorHandler(); public static final String SHUFFLE_PUSH_BLOCK_PREFIX = "shufflePush"; diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java index cf1e1cdb42b06..02a38eac5b409 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java @@ -65,8 +65,8 @@ import org.roaringbitmap.RoaringBitmap; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.buffer.FileSegmentManagedBuffer; @@ -98,7 +98,8 @@ public class RemoteBlockPushResolver implements MergedShuffleFileManager { private static final Cleaner CLEANER = Cleaner.create(); - private static final Logger logger = LoggerFactory.getLogger(RemoteBlockPushResolver.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(RemoteBlockPushResolver.class); public static final String MERGED_SHUFFLE_FILE_NAME_PREFIX = "shuffleMerged"; public static final String SHUFFLE_META_DELIMITER = ":"; diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockTransferor.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockTransferor.java index a5c26c6185bf1..ca2073af87c17 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockTransferor.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockTransferor.java @@ -29,8 +29,8 @@ import com.google.common.collect.Sets; import com.google.common.util.concurrent.Uninterruptibles; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.buffer.ManagedBuffer; @@ -70,7 +70,8 @@ void createAndStart(String[] blockIds, BlockTransferListener listener) private static final ExecutorService executorService = Executors.newCachedThreadPool( NettyUtils.createThreadFactory("Block Transfer Retry")); - private static final Logger logger = LoggerFactory.getLogger(RetryingBlockTransferor.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(RetryingBlockTransferor.class); /** Used to initiate new Block transfer on our remaining blocks. */ private final BlockTransferStarter transferStarter; diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java index 96651189aede1..705d47aab3b50 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java @@ -29,8 +29,8 @@ import io.netty.channel.socket.SocketChannel; import io.netty.handler.codec.MessageToMessageDecoder; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.network.TransportContext; import org.apache.spark.network.protocol.Message; import org.apache.spark.network.protocol.MessageDecoder; @@ -51,7 +51,8 @@ * are processed in the separate handlers. * */ public class ShuffleTransportContext extends TransportContext { - private static final Logger logger = LoggerFactory.getLogger(ShuffleTransportContext.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(ShuffleTransportContext.class); private static final ShuffleMessageDecoder SHUFFLE_DECODER = new ShuffleMessageDecoder(MessageDecoder.INSTANCE); private final EventLoopGroup finalizeWorkers; @@ -157,7 +158,7 @@ record RpcRequestInternal(BlockTransferMessage.Type messageType, RpcRequest rpcR } static class FinalizedHandler extends SimpleChannelInboundHandler { - private static final Logger logger = LoggerFactory.getLogger(FinalizedHandler.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(FinalizedHandler.class); public static final String HANDLER_NAME = "finalizeHandler"; private final TransportRequestHandler transportRequestHandler; diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/checksum/ShuffleChecksumHelper.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/checksum/ShuffleChecksumHelper.java index 2665801a1d03c..f9c0c60c2f2c6 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/checksum/ShuffleChecksumHelper.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/checksum/ShuffleChecksumHelper.java @@ -26,8 +26,8 @@ import com.google.common.io.ByteStreams; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.annotation.Private; @@ -38,8 +38,8 @@ */ @Private public class ShuffleChecksumHelper { - private static final Logger logger = - LoggerFactory.getLogger(ShuffleChecksumHelper.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(ShuffleChecksumHelper.class); public static final int CHECKSUM_CALCULATION_BUFFER = 8192; public static final Checksum[] EMPTY_CHECKSUM = new Checksum[0]; diff --git a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java index 66a6429ba14d2..e0af3c5ae2468 100644 --- a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java +++ b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java @@ -54,8 +54,8 @@ import org.apache.spark.network.util.DBProvider; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.network.TransportContext; @@ -102,8 +102,9 @@ * This {@code classpath} configuration is only supported on YARN versions >= 2.9.0. */ public class YarnShuffleService extends AuxiliaryService { - private static final Logger defaultLogger = LoggerFactory.getLogger(YarnShuffleService.class); - private Logger logger = defaultLogger; + private static final SparkLogger defaultSparkLogger = + SparkLoggerFactory.getLogger(YarnShuffleService.class); + private SparkLogger logger = defaultSparkLogger; // Port on which the shuffle server listens for fetch requests private static final String SPARK_SHUFFLE_SERVICE_PORT_KEY = "spark.shuffle.service.port"; @@ -246,7 +247,7 @@ protected void serviceInit(Configuration externalConf) throws Exception { String logsNamespace = _conf.get(SPARK_SHUFFLE_SERVICE_LOGS_NAMESPACE_KEY, ""); if (!logsNamespace.isEmpty()) { String className = YarnShuffleService.class.getName(); - logger = LoggerFactory.getLogger(className + "." + logsNamespace); + logger = SparkLoggerFactory.getLogger(className + "." + logsNamespace); } super.serviceInit(_conf); @@ -367,7 +368,7 @@ static MergedShuffleFileManager newMergedShuffleFileManagerInstance( return mergeManagerSubClazz.getConstructor(TransportConf.class, File.class) .newInstance(conf, mergeManagerFile); } catch (Exception e) { - defaultLogger.error("Unable to create an instance of {}", + defaultSparkLogger.error("Unable to create an instance of {}", MDC.of(LogKeys.CLASS_NAME$.MODULE$, mergeManagerImplClassName)); return new NoOpMergedShuffleFileManager(conf, mergeManagerFile); } diff --git a/common/utils/src/main/java/org/apache/spark/internal/Logger.java b/common/utils/src/main/java/org/apache/spark/internal/SparkLogger.java similarity index 98% rename from common/utils/src/main/java/org/apache/spark/internal/Logger.java rename to common/utils/src/main/java/org/apache/spark/internal/SparkLogger.java index 7c54e912b189a..20ad68641da05 100644 --- a/common/utils/src/main/java/org/apache/spark/internal/Logger.java +++ b/common/utils/src/main/java/org/apache/spark/internal/SparkLogger.java @@ -25,12 +25,12 @@ import org.apache.logging.log4j.message.MessageFactory; import org.apache.logging.log4j.message.ParameterizedMessageFactory; -public class Logger { +public class SparkLogger { private static final MessageFactory MESSAGE_FACTORY = ParameterizedMessageFactory.INSTANCE; private final org.slf4j.Logger slf4jLogger; - Logger(org.slf4j.Logger slf4jLogger) { + SparkLogger(org.slf4j.Logger slf4jLogger) { this.slf4jLogger = slf4jLogger; } diff --git a/common/utils/src/main/java/org/apache/spark/internal/LoggerFactory.java b/common/utils/src/main/java/org/apache/spark/internal/SparkLoggerFactory.java similarity index 81% rename from common/utils/src/main/java/org/apache/spark/internal/LoggerFactory.java rename to common/utils/src/main/java/org/apache/spark/internal/SparkLoggerFactory.java index 4595c9ad4b013..699f43d772839 100644 --- a/common/utils/src/main/java/org/apache/spark/internal/LoggerFactory.java +++ b/common/utils/src/main/java/org/apache/spark/internal/SparkLoggerFactory.java @@ -17,15 +17,15 @@ package org.apache.spark.internal; -public class LoggerFactory { +public class SparkLoggerFactory { - public static Logger getLogger(String name) { + public static SparkLogger getLogger(String name) { org.slf4j.Logger slf4jLogger = org.slf4j.LoggerFactory.getLogger(name); - return new Logger(slf4jLogger); + return new SparkLogger(slf4jLogger); } - public static Logger getLogger(Class clazz) { + public static SparkLogger getLogger(Class clazz) { org.slf4j.Logger slf4jLogger = org.slf4j.LoggerFactory.getLogger(clazz); - return new Logger(slf4jLogger); + return new SparkLogger(slf4jLogger); } } diff --git a/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java b/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java index d2120a997ba3b..65eef3833646f 100644 --- a/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java +++ b/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java @@ -30,8 +30,8 @@ import org.apache.commons.lang3.SystemUtils; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -40,7 +40,7 @@ * own Utils, just accessible within this package. */ public class JavaUtils { - private static final Logger logger = LoggerFactory.getLogger(JavaUtils.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(JavaUtils.class); /** * Define a default value for driver memory here since this value is referenced across the code diff --git a/common/utils/src/test/java/org/apache/spark/util/LoggerSuiteBase.java b/common/utils/src/test/java/org/apache/spark/util/LoggerSuiteBase.java index 6c39304bece0c..ecc0a75070c7b 100644 --- a/common/utils/src/test/java/org/apache/spark/util/LoggerSuiteBase.java +++ b/common/utils/src/test/java/org/apache/spark/util/LoggerSuiteBase.java @@ -26,13 +26,13 @@ import org.apache.logging.log4j.Level; import org.junit.jupiter.api.Test; -import org.apache.spark.internal.Logger; +import org.apache.spark.internal.SparkLogger; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; public abstract class LoggerSuiteBase { - abstract Logger logger(); + abstract SparkLogger logger(); abstract String className(); abstract String logFilePath(); diff --git a/common/utils/src/test/java/org/apache/spark/util/PatternLoggerSuite.java b/common/utils/src/test/java/org/apache/spark/util/PatternLoggerSuite.java index 13b6a1d054706..33de91697efa5 100644 --- a/common/utils/src/test/java/org/apache/spark/util/PatternLoggerSuite.java +++ b/common/utils/src/test/java/org/apache/spark/util/PatternLoggerSuite.java @@ -19,12 +19,12 @@ import org.apache.logging.log4j.Level; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; public class PatternLoggerSuite extends LoggerSuiteBase { - private static final Logger LOGGER = LoggerFactory.getLogger(PatternLoggerSuite.class); + private static final SparkLogger LOGGER = SparkLoggerFactory.getLogger(PatternLoggerSuite.class); private String toRegexPattern(Level level, String msg) { return msg @@ -33,7 +33,7 @@ private String toRegexPattern(Level level, String msg) { } @Override - Logger logger() { + SparkLogger logger() { return LOGGER; } diff --git a/common/utils/src/test/java/org/apache/spark/util/StructuredLoggerSuite.java b/common/utils/src/test/java/org/apache/spark/util/StructuredLoggerSuite.java index c1b31bf68a7de..110e7cc7794ed 100644 --- a/common/utils/src/test/java/org/apache/spark/util/StructuredLoggerSuite.java +++ b/common/utils/src/test/java/org/apache/spark/util/StructuredLoggerSuite.java @@ -21,12 +21,13 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.logging.log4j.Level; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; public class StructuredLoggerSuite extends LoggerSuiteBase { - private static final Logger LOGGER = LoggerFactory.getLogger(StructuredLoggerSuite.class); + private static final SparkLogger LOGGER = + SparkLoggerFactory.getLogger(StructuredLoggerSuite.class); private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); private String compactAndToRegexPattern(Level level, String json) { @@ -43,7 +44,7 @@ private String compactAndToRegexPattern(Level level, String json) { } @Override - Logger logger() { + SparkLogger logger() { return LOGGER; } diff --git a/connector/spark-ganglia-lgpl/src/main/java/com/codahale/metrics/ganglia/GangliaReporter.java b/connector/spark-ganglia-lgpl/src/main/java/com/codahale/metrics/ganglia/GangliaReporter.java index 37d133bcafd44..48c61e80d6655 100644 --- a/connector/spark-ganglia-lgpl/src/main/java/com/codahale/metrics/ganglia/GangliaReporter.java +++ b/connector/spark-ganglia-lgpl/src/main/java/com/codahale/metrics/ganglia/GangliaReporter.java @@ -19,8 +19,8 @@ import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -203,7 +203,7 @@ public GangliaReporter build(GMetric... gmetrics) { } } - private static final Logger LOGGER = LoggerFactory.getLogger(GangliaReporter.class); + private static final SparkLogger LOGGER = SparkLoggerFactory.getLogger(GangliaReporter.class); private final GMetric gmetric; private final GMetric[] gmetrics; diff --git a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java index 4aab6d9edcaac..5e9f1b78273a5 100644 --- a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java +++ b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java @@ -31,8 +31,8 @@ import com.google.common.base.Preconditions; import com.google.common.base.Throwables; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.util.ThreadUtils; @@ -48,7 +48,8 @@ */ public class ReadAheadInputStream extends InputStream { - private static final Logger logger = LoggerFactory.getLogger(ReadAheadInputStream.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(ReadAheadInputStream.class); private ReentrantLock stateChangeLock = new ReentrantLock(); diff --git a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java index aeabd358144f4..7e993c8a2a3a9 100644 --- a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java +++ b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java @@ -30,8 +30,8 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.unsafe.memory.MemoryBlock; @@ -60,7 +60,7 @@ */ public class TaskMemoryManager { - private static final Logger logger = LoggerFactory.getLogger(TaskMemoryManager.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(TaskMemoryManager.class); /** The number of bits used to address the page table. */ private static final int PAGE_NUMBER_BITS = 13; diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java b/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java index 284d1dd036b45..86f7d5143eff5 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java @@ -34,8 +34,8 @@ import com.google.common.io.Closeables; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.Partitioner; @@ -83,7 +83,8 @@ final class BypassMergeSortShuffleWriter extends ShuffleWriter implements ShuffleChecksumSupport { - private static final Logger logger = LoggerFactory.getLogger(BypassMergeSortShuffleWriter.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(BypassMergeSortShuffleWriter.class); private final int fileBufferSize; private final boolean transferToEnabled; diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java index 8fe432cfe239f..f96513f1b1097 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java @@ -32,8 +32,8 @@ import org.apache.spark.TaskContext; import org.apache.spark.executor.ShuffleWriteMetrics; import org.apache.spark.internal.config.package$; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.memory.MemoryConsumer; @@ -72,7 +72,8 @@ */ final class ShuffleExternalSorter extends MemoryConsumer implements ShuffleChecksumSupport { - private static final Logger logger = LoggerFactory.getLogger(ShuffleExternalSorter.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(ShuffleExternalSorter.class); @VisibleForTesting static final int DISK_WRITE_BUFFER_SIZE = 1024 * 1024; diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java index 6da9d3def3f89..13fd18c0942b1 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java @@ -39,8 +39,8 @@ import org.apache.spark.*; import org.apache.spark.annotation.Private; import org.apache.spark.internal.config.package$; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.io.CompressionCodec; @@ -68,7 +68,7 @@ @Private public class UnsafeShuffleWriter extends ShuffleWriter { - private static final Logger logger = LoggerFactory.getLogger(UnsafeShuffleWriter.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(UnsafeShuffleWriter.class); private static final ClassTag OBJECT_CLASS_TAG = ClassTag$.MODULE$.Object(); diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/io/LocalDiskShuffleMapOutputWriter.java b/core/src/main/java/org/apache/spark/shuffle/sort/io/LocalDiskShuffleMapOutputWriter.java index fbf4abc160b66..606bb625f5b22 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/io/LocalDiskShuffleMapOutputWriter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/io/LocalDiskShuffleMapOutputWriter.java @@ -27,8 +27,8 @@ import java.util.Optional; import org.apache.spark.SparkConf; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.shuffle.api.ShuffleMapOutputWriter; @@ -45,8 +45,8 @@ */ public class LocalDiskShuffleMapOutputWriter implements ShuffleMapOutputWriter { - private static final Logger log = - LoggerFactory.getLogger(LocalDiskShuffleMapOutputWriter.class); + private static final SparkLogger log = + SparkLoggerFactory.getLogger(LocalDiskShuffleMapOutputWriter.class); private final int shuffleId; private final long mapId; diff --git a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java index 3506e2a88864e..2a8e15cd09ccf 100644 --- a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java +++ b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java @@ -29,8 +29,8 @@ import org.apache.spark.SparkEnv; import org.apache.spark.executor.ShuffleWriteMetrics; import org.apache.spark.internal.LogKeys; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.MDC; import org.apache.spark.memory.MemoryConsumer; import org.apache.spark.memory.SparkOutOfMemoryError; @@ -68,7 +68,7 @@ */ public final class BytesToBytesMap extends MemoryConsumer { - private static final Logger logger = LoggerFactory.getLogger(BytesToBytesMap.class); + private static final SparkLogger logger = SparkLoggerFactory.getLogger(BytesToBytesMap.class); private static final HashMapGrowthStrategy growthStrategy = HashMapGrowthStrategy.DOUBLING; diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java index 0be312d48a9de..af421e903ba3f 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java @@ -32,8 +32,8 @@ import org.apache.spark.TaskContext; import org.apache.spark.executor.ShuffleWriteMetrics; import org.apache.spark.internal.LogKeys; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.MDC; import org.apache.spark.memory.MemoryConsumer; import org.apache.spark.memory.SparkOutOfMemoryError; @@ -52,7 +52,8 @@ */ public final class UnsafeExternalSorter extends MemoryConsumer { - private static final Logger logger = LoggerFactory.getLogger(UnsafeExternalSorter.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(UnsafeExternalSorter.class); @Nullable private final PrefixComparator prefixComparator; diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java index 4eff6a70accad..0693f8cb1a808 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java @@ -23,8 +23,8 @@ import org.apache.spark.TaskContext; import org.apache.spark.internal.config.package$; import org.apache.spark.internal.config.ConfigEntry; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.io.NioBufferedFileInputStream; import org.apache.spark.io.ReadAheadInputStream; import org.apache.spark.serializer.SerializerManager; @@ -38,7 +38,8 @@ * of the file format). */ public final class UnsafeSorterSpillReader extends UnsafeSorterIterator implements Closeable { - private static final Logger logger = LoggerFactory.getLogger(UnsafeSorterSpillReader.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(UnsafeSorterSpillReader.class); public static final int MAX_BUFFER_SIZE_BYTES = 16777216; // 16 mb private InputStream in; diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java index be7e682a3bdf5..c057c36ca8204 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java @@ -19,8 +19,8 @@ import java.io.Closeable; import java.io.IOException; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.memory.MemoryConsumer; @@ -48,7 +48,8 @@ * */ public abstract class RowBasedKeyValueBatch extends MemoryConsumer implements Closeable { - protected static final Logger logger = LoggerFactory.getLogger(RowBasedKeyValueBatch.class); + protected static final SparkLogger logger = + SparkLoggerFactory.getLogger(RowBasedKeyValueBatch.class); private static final int DEFAULT_CAPACITY = 1 << 16; diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/util/CaseInsensitiveStringMap.java b/sql/catalyst/src/main/java/org/apache/spark/sql/util/CaseInsensitiveStringMap.java index d66524d841ca6..ec461f9740019 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/util/CaseInsensitiveStringMap.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/util/CaseInsensitiveStringMap.java @@ -26,8 +26,8 @@ import java.util.Set; import org.apache.spark.annotation.Experimental; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.SparkIllegalArgumentException; @@ -45,7 +45,8 @@ */ @Experimental public class CaseInsensitiveStringMap implements Map { - private static final Logger logger = LoggerFactory.getLogger(CaseInsensitiveStringMap.class); + private static final SparkLogger logger = + SparkLoggerFactory.getLogger(CaseInsensitiveStringMap.class); public static CaseInsensitiveStringMap empty() { return new CaseInsensitiveStringMap(new HashMap<>(0)); diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java index 009b9f253ce0d..b31d024eeeeb9 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java @@ -22,8 +22,8 @@ import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -33,7 +33,7 @@ */ public abstract class AbstractService implements Service { - private static final Logger LOG = LoggerFactory.getLogger(AbstractService.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(AbstractService.class); /** * Service state: initially {@link STATE#NOTINITED}. diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java index ecd9de8154b31..663bcdb86f9f6 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java @@ -24,8 +24,8 @@ import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -35,7 +35,7 @@ */ public class CompositeService extends AbstractService { - private static final Logger LOG = LoggerFactory.getLogger(CompositeService.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(CompositeService.class); private final List serviceList = new ArrayList(); diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java index 25e0316d5e9c3..c315478939c8d 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java @@ -22,8 +22,8 @@ import org.apache.commons.codec.binary.Base64; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; /** * The cookie signer generates a signature based on SHA digest @@ -34,7 +34,7 @@ public class CookieSigner { private static final String SIGNATURE = "&s="; private static final String SHA_STRING = "SHA-256"; private byte[] secretBytes; - private static final Logger LOG = LoggerFactory.getLogger(CookieSigner.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(CookieSigner.class); /** * Constructor diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java index d947f01681bea..92d733c563cab 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java @@ -19,8 +19,8 @@ import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -29,7 +29,7 @@ * */ public final class ServiceOperations { - private static final Logger LOG = LoggerFactory.getLogger(ServiceOperations.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(ServiceOperations.class); private ServiceOperations() { } diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java index 82ef4b9f9ce70..25db121207bbf 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java @@ -18,7 +18,7 @@ import java.io.IOException; -import org.apache.spark.internal.Logger; +import org.apache.spark.internal.SparkLogger; public class ServiceUtils { @@ -52,7 +52,7 @@ public static int indexOfDomainMatch(String userName) { * @param log the log to record problems to at debug level. Can be null. * @param closeables the objects to close */ - public static void cleanup(Logger log, java.io.Closeable... closeables) { + public static void cleanup(SparkLogger log, java.io.Closeable... closeables) { for (java.io.Closeable c : closeables) { if (c != null) { try { diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java index b570e88e2bc5b..ecbda2661e960 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -43,8 +43,8 @@ import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -54,7 +54,7 @@ */ public class HiveAuthFactory { - private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(HiveAuthFactory.class); public enum AuthTypes { NOSASL("NOSASL"), diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java index 0bfe361104dea..e307bdab04498 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java @@ -40,8 +40,8 @@ import org.ietf.jgss.GSSName; import org.ietf.jgss.Oid; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -53,7 +53,7 @@ public final class HttpAuthUtils { public static final String AUTHORIZATION = "Authorization"; public static final String BASIC = "Basic"; public static final String NEGOTIATE = "Negotiate"; - private static final Logger LOG = LoggerFactory.getLogger(HttpAuthUtils.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(HttpAuthUtils.class); private static final String COOKIE_ATTR_SEPARATOR = "&"; private static final String COOKIE_CLIENT_USER_NAME = "cu"; private static final String COOKIE_CLIENT_RAND_NUMBER = "rn"; diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java index 3b24ad1ebe14f..e0091d6c04fe7 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java @@ -26,8 +26,8 @@ import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; /** * This class is responsible for setting the ipAddress for operations executed via HiveServer2. @@ -39,7 +39,7 @@ */ public class TSetIpAddressProcessor extends TCLIService.Processor { - private static final Logger LOGGER = LoggerFactory.getLogger(TSetIpAddressProcessor.class); + private static final SparkLogger LOGGER = SparkLoggerFactory.getLogger(TSetIpAddressProcessor.class); public TSetIpAddressProcessor(Iface iface) { super(iface); diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIService.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIService.java index e612b34d7bdf7..86fb725d3a3cc 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIService.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIService.java @@ -50,8 +50,8 @@ import org.apache.hive.service.rpc.thrift.TTableSchema; import org.apache.hive.service.server.HiveServer2; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -61,7 +61,7 @@ */ public class CLIService extends CompositeService implements ICLIService { - private static final Logger LOG = LoggerFactory.getLogger(CLIService.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(CLIService.class); public static final TProtocolVersion SERVER_VERSION; diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java index f6a269e99251d..4331f6829fbf3 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java @@ -31,8 +31,8 @@ import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TIOStreamTransport; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -47,7 +47,7 @@ public class ColumnBasedSet implements RowSet { private final List columns; private byte[] blob; private boolean isBlobBased = false; - public static final Logger LOG = LoggerFactory.getLogger(ColumnBasedSet.class); + public static final SparkLogger LOG = SparkLoggerFactory.getLogger(ColumnBasedSet.class); public ColumnBasedSet(TableSchema schema) { descriptors = schema.toTypeDescriptors(); diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java index 3876632211715..0b71b606b9d65 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java @@ -29,8 +29,8 @@ import com.google.common.collect.Multimap; import org.apache.hadoop.hive.metastore.TableType; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -43,7 +43,7 @@ */ public class ClassicTableTypeMapping implements TableTypeMapping { - private static final Logger LOG = LoggerFactory.getLogger(ClassicTableTypeMapping.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(ClassicTableTypeMapping.class); public enum ClassicTableTypes { TABLE, diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java index 135420508e21e..f0c1985ce58a3 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java @@ -39,8 +39,8 @@ import org.apache.hive.service.rpc.thrift.TRowSet; import org.apache.hive.service.rpc.thrift.TTableSchema; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -49,7 +49,7 @@ public abstract class Operation { private OperationState state = OperationState.INITIALIZED; private final OperationHandle opHandle; private HiveConf configuration; - public static final Logger LOG = LoggerFactory.getLogger(Operation.class); + public static final SparkLogger LOG = SparkLoggerFactory.getLogger(Operation.class); public static final FetchOrientation DEFAULT_FETCH_ORIENTATION = FetchOrientation.FETCH_NEXT; public static final long DEFAULT_FETCH_MAX_ROWS = 100; protected boolean hasResultSet; diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java index 1498cb4907f1f..fd8266d1a9acc 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java @@ -41,8 +41,8 @@ import org.apache.hive.service.rpc.thrift.TTableSchema; import org.apache.logging.log4j.core.Appender; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -51,7 +51,7 @@ * */ public class OperationManager extends AbstractService { - private static final Logger LOG = LoggerFactory.getLogger(OperationManager.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(OperationManager.class); private final Map handleToOperation = new HashMap(); diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index e073fa4713bfb..410d010a79bdc 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -70,8 +70,8 @@ import org.apache.hive.service.rpc.thrift.TTableSchema; import org.apache.hive.service.server.ThreadWithGarbageCleanup; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -94,7 +94,7 @@ public class HiveSessionImpl implements HiveSession { private String ipAddress; private static final String FETCH_WORK_SERDE_CLASS = "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; - private static final Logger LOG = LoggerFactory.getLogger(HiveSessionImpl.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(HiveSessionImpl.class); private SessionManager sessionManager; private OperationManager operationManager; private final Set opHandleSet = new HashSet(); diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/SessionManager.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/SessionManager.java index 6c282b679ca8c..3f60fd00b82a7 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/SessionManager.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/SessionManager.java @@ -39,8 +39,8 @@ import org.apache.hive.service.server.HiveServer2; import org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -50,7 +50,7 @@ */ public class SessionManager extends CompositeService { - private static final Logger LOG = LoggerFactory.getLogger(SessionManager.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(SessionManager.class); public static final String HIVERCFILE = ".hiverc"; private HiveConf hiveConf; private final Map handleToSession = diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index defe51bc97993..07af0013846ba 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -42,8 +42,8 @@ import org.apache.thrift.server.TServerEventHandler; import org.apache.thrift.transport.TTransport; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -53,7 +53,7 @@ */ public abstract class ThriftCLIService extends AbstractService implements TCLIService.Iface, Runnable { - public static final Logger LOG = LoggerFactory.getLogger(ThriftCLIService.class); + public static final SparkLogger LOG = SparkLoggerFactory.getLogger(ThriftCLIService.class); protected CLIService cliService; private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS); diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index b423038fe2b61..d9bf361fdef63 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -56,8 +56,8 @@ import org.ietf.jgss.GSSName; import org.ietf.jgss.Oid; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; @@ -69,7 +69,7 @@ public class ThriftHttpServlet extends TServlet { private static final long serialVersionUID = 1L; - public static final Logger LOG = LoggerFactory.getLogger(ThriftHttpServlet.class); + public static final SparkLogger LOG = SparkLoggerFactory.getLogger(ThriftHttpServlet.class); private final String authType; private final UserGroupInformation serviceUGI; private final UserGroupInformation httpUGI; diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/HiveServer2.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/HiveServer2.java index b6c9b937c5f32..9345125a8279e 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/HiveServer2.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/HiveServer2.java @@ -37,8 +37,8 @@ import org.apache.hive.service.cli.thrift.ThriftCLIService; import org.apache.hive.service.cli.thrift.ThriftHttpCLIService; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.MDC; import org.apache.spark.util.ShutdownHookManager; @@ -49,7 +49,7 @@ * */ public class HiveServer2 extends CompositeService { - private static final Logger LOG = LoggerFactory.getLogger(HiveServer2.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(HiveServer2.class); private CLIService cliService; private ThriftCLIService thriftCLIService; diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java index 23957e146ddf1..16d8540b40560 100644 --- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java +++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java @@ -23,8 +23,8 @@ import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.RawStore; -import org.apache.spark.internal.Logger; -import org.apache.spark.internal.LoggerFactory; +import org.apache.spark.internal.SparkLogger; +import org.apache.spark.internal.SparkLoggerFactory; /** * A HiveServer2 thread used to construct new server threads. @@ -32,7 +32,7 @@ * when killed by its corresponding ExecutorService. */ public class ThreadWithGarbageCleanup extends Thread { - private static final Logger LOG = LoggerFactory.getLogger(ThreadWithGarbageCleanup.class); + private static final SparkLogger LOG = SparkLoggerFactory.getLogger(ThreadWithGarbageCleanup.class); Map threadRawStoreMap = ThreadFactoryWithGarbageCleanup.getThreadRawStoreMap(); diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala index bf1c4978431b7..46537f75f1a11 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala @@ -34,7 +34,7 @@ import org.apache.hive.service.auth.HiveAuthFactory import org.apache.hive.service.cli._ import org.apache.hive.service.server.HiveServer2 -import org.apache.spark.internal.Logger +import org.apache.spark.internal.SparkLogger import org.apache.spark.sql.SQLContext import org.apache.spark.sql.catalyst.util.SQLKeywordUtils import org.apache.spark.sql.errors.QueryExecutionErrors @@ -113,10 +113,10 @@ private[hive] class SparkSQLCLIService(hiveServer: HiveServer2, sqlContext: SQLC private[thriftserver] trait ReflectedCompositeService { this: AbstractService => - private val logInfo = (msg: String) => getAncestorField[Logger](this, 3, "LOG").info(msg) + private val logInfo = (msg: String) => getAncestorField[SparkLogger](this, 3, "LOG").info(msg) private val logError = (msg: String, e: Throwable) => - getAncestorField[Logger](this, 3, "LOG").error(msg, e) + getAncestorField[SparkLogger](this, 3, "LOG").error(msg, e) def initCompositeService(hiveConf: HiveConf): Unit = { // Emulating `CompositeService.init(hiveConf)`