diff --git a/src/it/scala/net/snowflake/spark/snowflake/IntegrationEnv.scala b/src/it/scala/net/snowflake/spark/snowflake/IntegrationEnv.scala index 11bfe0bc..1e0ef73b 100644 --- a/src/it/scala/net/snowflake/spark/snowflake/IntegrationEnv.scala +++ b/src/it/scala/net/snowflake/spark/snowflake/IntegrationEnv.scala @@ -25,11 +25,6 @@ import java.util.TimeZone import net.snowflake.client.jdbc.internal.fasterxml.jackson.databind.{JsonNode, ObjectMapper} import net.snowflake.spark.snowflake.Parameters.MergedParameters import org.apache.log4j.PropertyConfigurator -import org.apache.logging.log4j.{Level, LogManager} -import org.apache.logging.log4j.core.{Appender, LoggerContext} -import org.apache.logging.log4j.core.appender.FileAppender -import org.apache.logging.log4j.core.config.AbstractConfiguration -import org.apache.logging.log4j.core.layout.PatternLayout import org.apache.spark.sql._ import org.apache.spark.{SparkConf, SparkContext} import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite} @@ -60,7 +55,7 @@ trait IntegrationEnv protected val DEFAULT_LOG4J_PROPERTY = "src/it/resources/log4j_default.properties" // From spark 3.3, log4j2 is used. For spark 3.2 and older versions, log4j is used. - protected val USE_LOG4J2_PROPERTIES = true + protected val USE_LOG4J2_PROPERTIES = false // Reconfigure log4j logging for the test of spark 3.2 and older versions protected def reconfigureLogFile(propertyFileName: String): Unit = { @@ -69,46 +64,6 @@ trait IntegrationEnv PropertyConfigurator.configure(log4jfile.getAbsolutePath) } - // Reconfigure log4j2 log level for the test of spark 3.3 and newer versions - protected def reconfigureLog4j2LogLevel(logLevel: Level): Unit = { - import org.apache.logging.log4j.LogManager - val ctx = LogManager.getContext(false).asInstanceOf[LoggerContext] - val config = ctx.getConfiguration - val loggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME) - log.warn(s"reconfigure log level as $logLevel") - loggerConfig.setLevel(logLevel) - ctx.updateLoggers() - } - - // Add a log4j2 FileAppender for the test of spark 3.3 and newer versions - protected def addLog4j2FileAppender(filePath: String, appenderName: String): Unit = { - val ctx = LogManager.getContext(false).asInstanceOf[LoggerContext] - val config = ctx.getConfiguration - val loggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME) - - val layout = PatternLayout.createDefaultLayout(config) - - val appender: Appender = FileAppender.createAppender( - filePath, "false", "false", appenderName, - "true", "false", "false", "4000", - layout, null, "false", null, config) - - appender.start() - config.addAppender(appender) - loggerConfig.addAppender(appender, null, null) - config.addLogger("org.apache.logging.log4j", loggerConfig) - ctx.updateLoggers() - } - - // Drop a log4j2 FileAppender for the test of spark 3.3 and newer versions - protected def dropLog4j2FileAppender(appenderName: String): Unit = { - val ctx = LogManager.getContext(false).asInstanceOf[org.apache.logging.log4j.core.LoggerContext] - val config = ctx.getConfiguration.asInstanceOf[AbstractConfiguration] - val appender = config.getAppender(appenderName).asInstanceOf[FileAppender] - config.removeAppender(appenderName: String) - appender.stop() - ctx.updateLoggers() - } // Some integration tests are for large Data, it needs long time to run. // But when the test suite is run on travis, there are job time limitation. diff --git a/src/it/scala/net/snowflake/spark/snowflake/SecuritySuite.scala b/src/it/scala/net/snowflake/spark/snowflake/SecuritySuite.scala index e0746dca..dc0d6761 100644 --- a/src/it/scala/net/snowflake/spark/snowflake/SecuritySuite.scala +++ b/src/it/scala/net/snowflake/spark/snowflake/SecuritySuite.scala @@ -69,55 +69,6 @@ class SecuritySuite extends IntegrationSuiteBase { FileUtils.deleteQuietly(new File(TEST_LOG_FILE_NAME)) } - ignore("manual test for addLog4j2FileAppender()/dropLog4j2FileAppender()") { - logger.info("Before adding file appender") - addLog4j2FileAppender(loggingFilePath, fileAppenderName) - logger.info("After adding file appender") - dropLog4j2FileAppender(fileAppenderName) - logger.info("After dropping file appender") - } - - // in JDBC starts to log masked pre-signed url in 3.17.0 - ignore("verify pre-signed URL are not logged for read & write") { - logger.info("Reconfigure to log into file") - // Reconfigure log file to output all logging entries. - if (USE_LOG4J2_PROPERTIES) { - addLog4j2FileAppender(loggingFilePath, fileAppenderName) - } else { - reconfigureLogFile(TEST_LOG4J_PROPERTY) - } - - try { - // Read from one snowflake table and write to another snowflake table - sparkSession - .sql("select * from test_table_large_result order by int_c") - .write - .format(SNOWFLAKE_SOURCE_NAME) - .options(thisConnectorOptionsNoTable) - .option("dbtable", test_table_write) - .mode(SaveMode.Overwrite) - .save() - - // Check pre-signed is used for the test - assert(searchInLogFile(".*Spark Connector.*")) - - // Check pre-signed URL are NOT printed in the log - // by searching the pre-signed URL domain name. - assert(!searchInLogFile(".*https?://.*amazonaws.com.*")) - assert(!searchInLogFile(".*https?://.*core.windows.net.*")) - assert(!searchInLogFile(".*https?://.*googleapis.com.*")) - } finally { - // Reconfigure back to the default log file. - if (USE_LOG4J2_PROPERTIES) { - dropLog4j2FileAppender(fileAppenderName) - } else { - reconfigureLogFile(DEFAULT_LOG4J_PROPERTY) - } - } - - logger.info("Restore back to log into STDOUT") - } - override def beforeEach(): Unit = { super.beforeEach() }