Skip to content

Commit

Permalink
update test
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-bli committed Oct 4, 2024
1 parent ce96d23 commit d4835a5
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 95 deletions.
47 changes: 1 addition & 46 deletions src/it/scala/net/snowflake/spark/snowflake/IntegrationEnv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,6 @@ import java.util.TimeZone
import net.snowflake.client.jdbc.internal.fasterxml.jackson.databind.{JsonNode, ObjectMapper}
import net.snowflake.spark.snowflake.Parameters.MergedParameters
import org.apache.log4j.PropertyConfigurator
import org.apache.logging.log4j.{Level, LogManager}
import org.apache.logging.log4j.core.{Appender, LoggerContext}
import org.apache.logging.log4j.core.appender.FileAppender
import org.apache.logging.log4j.core.config.AbstractConfiguration
import org.apache.logging.log4j.core.layout.PatternLayout
import org.apache.spark.sql._
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}
Expand Down Expand Up @@ -60,7 +55,7 @@ trait IntegrationEnv
protected val DEFAULT_LOG4J_PROPERTY = "src/it/resources/log4j_default.properties"

// From spark 3.3, log4j2 is used. For spark 3.2 and older versions, log4j is used.
protected val USE_LOG4J2_PROPERTIES = true
protected val USE_LOG4J2_PROPERTIES = false

// Reconfigure log4j logging for the test of spark 3.2 and older versions
protected def reconfigureLogFile(propertyFileName: String): Unit = {
Expand All @@ -69,46 +64,6 @@ trait IntegrationEnv
PropertyConfigurator.configure(log4jfile.getAbsolutePath)
}

// Reconfigure log4j2 log level for the test of spark 3.3 and newer versions
protected def reconfigureLog4j2LogLevel(logLevel: Level): Unit = {
import org.apache.logging.log4j.LogManager
val ctx = LogManager.getContext(false).asInstanceOf[LoggerContext]
val config = ctx.getConfiguration
val loggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME)
log.warn(s"reconfigure log level as $logLevel")
loggerConfig.setLevel(logLevel)
ctx.updateLoggers()
}

// Add a log4j2 FileAppender for the test of spark 3.3 and newer versions
protected def addLog4j2FileAppender(filePath: String, appenderName: String): Unit = {
val ctx = LogManager.getContext(false).asInstanceOf[LoggerContext]
val config = ctx.getConfiguration
val loggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME)

val layout = PatternLayout.createDefaultLayout(config)

val appender: Appender = FileAppender.createAppender(
filePath, "false", "false", appenderName,
"true", "false", "false", "4000",
layout, null, "false", null, config)

appender.start()
config.addAppender(appender)
loggerConfig.addAppender(appender, null, null)
config.addLogger("org.apache.logging.log4j", loggerConfig)
ctx.updateLoggers()
}

// Drop a log4j2 FileAppender for the test of spark 3.3 and newer versions
protected def dropLog4j2FileAppender(appenderName: String): Unit = {
val ctx = LogManager.getContext(false).asInstanceOf[org.apache.logging.log4j.core.LoggerContext]
val config = ctx.getConfiguration.asInstanceOf[AbstractConfiguration]
val appender = config.getAppender(appenderName).asInstanceOf[FileAppender]
config.removeAppender(appenderName: String)
appender.stop()
ctx.updateLoggers()
}

// Some integration tests are for large Data, it needs long time to run.
// But when the test suite is run on travis, there are job time limitation.
Expand Down
49 changes: 0 additions & 49 deletions src/it/scala/net/snowflake/spark/snowflake/SecuritySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,55 +69,6 @@ class SecuritySuite extends IntegrationSuiteBase {
FileUtils.deleteQuietly(new File(TEST_LOG_FILE_NAME))
}

ignore("manual test for addLog4j2FileAppender()/dropLog4j2FileAppender()") {
logger.info("Before adding file appender")
addLog4j2FileAppender(loggingFilePath, fileAppenderName)
logger.info("After adding file appender")
dropLog4j2FileAppender(fileAppenderName)
logger.info("After dropping file appender")
}

// in JDBC starts to log masked pre-signed url in 3.17.0
ignore("verify pre-signed URL are not logged for read & write") {
logger.info("Reconfigure to log into file")
// Reconfigure log file to output all logging entries.
if (USE_LOG4J2_PROPERTIES) {
addLog4j2FileAppender(loggingFilePath, fileAppenderName)
} else {
reconfigureLogFile(TEST_LOG4J_PROPERTY)
}

try {
// Read from one snowflake table and write to another snowflake table
sparkSession
.sql("select * from test_table_large_result order by int_c")
.write
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_write)
.mode(SaveMode.Overwrite)
.save()

// Check pre-signed is used for the test
assert(searchInLogFile(".*Spark Connector.*"))

// Check pre-signed URL are NOT printed in the log
// by searching the pre-signed URL domain name.
assert(!searchInLogFile(".*https?://.*amazonaws.com.*"))
assert(!searchInLogFile(".*https?://.*core.windows.net.*"))
assert(!searchInLogFile(".*https?://.*googleapis.com.*"))
} finally {
// Reconfigure back to the default log file.
if (USE_LOG4J2_PROPERTIES) {
dropLog4j2FileAppender(fileAppenderName)
} else {
reconfigureLogFile(DEFAULT_LOG4J_PROPERTY)
}
}

logger.info("Restore back to log into STDOUT")
}

override def beforeEach(): Unit = {
super.beforeEach()
}
Expand Down

0 comments on commit d4835a5

Please sign in to comment.