Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

adding delta lake 0.6.0 support #302

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 15 additions & 6 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,19 @@
<modelVersion>4.0.0</modelVersion>

<groupId>com.hortonworks.spark</groupId>
<artifactId>spark-atlas-connector-main_2.11</artifactId>
<artifactId>spark-atlas-connector-main</artifactId>
<version>0.1.0-SNAPSHOT</version>
<packaging>pom</packaging>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
<spark.version>2.4.0</spark.version>
<spark.version>2.4.5</spark.version>
<atlas.version>2.0.0</atlas.version>
<maven.version>3.5.4</maven.version>
<scala.version>2.11.12</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<scala.version>2.12.11</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<kafka.version>2.0.1</kafka.version>
<MaxPermGen>512m</MaxPermGen>
<CodeCacheSize>512m</CodeCacheSize>
Expand Down Expand Up @@ -65,6 +65,15 @@
</repositories>

<dependencies>

<dependency>
<groupId>io.delta</groupId>
<artifactId>delta-core_${scala.binary.version}</artifactId>
<version>0.6.0</version>
<scope>provided</scope>
</dependency>


<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
Expand Down Expand Up @@ -123,7 +132,7 @@
-->
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-scala_2.11</artifactId>
<artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
<version>${jackson.version}</version>
<scope>test</scope>
</dependency>
Expand Down Expand Up @@ -240,7 +249,7 @@
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<version>1.12.5</version>
<version>1.14.3</version>
</dependency>

<dependency>
Expand Down
4 changes: 2 additions & 2 deletions spark-atlas-connector-assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.hortonworks.spark</groupId>
<artifactId>spark-atlas-connector-main_2.11</artifactId>
<artifactId>spark-atlas-connector-main</artifactId>
<version>0.1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
Expand All @@ -33,7 +33,7 @@
<dependencies>
<dependency>
<groupId>com.hortonworks.spark</groupId>
<artifactId>spark-atlas-connector_${scala.binary.version}</artifactId>
<artifactId>spark-atlas-connector</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
Expand Down
4 changes: 2 additions & 2 deletions spark-atlas-connector/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@

<parent>
<groupId>com.hortonworks.spark</groupId>
<artifactId>spark-atlas-connector-main_2.11</artifactId>
<artifactId>spark-atlas-connector-main</artifactId>
<version>0.1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

<artifactId>spark-atlas-connector_2.11</artifactId>
<artifactId>spark-atlas-connector</artifactId>
<packaging>jar</packaging>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,9 @@ import com.hortonworks.spark.atlas.sql.SparkExecutionPlanProcessor.SinkDataSourc
import com.hortonworks.spark.atlas.types.{AtlasEntityUtils, external, internal}
import com.hortonworks.spark.atlas.utils.SparkUtils.sparkSession
import com.hortonworks.spark.atlas.utils.{Logging, SparkUtils}
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.delta.DeltaLog
import org.apache.spark.sql.delta.sources.DeltaDataSource
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import org.apache.spark.sql.streaming.SinkProgress

Expand Down Expand Up @@ -165,6 +168,7 @@ object CommandsHarvester extends AtlasEntityUtils with Logging {
}
}

// TODO : ADD Support For Detla Table output source
object SaveIntoDataSourceHarvester extends Harvester[SaveIntoDataSourceCommand] {
override def harvest(
node: SaveIntoDataSourceCommand,
Expand All @@ -175,6 +179,10 @@ object CommandsHarvester extends AtlasEntityUtils with Logging {
case SHCEntities(shcEntities) => Seq(shcEntities)
case JDBCEntities(jdbcEntities) => Seq(jdbcEntities)
case KafkaEntities(kafkaEntities) => kafkaEntities
case e if e.dataSource.isInstanceOf[DeltaDataSource] =>
val path = node.options.getOrElse("path", "none")
val entity = external.pathToEntity(path)
Seq(entity)
case e =>
logWarn(s"Missing output entities: $e")
Seq.empty
Expand Down Expand Up @@ -239,6 +247,12 @@ object CommandsHarvester extends AtlasEntityUtils with Logging {
tChildren.flatMap {
case r: HiveTableRelation => Seq(tableToEntity(r.tableMeta))
case v: View => Seq(tableToEntity(v.desc))
case LogicalRelation(fileRelation: FileRelation, _, catalogTable, _)
if fileRelation.getClass.getName.contains("org.apache.spark.sql.delta.DeltaLog") =>
if (fileRelation.inputFiles.nonEmpty) {
val path = new Path(fileRelation.inputFiles.head).getParent.toString
Seq(external.pathToEntity(path))
} else Seq.empty
case LogicalRelation(fileRelation: FileRelation, _, catalogTable, _) =>
catalogTable.map(tbl => Seq(tableToEntity(tbl))).getOrElse(
fileRelation.inputFiles.flatMap(file => Seq(external.pathToEntity(file))).toSeq)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,14 @@ object external {
val S3_OBJECT_TYPE_STRING = "aws_s3_object"
val S3_PSEUDO_DIR_TYPE_STRING = "aws_s3_pseudo_dir"
val S3_BUCKET_TYPE_STRING = "aws_s3_bucket"
val GS_OBJECT_TYPE_STRING = "gcp_gs_object"
val GS_PSEUDO_DIR_TYPE_STRING = "gcp_gs_pseudo_dir"
val GS_BUCKET_TYPE_STRING = "gcp_gs_bucket"

private def isS3Schema(schema: String): Boolean = schema.matches("s3[an]?")

private def isGSSchema(schema: String): Boolean = schema.matches("gs")

private def extractS3Entity(uri: URI, fsPath: Path): SACAtlasEntityWithDependencies = {
val path = Path.getPathWithoutSchemeAndAuthority(fsPath).toString

Expand Down Expand Up @@ -81,6 +86,43 @@ object external {
new SACAtlasEntityWithDependencies(objectEntity, Seq(dirEntityWithDeps))
}

private def extractGSEntity(uri: URI, fsPath: Path): SACAtlasEntityWithDependencies = {
val path = Path.getPathWithoutSchemeAndAuthority(fsPath).toString

val bucketName = uri.getAuthority
val bucketQualifiedName = s"gs://${bucketName}"
val dirName = path.replaceFirst("[^/]*$", "")
val dirQualifiedName = bucketQualifiedName + dirName
val objectName = path.replaceFirst("^.*/", "")
val objectQualifiedName = dirQualifiedName + objectName

// bucket
val bucketEntity = new AtlasEntity(GS_BUCKET_TYPE_STRING)
bucketEntity.setAttribute("name", bucketName)
bucketEntity.setAttribute("qualifiedName", bucketQualifiedName)

// pseudo dir
val dirEntity = new AtlasEntity(GS_PSEUDO_DIR_TYPE_STRING)
dirEntity.setAttribute("name", dirName)
dirEntity.setAttribute("qualifiedName", dirQualifiedName)
dirEntity.setAttribute("objectPrefix", dirQualifiedName)
dirEntity.setAttribute("bucket", AtlasUtils.entityToReference(bucketEntity))

// object
val objectEntity = new AtlasEntity(GS_OBJECT_TYPE_STRING)
objectEntity.setAttribute("name", objectName)
objectEntity.setAttribute("path", path)
objectEntity.setAttribute("qualifiedName", objectQualifiedName)
objectEntity.setAttribute("pseudoDirectory", AtlasUtils.entityToReference(dirEntity))

// dir entity depends on bucket entity
val dirEntityWithDeps = new SACAtlasEntityWithDependencies(dirEntity,
Seq(SACAtlasEntityWithDependencies(bucketEntity)))

// object entity depends on dir entity
new SACAtlasEntityWithDependencies(objectEntity, Seq(dirEntityWithDeps))
}

def pathToEntity(path: String): SACAtlasEntityWithDependencies = {
val uri = resolveURI(path)
val fsPath = new Path(uri)
Expand All @@ -96,6 +138,8 @@ object external {
SACAtlasEntityWithDependencies(entity)
} else if (isS3Schema(uri.getScheme)) {
extractS3Entity(uri, fsPath)
} else if (isGSSchema(uri.getScheme)) {
extractGSEntity(uri, fsPath)
} else {
val entity = new AtlasEntity(FS_PATH_TYPE_STRING)
entity.setAttribute("name",
Expand Down