diff --git a/README.md b/README.md index 05037c7..d665bc5 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ See [Our Features](https://dataflint.gitbook.io/dataflint-for-spark/overview/our Install DataFlint via sbt: ```sbt -libraryDependencies += "io.dataflint" %% "spark" % "0.1.6" +libraryDependencies += "io.dataflint" %% "spark" % "0.1.7" ``` Then instruct spark to load the DataFlint plugin: @@ -65,7 +65,7 @@ Add these 2 configs to your pyspark session builder: ```python builder = pyspark.sql.SparkSession.builder ... - .config("spark.jars.packages", "io.dataflint:spark_2.12:0.1.6") \ + .config("spark.jars.packages", "io.dataflint:spark_2.12:0.1.7") \ .config("spark.plugins", "io.dataflint.spark.SparkDataflintPlugin") \ ... ``` @@ -76,7 +76,7 @@ Alternatively, install DataFlint with **no code change** as a spark ivy package ```bash spark-submit ---packages io.dataflint:spark_2.12:0.1.6 \ +--packages io.dataflint:spark_2.12:0.1.7 \ --conf spark.plugins=io.dataflint.spark.SparkDataflintPlugin \ ... ``` @@ -89,7 +89,7 @@ After the installations you will see a "DataFlint" button in Spark UI, click on ### Additional installation options -* There is also support for scala 2.13, if your spark cluster is using scala 2.13 change package name to io.dataflint:spark_**2.13**:0.1.6 +* There is also support for scala 2.13, if your spark cluster is using scala 2.13 change package name to io.dataflint:spark_**2.13**:0.1.7 * For more installation options, including for **python** and **k8s spark-operator**, see [Install on Spark docs](https://dataflint.gitbook.io/dataflint-for-spark/getting-started/install-on-spark) * For installing DataFlint in **spark history server** for observability on completed runs see [install on spark history server docs](https://dataflint.gitbook.io/dataflint-for-spark/getting-started/install-on-spark-history-server) * For installing DataFlint on **DataBricks** see [install on databricks docs](https://dataflint.gitbook.io/dataflint-for-spark/getting-started/install-on-databricks) diff --git a/spark-plugin/build.sbt b/spark-plugin/build.sbt index 09dad1d..7bc614c 100644 --- a/spark-plugin/build.sbt +++ b/spark-plugin/build.sbt @@ -1,6 +1,6 @@ import xerial.sbt.Sonatype._ -lazy val versionNum: String = "0.1.6" +lazy val versionNum: String = "0.1.7" lazy val scala212 = "2.12.18" lazy val scala213 = "2.13.12" lazy val supportedScalaVersions = List(scala212, scala213) diff --git a/spark-ui/package-lock.json b/spark-ui/package-lock.json index 7b094e5..3084633 100644 --- a/spark-ui/package-lock.json +++ b/spark-ui/package-lock.json @@ -1,12 +1,12 @@ { "name": "dataflint-ui", - "version": "0.1.6", + "version": "0.1.7", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "dataflint-ui", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@chatscope/chat-ui-kit-react": "^1.10.1", "@chatscope/chat-ui-kit-styles": "^1.4.0", diff --git a/spark-ui/package.json b/spark-ui/package.json index 4b867da..351d9ee 100644 --- a/spark-ui/package.json +++ b/spark-ui/package.json @@ -1,6 +1,6 @@ { "name": "dataflint-ui", - "version": "0.1.6", + "version": "0.1.7", "homepage": "./", "private": true, "dependencies": {