From 88e7495b5a6edde359d0093a9a488e4f99e343bd Mon Sep 17 00:00:00 2001 From: menishmueli Date: Sun, 27 Oct 2024 15:32:02 +0200 Subject: [PATCH] version bump to 0.2.5 --- README.md | 8 ++++---- spark-plugin/build.sbt | 2 +- spark-ui/package-lock.json | 4 ++-- spark-ui/package.json | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 274ca3a..04c8d10 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ See [Our Features](https://dataflint.gitbook.io/dataflint-for-spark/overview/our Install DataFlint via sbt: ```sbt -libraryDependencies += "io.dataflint" %% "spark" % "0.2.4" +libraryDependencies += "io.dataflint" %% "spark" % "0.2.5" ``` Then instruct spark to load the DataFlint plugin: @@ -77,7 +77,7 @@ Add these 2 configs to your pyspark session builder: ```python builder = pyspark.sql.SparkSession.builder ... - .config("spark.jars.packages", "io.dataflint:spark_2.12:0.2.4") \ + .config("spark.jars.packages", "io.dataflint:spark_2.12:0.2.5") \ .config("spark.plugins", "io.dataflint.spark.SparkDataflintPlugin") \ ... ``` @@ -88,14 +88,14 @@ Alternatively, install DataFlint with **no code change** as a spark ivy package ```bash spark-submit ---packages io.dataflint:spark_2.12:0.2.4 \ +--packages io.dataflint:spark_2.12:0.2.5 \ --conf spark.plugins=io.dataflint.spark.SparkDataflintPlugin \ ... ``` ### Additional installation options -* There is also support for scala 2.13, if your spark cluster is using scala 2.13 change package name to io.dataflint:spark_**2.13**:0.2.4 +* There is also support for scala 2.13, if your spark cluster is using scala 2.13 change package name to io.dataflint:spark_**2.13**:0.2.5 * For more installation options, including for **python** and **k8s spark-operator**, see [Install on Spark docs](https://dataflint.gitbook.io/dataflint-for-spark/getting-started/install-on-spark) * For installing DataFlint in **spark history server** for observability on completed runs see [install on spark history server docs](https://dataflint.gitbook.io/dataflint-for-spark/getting-started/install-on-spark-history-server) * For installing DataFlint on **DataBricks** see [install on databricks docs](https://dataflint.gitbook.io/dataflint-for-spark/getting-started/install-on-databricks) diff --git a/spark-plugin/build.sbt b/spark-plugin/build.sbt index c713d83..a23297c 100644 --- a/spark-plugin/build.sbt +++ b/spark-plugin/build.sbt @@ -1,6 +1,6 @@ import xerial.sbt.Sonatype._ -lazy val versionNum: String = "0.2.4" +lazy val versionNum: String = "0.2.5" lazy val scala212 = "2.12.18" lazy val scala213 = "2.13.12" lazy val supportedScalaVersions = List(scala212, scala213) diff --git a/spark-ui/package-lock.json b/spark-ui/package-lock.json index 2989d88..f9151c6 100644 --- a/spark-ui/package-lock.json +++ b/spark-ui/package-lock.json @@ -1,12 +1,12 @@ { "name": "dataflint-ui", - "version": "0.2.4", + "version": "0.2.5", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "dataflint-ui", - "version": "0.2.4", + "version": "0.2.5", "dependencies": { "@chatscope/chat-ui-kit-react": "^1.10.1", "@chatscope/chat-ui-kit-styles": "^1.4.0", diff --git a/spark-ui/package.json b/spark-ui/package.json index a899faf..06d9199 100644 --- a/spark-ui/package.json +++ b/spark-ui/package.json @@ -1,6 +1,6 @@ { "name": "dataflint-ui", - "version": "0.2.4", + "version": "0.2.5", "homepage": "./", "private": true, "dependencies": {