Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -349,6 +349,10 @@ Even more queries can be found [here](https://colab.research.google.com/github/R

# Latest updates

## Version 0.2.0 alpha 6
- Fix a bug with the config() call of the builder.
- add withDelta() to configure Delta Lake tables and files, for use with the JSONiq Update Facility.

## Version 0.2.0 alpha 5
- If the initialization of the Spark session fails, we now check if SPARK_HOME is set and if it may be invalid or pointing to a different Spark version than 4.0, and output a more informative error message.

Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@ build-backend = "setuptools.build_meta"

[project]
name = "jsoniq"
version = "0.2.0a5"
version = "0.2.0a6"
description = "Python edition of RumbleDB, a JSONiq engine"
requires-python = ">=3.11"
dependencies = [
"pyspark==4.0",
"pandas>=2.2"
"pandas>=2.2",
"delta-spark==4.0"
]
authors = [
{name = "Ghislain Fourny", email = "[email protected]"},
Expand Down
Binary file modified src/jsoniq/jars/rumbledb-1.24.0.jar
Binary file not shown.
11 changes: 7 additions & 4 deletions src/jsoniq/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,12 +111,15 @@ def master(self, url):
self._sparkbuilder = self._sparkbuilder.master(url);
return self;

def config(self, key, value):
self._sparkbuilder = self._sparkbuilder.config(key, value);
def config(self, key=None, value=None, conf=None, *, map=None):
self._sparkbuilder = self._sparkbuilder.config(key=key, value=value, conf=conf, map=map)
return self;

def config(self, conf):
self._sparkbuilder = self._sparkbuilder.config(conf);
def withDelta(self):
self._sparkbuilder = self._sparkbuilder \
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension") \
.config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog") \
.config("spark.jars.packages", "io.delta:delta-spark_2.13:4.0.0")
return self;

def __getattr__(self, name):
Expand Down