Skip to content

Commit e8ed10e

Browse files
authored
Merge branch 'master' into stack/snapshotmanager-wireframe
2 parents 27edff4 + 2306be9 commit e8ed10e

File tree

1,357 files changed

+4522
-269997
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,357 files changed

+4522
-269997
lines changed

.github/workflows/connectors_test.yaml

Lines changed: 0 additions & 38 deletions
This file was deleted.

.github/workflows/iceberg_test.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
name: "Delta Iceberg Latest"
2-
on: [push, pull_request]
2+
on: [] # [push, pull_request]
33
jobs:
44
test:
55
name: "DIL: Scala ${{ matrix.scala }}"
@@ -25,7 +25,7 @@ jobs:
2525
uses: actions/setup-java@v3
2626
with:
2727
distribution: "zulu"
28-
java-version: "11"
28+
java-version: "17"
2929
- name: Cache Scala, SBT
3030
uses: actions/cache@v3
3131
with:

.github/workflows/kernel_test.yaml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,11 +37,12 @@ jobs:
3737
echo "Runner arch: ${{ runner.arch }}"
3838
- name: Checkout code
3939
uses: actions/checkout@v4
40+
# Run unit tests with JDK 17. These unit tests depend on Spark, and Spark 4.0+ is JDK 17.
4041
- name: install java
4142
uses: actions/setup-java@v4
4243
with:
4344
distribution: "zulu"
44-
java-version: "11"
45+
java-version: "17"
4546
- name: Cache SBT and dependencies
4647
id: cache-sbt
4748
uses: actions/cache@v4
@@ -59,7 +60,7 @@ jobs:
5960
else
6061
echo "❌ Cache MISS - will download dependencies"
6162
fi
62-
- name: Run tests
63+
- name: Run unit tests
6364
run: |
6465
python run-tests.py --group kernel --coverage --shard ${{ matrix.shard }}
6566
@@ -68,6 +69,7 @@ jobs:
6869
runs-on: ubuntu-24.04
6970
steps:
7071
- uses: actions/checkout@v3
72+
# Run integration tests with JDK 11, as they have no Spark dependency
7173
- name: install java
7274
uses: actions/setup-java@v3
7375
with:

.github/workflows/kernel_unitycatalog_test.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ jobs:
2222
uses: actions/setup-java@v3
2323
with:
2424
distribution: "zulu"
25-
java-version: "11"
25+
java-version: "17"
2626
if: steps.git-diff.outputs.diff
2727
- name: Run Unity tests with coverage
2828
run: |

.github/workflows/spark_examples_test.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ jobs:
2424
uses: actions/setup-java@v3
2525
with:
2626
distribution: "zulu"
27-
java-version: "11"
27+
java-version: "17"
2828
- name: Cache Scala, SBT
2929
uses: actions/cache@v3
3030
with:

.github/workflows/spark_master_test.yaml

Lines changed: 0 additions & 59 deletions
This file was deleted.

.github/workflows/spark_python_test.yaml

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ jobs:
2525
uses: actions/setup-java@v3
2626
with:
2727
distribution: "zulu"
28-
java-version: "11"
28+
java-version: "17"
2929
- name: Cache Scala, SBT
3030
uses: actions/cache@v3
3131
with:
@@ -53,33 +53,36 @@ jobs:
5353
export PATH="~/.pyenv/bin:$PATH"
5454
eval "$(pyenv init -)"
5555
eval "$(pyenv virtualenv-init -)"
56-
pyenv install 3.8.18
57-
pyenv global system 3.8.18
58-
pipenv --python 3.8 install
56+
pyenv install 3.9
57+
pyenv global system 3.9
58+
pipenv --python 3.9 install
5959
# Update the pip version to 24.0. By default `pyenv.run` installs the latest pip version
6060
# available. From version 24.1, `pip` doesn't allow installing python packages
6161
# with version string containing `-`. In Delta-Spark case, the pypi package generated has
6262
# `-SNAPSHOT` in version (e.g. `3.3.0-SNAPSHOT`) as the version is picked up from
6363
# the`version.sbt` file.
6464
pipenv run pip install pip==24.0 setuptools==69.5.1 wheel==0.43.0
65-
# Install PySpark without bundled Scala 2.12 JARs - read more in the future note below
66-
pipenv run pip install pyspark==3.5.3 --no-deps
67-
pipenv run pip install py4j==0.10.9.7
68-
pipenv run pip install flake8==3.5.0 pypandoc==1.3.3
69-
pipenv run pip install black==23.9.1
65+
pipenv run pip install pyspark==4.0.1
66+
pipenv run pip install flake8==3.9.0
67+
pipenv run pip install black==23.12.1
7068
pipenv run pip install importlib_metadata==3.10.0
71-
# The mypy versions 0.982 and 1.8.0 have conflicting rules (cannot get style checks to
72-
# pass for both versions on the same file) so we upgrade this to match Spark 4.0
7369
pipenv run pip install mypy==1.8.0
7470
pipenv run pip install mypy-protobuf==3.3.0
7571
pipenv run pip install cryptography==37.0.4
7672
pipenv run pip install twine==4.0.1
7773
pipenv run pip install wheel==0.33.4
7874
pipenv run pip install setuptools==41.1.0
7975
pipenv run pip install pydocstyle==3.0.0
80-
pipenv run pip install pandas==1.1.3
81-
pipenv run pip install pyarrow==8.0.0
82-
pipenv run pip install numpy==1.20.3
76+
pipenv run pip install pandas==2.2.0
77+
pipenv run pip install pyarrow==11.0.0
78+
pipenv run pip install pypandoc==1.3.3
79+
pipenv run pip install numpy==1.22.4
80+
pipenv run pip install grpcio==1.67.0
81+
pipenv run pip install grpcio-status==1.67.0
82+
pipenv run pip install googleapis-common-protos==1.65.0
83+
pipenv run pip install protobuf==5.29.1
84+
pipenv run pip install googleapis-common-protos-stubs==2.2.0
85+
pipenv run pip install grpc-stubs==1.24.11
8386
if: steps.git-diff.outputs.diff
8487
- name: Run Python tests
8588
# when changing TEST_PARALLELISM_COUNT make sure to also change it in spark_master_test.yaml

.github/workflows/spark_test.yaml

Lines changed: 18 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ jobs:
2929
uses: actions/setup-java@v3
3030
with:
3131
distribution: "zulu"
32-
java-version: "11"
32+
java-version: "17"
3333
- name: Cache Scala, SBT
3434
uses: actions/cache@v3
3535
with:
@@ -57,29 +57,36 @@ jobs:
5757
export PATH="~/.pyenv/bin:$PATH"
5858
eval "$(pyenv init -)"
5959
eval "$(pyenv virtualenv-init -)"
60-
pyenv install 3.8.18
61-
pyenv global system 3.8.18
62-
pipenv --python 3.8 install
60+
pyenv install 3.9
61+
pyenv global system 3.9
62+
pipenv --python 3.9 install
6363
# Update the pip version to 24.0. By default `pyenv.run` installs the latest pip version
6464
# available. From version 24.1, `pip` doesn't allow installing python packages
6565
# with version string containing `-`. In Delta-Spark case, the pypi package generated has
6666
# `-SNAPSHOT` in version (e.g. `3.3.0-SNAPSHOT`) as the version is picked up from
6767
# the`version.sbt` file.
6868
pipenv run pip install pip==24.0 setuptools==69.5.1 wheel==0.43.0
69-
pipenv run pip install pyspark==3.5.3
70-
pipenv run pip install flake8==3.5.0 pypandoc==1.3.3
71-
pipenv run pip install black==23.9.1
69+
pipenv run pip install pyspark==4.0.1
70+
pipenv run pip install flake8==3.9.0
71+
pipenv run pip install black==23.12.1
7272
pipenv run pip install importlib_metadata==3.10.0
73-
pipenv run pip install mypy==0.982
73+
pipenv run pip install mypy==1.8.0
7474
pipenv run pip install mypy-protobuf==3.3.0
7575
pipenv run pip install cryptography==37.0.4
7676
pipenv run pip install twine==4.0.1
7777
pipenv run pip install wheel==0.33.4
7878
pipenv run pip install setuptools==41.1.0
7979
pipenv run pip install pydocstyle==3.0.0
80-
pipenv run pip install pandas==1.1.3
81-
pipenv run pip install pyarrow==8.0.0
82-
pipenv run pip install numpy==1.20.3
80+
pipenv run pip install pandas==2.2.0
81+
pipenv run pip install pyarrow==11.0.0
82+
pipenv run pip install pypandoc==1.3.3
83+
pipenv run pip install numpy==1.22.4
84+
pipenv run pip install grpcio==1.67.0
85+
pipenv run pip install grpcio-status==1.67.0
86+
pipenv run pip install googleapis-common-protos==1.65.0
87+
pipenv run pip install protobuf==5.29.1
88+
pipenv run pip install googleapis-common-protos-stubs==2.2.0
89+
pipenv run pip install grpc-stubs==1.24.11
8390
if: steps.git-diff.outputs.diff
8491
- name: Scala structured logging style check
8592
run: |

.github/workflows/unidoc.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
uses: actions/setup-java@v3
1414
with:
1515
distribution: "zulu"
16-
java-version: "11"
16+
java-version: "17"
1717
- uses: actions/checkout@v3
1818
- name: generate unidoc
1919
run: build/sbt "++ ${{ matrix.scala }}" unidoc

0 commit comments

Comments
 (0)