-
Notifications
You must be signed in to change notification settings - Fork 15
Expand file tree
/
Copy pathpyproject.toml
More file actions
261 lines (234 loc) · 9.21 KB
/
pyproject.toml
File metadata and controls
261 lines (234 loc) · 9.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
[build-system]
requires = ["setuptools>=64.0", "setuptools_scm[toml]>=6.2"]
build-backend = "setuptools.build_meta"
# ----------------------------------------------------------------------------
# Installation
#
# Recommended: pixi (https://pixi.sh). The pixi config below is the canonical
# environment definition; it pins the conda-forge versions of compass, isce3,
# gdal etc. that cannot ship on PyPI, and points s1-reader at the maintained
# fork at scottstanie/s1-reader@develop-scott.
#
# pixi install # solve + create the default env
# pixi run sweets --help # run inside the env
#
# For users who prefer plain conda/mamba, an environment.yml synced from this
# pyproject is provided at the repo root.
# ----------------------------------------------------------------------------
[project]
name = "sweets"
description = "Workflows for generating surface displacement maps using InSAR"
readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.11"
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"Programming Language :: Python :: 3",
"License = file : LICENSE",
"Operating System :: OS Independent",
]
# The version is written into _version.py at install time by setuptools_scm.
dynamic = ["version"]
# These are the pip-installable runtime deps. Several heavy native deps
# (compass, isce3, gdal, libgdal-netcdf) are conda-only and live in
# [tool.pixi.dependencies] below.
dependencies = [
"asf_search",
"burst2safe",
"dolphin",
"h5py",
"loguru",
"numpy",
# opera-utils comes from scottstanie/opera-utils@develop-scott via the
# pixi pypi-dependencies block (carries the tropo workflow); a permissive
# version pin here is fine for non-pixi installs.
"opera-utils",
"pandas",
"pydantic>=2.1",
"pyproj",
"python-dateutil",
"rasterio",
"requests",
"rich",
"rioxarray",
"sardem",
"sentineleof",
"shapely",
"tyro",
]
[project.optional-dependencies]
web = [
"fastapi>=0.100",
"uvicorn[standard]",
"sqlmodel",
"python-multipart",
"websockets",
"titiler.core",
]
[project.urls]
Homepage = "https://github.com/isce-framework/sweets"
"Bug Tracker" = "https://github.com/isce-framework/sweets/issues"
# Entry points for the command line interface
[project.scripts]
sweets = "sweets.cli:main"
# ----------------------------------------------------------------------------
# pixi: canonical environment definition
# ----------------------------------------------------------------------------
[tool.pixi.workspace]
channels = ["conda-forge"]
platforms = ["osx-arm64", "linux-64"]
[tool.pixi.pypi-dependencies]
sweets = { path = ".", editable = true }
# Use the recently-updated forks. Upstream isce-framework/s1-reader has a numpy 2
# incompat (polyfit scalar regression, see sweets#132); upstream
# opera-adt/COMPASS still uses np.string_ / np.unicode_ which were removed
# in numpy 2. Both forks restore numpy-2 compatibility on develop-scott.
s1reader = { git = "https://github.com/scottstanie/s1-reader.git", branch = "develop-scott" }
compass = { git = "https://github.com/scottstanie/COMPASS.git", branch = "develop-scott" }
# scottstanie/opera-utils@develop-scott carries the high-level
# `create_tropo_corrections_for_stack` workflow + `search_tropo` CMR client
# that the sweets tropo step builds on, plus the OPERA CSLC download
# helpers used by the OperaCslcSearch source and the NISAR GSLC helpers
# used by NisarGslcSearch.
# opera-utils extras are pinned explicitly rather than `[all]` because
# `[all]` folds in the `geopandas` extra, which declares `pyogrio` and
# `geopandas` as PyPI requirements. uv will then install those as wheels
# on top of the conda-forge versions pixi has already provided, and the
# pyogrio wheel crashes on `import pyogrio` with "Could not correctly
# detect PROJ data files installed by pyogrio wheel" because it picks
# up conda's PROJ_DATA dir, which doesn't match the wheel's bundled
# PROJ layout. Listing only the extras sweets actually needs keeps
# geopandas/pyogrio on the conda-forge side.
opera-utils = { git = "https://github.com/scottstanie/opera-utils.git", branch = "develop-scott", extras = [
"asf",
"disp",
"nisar",
"tropo",
] }
# scottstanie/dolphin@develop-scott carries the YamlModel commented-yaml
# fix for Union-of-submodels JSON schema, which unblocks the
# `Workflow.search: BurstSearch | OperaCslcSearch | NisarGslcSearch`
# discriminated union.
dolphin = { git = "https://github.com/scottstanie/dolphin.git", branch = "develop-scott" }
# scottstanie/spurt@develop-scott is significantly ahead of upstream
# isce-framework/spurt for temporal phase unwrapping - performance
# fixes, macOS multiprocessing memory fix, richer logging, reader-
# interface cleanups. dolphin only pulls `spurt` transitively, so
# pinning the fork explicitly here gets everyone who sets
# `dolphin.unwrap_method: spurt` in their sweets config onto the
# supported version.
spurt = { git = "https://github.com/scottstanie/spurt.git", branch = "develop-scott" }
[tool.pixi.dependencies]
python = ">=3.11"
pip = ">=21.3"
# Heavy native / conda-only deps.
# compass and opera-utils are intentionally not here - they come from scottstanie/<repo>
# via [tool.pixi.pypi-dependencies]
# isce3 is NOT shared - it lives in the `cpu` / `gpu` features below so
# users on CUDA Linux boxes can select the `gpu` environment and get
# isce3-cuda (GPU-accelerated geocoding / crossmul / resampling). Every
# existing environment folds the `cpu` feature in by default.
gdal = "*"
libgdal-netcdf = "*"
# Pure-python deps mirrored from [project.dependencies] so the env solves
# fully through conda when possible. (dolphin is intentionally NOT here -
# it comes from scottstanie/dolphin@develop-scott via the pypi-dependencies
# block above.)
asf_search = "*"
burst2safe = "*"
h5py = ">=3.6"
loguru = "*"
numpy = ">=1.25"
pandas = "*"
pydantic = ">=2.1"
pyproj = ">=3.2"
python-dateutil = "*"
rasterio = "*"
requests = ">=2.10"
rich = ">=12.0"
rioxarray = "*"
sardem = "*"
sentineleof = "*"
shapely = ">=2.0"
tyro = "*"
# compass runtime deps. compass's pyproject.toml has no [project.dependencies],
# so pip/uv-installed compass (from scottstanie/COMPASS@develop-scott) comes
# with zero runtime deps - they live only in compass's environment.yml and the
# conda-forge compass-feedstock meta.yaml. We mirror the subset sweets exercises
# here so the conda solver pulls them in. dem_stitcher / progressbar are
# RAiDER-path only and deliberately omitted.
scikit-image = "*"
pillow = "*"
pysolid = "*"
"ruamel.yaml" = "*"
yamale = "*"
lxml = "*"
scipy = "*"
# geopandas + pyogrio are hoisted here (rather than under the plotting
# feature) so pixi's pypi-to-conda mapping marks them as satisfied for
# *every* environment. Without this, any PyPI dep that declares a
# `pyogrio` requirement (e.g. `opera-utils[geopandas]` pulled via an
# `[all]` extra) causes uv to install a pyogrio wheel on top of conda's,
# and the wheel crashes with a PROJ-data-dir mismatch on import.
geopandas = "*"
pyogrio = "*"
[tool.pixi.tasks]
test = "pytest"
install = { depends-on = ["pre-commit_install"] }
pre-commit_install = "pre-commit install"
# Regenerate environment.yml from the pixi solve so non-pixi users have a
# current conda env spec to fall back on.
export-env = "pixi project export conda-environment > environment.yml"
[tool.pixi.feature.test.dependencies]
pytest = ">=8.3.5,<9"
pytest-cov = "*"
pytest-randomly = "*"
pytest-recording = "*"
pytest-xdist = "*"
pre-commit = "*"
mypy = "*"
[tool.pixi.feature.plotting.dependencies]
cartopy = "*"
matplotlib = "*"
ipywidgets = "*"
# CPU (default) isce3 build - works on every platform sweets supports.
[tool.pixi.feature.cpu.dependencies]
isce3 = ">=0.24"
# CUDA-accelerated isce3 for geometry / crossmul / resampling. Only
# available on linux-64; conda-forge doesn't ship osx-arm64 builds.
# Requires CUDA 12+ on the host. Activate with `pixi shell -e gpu`.
[tool.pixi.feature.gpu.target.linux-64.dependencies]
isce3-cuda = ">=0.24"
[tool.pixi.feature.gpu.system-requirements]
cuda = "12"
[tool.pixi.environments]
# `cpu` is folded into every CPU environment so isce3 is always present.
default = { features = ["test", "plotting", "cpu"], solve-group = "default" }
test = { features = ["test", "plotting", "cpu"], solve-group = "default" }
plotting = { features = ["plotting", "cpu"], solve-group = "default" }
minimal = { features = ["cpu"], solve-group = "default" }
# GPU environment: same extras as `default` but swaps isce3 -> isce3-cuda.
# `solve-group` is deliberately separate so pixi doesn't try to reconcile
# the CPU and GPU isce3 builds in a single resolution.
gpu = { features = ["test", "plotting", "gpu"] }
[tool.setuptools_scm]
# https://github.com/pypa/setuptools_scm#configuration-parameters
write_to = "src/sweets/_version.py"
# https://github.com/pypa/setuptools_scm#version-number-construction
version_scheme = "no-guess-dev" # Will not guess the next version
[tool.black]
target-version = ["py311", "py312", "py313", "py314"]
preview = true
[tool.isort]
profile = "black"
known_first_party = ["sweets"]
[tool.mypy]
python_version = "3.12"
ignore_missing_imports = true
plugins = ["pydantic.mypy"]
[tool.pydocstyle]
ignore = "D100,D102,D104,D105,D106,D107,D203,D204,D213,D413"
[tool.pytest.ini_options]
filterwarnings = [
# "error",
]