Skip to content

Commit

Permalink
Fix widget with Python 3.9. Fix flipY for lat-reversed data sets. Bum…
Browse files Browse the repository at this point in the history
…p to 0.4.14.
  • Loading branch information
msoechting committed Feb 6, 2024
1 parent a2b5fc1 commit 349144a
Show file tree
Hide file tree
Showing 17 changed files with 75 additions and 46 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ Example notebooks can be found in the [examples](https://github.com/msoechting/l

## Attribution

When using Lexcube in your research, please cite:
When using Lexcube and/or generated images, please acknowledge/cite:
```bibtex
@ARTICLE{soechting2024lexcube,
author={Söchting, Maximilian and Mahecha, Miguel D. and Montero, David and Scheuermann, Gerik},
Expand Down Expand Up @@ -101,7 +101,7 @@ If you are using Jupyter within VSCode, you may have to add the following to you
"unpkg.com"
],
```
This allows the Lexcube JavaScript front-end files to be downloaded from these sources ([read more](https://github.com/microsoft/vscode-jupyter/wiki/IPyWidget-Support-in-VS-Code-Python)).
If you are working on a remote server in VSCode, do not forget to set this setting also there! This allows the Lexcube JavaScript front-end files to be downloaded from these sources ([read more](https://github.com/microsoft/vscode-jupyter/wiki/IPyWidget-Support-in-VS-Code-Python)).

## Installation

Expand Down
4 changes: 2 additions & 2 deletions examples/1_introduction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"outputs": [],
"source": [
"# ** Option 1: Use a numpy data set\n",
"# ds = np.sum(np.mgrid[0:256,0:256,0:256], axis=0)"
"# data_source = np.sum(np.mgrid[0:256,0:256,0:256], axis=0)"
]
},
{
Expand All @@ -50,7 +50,7 @@
"outputs": [],
"source": [
"# ** Option 2: Load a local xarray data set\n",
"# ds = xr.open_dataset(\"/data/my_data_set.zarr\", chunks={}, engine=\"zarr\")"
"# data_source = xr.open_dataset(\"/data/my_data_set.zarr\", chunks={}, engine=\"zarr\")"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion lexcube/_frontend.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@
"""

module_name = "lexcube"
module_version = "^0.4.13"
module_version = "^0.4.14"
2 changes: 1 addition & 1 deletion lexcube/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.

__version__ = "0.4.13"
__version__ = "0.4.14"
9 changes: 6 additions & 3 deletions lexcube/cube3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,10 @@
import datetime
import math
import asyncio
from traitlets import Unicode, Dict, Float, Union, List, Int, validate, TraitError, Bool, Tuple
from traitlets import Unicode, Dict, Float, List, Int, validate, TraitError, Bool, Tuple
import traitlets
from typing import Union

from ._frontend import module_name, module_version
import ipywidgets as widgets
from lexcube.lexcube_server.src.lexcube_widget import start_tile_server_in_widget_mode
Expand Down Expand Up @@ -56,7 +59,7 @@ class Cube3DWidget(widgets.DOMWidget):
request_progress = Dict().tag(sync=True)
vmin = Float(allow_none=True).tag(sync=True)
vmax = Float(allow_none=True).tag(sync=True)
cmap = Union([Unicode(), List()], allow_none=True).tag(sync=True)
cmap = traitlets.Union([Unicode(), List()], allow_none=True).tag(sync=True)
xlim = Tuple(Int(), Int(), default_value=(-1, -1)).tag(sync=True)
ylim = Tuple(Int(), Int(), default_value=(-1, -1)).tag(sync=True)
zlim = Tuple(Int(), Int(), default_value=(-1, -1)).tag(sync=True)
Expand All @@ -67,7 +70,7 @@ class Cube3DWidget(widgets.DOMWidget):

isometric_mode = Bool(False).tag(sync=True)

def __init__(self, data_source, cmap: str | list | None = None, vmin: float | None = None, vmax: float | None = None, isometric_mode: bool = False, use_lexcube_chunk_caching: bool = True, **kwargs):
def __init__(self, data_source, cmap: Union[str, list, None] = None, vmin: Union[float, None] = None, vmax: Union[float, None] = None, isometric_mode: bool = False, use_lexcube_chunk_caching: bool = True, **kwargs):
super().__init__(**kwargs)
self.cmap = cmap
self.vmin = vmin
Expand Down
2 changes: 2 additions & 0 deletions lexcube/lexcube_server/requirements-core.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
s3fs==2023.12.2
bottleneck==1.3.7
cachey==0.2.1
dask==2024.1.1
netCDF4==1.6.5
opencv-python-headless==4.8.1.78
psutil==5.9.6
Expand Down
3 changes: 2 additions & 1 deletion lexcube/lexcube_server/src/lexcube_widget.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,13 @@

from __future__ import annotations
from .tile_server import TileServer, calculate_max_lod, API_VERSION, get_dimension_labels
from typing import Union
import ipywidgets as widgets
import numpy as np
import xarray as xr


def start_tile_server_in_widget_mode(widget: widgets.DOMWidget, data_source: xr.DataArray | np.ndarray, use_lexcube_chunk_caching: bool):
def start_tile_server_in_widget_mode(widget: widgets.DOMWidget, data_source: Union[xr.DataArray, np.ndarray], use_lexcube_chunk_caching: bool):
if type(data_source) not in [xr.DataArray, np.ndarray]:
print("Error: Input data is not xarray.DataArray or numpy.ndarray")
raise Exception("Error: Input data is not xarray.DataArray or numpy.ndarray")
Expand Down
35 changes: 15 additions & 20 deletions lexcube/lexcube_server/src/tile_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import xarray as xr
import zfpy
from dask.cache import Cache
from typing import Union

UNCOMPRESSED_SUFFIX = "_uncompressed"
ANOMALY_PARAMETER_ID_SUFFIX = "_lxc_anomaly"
Expand All @@ -56,7 +57,7 @@
TILE_FORMAT_MAGIC_BYTES = "lexc".encode("utf-8") # 6c 65 78 63, magic bytes to recognize lexcube tiles

class DataSourceProxy:
def __init__(self, data_source: np.ndarray | xr.DataArray) -> None:
def __init__(self, data_source: Union[xr.DataArray, np.ndarray]) -> None:
self.data_source = data_source
self.cache_chunks = type(data_source) == xr.DataArray and data_source.chunks and len(data_source.chunks) > 0
self.shape = self.data_source.shape
Expand Down Expand Up @@ -109,7 +110,7 @@ def get_chunk(self, iz: int, iy: int, ix: int):
self.chunk_cache[chunk_key] = self.data_source[slices].values
return self.chunk_cache[chunk_key]

def validate_slice(self, s: slice | int, dimension: int):
def validate_slice(self, s: Union[slice, int], dimension: int):
if type(s) == int:
s = slice(s, s + 1)
return slice(max(s.start, 0), min(s.stop, self.shape[dimension]))
Expand Down Expand Up @@ -152,7 +153,7 @@ def compress_nan_mask(self, nan_mask: bytes) -> bytes:
def decompress_nan_mask(self, data: bytes) -> bytes:
return self.nan_mask_compressor.decode(data)

def get_tile_data_compressor(self, use_lossless_override: (None | bool) = None):
def get_tile_data_compressor(self, use_lossless_override: Union[bool, None] = None):
lossless = self.compress_lossless if use_lossless_override == None else use_lossless_override
if lossless:
return self.tile_data_compressor_lossless
Expand All @@ -164,7 +165,7 @@ def compress_tile_data(self, tile_data: bytes, is_anomaly_tile: bool = False) ->
self.tile_data_compressor_default.tolerance = self.anomaly_compression_tolerance if is_anomaly_tile else self.default_compression_tolerance
return self.get_tile_data_compressor().encode(tile_data)

def decompress_tile_data(self, tile_data: bytes, use_lossless_override: (None | bool) = None) -> bytes:
def decompress_tile_data(self, tile_data: bytes, use_lossless_override: Union[bool, None] = None) -> bytes:
return self.get_tile_data_compressor(use_lossless_override).decode(tile_data)


Expand Down Expand Up @@ -241,21 +242,20 @@ def patch_data(data: np.ndarray, dataset_id: str, parameter: str, dataset_config
if parameter == "snow_water_equivalent":
data = np.where(data==-1, np.nan, data) # -1 = Oceans = NaN
data = np.where(data==-2, 0, data) # -2 = mountains or something...
if dataset_config and dataset_config.flipped_y:
data = np.flip(data, axis=1)
return data

def patch_dataset(ds: xr.DataArray | np.ndarray):
def patch_dataset(ds: Union[xr.DataArray, xr.Dataset, np.ndarray]):
if type(ds) == np.ndarray:
return ds
# Some datasets from xee (Google Earth Engine) have (time, lon, lat) dimension order, fix that here:
if ds.dims[1] in LONGITUDE_DIMENSION_NAMES and ds.dims[2] in LATITUDE_DIMENSION_NAMES:
ds = ds.transpose(ds.dims[0], ds.dims[2], ds.dims[1])
dims = list(ds.dims)
if dims[1] in LONGITUDE_DIMENSION_NAMES and dims[2] in LATITUDE_DIMENSION_NAMES:
ds = ds.transpose(dims[0], dims[2], dims[1])
# For data sets where the latitude is sorted by descending values (turning the world upside down), flip that:
if ds.dims[1] in LATITUDE_DIMENSION_NAMES:
lat_values = ds[ds.dims[1]]
if dims[1] in LATITUDE_DIMENSION_NAMES:
lat_values = ds[dims[1]]
if lat_values[0] < lat_values[len(lat_values) - 1]:
ds = np.flip(ds, axis=1)
ds = ds.sortby(dims[1], ascending=False)
return ds

def open_dataset(config: ServerConfig, path: str):
Expand Down Expand Up @@ -288,7 +288,6 @@ def __init__(self, dataset_config: dict) -> None:
self.calculate_anomalies = bool(dataset_config.get("calculateYearlyAnomalies") or False)
self.force_tile_generation = bool(dataset_config.get("forceTileGeneration") or False)
self.max_lod = int(dataset_config.get("overrideMaxLod") or -1)
self.flipped_y = bool(dataset_config.get("flippedY") or False)
self.use_offline_metadata = bool(dataset_config.get("useOfflineMetadata") or False)
self.min_max_values_approximate_only = bool(dataset_config.get("approximateMinMaxValues") or True)

Expand Down Expand Up @@ -382,9 +381,6 @@ def load_from_dataset(self, dataset: Dataset, data: xr.Dataset):
"z": get_dimension_labels(data, self.z_dimension_name, self.z_dimension_type)
}

if dataset.dataset_config.flipped_y:
self.axis_labels["y"] = self.axis_labels["y"][::-1]

class ParameterMetadataParser:
def __init__(self, config: ServerConfig, min_max_values_approximate_only: bool, dataset_path: str, dataset_id: str) -> None:
self.min_max_values_approximate_only = min_max_values_approximate_only
Expand Down Expand Up @@ -557,7 +553,6 @@ def __init__(self, server_config: ServerConfig, dataset_config: dict, base_dir:
self.x_max = -1
self.y_max = -1
self.z_max = -1
self.flipped_y = self.dataset_config.flipped_y
self.tile_size = tile_size
self.use_offline_metadata = self.dataset_config.use_offline_metadata
self.meta_data = DatasetMetadata()
Expand Down Expand Up @@ -817,7 +812,7 @@ def get_anomaly_tile(self):
def get_hash_key(self):
return "-".join([self.dataset_id, self.parameter, str(self.index_dimension.value), str(self.index_value), str(self.lod), str(self.x), str(self.y)])

def generate_from_data(self, source_data: np.ndarray | xr.DataArray | DataSourceProxy, tile_compressor: TileCompressor, z_offset: int = 0, added_compression_error: float = 0.0, resample_resolution: int = 1, compress_lossless: bool = False):
def generate_from_data(self, source_data: Union[xr.DataArray, np.ndarray, DataSourceProxy], tile_compressor: TileCompressor, z_offset: int = 0, added_compression_error: float = 0.0, resample_resolution: int = 1, compress_lossless: bool = False):
lod_factor = pow(2, self.lod)
inverse_lod_factor = 1 / lod_factor
lod_tile_size = lod_factor * self.tile_size
Expand Down Expand Up @@ -887,7 +882,7 @@ def write_to_intermediate_single_file(self, path: str, compressed_data: bytes, s
def get_tile_metadata_bytes(self, resample_resolution: int, nan_mask_length: int, max_error_or_magic_number: float):
return TILE_FORMAT_MAGIC_BYTES + struct.pack("<I", TILE_VERSION) + struct.pack("<I", resample_resolution) + struct.pack("<I", nan_mask_length) + struct.pack("<d", max_error_or_magic_number)

def compress_data(self, source_values: np.ndarray | xr.DataArray, tile_compressor: TileCompressor, resample_resolution: int = 1, added_compression_error: float = 0.0):
def compress_data(self, source_values: Union[xr.DataArray, np.ndarray], tile_compressor: TileCompressor, resample_resolution: int = 1, added_compression_error: float = 0.0):
if np.all(np.isnan(source_values)):
return self.get_tile_metadata_bytes(0, 0, NAN_TILE_MAGIC_NUMBER)
# if np.any(np.isnan(source_values)):
Expand Down Expand Up @@ -1027,7 +1022,7 @@ def update_progress(self, request_group_id: int, request_id: int, done: int, tot
total = sum(c[1] for c in current.values())
self.widget_update_progress([done, total])

def startup_widget(self, data_source: xr.DataArray | np.ndarray, use_lexcube_chunk_caching: bool):
def startup_widget(self, data_source: Union[xr.DataArray, np.ndarray], use_lexcube_chunk_caching: bool):
if type(data_source) == xr.DataArray and not data_source.chunks:
print("Xarray input object does not have chunks. You can re-open with 'chunks={}' to enable dask for caching and progress reporting functionality - but may be overall slower for small data sets.")
dask_cache = Cache(2e9) # Leverage two gigabytes of memory
Expand Down
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "lexcube",
"version": "0.4.13",
"version": "0.4.14",
"description": "Lexcube: 3D Data Cube Visualization in Jupyter Notebooks",
"keywords": [
"jupyter",
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ dependencies = [
"zarr>=2.14.2",
"zfpy>=1.0.0",
]
version = "0.4.13"
version = "0.4.14"

[project.optional-dependencies]
docs = [
Expand Down Expand Up @@ -109,7 +109,7 @@ build_cmd = "build:prod"
github_url = "https://github.com/msoechting/lexcube/"

[tool.tbump.version]
current = "0.4.13"
current = "0.4.14"

# Example of a semver regexp.
# Make sure this matches current_version before
Expand Down
3 changes: 2 additions & 1 deletion src/lexcube-client/src/client/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,10 @@ class CubeClientContext {
}

async startup() {
this.networking.connect();
await this.interaction.startup();
this.rendering.startup();
await this.networking.connect();
this.networking.postStartup();
this.postStartup();
}

Expand Down
2 changes: 1 addition & 1 deletion src/lexcube-client/src/client/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,6 @@ const DEFAULT_WIDGET_HEIGHT = 768;
const API_VERSION = 5;
const TILE_VERSION = 2;

const PACKAGE_VERSION = "0.4.13";
const PACKAGE_VERSION = "0.4.14";

export { DeviceOrientation, PACKAGE_VERSION, positiveModulo, range, getIndexDimensionOfFace, getAddressedFacesOfDimension, getFacesOfIndexDimension, capitalizeString, DEFAULT_WIDGET_WIDTH, DEFAULT_WIDGET_HEIGHT, DEFAULT_COLORMAP, ANOMALY_PARAMETER_ID_SUFFIX, TILE_FORMAT_MAGIC_BYTES, TILE_VERSION, TILE_SIZE, MAX_ZOOM_FACTOR, NAN_TILE_MAGIC_NUMBER, LOSSLESS_TILE_MAGIC_NUMBER, NAN_REPLACEMENT_VALUE, COLORMAP_STEPS, NOT_LOADED_REPLACEMENT_VALUE, API_VERSION, Dimension, CubeFace }
24 changes: 22 additions & 2 deletions src/lexcube-client/src/client/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,17 @@
</div>
</div>

<div class="fullscreen-wrapper nopointer noselect" style="top: 15px; left: 15px;">
<img src="lexcube-logo.png" style="width: calc(max(300px, 17vw)); opacity: 0.9;"/>
</div>

<div class="noselect top-left-ui ui-normal jupyter-notification" onclick="this.style.display = 'none';localStorage.setItem('dismiss-jupyter', `${Date.now()}`);" style="padding: 1px;">
<a href='https://github.com/msoechting/lexcube' target='_blank'><div style="background-image: url('lexcube-jupyter.png'); width: 337px;;
height: 140px;background-size: contain; cursor:pointer"></div></a>
<div class="close-button" onclick="this.parentNode.onclick()"></div>
</div>
<!-- <a href="https://github.com/msoechting/lexcube" target="_blank" style="width: 100%; height: 100%;"></a> -->

<div class="ui-normal noselect options-ui" style="display: none;">
<button type="button" class="collapsible-button" onclick="this.parentNode.style.display = 'none';">Close</button>
<hr>
Expand Down Expand Up @@ -151,12 +162,12 @@ <h3 class="noselect expert-mode" style="cursor: pointer;" onclick="let g = this.
<div class="ui-normal flex-col-center dataset-info-window">
<div class="dataset-info" style="max-width: 100%; overflow-y: auto; overflow-x: hidden;"></div>

<div style="width: 100%; text-align: center;">
<div style="width: 100%; text-align: center;">
<hr>
<div class="expert-mode">
LexCube is an interactive visualization of large-scale earth data sets. Created at Leipzig University by Maximilian Söchting.
</div>
<p xmlns:cc="http://creativecommons.org/ns#" xmlns:dct="http://purl.org/dc/terms/">Images generated by <a property="dct:title" rel="cc:attributionURL" target="_blank" href="https://www.lexcube.org">LexCube - Leipzig Explorer of Earth Data Cubes</a> by <a rel="cc:attributionURL dct:creator" property="cc:attributionName" target="_blank" href="https://rsc4earth.de/authors/msoechting/">Maximilian Söchting</a> are licensed under <a href="http://creativecommons.org/licenses/by/4.0/?ref=chooser-v1" target="_blank" rel="license noopener noreferrer" style="display:inline-block;">CC BY 4.0<img style="height:22px!important;margin-left:3px;vertical-align:text-bottom;" src="https://mirrors.creativecommons.org/presskit/icons/cc.svg?ref=chooser-v1"><img style="height:22px!important;margin-left:3px;vertical-align:text-bottom;" src="https://mirrors.creativecommons.org/presskit/icons/by.svg?ref=chooser-v1"></a></p>
<div><b>When using Lexcube and generated images acknowledge/cite</b>: M. Söchting, M. D. Mahecha, D. Montero and G. Scheuermann, "Lexcube: Interactive Visualization of Large Earth System Data Cubes," in IEEE Computer Graphics and Applications, vol. 44, no. 1, pp. 25-37, Jan.-Feb. 2024, doi: <a href="https://www.doi.org/10.1109/MCG.2023.3321989" target="_blank">10.1109/MCG.2023.3321989</a>.</div>

<div class="expert-mode">
Client Version: <%= htmlWebpackPlugin.options.version %> (Commit: <%= htmlWebpackPlugin.options.commitDate %>, Build: <%= htmlWebpackPlugin.options.buildDate %>)
Expand Down Expand Up @@ -254,6 +265,15 @@ <h1>Craft Your Data Cube!</h1>
const b = document.body;
if (document.URL.indexOf("localhost") != -1) {
b.getElementsByClassName("tutorial-wrapper")[0].style.display = "none";
try {
const c = localStorage.getItem('dismiss-jupyter');
const v = parseInt(c);
const notificationInterval = 1000 * 60 * 60 * 24 * 2; // 2 days
if (!isNaN(v) && Date.now() - v < notificationInterval) {
b.getElementsByClassName("jupyter-notification")[0].style.display = "none";
}
} catch (e) {
}
}
var touchDevice = ('ontouchstart' in window) || (navigator.maxTouchPoints > 0);
b.getElementsByClassName("tutorial-mouse")[0].style.display = touchDevice ? 'none' : 'block';
Expand Down
Loading

0 comments on commit 349144a

Please sign in to comment.