Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 3D_UNet
  • 3d_watershed
  • conv_zarr_tiff_folders
  • convert_tiff_folders
  • layered_surface_segmentation
  • main
  • memmap_txrm
  • notebook_update
  • notebooks
  • notebooksv1
  • optimize_scaleZYXdask
  • save_files_function
  • scaleZYX_mean
  • test
  • threshold-exploration
  • tr_val_te_splits
  • v0.2.0
  • v0.3.0
  • v0.3.1
  • v0.3.2
  • v0.3.3
  • v0.3.9
  • v0.4.0
  • v0.4.1
24 results

Target

Select target project
  • QIM/tools/qim3d
1 result
Select Git revision
  • 3D_UNet
  • 3d_watershed
  • conv_zarr_tiff_folders
  • convert_tiff_folders
  • layered_surface_segmentation
  • main
  • memmap_txrm
  • notebook_update
  • notebooks
  • notebooksv1
  • optimize_scaleZYXdask
  • save_files_function
  • scaleZYX_mean
  • test
  • threshold-exploration
  • tr_val_te_splits
  • v0.2.0
  • v0.3.0
  • v0.3.1
  • v0.3.2
  • v0.3.3
  • v0.3.9
  • v0.4.0
  • v0.4.1
24 results
Show changes
from . import doi
from .progress_bar import ProgressBar
from .system import Memory
from .misc import (
......@@ -13,3 +12,5 @@ from .misc import (
downscale_img,
scale_to_float16,
)
from .server import start_http_server
\ No newline at end of file
from zarr.util import normalize_chunks, normalize_dtype, normalize_shape
import numpy as np
def get_chunk_size(shape:tuple, dtype):
"""
How the chunk size is computed in zarr.storage.init_array_metadata which is ran in the chain of functions we use
in qim3d.io.export_ome_zarr function
Parameters
----------
- shape: shape of the data
- dtype: dtype of the data
"""
object_codec = None
dtype, object_codec = normalize_dtype(dtype, object_codec)
shape = normalize_shape(shape) + dtype.shape
dtype = dtype.base
chunks = None
chunks = normalize_chunks(chunks, shape, dtype.itemsize)
return chunks
def get_n_chunks(shapes:tuple, dtypes:tuple):
"""
Estimates how many chunks we will use in advence so we can pass this number to a progress bar and track how many
have been already written to disk
Parameters
----------
- shapes: list of shapes of the data for each scale
- dtype: dtype of the data
"""
n_chunks = 0
for shape, dtype in zip(shapes, dtypes):
chunk_size = np.array(get_chunk_size(shape, dtype))
shape = np.array(shape)
ratio = shape/chunk_size
n_chunks += np.prod(ratio)
return int(n_chunks)
from threading import Timer
import psutil
import sys
import os
from abc import ABC, abstractmethod
from tqdm.auto import tqdm
......@@ -21,21 +23,69 @@ class RepeatTimer(Timer):
while not self.finished.wait(self.interval):
self.function(*self.args, **self.kwargs)
class ProgressBar(ABC):
def __init__(self,tqdm_kwargs:dict, repeat_time: float, *args, **kwargs):
"""
Context manager for ('with' statement) to track progress during a long progress over
which we don't have control (like loading a file) and thus can not insert the tqdm
updates into loop
Thus we have to run parallel thread with forced activation to check the state
Parameters:
------------
- tqdm_kwargs (dict): Passed directly to tqdm constructor
- repeat_time (float): How often the timer runs the function (in seconds)
"""
self.timer = RepeatTimer(repeat_time, self.update_pbar)
self.pbar = tqdm(**tqdm_kwargs)
self.last_update = 0
def update_pbar(self):
new_update = self.get_new_update()
update = new_update - self.last_update
try:
self.pbar.update(update)
except (
AttributeError
): # When we leave the context manager, we delete the pbar so it can not be updated anymore
# It's because it takes quite a long time for the timer to end and might update the pbar
# one more time before ending which messes up the whole thing
pass
self.last_update = new_update
@abstractmethod
def get_new_update(self):
pass
def __enter__(self):
self.timer.start()
def __exit__(self, exception_type, exception_value, exception_traceback):
self.timer.cancel()
self.pbar.clear()
self.pbar.n = self.pbar.total
self.pbar.display()
del self.pbar # So the update process can not update it anymore
class ProgressBar:
class FileLoadingProgressBar(ProgressBar):
def __init__(self, filename: str, repeat_time: float = 0.5, *args, **kwargs):
"""
Creates class for 'with' statement to track progress during loading a file into memory
Context manager ('with' statement) to track progress during loading a file into memory
Parameters:
------------
- filename (str): to get size of the file
- repeat_time (float, optional): How often the timer checks how many bytes were loaded. Even if very small,
it doesn't make the progress bar smoother as there are only few visible changes in number of read_chars.
Defaults to 0.25
Defaults to 0.5
"""
self.timer = RepeatTimer(repeat_time, self.memory_check)
self.pbar = tqdm(
tqdm_kwargs = dict(
total=get_file_size(filename),
desc="Loading: ",
unit="B",
......@@ -45,33 +95,77 @@ class ProgressBar:
bar_format="{l_bar}{bar}| {n_fmt}{unit}/{total_fmt}{unit} [{elapsed}<{remaining}, "
"{rate_fmt}{postfix}]",
)
self.last_memory = 0
super().__init__( tqdm_kwargs, repeat_time)
self.process = psutil.Process()
def memory_check(self):
def get_new_update(self):
counters = self.process.io_counters()
try:
memory = counters.read_chars
except AttributeError:
memory = counters.read_bytes + counters.other_bytes
return memory
try:
self.pbar.update(memory - self.last_memory)
except (
AttributeError
): # When we leave the context manager, we delete the pbar so it can not be updated anymore
# It's because it takes quite a long time for the timer to end and might update the pbar
# one more time before ending which messes up the whole thing
pass
class OmeZarrExportProgressBar(ProgressBar):
def __init__(self,path:str, n_chunks:int, reapeat_time="auto"):
"""
Context manager to track the exporting of OmeZarr files.
self.last_memory = memory
Parameters
----------
path : str
The folder path where the files will be saved.
n_chunks : int
The total number of chunks to track.
repeat_time : int or float, optional
The interval (in seconds) for updating the progress bar. Defaults to "auto", which
sets the update frequency based on the number of chunks.
"""
def __enter__(self):
self.timer.start()
def __exit__(self, exception_type, exception_value, exception_traceback):
self.timer.cancel()
self.pbar.clear()
self.pbar.n = self.pbar.total
self.pbar.display()
del self.pbar # So the update process can not update it anymore
# Calculate the repeat time for the progress bar
if reapeat_time == "auto":
# Approximate the repeat time based on the number of chunks
# This ratio is based on reading the HOA dataset over the network:
# 620,000 files took 300 seconds to read
# The ratio is little smaller than needed to avoid disk stress
reapeat_time = n_chunks / 1500
else:
reapeat_time = float(reapeat_time)
# We don't want to update the progress bar too often anyway
if reapeat_time < 0.5:
reapeat_time = 0.5
self.path = path
tqdm_kwargs = dict(
total = n_chunks,
unit = "Chunks",
desc = "Saving",
unit_scale = True
)
super().__init__(tqdm_kwargs, reapeat_time)
self.last_update = 0
def get_new_update(self):
def file_count(folder_path:str):
"""
Goes recursively through the folders and counts how many files are there,
Doesn't count metadata json files
"""
count = 0
for path in os.listdir(folder_path):
new_path = os.path.join(folder_path, path)
if os.path.isfile(new_path):
filename = os.path.basename(os.path.normpath(new_path))
if not filename.startswith("."):
count += 1
else:
count += file_count(new_path)
return count
return file_count(self.path)
import os
from http.server import SimpleHTTPRequestHandler, HTTPServer
import threading
from qim3d.utils.logger import log
class CustomHTTPRequestHandler(SimpleHTTPRequestHandler):
def end_headers(self):
"""Add CORS headers to each response."""
# Allow requests from any origin, or restrict to specific domains by specifying the origin
self.send_header("Access-Control-Allow-Origin", "*")
# Allow specific methods
self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
# Allow specific headers (if needed)
self.send_header("Access-Control-Allow-Headers", "X-Requested-With, Content-Type")
super().end_headers()
def list_directory(self, path):
"""Helper to produce a directory listing, includes hidden files."""
try:
file_list = os.listdir(path)
except OSError:
self.send_error(404, "No permission to list directory")
return None
# Sort the file list
file_list.sort(key=lambda a: a.lower())
# Format the list with hidden files included
displaypath = os.path.basename(path)
r = ['<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">']
r.append(f"<html>\n<title>Directory listing for {displaypath}</title>\n")
r.append(f"<body>\n<h2>Directory listing for {displaypath}</h2>\n")
r.append("<hr>\n<ul>")
for name in file_list:
fullname = os.path.join(path, name)
displayname = linkname = name
# Append the files and directories to the HTML list
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
r.append(f'<li><a href="{linkname}">{displayname}</a></li>')
r.append("</ul>\n<hr>\n</body>\n</html>\n")
encoded = "\n".join(r).encode('utf-8', 'surrogateescape')
self.send_response(200)
self.send_header("Content-Type", "text/html; charset=utf-8")
self.send_header("Content-Length", str(len(encoded)))
self.end_headers()
# Write the encoded HTML directly to the response
self.wfile.write(encoded)
def start_http_server(directory, port=8000):
"""
Starts an HTTP server serving the specified directory on the given port with CORS enabled.
Parameters:
directory (str): The directory to serve.
port (int): The port number to use (default is 8000).
"""
# Change the working directory to the specified directory
os.chdir(directory)
# Create the server
server = HTTPServer(("", port), CustomHTTPRequestHandler)
# Run the server in a separate thread so it doesn't block execution
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
log.info(f"Serving directory '{directory}'\nhttp://localhost:{port}/")
return server
......@@ -7,6 +7,7 @@ from .explore import (
slicer,
slices,
)
from .itk_vtk_viewer import itk_vtk, Installer, NotInstalledError
from .k3d import vol, mesh
from .local_thickness_ import local_thickness
from .structure_tensor import vectors
......
......@@ -12,6 +12,9 @@ def plot_cc(
overlay=None,
crop=False,
show=True,
cmap:str = 'viridis',
vmin:float = None,
vmax:float = None,
**kwargs,
) -> list[plt.Figure]:
"""
......@@ -24,6 +27,9 @@ def plot_cc(
overlay (optional): Overlay image. Defaults to None.
crop (bool, optional): Whether to crop the image to the cc. Defaults to False.
show (bool, optional): Whether to show the figure. Defaults to True.
cmap (str, optional): Specifies the color map for the image. Defaults to "viridis".
vmin (float, optional): Together with vmax define the data range the colormap covers. By default colormap covers the full range. Defaults to None.
vmax (float, optional): Together with vmin define the data range the colormap covers. By default colormap covers the full range. Defaults to None
**kwargs: Additional keyword arguments to pass to `qim3d.viz.slices`.
Returns:
......@@ -66,11 +72,10 @@ def plot_cc(
overlay_crop = overlay[bb]
# use cc as mask for overlay_crop, where all values in cc set to 0 should be masked out, cc contains integers
overlay_crop = np.where(cc == 0, 0, overlay_crop)
fig = qim3d.viz.slices(overlay_crop, show=show, **kwargs)
else:
cc = connected_components.get_cc(component, crop=False)
overlay_crop = np.where(cc == 0, 0, overlay)
fig = qim3d.viz.slices(overlay_crop, show=show, **kwargs)
fig = qim3d.viz.slices(overlay_crop, show=show, cmap = cmap, vmin = vmin, vmax = vmax, **kwargs)
else:
# assigns discrete color map to each connected component if not given
if "cmap" not in kwargs:
......
......@@ -78,7 +78,7 @@ def objects(
import qim3d
vol = qim3d.examples.cement_128x128x128
binary = qim3d.processing.filters.gaussian(vol, 2) < 60
binary = qim3d.processing.filters.gaussian(vol, sigma = 2) < 60
labeled_volume, num_labels = qim3d.processing.operations.watershed(binary)
cmap = qim3d.viz.colormaps.objects(num_labels, style = 'bright')
......
......@@ -20,6 +20,8 @@ def slices(
n_slices: int = 5,
max_cols: int = 5,
cmap: str = "viridis",
vmin:float = None,
vmax:float = None,
img_height: int = 2,
img_width: int = 2,
show: bool = False,
......@@ -41,6 +43,8 @@ def slices(
n_slices (int, optional): Defines how many slices the user wants to be displayed. Defaults to 5.
max_cols (int, optional): The maximum number of columns to be plotted. Defaults to 5.
cmap (str, optional): Specifies the color map for the image. Defaults to "viridis".
vmin (float, optional): Together with vmax define the data range the colormap covers. By default colormap covers the full range. Defaults to None.
vmax (float, optional): Together with vmin define the data range the colormap covers. By default colormap covers the full range. Defaults to None
img_height (int, optional): Height of the figure.
img_width (int, optional): Width of the figure.
show (bool, optional): If True, displays the plot (i.e. calls plt.show()). Defaults to False.
......@@ -136,8 +140,13 @@ def slices(
slice_idx = i * max_cols + j
try:
slice_img = vol.take(slice_idxs[slice_idx], axis=axis)
# If vmin is higher than the highest value in the image ValueError is raised
# We don't want to override the values because next slices might be okay
new_vmin = None if (isinstance(vmin, (float, int)) and vmin > np.max(slice_img)) else vmin
new_vmax = None if (isinstance(vmax, (float, int)) and vmax < np.min(slice_img)) else vmax
ax.imshow(
slice_img, cmap=cmap, interpolation=interpolation, **imshow_kwargs
slice_img, cmap=cmap, interpolation=interpolation,vmin = new_vmin, vmax = new_vmax, **imshow_kwargs
)
if show_position:
......@@ -200,6 +209,8 @@ def slicer(
vol: np.ndarray,
axis: int = 0,
cmap: str = "viridis",
vmin:float = None,
vmax:float = None,
img_height: int = 3,
img_width: int = 3,
show_position: bool = False,
......@@ -213,6 +224,8 @@ def slicer(
vol (np.ndarray): The 3D volume to be sliced.
axis (int, optional): Specifies the axis, or dimension, along which to slice. Defaults to 0.
cmap (str, optional): Specifies the color map for the image. Defaults to "viridis".
vmin (float, optional): Together with vmax define the data range the colormap covers. By default colormap covers the full range. Defaults to None.
vmax (float, optional): Together with vmin define the data range the colormap covers. By default colormap covers the full range. Defaults to None
img_height (int, optional): Height of the figure. Defaults to 3.
img_width (int, optional): Width of the figure. Defaults to 3.
show_position (bool, optional): If True, displays the position of the slices. Defaults to False.
......@@ -241,6 +254,8 @@ def slicer(
vol,
axis=axis,
cmap=cmap,
vmin = vmin,
vmax = vmax,
img_height=img_height,
img_width=img_width,
show_position=show_position,
......@@ -268,6 +283,8 @@ def slicer(
def orthogonal(
vol: np.ndarray,
cmap: str = "viridis",
vmin:float = None,
vmax:float = None,
img_height: int = 3,
img_width: int = 3,
show_position: bool = False,
......@@ -279,6 +296,8 @@ def orthogonal(
Args:
vol (np.ndarray): The 3D volume to be sliced.
cmap (str, optional): Specifies the color map for the image. Defaults to "viridis".
vmin (float, optional): Together with vmax define the data range the colormap covers. By default colormap covers the full range. Defaults to None.
vmax (float, optional): Together with vmin define the data range the colormap covers. By default colormap covers the full range. Defaults to None
img_height (int, optional): Height of the figure.
img_width (int, optional): Width of the figure.
show_position (bool, optional): If True, displays the position of the slices. Defaults to False.
......@@ -301,34 +320,22 @@ def orthogonal(
img_height = img_size
img_width = img_size
z_slicer = slicer(
vol,
axis=0,
cmap=cmap,
img_height=img_height,
img_width=img_width,
show_position=show_position,
interpolation=interpolation,
)
y_slicer = slicer(
get_slicer_for_axis = lambda axis: slicer(
vol,
axis=1,
cmap=cmap,
img_height=img_height,
img_width=img_width,
show_position=show_position,
interpolation=interpolation,
)
x_slicer = slicer(
vol,
axis=2,
axis = axis,
cmap = cmap,
vmin = vmin,
vmax = vmax,
img_height = img_height,
img_width = img_width,
show_position = show_position,
interpolation = interpolation,
)
z_slicer = get_slicer_for_axis(axis = 0)
y_slicer = get_slicer_for_axis(axis = 1)
x_slicer = get_slicer_for_axis(axis = 2)
z_slicer.children[0].description = "Z"
y_slicer.children[0].description = "Y"
x_slicer.children[0].description = "X"
......@@ -336,7 +343,7 @@ def orthogonal(
return widgets.HBox([z_slicer, y_slicer, x_slicer])
def interactive_fade_mask(vol: np.ndarray, axis: int = 0):
def interactive_fade_mask(vol: np.ndarray, axis: int = 0,cmap:str = 'viridis', vmin:float = None, vmax:float = None):
"""Interactive widget for visualizing the effect of edge fading on a 3D volume.
This can be used to select the best parameters before applying the mask.
......@@ -344,6 +351,9 @@ def interactive_fade_mask(vol: np.ndarray, axis: int = 0):
Args:
vol (np.ndarray): The volume to apply edge fading to.
axis (int, optional): The axis along which to apply the fading. Defaults to 0.
cmap (str, optional): Specifies the color map for the image. Defaults to "viridis".
vmin (float, optional): Together with vmax define the data range the colormap covers. By default colormap covers the full range. Defaults to None.
vmax (float, optional): Together with vmin define the data range the colormap covers. By default colormap covers the full range. Defaults to None
Example:
```python
......@@ -359,7 +369,13 @@ def interactive_fade_mask(vol: np.ndarray, axis: int = 0):
def _slicer(position, decay_rate, ratio, geometry, invert):
fig, axes = plt.subplots(1, 3, figsize=(9, 3))
axes[0].imshow(vol[position, :, :], cmap="viridis")
slice_img = vol[position, :, :]
# If vmin is higher than the highest value in the image ValueError is raised
# We don't want to override the values because next slices might be okay
new_vmin = None if (isinstance(vmin, (float, int)) and vmin > np.max(slice_img)) else vmin
new_vmax = None if (isinstance(vmax, (float, int)) and vmax < np.min(slice_img)) else vmax
axes[0].imshow(slice_img, cmap=cmap, vmin = new_vmin, vmax = new_vmax)
axes[0].set_title("Original")
axes[0].axis("off")
......@@ -371,7 +387,7 @@ def interactive_fade_mask(vol: np.ndarray, axis: int = 0):
axis=axis,
invert=invert,
)
axes[1].imshow(mask[position, :, :], cmap="viridis")
axes[1].imshow(mask[position, :, :], cmap=cmap)
axes[1].set_title("Mask")
axes[1].axis("off")
......@@ -383,15 +399,20 @@ def interactive_fade_mask(vol: np.ndarray, axis: int = 0):
axis=axis,
invert=invert,
)
axes[2].imshow(masked_vol[position, :, :], cmap="viridis")
# If vmin is higher than the highest value in the image ValueError is raised
# We don't want to override the values because next slices might be okay
slice_img = masked_vol[position, :, :]
new_vmin = None if (isinstance(vmin, (float, int)) and vmin > np.max(slice_img)) else vmin
new_vmax = None if (isinstance(vmax, (float, int)) and vmax < np.min(slice_img)) else vmax
axes[2].imshow(slice_img, cmap=cmap, vmin = new_vmin, vmax = new_vmax)
axes[2].set_title("Masked")
axes[2].axis("off")
return fig
shape_dropdown = widgets.Dropdown(
options=["sphere", "cilinder"],
value="sphere", # default value
options=["spherical", "cylindrical"],
value="spherical", # default value
description="Geometry",
)
......
from .installation import Installer
from .run import itk_vtk
from .helpers import NotInstalledError
\ No newline at end of file
from pathlib import Path
import os
import platform
from typing import Callable
import qim3d
class NotInstalledError(Exception): pass
SOURCE_FNM = "fnm env --use-on-cd | Out-String | Invoke-Expression;"
LINUX = 'Linux'
WINDOWS = 'Windows'
MAC = 'Darwin'
def get_itk_dir() -> Path:
qim_dir = Path(qim3d.__file__).parents[0] #points to .../qim3d/qim3d/
dir = qim_dir.joinpath("viz/itk_vtk_viewer")
return dir
def get_nvm_dir(dir:Path = None) -> Path:
if platform.system() in [LINUX, MAC]:
following_folder = ".nvm"
elif platform.system() == WINDOWS:
following_folder = ''
return dir.joinpath(following_folder) if dir is not None else get_qim_dir().joinpath(following_folder)
def get_node_binaries_dir(nvm_dir:Path = None) -> Path:
"""
Versions could change in time. This makes sure we use the newest one.
For windows we have to pass the argument nvm_dir and it is the itk-vtk_dir
"""
if platform.system() in [LINUX, MAC]:
following_folder = "versions/node"
binaries_folder = 'bin'
elif platform.system() == WINDOWS:
following_folder = 'node-versions'
binaries_folder = 'installation'
node_folder = nvm_dir.joinpath(following_folder) if nvm_dir is not None else get_nvm_dir().joinpath(following_folder)
# We don't wanna throw an error
# Instead we return None and check the returned value in run.py
if not os.path.isdir(node_folder):
return None
l = sorted(os.listdir(node_folder))
for name in l[::-1]:
path = node_folder.joinpath(name)
if os.path.isdir(path):
return path.joinpath(binaries_folder)
def get_viewer_dir(dir:Path = None) -> Path:
following_folder = "viewer_app"
return dir.joinpath(following_folder) if dir is not None else get_qim_dir().joinpath(following_folder)
def get_viewer_binaries(viewer_dir:Path = None) -> Path:
following_folder1 = 'node_modules'
following_folder2 = '.bin'
if viewer_dir is None:
viewer_dir = get_viewer_dir()
return viewer_dir.joinpath(following_folder1).joinpath(following_folder2)
def run_for_platform(linux_func:Callable, windows_func:Callable, macos_func:Callable):
this_platform = platform.system()
if this_platform == LINUX:
return linux_func()
elif this_platform == WINDOWS:
return windows_func()
elif this_platform == MAC:
return macos_func()
def lambda_raise(err):
raise err
#!/usr/bin/env bash
{ # this ensures the entire script is downloaded #
nvm_has() {
type "$1" > /dev/null 2>&1
}
nvm_echo() {
command printf %s\\n "$*" 2>/dev/null
}
if [ -z "${BASH_VERSION}" ] || [ -n "${ZSH_VERSION}" ]; then
# shellcheck disable=SC2016
nvm_echo >&2 'Error: the install instructions explicitly say to pipe the install script to `bash`; please follow them'
exit 1
fi
nvm_grep() {
GREP_OPTIONS='' command grep "$@"
}
nvm_default_install_dir() {
[ -z "${XDG_CONFIG_HOME-}" ] && printf %s "${HOME}/.nvm" || printf %s "${XDG_CONFIG_HOME}/nvm"
}
nvm_install_dir() {
if [ -n "$NVM_DIR" ]; then
printf %s "${NVM_DIR}"
else
nvm_default_install_dir
fi
}
nvm_latest_version() {
nvm_echo "v0.39.7"
}
nvm_profile_is_bash_or_zsh() {
local TEST_PROFILE
TEST_PROFILE="${1-}"
case "${TEST_PROFILE-}" in
*"/.bashrc" | *"/.bash_profile" | *"/.zshrc" | *"/.zprofile")
return
;;
*)
return 1
;;
esac
}
#
# Outputs the location to NVM depending on:
# * The availability of $NVM_SOURCE
# * The presence of $NVM_INSTALL_GITHUB_REPO
# * The method used ("script" or "git" in the script, defaults to "git")
# NVM_SOURCE always takes precedence unless the method is "script-nvm-exec"
#
nvm_source() {
local NVM_GITHUB_REPO
NVM_GITHUB_REPO="${NVM_INSTALL_GITHUB_REPO:-nvm-sh/nvm}"
if [ "${NVM_GITHUB_REPO}" != 'nvm-sh/nvm' ]; then
{ nvm_echo >&2 "$(cat)" ; } << EOF
@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
@ WARNING: REMOTE REPO IDENTIFICATION HAS CHANGED! @
@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
IT IS POSSIBLE THAT SOMEONE IS DOING SOMETHING NASTY!
The default repository for this install is \`nvm-sh/nvm\`,
but the environment variables \`\$NVM_INSTALL_GITHUB_REPO\` is
currently set to \`${NVM_GITHUB_REPO}\`.
If this is not intentional, interrupt this installation and
verify your environment variables.
EOF
fi
local NVM_VERSION
NVM_VERSION="${NVM_INSTALL_VERSION:-$(nvm_latest_version)}"
local NVM_METHOD
NVM_METHOD="$1"
local NVM_SOURCE_URL
NVM_SOURCE_URL="$NVM_SOURCE"
if [ "_$NVM_METHOD" = "_script-nvm-exec" ]; then
NVM_SOURCE_URL="https://raw.githubusercontent.com/${NVM_GITHUB_REPO}/${NVM_VERSION}/nvm-exec"
elif [ "_$NVM_METHOD" = "_script-nvm-bash-completion" ]; then
NVM_SOURCE_URL="https://raw.githubusercontent.com/${NVM_GITHUB_REPO}/${NVM_VERSION}/bash_completion"
elif [ -z "$NVM_SOURCE_URL" ]; then
if [ "_$NVM_METHOD" = "_script" ]; then
NVM_SOURCE_URL="https://raw.githubusercontent.com/${NVM_GITHUB_REPO}/${NVM_VERSION}/nvm.sh"
elif [ "_$NVM_METHOD" = "_git" ] || [ -z "$NVM_METHOD" ]; then
NVM_SOURCE_URL="https://github.com/${NVM_GITHUB_REPO}.git"
else
nvm_echo >&2 "Unexpected value \"$NVM_METHOD\" for \$NVM_METHOD"
return 1
fi
fi
nvm_echo "$NVM_SOURCE_URL"
}
#
# Node.js version to install
#
nvm_node_version() {
nvm_echo "$NODE_VERSION"
}
nvm_download() {
if nvm_has "curl"; then
curl --fail --compressed -q "$@"
elif nvm_has "wget"; then
# Emulate curl with wget
ARGS=$(nvm_echo "$@" | command sed -e 's/--progress-bar /--progress=bar /' \
-e 's/--compressed //' \
-e 's/--fail //' \
-e 's/-L //' \
-e 's/-I /--server-response /' \
-e 's/-s /-q /' \
-e 's/-sS /-nv /' \
-e 's/-o /-O /' \
-e 's/-C - /-c /')
# shellcheck disable=SC2086
eval wget $ARGS
fi
}
install_nvm_from_git() {
local INSTALL_DIR
INSTALL_DIR="$(nvm_install_dir)"
local NVM_VERSION
NVM_VERSION="${NVM_INSTALL_VERSION:-$(nvm_latest_version)}"
if [ -n "${NVM_INSTALL_VERSION:-}" ]; then
# Check if version is an existing ref
if command git ls-remote "$(nvm_source "git")" "$NVM_VERSION" | nvm_grep -q "$NVM_VERSION" ; then
:
# Check if version is an existing changeset
elif ! nvm_download -o /dev/null "$(nvm_source "script-nvm-exec")"; then
nvm_echo >&2 "Failed to find '$NVM_VERSION' version."
exit 1
fi
fi
local fetch_error
if [ -d "$INSTALL_DIR/.git" ]; then
# Updating repo
nvm_echo "=> nvm is already installed in $INSTALL_DIR, trying to update using git"
command printf '\r=> '
fetch_error="Failed to update nvm with $NVM_VERSION, run 'git fetch' in $INSTALL_DIR yourself."
else
fetch_error="Failed to fetch origin with $NVM_VERSION. Please report this!"
nvm_echo "=> Downloading nvm from git to '$INSTALL_DIR'"
command printf '\r=> '
mkdir -p "${INSTALL_DIR}"
if [ "$(ls -A "${INSTALL_DIR}")" ]; then
# Initializing repo
command git init "${INSTALL_DIR}" || {
nvm_echo >&2 'Failed to initialize nvm repo. Please report this!'
exit 2
}
command git --git-dir="${INSTALL_DIR}/.git" remote add origin "$(nvm_source)" 2> /dev/null \
|| command git --git-dir="${INSTALL_DIR}/.git" remote set-url origin "$(nvm_source)" || {
nvm_echo >&2 'Failed to add remote "origin" (or set the URL). Please report this!'
exit 2
}
else
# Cloning repo
command git clone "$(nvm_source)" --depth=1 "${INSTALL_DIR}" || {
nvm_echo >&2 'Failed to clone nvm repo. Please report this!'
exit 2
}
fi
fi
# Try to fetch tag
if command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" fetch origin tag "$NVM_VERSION" --depth=1 2>/dev/null; then
:
# Fetch given version
elif ! command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" fetch origin "$NVM_VERSION" --depth=1; then
nvm_echo >&2 "$fetch_error"
exit 1
fi
command git -c advice.detachedHead=false --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" checkout -f --quiet FETCH_HEAD || {
nvm_echo >&2 "Failed to checkout the given version $NVM_VERSION. Please report this!"
exit 2
}
if [ -n "$(command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" show-ref refs/heads/master)" ]; then
if command git --no-pager --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" branch --quiet 2>/dev/null; then
command git --no-pager --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" branch --quiet -D master >/dev/null 2>&1
else
nvm_echo >&2 "Your version of git is out of date. Please update it!"
command git --no-pager --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" branch -D master >/dev/null 2>&1
fi
fi
nvm_echo "=> Compressing and cleaning up git repository"
if ! command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" reflog expire --expire=now --all; then
nvm_echo >&2 "Your version of git is out of date. Please update it!"
fi
if ! command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" gc --auto --aggressive --prune=now ; then
nvm_echo >&2 "Your version of git is out of date. Please update it!"
fi
return
}
#
# Automatically install Node.js
#
nvm_install_node() {
local NODE_VERSION_LOCAL
NODE_VERSION_LOCAL="$(nvm_node_version)"
if [ -z "$NODE_VERSION_LOCAL" ]; then
return 0
fi
nvm_echo "=> Installing Node.js version $NODE_VERSION_LOCAL"
nvm install "$NODE_VERSION_LOCAL"
local CURRENT_NVM_NODE
CURRENT_NVM_NODE="$(nvm_version current)"
if [ "$(nvm_version "$NODE_VERSION_LOCAL")" == "$CURRENT_NVM_NODE" ]; then
nvm_echo "=> Node.js version $NODE_VERSION_LOCAL has been successfully installed"
else
nvm_echo >&2 "Failed to install Node.js $NODE_VERSION_LOCAL"
fi
}
install_nvm_as_script() {
local INSTALL_DIR
INSTALL_DIR="$(nvm_install_dir)"
local NVM_SOURCE_LOCAL
NVM_SOURCE_LOCAL="$(nvm_source script)"
local NVM_EXEC_SOURCE
NVM_EXEC_SOURCE="$(nvm_source script-nvm-exec)"
local NVM_BASH_COMPLETION_SOURCE
NVM_BASH_COMPLETION_SOURCE="$(nvm_source script-nvm-bash-completion)"
# Downloading to $INSTALL_DIR
mkdir -p "$INSTALL_DIR"
if [ -f "$INSTALL_DIR/nvm.sh" ]; then
nvm_echo "=> nvm is already installed in $INSTALL_DIR, trying to update the script"
else
nvm_echo "=> Downloading nvm as script to '$INSTALL_DIR'"
fi
nvm_download -s "$NVM_SOURCE_LOCAL" -o "$INSTALL_DIR/nvm.sh" || {
nvm_echo >&2 "Failed to download '$NVM_SOURCE_LOCAL'"
return 1
} &
nvm_download -s "$NVM_EXEC_SOURCE" -o "$INSTALL_DIR/nvm-exec" || {
nvm_echo >&2 "Failed to download '$NVM_EXEC_SOURCE'"
return 2
} &
nvm_download -s "$NVM_BASH_COMPLETION_SOURCE" -o "$INSTALL_DIR/bash_completion" || {
nvm_echo >&2 "Failed to download '$NVM_BASH_COMPLETION_SOURCE'"
return 2
} &
for job in $(jobs -p | command sort)
do
wait "$job" || return $?
done
chmod a+x "$INSTALL_DIR/nvm-exec" || {
nvm_echo >&2 "Failed to mark '$INSTALL_DIR/nvm-exec' as executable"
return 3
}
}
nvm_try_profile() {
if [ -z "${1-}" ] || [ ! -f "${1}" ]; then
return 1
fi
nvm_echo "${1}"
}
#
# Detect profile file if not specified as environment variable
# (eg: PROFILE=~/.myprofile)
# The echo'ed path is guaranteed to be an existing file
# Otherwise, an empty string is returned
#
nvm_detect_profile() {
if [ "${PROFILE-}" = '/dev/null' ]; then
# the user has specifically requested NOT to have nvm touch their profile
return
fi
if [ -n "${PROFILE}" ] && [ -f "${PROFILE}" ]; then
nvm_echo "${PROFILE}"
return
fi
local DETECTED_PROFILE
DETECTED_PROFILE=''
if [ "${SHELL#*bash}" != "$SHELL" ]; then
if [ -f "$HOME/.bashrc" ]; then
DETECTED_PROFILE="$HOME/.bashrc"
elif [ -f "$HOME/.bash_profile" ]; then
DETECTED_PROFILE="$HOME/.bash_profile"
fi
elif [ "${SHELL#*zsh}" != "$SHELL" ]; then
if [ -f "$HOME/.zshrc" ]; then
DETECTED_PROFILE="$HOME/.zshrc"
elif [ -f "$HOME/.zprofile" ]; then
DETECTED_PROFILE="$HOME/.zprofile"
fi
fi
if [ -z "$DETECTED_PROFILE" ]; then
for EACH_PROFILE in ".profile" ".bashrc" ".bash_profile" ".zprofile" ".zshrc"
do
if DETECTED_PROFILE="$(nvm_try_profile "${HOME}/${EACH_PROFILE}")"; then
break
fi
done
fi
if [ -n "$DETECTED_PROFILE" ]; then
nvm_echo "$DETECTED_PROFILE"
fi
}
#
# Check whether the user has any globally-installed npm modules in their system
# Node, and warn them if so.
#
nvm_check_global_modules() {
local NPM_COMMAND
NPM_COMMAND="$(command -v npm 2>/dev/null)" || return 0
[ -n "${NVM_DIR}" ] && [ -z "${NPM_COMMAND%%"$NVM_DIR"/*}" ] && return 0
local NPM_VERSION
NPM_VERSION="$(npm --version)"
NPM_VERSION="${NPM_VERSION:--1}"
[ "${NPM_VERSION%%[!-0-9]*}" -gt 0 ] || return 0
local NPM_GLOBAL_MODULES
NPM_GLOBAL_MODULES="$(
npm list -g --depth=0 |
command sed -e '/ npm@/d' -e '/ (empty)$/d'
)"
local MODULE_COUNT
MODULE_COUNT="$(
command printf %s\\n "$NPM_GLOBAL_MODULES" |
command sed -ne '1!p' | # Remove the first line
wc -l | command tr -d ' ' # Count entries
)"
if [ "${MODULE_COUNT}" != '0' ]; then
# shellcheck disable=SC2016
nvm_echo '=> You currently have modules installed globally with `npm`. These will no'
# shellcheck disable=SC2016
nvm_echo '=> longer be linked to the active version of Node when you install a new node'
# shellcheck disable=SC2016
nvm_echo '=> with `nvm`; and they may (depending on how you construct your `$PATH`)'
# shellcheck disable=SC2016
nvm_echo '=> override the binaries of modules installed with `nvm`:'
nvm_echo
command printf %s\\n "$NPM_GLOBAL_MODULES"
nvm_echo '=> If you wish to uninstall them at a later point (or re-install them under your'
# shellcheck disable=SC2016
nvm_echo '=> `nvm` Nodes), you can remove them from the system Node as follows:'
nvm_echo
nvm_echo ' $ nvm use system'
nvm_echo ' $ npm uninstall -g a_module'
nvm_echo
fi
}
nvm_do_install() {
if [ -n "${NVM_DIR-}" ] && ! [ -d "${NVM_DIR}" ]; then
if [ -e "${NVM_DIR}" ]; then
nvm_echo >&2 "File \"${NVM_DIR}\" has the same name as installation directory."
exit 1
fi
if [ "${NVM_DIR}" = "$(nvm_default_install_dir)" ]; then
mkdir "${NVM_DIR}"
else
nvm_echo >&2 "You have \$NVM_DIR set to \"${NVM_DIR}\", but that directory does not exist. Check your profile files and environment."
exit 1
fi
fi
# Disable the optional which check, https://www.shellcheck.net/wiki/SC2230
# shellcheck disable=SC2230
if nvm_has xcode-select && [ "$(xcode-select -p >/dev/null 2>/dev/null ; echo $?)" = '2' ] && [ "$(which git)" = '/usr/bin/git' ] && [ "$(which curl)" = '/usr/bin/curl' ]; then
nvm_echo >&2 'You may be on a Mac, and need to install the Xcode Command Line Developer Tools.'
# shellcheck disable=SC2016
nvm_echo >&2 'If so, run `xcode-select --install` and try again. If not, please report this!'
exit 1
fi
if [ -z "${METHOD}" ]; then
# Autodetect install method
if nvm_has git; then
install_nvm_from_git
elif nvm_has curl || nvm_has wget; then
install_nvm_as_script
else
nvm_echo >&2 'You need git, curl, or wget to install nvm'
exit 1
fi
elif [ "${METHOD}" = 'git' ]; then
if ! nvm_has git; then
nvm_echo >&2 "You need git to install nvm"
exit 1
fi
install_nvm_from_git
elif [ "${METHOD}" = 'script' ]; then
if ! nvm_has curl && ! nvm_has wget; then
nvm_echo >&2 "You need curl or wget to install nvm"
exit 1
fi
install_nvm_as_script
else
nvm_echo >&2 "The environment variable \$METHOD is set to \"${METHOD}\", which is not recognized as a valid installation method."
exit 1
fi
nvm_echo
local NVM_PROFILE
NVM_PROFILE="$(nvm_detect_profile)"
local PROFILE_INSTALL_DIR
PROFILE_INSTALL_DIR="$(nvm_install_dir | command sed "s:^$HOME:\$HOME:")"
SOURCE_STR="\\nexport NVM_DIR=\"${PROFILE_INSTALL_DIR}\"\\n[ -s \"\$NVM_DIR/nvm.sh\" ] && \\. \"\$NVM_DIR/nvm.sh\" # This loads nvm\\n"
# shellcheck disable=SC2016
COMPLETION_STR='[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion\n'
BASH_OR_ZSH=false
if [ -z "${NVM_PROFILE-}" ] ; then
local TRIED_PROFILE
if [ -n "${PROFILE}" ]; then
TRIED_PROFILE="${NVM_PROFILE} (as defined in \$PROFILE), "
fi
nvm_echo "=> Profile not found. Tried ${TRIED_PROFILE-}~/.bashrc, ~/.bash_profile, ~/.zprofile, ~/.zshrc, and ~/.profile."
nvm_echo "=> Create one of them and run this script again"
nvm_echo " OR"
nvm_echo "=> Append the following lines to the correct file yourself:"
command printf "${SOURCE_STR}"
nvm_echo
else
if nvm_profile_is_bash_or_zsh "${NVM_PROFILE-}"; then
BASH_OR_ZSH=true
fi
if ! command grep -qc '/nvm.sh' "$NVM_PROFILE"; then
nvm_echo "=> Appending nvm source string to $NVM_PROFILE"
command printf "${SOURCE_STR}" >> "$NVM_PROFILE"
else
nvm_echo "=> nvm source string already in ${NVM_PROFILE}"
fi
# shellcheck disable=SC2016
if ${BASH_OR_ZSH} && ! command grep -qc '$NVM_DIR/bash_completion' "$NVM_PROFILE"; then
nvm_echo "=> Appending bash_completion source string to $NVM_PROFILE"
command printf "$COMPLETION_STR" >> "$NVM_PROFILE"
else
nvm_echo "=> bash_completion source string already in ${NVM_PROFILE}"
fi
fi
if ${BASH_OR_ZSH} && [ -z "${NVM_PROFILE-}" ] ; then
nvm_echo "=> Please also append the following lines to the if you are using bash/zsh shell:"
command printf "${COMPLETION_STR}"
fi
# Source nvm
# shellcheck source=/dev/null
\. "$(nvm_install_dir)/nvm.sh"
nvm_check_global_modules
nvm_install_node
nvm_reset
nvm_echo "=> Close and reopen your terminal to start using nvm or run the following to use it now:"
command printf "${SOURCE_STR}"
if ${BASH_OR_ZSH} ; then
command printf "${COMPLETION_STR}"
fi
}
#
# Unsets the various functions defined
# during the execution of the install script
#
nvm_reset() {
unset -f nvm_has nvm_install_dir nvm_latest_version nvm_profile_is_bash_or_zsh \
nvm_source nvm_node_version nvm_download install_nvm_from_git nvm_install_node \
install_nvm_as_script nvm_try_profile nvm_detect_profile nvm_check_global_modules \
nvm_do_install nvm_reset nvm_default_install_dir nvm_grep
}
[ "_$NVM_ENV" = "_testing" ] || nvm_do_install
} # this ensures the entire script is downloaded #
from pathlib import Path
import subprocess
import os
import platform
from .helpers import get_itk_dir, get_nvm_dir, get_node_binaries_dir, get_viewer_dir, SOURCE_FNM, NotInstalledError, run_for_platform
class Installer:
"""
Implements installation procedure of itk-vtk-viewer for each OS.
Also goes for minimal installation: checking if the necessary binaries aren't already installed
"""
def __init__(self):
self.platform = platform.system()
self.install_functions = (self.install_node_manager, self.install_node, self.install_viewer)
self.dir = get_itk_dir() # itk_vtk_viewer folder within qim3d.viz
# If nvm was already installed, there should be this environment variable
# However it could have also been installed via our process, or user deleted the folder but didn't adjusted the bashrc, that's why we check again
self.os_nvm_dir = os.getenv('NVM_DIR')
if self.os_nvm_dir is not None:
self.os_nvm_dir = Path(self.os_nvm_dir)
self.qim_nvm_dir = get_nvm_dir(self.dir)
if not os.path.isdir(self.qim_nvm_dir):
os.mkdir(self.qim_nvm_dir)
@property
def is_node_manager_already_installed(self) -> bool:
"""
Checks for global and local installation of nvm (Node Version Manager)
"""
def _linux() -> bool:
command_f = lambda nvmsh: F'/bin/bash -c "source {nvmsh} && nvm"'
if self.os_nvm_dir is not None:
nvmsh = self.os_nvm_dir.joinpath('nvm.sh')
output = subprocess.run(command_f(nvmsh), shell = True, capture_output = True)
if not output.stderr:
self.nvm_dir = self.os_nvm_dir
return True
nvmsh = self.qim_nvm_dir.joinpath('nvm.sh')
output = subprocess.run(command_f(nvmsh), shell = True, capture_output = True)
self.nvm_dir = self.qim_nvm_dir
return not bool(output.stderr) # If there is an error running the above command then it is not installed (not in expected location)
def _windows() -> bool:
output = subprocess.run(['powershell.exe', 'fnm --version'], capture_output=True)
return not bool(output.stderr)
return run_for_platform(linux_func=_linux, windows_func=_windows,macos_func= _linux)
@property
def is_node_already_installed(self) -> bool:
"""
Checks for global and local installation of Node.js and npm (Node Package Manager)
"""
def _linux() -> bool:
# get_node_binaries_dir might return None if the folder is not there
# In that case there is 'None' added to the PATH, thats not a problem
# the command will return an error to the output and it will be evaluated as not installed
command = F'export PATH="$PATH:{get_node_binaries_dir(self.nvm_dir)}" && npm version'
output = subprocess.run(command, shell = True, capture_output = True)
return not bool(output.stderr)
def _windows() -> bool:
# Didn't figure out how to install the viewer and run it properly when using global npm
return False
return run_for_platform(linux_func=_linux,windows_func= _windows,macos_func= _linux)
def install(self):
"""
First check if some of the binaries are not installed already.
If node.js is already installed (it was able to call npm without raising an error)
it only has to install the viewer and doesn't have to go through the process
"""
if self.is_node_manager_already_installed:
self.install_status = 1
print("Node manager already installed")
if self.is_node_already_installed:
self.install_status = 2
print("Node.js already installed")
else:
self.install_status = 0
for install_function in self.install_functions[self.install_status:]:
install_function()
def install_node_manager(self):
def _linux():
print(F'Installing Node manager into {self.nvm_dir}...')
_ = subprocess.run([F'export NVM_DIR={self.nvm_dir} && bash {self.dir.joinpath("install_nvm.sh")}'], shell = True, capture_output=True)
def _windows():
print("Installing node manager...")
subprocess.run(["powershell.exe", F'$env:XDG_DATA_HOME = "{self.dir}";', "winget install Schniz.fnm"])
# self._run_for_platform(_linux, None, _windows)
run_for_platform(linux_func=_linux,windows_func= _windows,macos_func= _linux)
print("Node manager installed")
def install_node(self):
def _linux():
"""
If nvm was already installed, terminal should have environemnt variable 'NVM_DIR' where is nvm.sh
We have to source that file either way, to be able to call nvm function
If it was't installed before, we need to export NVM_DIR in order to install npm to correct location
"""
print(F'Installing node.js into {self.nvm_dir}...')
if self.install_status == 0:
nvm_dir = self.nvm_dir
prefix = F'export NVM_DIR={nvm_dir} && '
elif self.install_status == 1:
nvm_dir = self.os_nvm_dir
prefix = ''
nvmsh = Path(nvm_dir).joinpath('nvm.sh')
command = f'{prefix}/bin/bash -c "source {nvmsh} && nvm install 22"'
output = subprocess.run(command, shell = True, capture_output=True)
def _windows():
subprocess.run(["powershell.exe", SOURCE_FNM, F"fnm use --fnm-dir {self.dir} --install-if-missing 22"])
print(F'Installing node.js...')
run_for_platform(linux_func = _linux, windows_func=_windows, macos_func=_linux)
print("Node.js installed")
def install_viewer(self):
def _linux():
# Adds local binaries to the path in case we had to install node first (locally into qim folder), but shouldnt interfere even if
# npm is installed globally
command = F'export PATH="$PATH:{get_node_binaries_dir(self.nvm_dir)}" && npm install --prefix {self.viewer_dir} itk-vtk-viewer'
output = subprocess.run([command], shell=True, capture_output=True)
# print(output.stderr)
def _windows():
try:
node_bin = get_node_binaries_dir(self.dir)
print(F'Installing into {self.viewer_dir}')
subprocess.run(["powershell.exe", F'$env:PATH=$env:PATH + \';{node_bin}\';', F"npm install --prefix {self.viewer_dir} itk-vtk-viewer"], capture_output=True)
except NotInstalledError: # Not installed in qim
subprocess.run(["powershell.exe", SOURCE_FNM, F"npm install itk-vtk-viewer"], capture_output=True)
self.viewer_dir = get_viewer_dir(self.dir)
if not os.path.isdir(self.viewer_dir):
os.mkdir(self.viewer_dir)
print(F"Installing itk-vtk-viewer...")
run_for_platform(linux_func=_linux, windows_func=_windows, macos_func=_linux)
print("Itk-vtk-viewer installed")
\ No newline at end of file
import subprocess
import platform
from pathlib import Path
import os
import qim3d.utils
from qim3d.utils.logger import log
# from .helpers import get_qim_dir, get_nvm_dir, get_viewer_binaries, get_viewer_dir, get_node_binaries_dir, NotInstalledError, SOURCE_FNM
from .helpers import *
import webbrowser
import threading
import time
# Start viewer
START_COMMAND = "itk-vtk-viewer -s"
# Lock, so two threads can safely read and write to is_installed
c = threading.Condition()
is_installed = True
def run_global(port=3000):
linux_func = lambda: subprocess.run(
START_COMMAND+f" -p {port}", shell=True, stderr=subprocess.DEVNULL
)
# First sourcing the node.js, if sourcing via fnm doesnt help and user would have to do it any other way, it would throw an error and suggest to install viewer to qim library
windows_func = lambda: subprocess.run(
["powershell.exe", SOURCE_FNM, START_COMMAND+f" -p {port}"],
shell=True,
stderr=subprocess.DEVNULL,
)
run_for_platform(
linux_func=linux_func, windows_func=windows_func, macos_func=linux_func
)
def run_within_qim_dir(port=3000):
dir = get_itk_dir()
viewer_dir = get_viewer_dir(dir)
viewer_bin = get_viewer_binaries(viewer_dir)
def linux_func():
# Looks for node binaries installed in qim3d/viz/itk_vtk_viewer/.nvm
node_bin = get_node_binaries_dir(get_nvm_dir(dir))
if node_bin is None:
# Didn't find node binaries there so it looks for enviroment variable to tell it where is nvm folder
node_bin = get_node_binaries_dir(Path(str(os.getenv("NVM_DIR"))))
if node_bin is not None:
subprocess.run(
f'export PATH="$PATH:{viewer_bin}:{node_bin}" && {START_COMMAND+f" -p {port}"}',
shell=True,
stderr=subprocess.DEVNULL,
)
def windows_func():
node_bin = get_node_binaries_dir(dir)
if node_bin is not None:
subprocess.run(
[
"powershell.exe",
f"$env:PATH = $env:PATH + ';{viewer_bin};{node_bin}';",
START_COMMAND+f" -p {port}",
],
stderr=subprocess.DEVNULL,
)
run_for_platform(
linux_func=linux_func, windows_func=windows_func, macos_func=linux_func
)
def itk_vtk(
filename: str = None,
open_browser: bool = True,
file_server_port: int = 8042,
viewer_port: int = 3000,
):
"""
Opens a visualization window using the itk-vtk-viewer. Works both for common file types (Tiff, Nifti, etc.) and for **OME-Zarr stores**.
This function starts the itk-vtk-viewer, either using a global
installation or a local installation within the QIM package. It also starts
an HTTP server to serve the file to the viewer. Optionally, it can
automatically open a browser window to display the viewer. If the viewer
is not installed, it raises a NotInstalledError.
Args:
filename (str, optional): Path to the file or OME-Zarr store to be visualized. Trailing slashes in
the path are normalized. Defaults to None.
open_browser (bool, optional): If True, opens the visualization in a new browser tab.
Defaults to True.
file_server_port (int, optional): The port number for the local file server that hosts
the store. Defaults to 8042.
viewer_port (int, optional): The port number for the itk-vtk-viewer server. Defaults to 3000.
Raises:
NotInstalledError: Raised if the itk-vtk-viewer is not installed in the expected location.
Example:
```python
import qim3d
# Download data
downloader = qim3d.io.Downloader()
data = downloader.Okinawa_Forams.Okinawa_Foram_1(load_file=True, virtual_stack=True)
# Export to OME-Zarr
qim3d.io.export_ome_zarr("Okinawa_Foram_1.zarr", data)
# Start visualization
qim3d.viz.itk_vtk("Okinawa_Foram_1.zarr")
```
<pre style="margin-left: 12px; margin-right: 12px; color:#454545">
Downloading Okinawa_Foram_1.tif
https://archive.compute.dtu.dk/download/public/projects/viscomp_data_repository/Okinawa_Forams/Okinawa_Foram_1.tif
1.85GB [00:17, 111MB/s]
Loading Okinawa_Foram_1.tif
Loading: 100%
1.85GB/1.85GB [00:02<00:00, 762MB/s]
Loaded shape: (995, 1014, 984)
Using virtual stack
Exporting data to OME-Zarr format at Okinawa_Foram_1.zarr
Number of scales: 5
Creating a multi-scale pyramid
- Scale 0: (995, 1014, 984)
- Scale 1: (498, 507, 492)
- Scale 2: (249, 254, 246)
- Scale 3: (124, 127, 123)
- Scale 4: (62, 63, 62)
Writing data to disk
All done!
itk-vtk-viewer
=> Serving /home/fima/Notebooks/Qim3d on port 3000
enp0s31f6 => http://10.52.0.158:3000/
wlp0s20f3 => http://10.197.104.229:3000/
Serving directory '/home/fima/Notebooks/Qim3d'
http://localhost:8042/
Visualization url:
http://localhost:3000/?rotate=false&fileToLoad=http://localhost:8042/Okinawa_Foram_1.zarr
</pre>
![itk-vtk-viewer](assets/screenshots/itk-vtk-viewer.gif)
"""
global is_installed
# This might seem redundant but is here in case we have to go through the installation first
# If we have to install first this variable is set to False and doesn't disappear
# So when we want to run the newly installed viewer it would still be false and webbrowser wouldnt open
c.acquire()
is_installed = True
c.release()
# We do a delay open for the browser, just so that the itk-vtk-viewer has time to start.
# Timing is not critical, this is just so that the user does not see the "server cannot be reached" page
def delayed_open():
time.sleep(3)
global is_installed
c.acquire()
if is_installed:
# Normalize the filename. This is necessary for trailing slashes by the end of the path
filename_norm = os.path.normpath(os.path.abspath(filename))
# Start the http server
qim3d.utils.start_http_server(
os.path.dirname(filename_norm), port=file_server_port
)
viz_url = f"http://localhost:{viewer_port}/?rotate=false&fileToLoad=http://localhost:{file_server_port}/{os.path.basename(filename_norm)}"
if open_browser:
webbrowser.open_new_tab(viz_url)
log.info(f"\nVisualization url:\n{viz_url}\n")
c.release()
# Start the delayed open in a separate thread
delayed_window = threading.Thread(target=delayed_open)
delayed_window.start()
# First try if the user doesn't have it globally
run_global(port=viewer_port)
# Then try to also find node.js installed in qim package
run_within_qim_dir(port=viewer_port)
# If we got to this part, it means that the viewer is not installed and we don't want to
# open browser with non-working window
# We sat the flag is_installed to False which will be read in the other thread to let it know not to open the browser
c.acquire()
is_installed = False
c.release()
delayed_window.join()
# If we still get an error, it is not installed in location we expect it to be installed and have to raise an error
# which will be caught in the command line and it will ask for installation
raise NotInstalledError
......@@ -14,13 +14,13 @@ from qim3d.utils.misc import downscale_img, scale_to_float16
def vol(
img,
vmin=None,
vmax=None,
aspectmode="data",
show=True,
save=False,
grid_visible=False,
cmap=None,
vmin=None,
vmax=None,
samples="auto",
max_voxels=512**3,
data_type="scaled_float16",
......@@ -41,8 +41,12 @@ def vol(
file will be saved. Defaults to False.
grid_visible (bool, optional): If True, the grid is visible in the plot. Defaults to False.
cmap (list, optional): The color map to be used for the volume rendering. Defaults to None.
vmin (float, optional): Together with vmax defines the data range the colormap covers. By default colormap covers the full range. Defaults to None.
vmax (float, optional): Together with vmin defines the data range the colormap covers. By default colormap covers the full range. Defaults to None
samples (int, optional): The number of samples to be used for the volume rendering in k3d. Defaults to 512.
Lower values will render faster but with lower quality.
max_voxels (int, optional): Defaults to 512^3.
data_type (str, optional): Default to 'scaled_float16'.
**kwargs: Additional keyword arguments to be passed to the `k3d.plot` function.
Returns:
......
......@@ -18,6 +18,9 @@ def vectors(
volume: np.ndarray,
vec: np.ndarray,
axis: int = 0,
volume_cmap:str = 'grey',
vmin:float = None,
vmax:float = None,
slice_idx: Optional[Union[int, float]] = None,
grid_size: int = 10,
interactive: bool = True,
......@@ -31,6 +34,9 @@ def vectors(
volume (np.ndarray): The 3D volume to be sliced.
vec (np.ndarray): The eigenvectors of the structure tensor.
axis (int, optional): The axis along which to visualize the orientation. Defaults to 0.
volume_cmap (str, optional): Defines colormap for display of the volume
vmin (float, optional): Together with vmax define the data range the colormap covers. By default colormap covers the full range. Defaults to None.
vmax (float, optional): Together with vmin define the data range the colormap covers. By default colormap covers the full range. Defaults to None
slice_idx (int or float, optional): The initial slice to be visualized. The slice index
can afterwards be changed. If value is an integer, it will be the index of the slice
to be visualized. If value is a float between 0 and 1, it will be multiplied by the
......@@ -169,7 +175,7 @@ def vectors(
angles="xy",
)
ax[0].imshow(data_slice, cmap=plt.cm.gray)
ax[0].imshow(data_slice, cmap = volume_cmap, vmin = vmin, vmax = vmax)
ax[0].set_title(
f"Orientation vectors (slice {slice_idx})"
if not interactive
......