diff --git a/poetry.lock b/poetry.lock
index d5f2fc40..0c2fc92d 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -183,18 +183,6 @@ yfinance = ["yfinance"]
youtube = ["youtube_transcript_api"]
zep = ["zep-cloud"]
-[[package]]
-name = "aiofiles"
-version = "25.1.0"
-description = "File support for asyncio."
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695"},
- {file = "aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2"},
-]
-
[[package]]
name = "aiohappyeyeballs"
version = "2.6.1"
@@ -721,7 +709,7 @@ pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
name = "charset-normalizer"
version = "3.4.4"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
-optional = false
+optional = true
python-versions = ">=3.7"
groups = ["main"]
files = [
@@ -1056,18 +1044,6 @@ markers = {main = "extra == \"dev\""}
[package.extras]
toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
-[[package]]
-name = "decorator"
-version = "5.2.1"
-description = "Decorators for Humans"
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-files = [
- {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"},
- {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"},
-]
-
[[package]]
name = "discord-py"
version = "2.7.0"
@@ -1165,9 +1141,10 @@ standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[stand
name = "filelock"
version = "3.24.3"
description = "A platform independent file lock."
-optional = false
+optional = true
python-versions = ">=3.10"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d"},
{file = "filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa"},
@@ -1318,9 +1295,10 @@ files = [
name = "fsspec"
version = "2026.2.0"
description = "File-system specification"
-optional = false
+optional = true
python-versions = ">=3.10"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437"},
{file = "fsspec-2026.2.0.tar.gz", hash = "sha256:6544e34b16869f5aacd5b90bdf1a71acb37792ea3ddf6125ee69a22a53fb8bff"},
@@ -1488,10 +1466,10 @@ hyperframe = ">=6.1,<7"
name = "hf-xet"
version = "1.3.2"
description = "Fast transfer of large files with the Hugging Face Hub."
-optional = false
+optional = true
python-versions = ">=3.8"
groups = ["main"]
-markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\""
+markers = "(extra == \"bigbrain\" or extra == \"embeddings\") and (platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\")"
files = [
{file = "hf_xet-1.3.2-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:335a8f36c55fd35a92d0062f4e9201b4015057e62747b7e7001ffb203c0ee1d2"},
{file = "hf_xet-1.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c1ae4d3a716afc774e66922f3cac8206bfa707db13f6a7e62dfff74bfc95c9a8"},
@@ -1640,9 +1618,10 @@ zstd = ["zstandard (>=0.18.0)"]
name = "huggingface-hub"
version = "1.5.0"
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
-optional = false
+optional = true
python-versions = ">=3.9.0"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "huggingface_hub-1.5.0-py3-none-any.whl", hash = "sha256:c9c0b3ab95a777fc91666111f3b3ede71c0cdced3614c553a64e98920585c4ee"},
{file = "huggingface_hub-1.5.0.tar.gz", hash = "sha256:f281838db29265880fb543de7a23b0f81d3504675de82044307ea3c6c62f799d"},
@@ -1698,57 +1677,6 @@ files = [
[package.extras]
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
-[[package]]
-name = "imageio"
-version = "2.37.2"
-description = "Read and write images and video across all major formats. Supports scientific and volumetric data."
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "imageio-2.37.2-py3-none-any.whl", hash = "sha256:ad9adfb20335d718c03de457358ed69f141021a333c40a53e57273d8a5bd0b9b"},
- {file = "imageio-2.37.2.tar.gz", hash = "sha256:0212ef2727ac9caa5ca4b2c75ae89454312f440a756fcfc8ef1993e718f50f8a"},
-]
-
-[package.dependencies]
-numpy = "*"
-pillow = ">=8.3.2"
-
-[package.extras]
-all-plugins = ["astropy", "av", "fsspec[http]", "imageio-ffmpeg", "numpy (>2)", "pillow-heif", "psutil", "rawpy", "tifffile"]
-all-plugins-pypy = ["fsspec[http]", "imageio-ffmpeg", "pillow-heif", "psutil", "tifffile"]
-dev = ["black", "flake8", "fsspec[github]", "pytest", "pytest-cov"]
-docs = ["numpydoc", "pydata-sphinx-theme", "sphinx (<6)"]
-ffmpeg = ["imageio-ffmpeg", "psutil"]
-fits = ["astropy"]
-freeimage = ["fsspec[http]"]
-full = ["astropy", "av", "black", "flake8", "fsspec[github,http]", "imageio-ffmpeg", "numpy (>2)", "numpydoc", "pillow-heif", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "rawpy", "sphinx (<6)", "tifffile"]
-gdal = ["gdal"]
-itk = ["itk"]
-linting = ["black", "flake8"]
-pillow-heif = ["pillow-heif"]
-pyav = ["av"]
-rawpy = ["numpy (>2)", "rawpy"]
-test = ["fsspec[github]", "pytest", "pytest-cov"]
-tifffile = ["tifffile"]
-
-[[package]]
-name = "imageio-ffmpeg"
-version = "0.6.0"
-description = "FFMPEG wrapper for Python"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "imageio_ffmpeg-0.6.0-py3-none-macosx_10_9_intel.macosx_10_9_x86_64.whl", hash = "sha256:9d2baaf867088508d4a3458e61eeb30e945c4ad8016025545f66c4b5aaef0a61"},
- {file = "imageio_ffmpeg-0.6.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b1ae3173414b5fc5f538a726c4e48ea97edc0d2cdc11f103afee655c463fa742"},
- {file = "imageio_ffmpeg-0.6.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1d47bebd83d2c5fc770720d211855f208af8a596c82d17730aa51e815cdee6dc"},
- {file = "imageio_ffmpeg-0.6.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c7e46fcec401dd990405049d2e2f475e2b397779df2519b544b8aab515195282"},
- {file = "imageio_ffmpeg-0.6.0-py3-none-win32.whl", hash = "sha256:196faa79366b4a82f95c0f4053191d2013f4714a715780f0ad2a68ff37483cc2"},
- {file = "imageio_ffmpeg-0.6.0-py3-none-win_amd64.whl", hash = "sha256:02fa47c83703c37df6bfe4896aab339013f62bf02c5ebf2dce6da56af04ffc0a"},
- {file = "imageio_ffmpeg-0.6.0.tar.gz", hash = "sha256:e2556bed8e005564a9f925bb7afa4002d82770d6b08825078b7697ab88ba1755"},
-]
-
[[package]]
name = "iniconfig"
version = "2.3.0"
@@ -1896,9 +1824,10 @@ files = [
name = "joblib"
version = "1.5.3"
description = "Lightweight pipelining with Python functions"
-optional = false
+optional = true
python-versions = ">=3.9"
groups = ["main"]
+markers = "extra == \"embeddings\""
files = [
{file = "joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713"},
{file = "joblib-1.5.3.tar.gz", hash = "sha256:8561a3269e6801106863fd0d6d84bb737be9e7631e33aaed3fb9ce5953688da3"},
@@ -2076,39 +2005,14 @@ files = [
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
-[[package]]
-name = "moviepy"
-version = "2.2.1"
-description = "Video editing with Python"
-optional = false
-python-versions = "*"
-groups = ["main"]
-files = [
- {file = "moviepy-2.2.1-py3-none-any.whl", hash = "sha256:6b56803fec2ac54b557404126ac1160e65448e03798fa282bd23e8fab3795060"},
- {file = "moviepy-2.2.1.tar.gz", hash = "sha256:c80cb56815ece94e5e3e2d361aa40070eeb30a09d23a24c4e684d03e16deacb1"},
-]
-
-[package.dependencies]
-decorator = ">=4.0.2,<6.0"
-imageio = ">=2.5,<3.0"
-imageio_ffmpeg = ">=0.2.0"
-numpy = ">=1.25.0"
-pillow = ">=9.2.0,<12.0"
-proglog = "<=1.0.0"
-python-dotenv = ">=0.10"
-
-[package.extras]
-doc = ["Sphinx (==6.*)", "numpydoc (<2.0)", "pydata-sphinx-theme (==0.13)", "sphinx_design"]
-lint = ["black (>=23.7.0)", "flake8 (>=6.0.0)", "flake8-absolute-import (>=1.0)", "flake8-docstrings (>=1.7.0)", "flake8-implicit-str-concat (==0.4.0)", "flake8-rst-docstrings (>=0.3)", "isort (>=5.12)", "pre-commit (>=3.3)"]
-test = ["coveralls (>=3.0,<4.0)", "pytest (>=3.0.0,<7.0.0)", "pytest-cov (>=2.5.1,<3.0)"]
-
[[package]]
name = "mpmath"
version = "1.3.0"
description = "Python library for arbitrary-precision floating-point arithmetic"
-optional = false
+optional = true
python-versions = "*"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
{file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"},
@@ -2281,9 +2185,10 @@ files = [
name = "networkx"
version = "3.6"
description = "Python package for creating and manipulating graphs and networks"
-optional = false
+optional = true
python-versions = ">=3.11"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "networkx-3.6-py3-none-any.whl", hash = "sha256:cdb395b105806062473d3be36458d8f1459a4e4b98e236a66c3a48996e07684f"},
{file = "networkx-3.6.tar.gz", hash = "sha256:285276002ad1f7f7da0f7b42f004bcba70d381e936559166363707fdad3d72ad"},
@@ -2304,9 +2209,10 @@ test-extras = ["pytest-mpl", "pytest-randomly"]
name = "numpy"
version = "2.4.2"
description = "Fundamental package for array computing in Python"
-optional = false
+optional = true
python-versions = ">=3.11"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "numpy-2.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7e88598032542bd49af7c4747541422884219056c268823ef6e5e89851c8825"},
{file = "numpy-2.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7edc794af8b36ca37ef5fcb5e0d128c7e0595c7b96a2318d1badb6fcd8ee86b1"},
@@ -2386,10 +2292,10 @@ files = [
name = "nvidia-cublas-cu12"
version = "12.6.4.1"
description = "CUBLAS native runtime libraries"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cublas_cu12-12.6.4.1-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:08ed2686e9875d01b58e3cb379c6896df8e76c75e0d4a7f7dace3d7b6d9ef8eb"},
{file = "nvidia_cublas_cu12-12.6.4.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:235f728d6e2a409eddf1df58d5b0921cf80cfa9e72b9f2775ccb7b4a87984668"},
@@ -2400,10 +2306,10 @@ files = [
name = "nvidia-cuda-cupti-cu12"
version = "12.6.80"
description = "CUDA profiling tools runtime libs."
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cuda_cupti_cu12-12.6.80-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:166ee35a3ff1587f2490364f90eeeb8da06cd867bd5b701bf7f9a02b78bc63fc"},
{file = "nvidia_cuda_cupti_cu12-12.6.80-py3-none-manylinux2014_aarch64.whl", hash = "sha256:358b4a1d35370353d52e12f0a7d1769fc01ff74a191689d3870b2123156184c4"},
@@ -2416,10 +2322,10 @@ files = [
name = "nvidia-cuda-nvrtc-cu12"
version = "12.6.77"
description = "NVRTC native runtime libraries"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cuda_nvrtc_cu12-12.6.77-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5847f1d6e5b757f1d2b3991a01082a44aad6f10ab3c5c0213fa3e25bddc25a13"},
{file = "nvidia_cuda_nvrtc_cu12-12.6.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:35b0cc6ee3a9636d5409133e79273ce1f3fd087abb0532d2d2e8fff1fe9efc53"},
@@ -2430,10 +2336,10 @@ files = [
name = "nvidia-cuda-runtime-cu12"
version = "12.6.77"
description = "CUDA Runtime native Libraries"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cuda_runtime_cu12-12.6.77-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6116fad3e049e04791c0256a9778c16237837c08b27ed8c8401e2e45de8d60cd"},
{file = "nvidia_cuda_runtime_cu12-12.6.77-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d461264ecb429c84c8879a7153499ddc7b19b5f8d84c204307491989a365588e"},
@@ -2446,10 +2352,10 @@ files = [
name = "nvidia-cudnn-cu12"
version = "9.5.1.17"
description = "cuDNN runtime libraries"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cudnn_cu12-9.5.1.17-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:9fd4584468533c61873e5fda8ca41bac3a38bcb2d12350830c69b0a96a7e4def"},
{file = "nvidia_cudnn_cu12-9.5.1.17-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:30ac3869f6db17d170e0e556dd6cc5eee02647abc31ca856634d5a40f82c15b2"},
@@ -2463,10 +2369,10 @@ nvidia-cublas-cu12 = "*"
name = "nvidia-cufft-cu12"
version = "11.3.0.4"
description = "CUFFT native runtime libraries"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cufft_cu12-11.3.0.4-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d16079550df460376455cba121db6564089176d9bac9e4f360493ca4741b22a6"},
{file = "nvidia_cufft_cu12-11.3.0.4-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8510990de9f96c803a051822618d42bf6cb8f069ff3f48d93a8486efdacb48fb"},
@@ -2482,10 +2388,10 @@ nvidia-nvjitlink-cu12 = "*"
name = "nvidia-cufile-cu12"
version = "1.11.1.6"
description = "cuFile GPUDirect libraries"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cufile_cu12-1.11.1.6-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc23469d1c7e52ce6c1d55253273d32c565dd22068647f3aa59b3c6b005bf159"},
{file = "nvidia_cufile_cu12-1.11.1.6-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:8f57a0051dcf2543f6dc2b98a98cb2719c37d3cee1baba8965d57f3bbc90d4db"},
@@ -2495,10 +2401,10 @@ files = [
name = "nvidia-curand-cu12"
version = "10.3.7.77"
description = "CURAND native runtime libraries"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_curand_cu12-10.3.7.77-py3-none-manylinux2014_aarch64.whl", hash = "sha256:6e82df077060ea28e37f48a3ec442a8f47690c7499bff392a5938614b56c98d8"},
{file = "nvidia_curand_cu12-10.3.7.77-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a42cd1344297f70b9e39a1e4f467a4e1c10f1da54ff7a85c12197f6c652c8bdf"},
@@ -2511,10 +2417,10 @@ files = [
name = "nvidia-cusolver-cu12"
version = "11.7.1.2"
description = "CUDA solver native runtime libraries"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cusolver_cu12-11.7.1.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0ce237ef60acde1efc457335a2ddadfd7610b892d94efee7b776c64bb1cac9e0"},
{file = "nvidia_cusolver_cu12-11.7.1.2-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e9e49843a7707e42022babb9bcfa33c29857a93b88020c4e4434656a655b698c"},
@@ -2532,10 +2438,10 @@ nvidia-nvjitlink-cu12 = "*"
name = "nvidia-cusparse-cu12"
version = "12.5.4.2"
description = "CUSPARSE native runtime libraries"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cusparse_cu12-12.5.4.2-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d25b62fb18751758fe3c93a4a08eff08effedfe4edf1c6bb5afd0890fe88f887"},
{file = "nvidia_cusparse_cu12-12.5.4.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7aa32fa5470cf754f72d1116c7cbc300b4e638d3ae5304cfa4a638a5b87161b1"},
@@ -2551,10 +2457,10 @@ nvidia-nvjitlink-cu12 = "*"
name = "nvidia-cusparselt-cu12"
version = "0.6.3"
description = "NVIDIA cuSPARSELt"
-optional = false
+optional = true
python-versions = "*"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_cusparselt_cu12-0.6.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8371549623ba601a06322af2133c4a44350575f5a3108fb75f3ef20b822ad5f1"},
{file = "nvidia_cusparselt_cu12-0.6.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e5c8a26c36445dd2e6812f1177978a24e2d37cacce7e090f297a688d1ec44f46"},
@@ -2565,10 +2471,10 @@ files = [
name = "nvidia-nccl-cu12"
version = "2.26.2"
description = "NVIDIA Collective Communication Library (NCCL) Runtime"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_nccl_cu12-2.26.2-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5c196e95e832ad30fbbb50381eb3cbd1fadd5675e587a548563993609af19522"},
{file = "nvidia_nccl_cu12-2.26.2-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:694cf3879a206553cc9d7dbda76b13efaf610fdb70a50cba303de1b0d1530ac6"},
@@ -2578,10 +2484,10 @@ files = [
name = "nvidia-nvjitlink-cu12"
version = "12.6.85"
description = "Nvidia JIT LTO Library"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_nvjitlink_cu12-12.6.85-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:eedc36df9e88b682efe4309aa16b5b4e78c2407eac59e8c10a6a47535164369a"},
{file = "nvidia_nvjitlink_cu12-12.6.85-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cf4eaa7d4b6b543ffd69d6abfb11efdeb2db48270d94dfd3a452c24150829e41"},
@@ -2592,10 +2498,10 @@ files = [
name = "nvidia-nvtx-cu12"
version = "12.6.77"
description = "NVIDIA Tools Extension"
-optional = false
+optional = true
python-versions = ">=3"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "nvidia_nvtx_cu12-12.6.77-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f44f8d86bb7d5629988d61c8d3ae61dddb2015dee142740536bc7481b022fe4b"},
{file = "nvidia_nvtx_cu12-12.6.77-py3-none-manylinux2014_aarch64.whl", hash = "sha256:adcaabb9d436c9761fca2b13959a2d237c5f9fd406c8e4b723c695409ff88059"},
@@ -2716,131 +2622,6 @@ files = [
{file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"},
]
-[[package]]
-name = "pillow"
-version = "11.3.0"
-description = "Python Imaging Library (Fork)"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"},
- {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"},
- {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"},
- {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"},
- {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"},
- {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"},
- {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"},
- {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"},
- {file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"},
- {file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"},
- {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"},
- {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"},
- {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"},
- {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"},
- {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"},
- {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"},
- {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"},
- {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"},
- {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"},
- {file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"},
- {file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"},
- {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"},
- {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"},
- {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"},
- {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"},
- {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"},
- {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"},
- {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"},
- {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"},
- {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"},
- {file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"},
- {file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"},
- {file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"},
- {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"},
- {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"},
- {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"},
- {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"},
- {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"},
- {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"},
- {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"},
- {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"},
- {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"},
- {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"},
- {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"},
- {file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"},
- {file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"},
- {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"},
- {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"},
- {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"},
- {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"},
- {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"},
- {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"},
- {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"},
- {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"},
- {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"},
- {file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"},
- {file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"},
- {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"},
- {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"},
- {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"},
- {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"},
- {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"},
- {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"},
- {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"},
- {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"},
- {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"},
- {file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"},
- {file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"},
- {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"},
- {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"},
- {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"},
- {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"},
- {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"},
- {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"},
- {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"},
- {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"},
- {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"},
- {file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"},
- {file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"},
- {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"},
- {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"},
- {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"},
- {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"},
- {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"},
- {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"},
- {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"},
- {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"},
- {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"},
- {file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"},
- {file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"},
- {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"},
- {file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"},
-]
-
-[package.extras]
-docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"]
-fpx = ["olefile"]
-mic = ["olefile"]
-test-arrow = ["pyarrow"]
-tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"]
-typing = ["typing-extensions ; python_version < \"3.10\""]
-xmp = ["defusedxml"]
-
[[package]]
name = "pluggy"
version = "1.6.0"
@@ -2858,21 +2639,6 @@ markers = {main = "extra == \"dev\""}
dev = ["pre-commit", "tox"]
testing = ["coverage", "pytest", "pytest-benchmark"]
-[[package]]
-name = "proglog"
-version = "0.1.12"
-description = "Log and progress bar manager for console, notebooks, web..."
-optional = false
-python-versions = "*"
-groups = ["main"]
-files = [
- {file = "proglog-0.1.12-py3-none-any.whl", hash = "sha256:ccaafce51e80a81c65dc907a460c07ccb8ec1f78dc660cfd8f9ec3a22f01b84c"},
- {file = "proglog-0.1.12.tar.gz", hash = "sha256:361ee074721c277b89b75c061336cb8c5f287c92b043efa562ccf7866cda931c"},
-]
-
-[package.dependencies]
-tqdm = "*"
-
[[package]]
name = "prompt-toolkit"
version = "3.0.52"
@@ -7197,9 +6963,10 @@ xxhash = ["xxhash (>=3.6.0,<3.7.0)"]
name = "regex"
version = "2026.2.28"
description = "Alternative regular expression module, to replace re."
-optional = false
+optional = true
python-versions = ">=3.10"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "regex-2026.2.28-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fc48c500838be6882b32748f60a15229d2dea96e59ef341eaa96ec83538f498d"},
{file = "regex-2026.2.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2afa673660928d0b63d84353c6c08a8a476ddfc4a47e11742949d182e6863ce8"},
@@ -7321,7 +7088,7 @@ files = [
name = "requests"
version = "2.32.5"
description = "Python HTTP for Humans."
-optional = false
+optional = true
python-versions = ">=3.9"
groups = ["main"]
files = [
@@ -7362,9 +7129,10 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
name = "safetensors"
version = "0.7.0"
description = ""
-optional = false
+optional = true
python-versions = ">=3.9"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "safetensors-0.7.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c82f4d474cf725255d9e6acf17252991c3c8aac038d6ef363a4bf8be2f6db517"},
{file = "safetensors-0.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:94fd4858284736bb67a897a41608b5b0c2496c9bdb3bf2af1fa3409127f20d57"},
@@ -7409,9 +7177,10 @@ torch = ["packaging", "safetensors[numpy]", "torch (>=1.10)"]
name = "scikit-learn"
version = "1.8.0"
description = "A set of python modules for machine learning and data mining"
-optional = false
+optional = true
python-versions = ">=3.11"
groups = ["main"]
+markers = "extra == \"embeddings\""
files = [
{file = "scikit_learn-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:146b4d36f800c013d267b29168813f7a03a43ecd2895d04861f1240b564421da"},
{file = "scikit_learn-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f984ca4b14914e6b4094c5d52a32ea16b49832c03bd17a110f004db3c223e8e1"},
@@ -7471,9 +7240,10 @@ tests = ["matplotlib (>=3.6.1)", "mypy (>=1.15)", "numpydoc (>=1.2.0)", "pandas
name = "scipy"
version = "1.17.1"
description = "Fundamental algorithms for scientific computing in Python"
-optional = false
+optional = true
python-versions = ">=3.11"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "scipy-1.17.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1f95b894f13729334fb990162e911c9e5dc1ab390c58aa6cbecb389c5b5e28ec"},
{file = "scipy-1.17.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e18f12c6b0bc5a592ed23d3f7b891f68fd7f8241d69b7883769eb5d5dfb52696"},
@@ -7571,9 +7341,10 @@ websocket-client = ">=1.8.0,<2.0"
name = "sentence-transformers"
version = "5.2.3"
description = "Embeddings, Retrieval, and Reranking"
-optional = false
+optional = true
python-versions = ">=3.10"
groups = ["main"]
+markers = "extra == \"embeddings\""
files = [
{file = "sentence_transformers-5.2.3-py3-none-any.whl", hash = "sha256:6437c62d4112b615ddebda362dfc16a4308d604c5b68125ed586e3e95d5b2e30"},
{file = "sentence_transformers-5.2.3.tar.gz", hash = "sha256:3cd3044e1f3fe859b6a1b66336aac502eaae5d3dd7d5c8fc237f37fbf58137c7"},
@@ -7601,10 +7372,10 @@ train = ["accelerate (>=0.20.3)", "datasets"]
name = "setuptools"
version = "82.0.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
-optional = false
+optional = true
python-versions = ">=3.9"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" or python_version >= \"3.12\""
+markers = "(extra == \"bigbrain\" or extra == \"embeddings\") and (python_version >= \"3.12\" or platform_system == \"Linux\") and (python_version >= \"3.12\" or platform_machine == \"x86_64\")"
files = [
{file = "setuptools-82.0.0-py3-none-any.whl", hash = "sha256:70b18734b607bd1da571d097d236cfcfacaf01de45717d59e6e04b96877532e0"},
{file = "setuptools-82.0.0.tar.gz", hash = "sha256:22e0a2d69474c6ae4feb01951cb69d515ed23728cf96d05513d36e42b62b37cb"},
@@ -7806,9 +7577,10 @@ full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart
name = "sympy"
version = "1.14.0"
description = "Computer algebra system (CAS) in Python"
-optional = false
+optional = true
python-versions = ">=3.9"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"},
{file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"},
@@ -7824,9 +7596,10 @@ dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"]
name = "threadpoolctl"
version = "3.6.0"
description = "threadpoolctl"
-optional = false
+optional = true
python-versions = ">=3.9"
groups = ["main"]
+markers = "extra == \"embeddings\""
files = [
{file = "threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb"},
{file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"},
@@ -7836,9 +7609,10 @@ files = [
name = "tokenizers"
version = "0.22.2"
description = ""
-optional = false
+optional = true
python-versions = ">=3.9"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "tokenizers-0.22.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:544dd704ae7238755d790de45ba8da072e9af3eea688f698b137915ae959281c"},
{file = "tokenizers-0.22.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1e418a55456beedca4621dbab65a318981467a2b188e982a23e117f115ce5001"},
@@ -7878,9 +7652,10 @@ testing = ["datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff",
name = "torch"
version = "2.7.1"
description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
-optional = false
+optional = true
python-versions = ">=3.9.0"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "torch-2.7.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a103b5d782af5bd119b81dbcc7ffc6fa09904c423ff8db397a1e6ea8fd71508f"},
{file = "torch-2.7.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:fe955951bdf32d182ee8ead6c3186ad54781492bf03d547d31771a01b3d6fb7d"},
@@ -7962,9 +7737,10 @@ telegram = ["requests"]
name = "transformers"
version = "5.2.0"
description = "Transformers: the model-definition framework for state-of-the-art machine learning models in text, vision, audio, and multimodal models, for both inference and training."
-optional = false
+optional = true
python-versions = ">=3.10.0"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "transformers-5.2.0-py3-none-any.whl", hash = "sha256:9ecaf243dc45bee11a7d93f8caf03746accc0cb069181bbf4ad8566c53e854b4"},
{file = "transformers-5.2.0.tar.gz", hash = "sha256:0088b8b46ccc9eff1a1dca72b5d618a5ee3b1befc3e418c9512b35dea9f9a650"},
@@ -8055,10 +7831,10 @@ wsproto = ">=0.14"
name = "triton"
version = "3.3.1"
description = "A language and compiler for custom Deep Learning operations"
-optional = false
+optional = true
python-versions = "*"
groups = ["main"]
-markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and (extra == \"bigbrain\" or extra == \"embeddings\")"
files = [
{file = "triton-3.3.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b74db445b1c562844d3cfad6e9679c72e93fdfb1a90a24052b03bb5c49d1242e"},
{file = "triton-3.3.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b31e3aa26f8cb3cc5bf4e187bf737cbacf17311e1112b781d4a059353dfd731b"},
@@ -8098,9 +7874,10 @@ shellingham = ">=1.3.0"
name = "typer-slim"
version = "0.24.0"
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
-optional = false
+optional = true
python-versions = ">=3.10"
groups = ["main"]
+markers = "extra == \"bigbrain\" or extra == \"embeddings\""
files = [
{file = "typer_slim-0.24.0-py3-none-any.whl", hash = "sha256:d5d7ee1ee2834d5020c7c616ed5e0d0f29b9a4b1dd283bdebae198ec09778d0e"},
{file = "typer_slim-0.24.0.tar.gz", hash = "sha256:f0ed36127183f52ae6ced2ecb2521789995992c521a46083bfcdbb652d22ad34"},
@@ -8665,10 +8442,12 @@ bigbrain = ["airllm"]
celery = ["celery"]
dev = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-randomly", "pytest-timeout", "pytest-xdist", "selenium"]
discord = ["discord.py"]
+embeddings = ["numpy", "sentence-transformers"]
+git = ["GitPython"]
telegram = ["python-telegram-bot"]
voice = ["pyttsx3"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<4"
-content-hash = "337367c3d31512dfd2600ed1994b4c42a8c961d4eea2ced02a5492dbddd70faf"
+content-hash = "47fabca0120dac4d6eab84d09b6b1556b0ffb029c06226870a3ee096e32e868f"
diff --git a/src/brain/embeddings.py b/src/brain/embeddings.py
index 4ce527b8..1a09ed98 100644
--- a/src/brain/embeddings.py
+++ b/src/brain/embeddings.py
@@ -5,7 +5,6 @@ No OpenAI dependency. Runs 100% locally on CPU.
from __future__ import annotations
-import json
import logging
from typing import List, Union
diff --git a/src/brain/worker.py b/src/brain/worker.py
index 4ffe1ab5..10db77ea 100644
--- a/src/brain/worker.py
+++ b/src/brain/worker.py
@@ -77,9 +77,9 @@ class DistributedWorker:
)
if result.returncode == 0:
return True
- except:
+ except (OSError, subprocess.SubprocessError):
pass
-
+
# Check for ROCm
if os.path.exists("/opt/rocm"):
return True
@@ -93,11 +93,11 @@ class DistributedWorker:
)
if "Metal" in result.stdout:
return True
- except:
+ except (OSError, subprocess.SubprocessError):
pass
-
+
return False
-
+
def _has_internet(self) -> bool:
"""Check if we have internet connectivity."""
try:
@@ -106,9 +106,9 @@ class DistributedWorker:
capture_output=True, timeout=5
)
return result.returncode == 0
- except:
+ except (OSError, subprocess.SubprocessError):
return False
-
+
def _get_memory_gb(self) -> float:
"""Get total system memory in GB."""
try:
@@ -125,7 +125,7 @@ class DistributedWorker:
if line.startswith("MemTotal:"):
kb = int(line.split()[1])
return kb / (1024**2)
- except:
+ except (OSError, ValueError):
pass
return 8.0 # Assume 8GB if we can't detect
@@ -136,9 +136,9 @@ class DistributedWorker:
["which", cmd], capture_output=True, timeout=5
)
return result.returncode == 0
- except:
+ except (OSError, subprocess.SubprocessError):
return False
-
+
def _register_default_handlers(self):
"""Register built-in task handlers."""
self._handlers = {
diff --git a/src/config.py b/src/config.py
index 573edd8e..cd601758 100644
--- a/src/config.py
+++ b/src/config.py
@@ -215,6 +215,7 @@ OLLAMA_MODEL_FALLBACK: str = "qwen2.5:14b"
def check_ollama_model_available(model_name: str) -> bool:
"""Check if a specific Ollama model is available locally."""
try:
+ import json
import urllib.request
url = settings.ollama_url.replace("localhost", "127.0.0.1")
@@ -224,12 +225,12 @@ def check_ollama_model_available(model_name: str) -> bool:
headers={"Accept": "application/json"},
)
with urllib.request.urlopen(req, timeout=5) as response:
- import json
-
data = json.loads(response.read().decode())
- models = [m.get("name", "").split(":")[0] for m in data.get("models", [])]
- # Check for exact match or model name without tag
- return any(model_name in m or m in model_name for m in models)
+ models = [m.get("name", "") for m in data.get("models", [])]
+ return any(
+ model_name == m or model_name == m.split(":")[0] or m.startswith(model_name)
+ for m in models
+ )
except Exception:
return False
diff --git a/src/dashboard/app.py b/src/dashboard/app.py
index 6fd6894b..2f03a9fe 100644
--- a/src/dashboard/app.py
+++ b/src/dashboard/app.py
@@ -18,8 +18,6 @@ from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.trustedhost import TrustedHostMiddleware
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
-from fastapi.templating import Jinja2Templates
-
from config import settings
from dashboard.routes.agents import router as agents_router
from dashboard.routes.health import router as health_router
@@ -282,8 +280,8 @@ static_dir = PROJECT_ROOT / "static"
if static_dir.exists():
app.mount("/static", StaticFiles(directory=str(static_dir)), name="static")
-# Global templates instance
-templates = Jinja2Templates(directory=str(BASE_DIR / "templates"))
+# Shared templates instance
+from dashboard.templating import templates # noqa: E402
# Include routers
diff --git a/src/dashboard/middleware/csrf.py b/src/dashboard/middleware/csrf.py
index 6a3b5e3b..c565f5b1 100644
--- a/src/dashboard/middleware/csrf.py
+++ b/src/dashboard/middleware/csrf.py
@@ -187,7 +187,7 @@ class CSRFMiddleware(BaseHTTPMiddleware):
"/lightning/webhook",
"/_internal/",
]
- return any(pattern in path for pattern in exempt_patterns)
+ return any(path.startswith(pattern) for pattern in exempt_patterns)
async def _validate_request(self, request: Request, csrf_cookie: Optional[str]) -> bool:
"""Validate the CSRF token in the request.
diff --git a/src/dashboard/routes/agents.py b/src/dashboard/routes/agents.py
index 44aec5a7..38361afa 100644
--- a/src/dashboard/routes/agents.py
+++ b/src/dashboard/routes/agents.py
@@ -1,18 +1,16 @@
import logging
from datetime import datetime
-from pathlib import Path
from fastapi import APIRouter, Form, Request
from fastapi.responses import HTMLResponse
-from fastapi.templating import Jinja2Templates
from timmy.session import chat as timmy_chat
from dashboard.store import message_log
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/agents", tags=["agents"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("")
diff --git a/src/dashboard/routes/briefing.py b/src/dashboard/routes/briefing.py
index 53d0f710..029751c1 100644
--- a/src/dashboard/routes/briefing.py
+++ b/src/dashboard/routes/briefing.py
@@ -7,19 +7,17 @@ POST /briefing/approvals/{id}/reject — reject an item (HTMX)
"""
import logging
-from pathlib import Path
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse, JSONResponse
-from fastapi.templating import Jinja2Templates
from timmy.briefing import engine as briefing_engine
from timmy import approvals as approval_store
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/briefing", tags=["briefing"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("", response_class=HTMLResponse)
diff --git a/src/dashboard/routes/calm.py b/src/dashboard/routes/calm.py
index 4da20a70..ff842e5c 100644
--- a/src/dashboard/routes/calm.py
+++ b/src/dashboard/routes/calm.py
@@ -5,21 +5,15 @@ from typing import List, Optional
from fastapi import APIRouter, Depends, Form, HTTPException, Request
from fastapi.responses import HTMLResponse
-from fastapi.templating import Jinja2Templates
from sqlalchemy.orm import Session
from dashboard.models.calm import JournalEntry, Task, TaskCertainty, TaskState
from dashboard.models.database import SessionLocal, engine, get_db
-
-# Create database tables (if not already created by Alembic)
-# This is typically handled by Alembic migrations in a production environment
-# from dashboard.models.database import Base
-# Base.metadata.create_all(bind=engine)
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(tags=["calm"])
-templates = Jinja2Templates(directory="src/dashboard/templates")
# Helper functions for state machine logic
diff --git a/src/dashboard/routes/chat_api.py b/src/dashboard/routes/chat_api.py
index 9aa81c4e..a189e69d 100644
--- a/src/dashboard/routes/chat_api.py
+++ b/src/dashboard/routes/chat_api.py
@@ -15,7 +15,7 @@ import os
import uuid
from datetime import datetime
-from fastapi import APIRouter, File, Request, UploadFile
+from fastapi import APIRouter, File, HTTPException, Request, UploadFile
from fastapi.responses import JSONResponse
from config import settings
@@ -27,6 +27,7 @@ logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api", tags=["chat-api"])
_UPLOAD_DIR = os.path.join("data", "chat-uploads")
+_MAX_UPLOAD_SIZE = 50 * 1024 * 1024 # 50 MB
# ── POST /api/chat ────────────────────────────────────────────────────────────
@@ -112,11 +113,13 @@ async def api_upload(file: UploadFile = File(...)):
os.makedirs(_UPLOAD_DIR, exist_ok=True)
suffix = uuid.uuid4().hex[:12]
- safe_name = (file.filename or "upload").replace("/", "_").replace("\\", "_")
+ safe_name = os.path.basename(file.filename or "upload")
stored_name = f"{suffix}-{safe_name}"
file_path = os.path.join(_UPLOAD_DIR, stored_name)
contents = await file.read()
+ if len(contents) > _MAX_UPLOAD_SIZE:
+ raise HTTPException(status_code=413, detail="File too large (max 50 MB)")
with open(file_path, "wb") as f:
f.write(contents)
diff --git a/src/dashboard/routes/grok.py b/src/dashboard/routes/grok.py
index c135d101..2115589c 100644
--- a/src/dashboard/routes/grok.py
+++ b/src/dashboard/routes/grok.py
@@ -9,18 +9,16 @@ GET /grok/stats — Usage statistics (JSON)
"""
import logging
-from pathlib import Path
from fastapi import APIRouter, Form, Request
from fastapi.responses import HTMLResponse, JSONResponse
-from fastapi.templating import Jinja2Templates
from config import settings
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/grok", tags=["grok"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
# In-memory toggle state (persists per process lifetime)
_grok_mode_active: bool = False
diff --git a/src/dashboard/routes/marketplace.py b/src/dashboard/routes/marketplace.py
index b6c527eb..64e50d7b 100644
--- a/src/dashboard/routes/marketplace.py
+++ b/src/dashboard/routes/marketplace.py
@@ -4,16 +4,13 @@ DEPRECATED: Personas replaced by brain task queue.
This module is kept for UI compatibility.
"""
-from pathlib import Path
-
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse, JSONResponse
-from fastapi.templating import Jinja2Templates
from brain.client import BrainClient
+from dashboard.templating import templates
router = APIRouter(tags=["marketplace"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
# Orchestrator only — personas deprecated
AGENT_CATALOG = [
diff --git a/src/dashboard/routes/memory.py b/src/dashboard/routes/memory.py
index 067b970b..26dd13f6 100644
--- a/src/dashboard/routes/memory.py
+++ b/src/dashboard/routes/memory.py
@@ -1,11 +1,9 @@
"""Memory (vector store) routes for browsing and searching memories."""
-from pathlib import Path
from typing import Optional
from fastapi import APIRouter, Form, HTTPException, Request
from fastapi.responses import HTMLResponse, JSONResponse
-from fastapi.templating import Jinja2Templates
from timmy.memory.vector_store import (
store_memory,
@@ -17,9 +15,9 @@ from timmy.memory.vector_store import (
update_personal_fact,
delete_memory,
)
+from dashboard.templating import templates
router = APIRouter(prefix="/memory", tags=["memory"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("", response_class=HTMLResponse)
diff --git a/src/dashboard/routes/mobile.py b/src/dashboard/routes/mobile.py
index 33a17f0a..653282b3 100644
--- a/src/dashboard/routes/mobile.py
+++ b/src/dashboard/routes/mobile.py
@@ -8,16 +8,13 @@ The /mobile/local endpoint loads a small LLM directly into the
browser via WebLLM so Timmy can run on an iPhone with no server.
"""
-from pathlib import Path
-
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse
-from fastapi.templating import Jinja2Templates
from config import settings
+from dashboard.templating import templates
router = APIRouter(tags=["mobile"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("/mobile", response_class=HTMLResponse)
diff --git a/src/dashboard/routes/models.py b/src/dashboard/routes/models.py
index 77c566e9..5b63a9fd 100644
--- a/src/dashboard/routes/models.py
+++ b/src/dashboard/routes/models.py
@@ -10,7 +10,6 @@ from typing import Any, Optional
from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import HTMLResponse
-from fastapi.templating import Jinja2Templates
from pydantic import BaseModel
from config import settings
@@ -21,12 +20,12 @@ from infrastructure.models.registry import (
ModelRole,
model_registry,
)
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/models", tags=["models"])
api_router = APIRouter(prefix="/api/v1/models", tags=["models-api"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
# ── Pydantic schemas ──────────────────────────────────────────────────────────
diff --git a/src/dashboard/routes/router.py b/src/dashboard/routes/router.py
index 773dafb3..4a833fc8 100644
--- a/src/dashboard/routes/router.py
+++ b/src/dashboard/routes/router.py
@@ -1,15 +1,12 @@
"""Cascade Router status routes."""
-from pathlib import Path
-
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse
-from fastapi.templating import Jinja2Templates
from timmy.cascade_adapter import get_cascade_adapter
+from dashboard.templating import templates
router = APIRouter(prefix="/router", tags=["router"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("/status", response_class=HTMLResponse)
diff --git a/src/dashboard/routes/spark.py b/src/dashboard/routes/spark.py
index f9980509..f5424174 100644
--- a/src/dashboard/routes/spark.py
+++ b/src/dashboard/routes/spark.py
@@ -9,18 +9,16 @@ GET /spark/predictions — HTMX partial: EIDOS predictions
import json
import logging
-from pathlib import Path
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse
-from fastapi.templating import Jinja2Templates
from spark.engine import spark_engine
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/spark", tags=["spark"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("/ui", response_class=HTMLResponse)
diff --git a/src/dashboard/routes/swarm.py b/src/dashboard/routes/swarm.py
index 762b51e3..21b0e607 100644
--- a/src/dashboard/routes/swarm.py
+++ b/src/dashboard/routes/swarm.py
@@ -2,19 +2,17 @@
import json
import logging
-from pathlib import Path
from typing import Optional
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse
-from fastapi.templating import Jinja2Templates
from spark.engine import spark_engine
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/swarm", tags=["swarm"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("/events", response_class=HTMLResponse)
diff --git a/src/dashboard/routes/system.py b/src/dashboard/routes/system.py
index 8ff39766..84e49bd5 100644
--- a/src/dashboard/routes/system.py
+++ b/src/dashboard/routes/system.py
@@ -1,16 +1,15 @@
"""System-level dashboard routes (ledger, upgrades, etc.)."""
import logging
-from pathlib import Path
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse
-from fastapi.templating import Jinja2Templates
+
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(tags=["system"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("/lightning/ledger", response_class=HTMLResponse)
diff --git a/src/dashboard/routes/tasks_celery.py b/src/dashboard/routes/tasks_celery.py
index c2b6c390..5fbc9f77 100644
--- a/src/dashboard/routes/tasks_celery.py
+++ b/src/dashboard/routes/tasks_celery.py
@@ -8,16 +8,15 @@ POST /celery/api/{id}/revoke — cancel a running task
"""
import logging
-from pathlib import Path
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse, JSONResponse
-from fastapi.templating import Jinja2Templates
+
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/celery", tags=["celery"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
# In-memory record of submitted task IDs for the dashboard display.
# In production this would use the Celery result backend directly,
diff --git a/src/dashboard/routes/thinking.py b/src/dashboard/routes/thinking.py
index 791abedf..563c2d71 100644
--- a/src/dashboard/routes/thinking.py
+++ b/src/dashboard/routes/thinking.py
@@ -6,18 +6,16 @@ GET /thinking/api/{id}/chain — follow a thought chain
"""
import logging
-from pathlib import Path
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse, JSONResponse
-from fastapi.templating import Jinja2Templates
from timmy.thinking import thinking_engine
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/thinking", tags=["thinking"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("", response_class=HTMLResponse)
diff --git a/src/dashboard/routes/tools.py b/src/dashboard/routes/tools.py
index ffc7a872..8bfb9e76 100644
--- a/src/dashboard/routes/tools.py
+++ b/src/dashboard/routes/tools.py
@@ -3,16 +3,13 @@
Shows available tools and usage statistics.
"""
-from pathlib import Path
-
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse, JSONResponse
-from fastapi.templating import Jinja2Templates
from timmy.tools import get_all_available_tools
+from dashboard.templating import templates
router = APIRouter(tags=["tools"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.get("/tools", response_class=HTMLResponse)
diff --git a/src/dashboard/routes/voice.py b/src/dashboard/routes/voice.py
index c36caa90..cea86047 100644
--- a/src/dashboard/routes/voice.py
+++ b/src/dashboard/routes/voice.py
@@ -9,16 +9,14 @@ import logging
from fastapi import APIRouter, Form, Request
from fastapi.responses import HTMLResponse
-from fastapi.templating import Jinja2Templates
-from pathlib import Path
from integrations.voice.nlu import detect_intent, extract_command
from timmy.agent import create_timmy
+from dashboard.templating import templates
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/voice", tags=["voice"])
-templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
@router.post("/nlu")
diff --git a/src/dashboard/templating.py b/src/dashboard/templating.py
new file mode 100644
index 00000000..46d60527
--- /dev/null
+++ b/src/dashboard/templating.py
@@ -0,0 +1,7 @@
+"""Shared Jinja2Templates instance for all dashboard routes."""
+
+from pathlib import Path
+
+from fastapi.templating import Jinja2Templates
+
+templates = Jinja2Templates(directory=str(Path(__file__).parent / "templates"))
diff --git a/src/infrastructure/router/cascade.py b/src/infrastructure/router/cascade.py
index 81d72b26..a747530f 100644
--- a/src/infrastructure/router/cascade.py
+++ b/src/infrastructure/router/cascade.py
@@ -18,8 +18,6 @@ from enum import Enum
from pathlib import Path
from typing import Any, Optional
-from pathlib import Path
-
try:
import yaml
except ImportError:
diff --git a/src/infrastructure/ws_manager/handler.py b/src/infrastructure/ws_manager/handler.py
index 5435b0f2..fff894af 100644
--- a/src/infrastructure/ws_manager/handler.py
+++ b/src/infrastructure/ws_manager/handler.py
@@ -7,6 +7,7 @@ system events.
"""
import asyncio
+import collections
import json
import logging
from dataclasses import asdict, dataclass
@@ -34,8 +35,7 @@ class WebSocketManager:
def __init__(self) -> None:
self._connections: list[WebSocket] = []
- self._event_history: list[WSEvent] = []
- self._max_history = 100
+ self._event_history: collections.deque[WSEvent] = collections.deque(maxlen=100)
async def connect(self, websocket: WebSocket) -> None:
"""Accept a new WebSocket connection."""
@@ -46,7 +46,7 @@ class WebSocketManager:
len(self._connections),
)
# Send recent history to the new client
- for event in self._event_history[-20:]:
+ for event in list(self._event_history)[-20:]:
try:
await websocket.send_text(event.to_json())
except Exception:
@@ -69,8 +69,6 @@ class WebSocketManager:
timestamp=datetime.now(timezone.utc).isoformat(),
)
self._event_history.append(ws_event)
- if len(self._event_history) > self._max_history:
- self._event_history = self._event_history[-self._max_history:]
message = ws_event.to_json()
disconnected = []
@@ -78,7 +76,10 @@ class WebSocketManager:
for ws in self._connections:
try:
await ws.send_text(message)
+ except ConnectionError:
+ disconnected.append(ws)
except Exception:
+ logger.warning("Unexpected WebSocket send error", exc_info=True)
disconnected.append(ws)
# Clean up dead connections
@@ -128,8 +129,6 @@ class WebSocketManager:
Returns:
Number of clients notified
"""
- import json
-
message = json.dumps(data)
disconnected = []
count = 0
diff --git a/src/timmy/agent.py b/src/timmy/agent.py
index 1ded27f7..a1cd8620 100644
--- a/src/timmy/agent.py
+++ b/src/timmy/agent.py
@@ -20,7 +20,7 @@ from agno.agent import Agent
from agno.db.sqlite import SqliteDb
from agno.models.ollama import Ollama
-from config import settings
+from config import check_ollama_model_available, settings
from timmy.prompts import get_system_prompt
from timmy.tools import create_full_toolkit
@@ -64,27 +64,7 @@ _SMALL_MODEL_PATTERNS = (
def _check_model_available(model_name: str) -> bool:
"""Check if an Ollama model is available locally."""
- try:
- import urllib.request
- import json
-
- url = settings.ollama_url.replace("localhost", "127.0.0.1")
- req = urllib.request.Request(
- f"{url}/api/tags",
- method="GET",
- headers={"Accept": "application/json"},
- )
- with urllib.request.urlopen(req, timeout=5) as response:
- data = json.loads(response.read().decode())
- models = [m.get("name", "") for m in data.get("models", [])]
- # Check for exact match or model name without tag
- return any(
- model_name == m or model_name == m.split(":")[0] or m.startswith(model_name)
- for m in models
- )
- except Exception as exc:
- logger.debug("Could not check model availability: %s", exc)
- return False
+ return check_ollama_model_available(model_name)
def _pull_model(model_name: str) -> bool:
diff --git a/src/timmy/cli.py b/src/timmy/cli.py
index 1b957c8c..40cc58f2 100644
--- a/src/timmy/cli.py
+++ b/src/timmy/cli.py
@@ -121,67 +121,5 @@ def down():
subprocess.run(["docker", "compose", "down"], check=True)
-@app.command(name="ingest-report")
-def ingest_report(
- file: Optional[str] = typer.Argument(
- None, help="Path to JSON report file (reads stdin if omitted)",
- ),
- dry_run: bool = typer.Option(
- False, "--dry-run", help="Validate report and show what would be created",
- ),
-):
- """Ingest a structured test report and create bug_report tasks.
-
- Reads a JSON report with an array of bugs and creates one task per bug
- in the internal task queue. The task processor will then attempt to
- create GitHub Issues for each.
-
- Examples:
- timmy ingest-report report.json
- timmy ingest-report --dry-run report.json
- cat report.json | timmy ingest-report
- """
- import json
- import sys
-
- # Read input
- if file:
- try:
- with open(file) as f:
- raw = f.read()
- except FileNotFoundError:
- typer.echo(f"File not found: {file}", err=True)
- raise typer.Exit(1)
- else:
- if sys.stdin.isatty():
- typer.echo("Reading from stdin (paste JSON, then Ctrl+D)...")
- raw = sys.stdin.read()
-
- # Parse JSON
- try:
- data = json.loads(raw)
- except json.JSONDecodeError as exc:
- typer.echo(f"Invalid JSON: {exc}", err=True)
- raise typer.Exit(1)
-
- reporter = data.get("reporter", "unknown")
- bugs = data.get("bugs", [])
-
- if not bugs:
- typer.echo("No bugs in report.", err=True)
- raise typer.Exit(1)
-
- typer.echo(f"Report: {len(bugs)} bug(s) from {reporter}")
-
- if dry_run:
- for bug in bugs:
- typer.echo(f" [{bug.get('severity', '?')}] {bug.get('title', '(no title)')}")
- typer.echo("(dry run — no tasks created)")
- return
-
- typer.echo("Task queue not available (swarm module removed).", err=True)
- raise typer.Exit(1)
-
-
def main():
app()
diff --git a/tests/conftest.py b/tests/conftest.py
index a64ec5cd..97fcb6ae 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -35,6 +35,9 @@ for _mod in [
"celery",
"celery.app",
"celery.result",
+ "pyttsx3",
+ "sentence_transformers",
+ "redis",
]:
sys.modules.setdefault(_mod, MagicMock())
@@ -190,87 +193,3 @@ def mock_timmy_agent():
return agent
-@pytest.fixture
-def mock_memory_system():
- """Provide a mock memory system."""
- memory = MagicMock()
- memory.get_system_context = MagicMock(return_value="Test memory context")
- memory.add_memory = MagicMock()
- memory.search = MagicMock(return_value=[])
- return memory
-
-
-@pytest.fixture
-def mock_event_log():
- """Provide a mock event logger."""
- logger = MagicMock()
- logger.log_event = MagicMock()
- logger.get_events = MagicMock(return_value=[])
- return logger
-
-
-@pytest.fixture
-def mock_ws_manager():
- """Provide a mock WebSocket manager."""
- manager = MagicMock()
- manager.broadcast = MagicMock()
- manager.broadcast_json = MagicMock()
- manager.send = MagicMock()
- return manager
-
-
-@pytest.fixture
-def mock_settings():
- """Provide mock settings."""
- settings = MagicMock()
- settings.ollama_url = "http://localhost:11434"
- settings.ollama_model = "llama3.2"
- settings.thinking_enabled = True
- settings.thinking_interval_seconds = 300
- settings.error_log_enabled = False
- settings.repo_root = str(Path(__file__).parent.parent)
- return settings
-
-
-@pytest.fixture
-def sample_interview_data():
- """Provide sample interview data for testing."""
- return {
- "questions": [
- {
- "category": "Identity",
- "question": "Who are you?",
- "expected_keywords": ["Timmy", "agent"],
- },
- {
- "category": "Capabilities",
- "question": "What can you do?",
- "expected_keywords": ["agent", "brain"],
- },
- ],
- "expected_response_format": "string",
- }
-
-
-@pytest.fixture
-def sample_task_data():
- """Provide sample task data for testing."""
- return {
- "id": "task-1",
- "title": "Test Task",
- "description": "This is a test task",
- "assigned_to": "timmy",
- "status": "pending",
- "priority": "normal",
- }
-
-
-@pytest.fixture
-def sample_agent_data():
- """Provide sample agent data for testing."""
- return {
- "id": "agent-1",
- "name": "Test Agent",
- "capabilities": ["chat", "reasoning"],
- "status": "active",
- }
diff --git a/tests/conftest_markers.py b/tests/conftest_markers.py
index c4bd60e3..30d12ebb 100644
--- a/tests/conftest_markers.py
+++ b/tests/conftest_markers.py
@@ -37,7 +37,7 @@ def pytest_collection_modifyitems(config, items):
item.add_marker(pytest.mark.slow)
elif "functional" in test_path:
item.add_marker(pytest.mark.functional)
- elif "integration" in test_path:
+ elif "infrastructure" in test_path or "integration" in test_path:
item.add_marker(pytest.mark.integration)
else:
item.add_marker(pytest.mark.unit)
diff --git a/tests/integrations/test_websocket.py b/tests/integrations/test_websocket.py
index 477e45f6..ed6428cb 100644
--- a/tests/integrations/test_websocket.py
+++ b/tests/integrations/test_websocket.py
@@ -22,9 +22,10 @@ def test_ws_manager_initial_state():
@pytest.mark.asyncio
async def test_ws_manager_event_history_limit():
- """History is trimmed to max_history after broadcasts."""
+ """History is trimmed to maxlen after broadcasts."""
+ import collections
mgr = WebSocketManager()
- mgr._max_history = 5
+ mgr._event_history = collections.deque(maxlen=5)
for i in range(10):
await mgr.broadcast(f"e{i}", {})
assert len(mgr.event_history) == 5
diff --git a/tests/integrations/test_websocket_extended.py b/tests/integrations/test_websocket_extended.py
index 26448b10..37d4f379 100644
--- a/tests/integrations/test_websocket_extended.py
+++ b/tests/integrations/test_websocket_extended.py
@@ -67,8 +67,9 @@ class TestWebSocketManagerBroadcast:
@pytest.mark.asyncio
async def test_broadcast_trims_history(self):
+ import collections
mgr = WebSocketManager()
- mgr._max_history = 3
+ mgr._event_history = collections.deque(maxlen=3)
for i in range(5):
await mgr.broadcast(f"e{i}", {})
assert len(mgr.event_history) == 3
diff --git a/tests/security/test_xss_vulnerabilities.py b/tests/security/test_xss_vulnerabilities.py
index 6d47df17..fc24234b 100644
--- a/tests/security/test_xss_vulnerabilities.py
+++ b/tests/security/test_xss_vulnerabilities.py
@@ -1,50 +1,33 @@
import pytest
from fastapi.testclient import TestClient
from dashboard.app import app
-from config import settings
import html
@pytest.fixture
def client():
return TestClient(app)
-def test_health_status_xss_vulnerability(client):
+def test_health_status_xss_vulnerability(client, monkeypatch):
"""Verify that the health status page escapes the model name."""
- original_model = settings.ollama_model
malicious_model = '">'
-
- try:
- # Inject malicious model name into settings
- settings.ollama_model = malicious_model
-
- response = client.get("/health/status")
- assert response.status_code == 200
-
- # The malicious script should be escaped
- escaped_model = html.escape(malicious_model)
- assert escaped_model in response.text
- assert malicious_model not in response.text
- finally:
- settings.ollama_model = original_model
+ monkeypatch.setattr("config.settings.ollama_model", malicious_model)
-def test_grok_toggle_xss_vulnerability(client):
+ response = client.get("/health/status")
+ assert response.status_code == 200
+
+ escaped_model = html.escape(malicious_model)
+ assert escaped_model in response.text
+ assert malicious_model not in response.text
+
+def test_grok_toggle_xss_vulnerability(client, monkeypatch):
"""Verify that the grok toggle card escapes the model name."""
- original_model = settings.grok_default_model
malicious_model = '">
'
-
- try:
- # Inject malicious model name into settings
- settings.grok_default_model = malicious_model
-
- # We need to make grok available to trigger the render_toggle_card
- # Since we're in test mode, we might need to mock this or just call the function
- from dashboard.routes.grok import _render_toggle_card
-
- html_output = _render_toggle_card(active=True)
-
- # The malicious script should be escaped
- escaped_model = html.escape(malicious_model)
- assert escaped_model in html_output
- assert malicious_model not in html_output
- finally:
- settings.grok_default_model = original_model
+ monkeypatch.setattr("config.settings.grok_default_model", malicious_model)
+
+ from dashboard.routes.grok import _render_toggle_card
+
+ html_output = _render_toggle_card(active=True)
+
+ escaped_model = html.escape(malicious_model)
+ assert escaped_model in html_output
+ assert malicious_model not in html_output