Compare commits

...

2 Commits

Author SHA1 Message Date
Alexander Whitestone
d7abb7db36 chore: checkpoint local wip for issue 892 2026-04-05 13:31:06 -04:00
Alexander Whitestone
f8f5d08678 feat: Implement NIP-89 and NIP-90 for Nostr agent partnerships
This commit introduces a new NostrClient for interacting with the Nostr
network. The client implements the basic functionality for NIP-89
(discovery of agent capabilities) and NIP-90 (job delegation).

The following changes are included:

- A new  class in
  that can connect to relays, subscribe to events, and publish events.
- Implementation of  (NIP-89) to discover agent
  capability cards.
- Implementation of  (NIP-90) to create and publish
  job requests.
- Added  and usage: websockets [--version | <uri>] as dependencies.
- Added tests for the .

Refs #892
2026-03-23 22:07:43 -04:00
52 changed files with 481 additions and 155 deletions

View File

@@ -1,6 +1,12 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""Tiny auth gate for nginx auth_request. Sets a cookie after successful basic auth.""" """Tiny auth gate for nginx auth_request. Sets a cookie after successful basic auth."""
import hashlib, hmac, http.server, time, base64, os, sys import base64
import hashlib
import hmac
import http.server
import os
import sys
import time
SECRET = os.environ.get("AUTH_GATE_SECRET", "") SECRET = os.environ.get("AUTH_GATE_SECRET", "")
USER = os.environ.get("AUTH_GATE_USER", "") USER = os.environ.get("AUTH_GATE_USER", "")

View File

@@ -1,5 +1,4 @@
import os
import sys import sys
from pathlib import Path from pathlib import Path
@@ -8,6 +7,7 @@ sys.path.insert(0, str(Path(__file__).parent / "src"))
from timmy.memory_system import memory_store from timmy.memory_system import memory_store
def index_research_documents(): def index_research_documents():
research_dir = Path("docs/research") research_dir = Path("docs/research")
if not research_dir.is_dir(): if not research_dir.is_dir():

View File

@@ -1,9 +1,7 @@
from logging.config import fileConfig from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context from alembic import context
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides # this is the Alembic Config object, which provides
# access to the values within the .ini file in use. # access to the values within the .ini file in use.
@@ -19,7 +17,7 @@ if config.config_file_name is not None:
# from myapp import mymodel # from myapp import mymodel
# target_metadata = mymodel.Base.metadata # target_metadata = mymodel.Base.metadata
from src.dashboard.models.database import Base from src.dashboard.models.database import Base
from src.dashboard.models.calm import Task, JournalEntry
target_metadata = Base.metadata target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py, # other values from the config, defined by the needs of env.py,

View File

@@ -5,17 +5,16 @@ Revises:
Create Date: 2026-03-02 10:57:55.537090 Create Date: 2026-03-02 10:57:55.537090
""" """
from typing import Sequence, Union from collections.abc import Sequence
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = '0093c15b4bbf' revision: str = '0093c15b4bbf'
down_revision: Union[str, Sequence[str], None] = None down_revision: str | Sequence[str] | None = None
branch_labels: Union[str, Sequence[str], None] = None branch_labels: str | Sequence[str] | None = None
depends_on: Union[str, Sequence[str], None] = None depends_on: str | Sequence[str] | None = None
def upgrade() -> None: def upgrade() -> None:

125
poetry.lock generated
View File

@@ -752,10 +752,9 @@ pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
name = "charset-normalizer" name = "charset-normalizer"
version = "3.4.4" version = "3.4.4"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = true optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
groups = ["main"] groups = ["main"]
markers = "extra == \"voice\" or extra == \"research\""
files = [ files = [
{file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"},
{file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"},
@@ -942,6 +941,67 @@ prompt-toolkit = ">=3.0.36"
[package.extras] [package.extras]
testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
[[package]]
name = "coincurve"
version = "21.0.0"
description = "Safest and fastest Python library for secp256k1 elliptic curve operations"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "coincurve-21.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:986727bba6cf0c5670990358dc6af9a54f8d3e257979b992a9dbd50dd82fa0dc"},
{file = "coincurve-21.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1c584059de61ed16c658e7eae87ee488e81438897dae8fabeec55ef408af474"},
{file = "coincurve-21.0.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4210b35c922b2b36c987a48c0b110ab20e490a2d6a92464ca654cb09e739fcc"},
{file = "coincurve-21.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf67332cc647ef52ef371679c76000f096843ae266ae6df5e81906eb6463186b"},
{file = "coincurve-21.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997607a952913c6a4bebe86815f458e77a42467b7a75353ccdc16c3336726880"},
{file = "coincurve-21.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cfdd0938f284fb147aa1723a69f8794273ec673b10856b6e6f5f63fcc99d0c2e"},
{file = "coincurve-21.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:88c1e3f6df2f2fbe18152c789a18659ee0429dc604fc77530370c9442395f681"},
{file = "coincurve-21.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:530b58ed570895612ef510e28df5e8a33204b03baefb5c986e22811fa09622ef"},
{file = "coincurve-21.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:f920af756a98edd738c0cfa431e81e3109aeec6ffd6dffb5ed4f5b5a37aacba8"},
{file = "coincurve-21.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:070e060d0d57b496e68e48b39d5e3245681376d122827cb8e09f33669ff8cf1b"},
{file = "coincurve-21.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:65ec42cab9c60d587fb6275c71f0ebc580625c377a894c4818fb2a2b583a184b"},
{file = "coincurve-21.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5828cd08eab928db899238874d1aab12fa1236f30fe095a3b7e26a5fc81df0a3"},
{file = "coincurve-21.0.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54de1cac75182de9f71ce41415faafcaf788303e21cbd0188064e268d61625e5"},
{file = "coincurve-21.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cda058d9394bea30d57a92fdc18ee3ca6b5bc8ef776a479a2ffec917105836"},
{file = "coincurve-21.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9070804d7c71badfe4f0bf19b728cfe7c70c12e733938ead6b1db37920b745c0"},
{file = "coincurve-21.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:669ab5db393637824b226de058bb7ea0cb9a0236e1842d7b22f74d4a8a1f1ff1"},
{file = "coincurve-21.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3bcd538af097b3914ec3cb654262e72e224f95f2e9c1eb7fbd75d843ae4e528e"},
{file = "coincurve-21.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45b6a5e6b5536e1f46f729829d99ce1f8f847308d339e8880fe7fa1646935c10"},
{file = "coincurve-21.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:87597cf30dfc05fa74218810776efacf8816813ab9fa6ea1490f94e9f8b15e77"},
{file = "coincurve-21.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:b992d1b1dac85d7f542d9acbcf245667438839484d7f2b032fd032256bcd778e"},
{file = "coincurve-21.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f60ad56113f08e8c540bb89f4f35f44d434311433195ffff22893ccfa335070c"},
{file = "coincurve-21.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1cb1cd19fb0be22e68ecb60ad950b41f18b9b02eebeffaac9391dc31f74f08f2"},
{file = "coincurve-21.0.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05d7e255a697b3475d7ae7640d3bdef3d5bc98ce9ce08dd387f780696606c33b"},
{file = "coincurve-21.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a366c314df7217e3357bb8c7d2cda540b0bce180705f7a0ce2d1d9e28f62ad4"},
{file = "coincurve-21.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b04778b75339c6e46deb9ae3bcfc2250fbe48d1324153e4310fc4996e135715"},
{file = "coincurve-21.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8efcbdcd50cc219989a2662e6c6552f455efc000a15dd6ab3ebf4f9b187f41a3"},
{file = "coincurve-21.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6df44b4e3b7acdc1453ade52a52e3f8a5b53ecdd5a06bd200f1ec4b4e250f7d9"},
{file = "coincurve-21.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bcc0831f07cb75b91c35c13b1362e7b9dc76c376b27d01ff577bec52005e22a8"},
{file = "coincurve-21.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:5dd7b66b83b143f3ad3861a68fc0279167a0bae44fe3931547400b7a200e90b1"},
{file = "coincurve-21.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:78dbe439e8cb22389956a4f2f2312813b4bd0531a0b691d4f8e868c7b366555d"},
{file = "coincurve-21.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9df5ceb5de603b9caf270629996710cf5ed1d43346887bc3895a11258644b65b"},
{file = "coincurve-21.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:154467858d23c48f9e5ab380433bc2625027b50617400e2984cc16f5799ab601"},
{file = "coincurve-21.0.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f57f07c44d14d939bed289cdeaba4acb986bba9f729a796b6a341eab1661eedc"},
{file = "coincurve-21.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fb03e3a388a93d31ed56a442bdec7983ea404490e21e12af76fb1dbf097082a"},
{file = "coincurve-21.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d09ba4fd9d26b00b06645fcd768c5ad44832a1fa847ebe8fb44970d3204c3cb7"},
{file = "coincurve-21.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1a1e7ee73bc1b3bcf14c7b0d1f44e6485785d3b53ef7b16173c36d3cefa57f93"},
{file = "coincurve-21.0.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ad05952b6edc593a874df61f1bc79db99d716ec48ba4302d699e14a419fe6f51"},
{file = "coincurve-21.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4d2bf350ced38b73db9efa1ff8fd16a67a1cb35abb2dda50d89661b531f03fd3"},
{file = "coincurve-21.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:54d9500c56d5499375e579c3917472ffcf804c3584dd79052a79974280985c74"},
{file = "coincurve-21.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:773917f075ec4b94a7a742637d303a3a082616a115c36568eb6c873a8d950d18"},
{file = "coincurve-21.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb82ba677fc7600a3bf200edc98f4f9604c317b18c7b3f0a10784b42686e3a53"},
{file = "coincurve-21.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5001de8324c35eee95f34e011a5c3b4e7d9ae9ca4a862a93b2c89b3f467f511b"},
{file = "coincurve-21.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4d0bb5340bcac695731bef51c3e0126f252453e2d1ae7fa1486d90eff978bf6"},
{file = "coincurve-21.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a9b49789ff86f3cf86cfc8ff8c6c43bac2607720ec638e8ba471fa7e8765bd2"},
{file = "coincurve-21.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b85b49e192d2ca1a906a7b978bacb55d4dcb297cc2900fbbd9b9180d50878779"},
{file = "coincurve-21.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ad6445f0bb61b3a4404d87a857ddb2a74a642cd4d00810237641aab4d6b1a42f"},
{file = "coincurve-21.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d3f017f1491491f3f2c49e5d2d3a471a872d75117bfcb804d1167061c94bd347"},
{file = "coincurve-21.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:500e5e38cd4cbc4ea8a5c631ce843b1d52ef19ac41128568214d150f75f1f387"},
{file = "coincurve-21.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:ef81ca24511a808ad0ebdb8fdaf9c5c87f12f935b3d117acccc6520ad671bcce"},
{file = "coincurve-21.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:6ec8e859464116a3c90168cd2bd7439527d4b4b5e328b42e3c8e0475f9b0bf71"},
{file = "coincurve-21.0.0.tar.gz", hash = "sha256:8b37ce4265a82bebf0e796e21a769e56fdbf8420411ccbe3fafee4ed75b6a6e5"},
]
[[package]] [[package]]
name = "colorama" name = "colorama"
version = "0.4.6" version = "0.4.6"
@@ -3930,6 +3990,30 @@ dev = ["coverage[toml] (==7.10.7)", "cryptography (>=3.4.0)", "pre-commit", "pyt
docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
tests = ["coverage[toml] (==7.10.7)", "pytest (>=8.4.2,<9.0.0)"] tests = ["coverage[toml] (==7.10.7)", "pytest (>=8.4.2,<9.0.0)"]
[[package]]
name = "pynostr"
version = "0.7.0"
description = "Python Library for nostr."
optional = false
python-versions = ">3.7.0"
groups = ["main"]
files = [
{file = "pynostr-0.7.0-py3-none-any.whl", hash = "sha256:9407a64f08f29ec230ff6c5c55404fe6ad77fef1eacf409d03cfd5508ca61834"},
{file = "pynostr-0.7.0.tar.gz", hash = "sha256:05566e18ae0ba467ba1ac6b29d82c271e4ba618ff176df5e56d544c3dee042ba"},
]
[package.dependencies]
coincurve = ">=1.8.0"
cryptography = ">=37.0.4"
requests = "*"
rich = "*"
tlv8 = "*"
tornado = "*"
typer = "*"
[package.extras]
websocket-client = ["websocket-client (>=1.3.3)"]
[[package]] [[package]]
name = "pyobjc" name = "pyobjc"
version = "12.1" version = "12.1"
@@ -8016,10 +8100,9 @@ files = [
name = "requests" name = "requests"
version = "2.32.5" version = "2.32.5"
description = "Python HTTP for Humans." description = "Python HTTP for Humans."
optional = true optional = false
python-versions = ">=3.9" python-versions = ">=3.9"
groups = ["main"] groups = ["main"]
markers = "extra == \"voice\" or extra == \"research\""
files = [ files = [
{file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"},
{file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"},
@@ -8828,6 +8911,17 @@ docs = ["sphinx", "sphinx-autobuild", "sphinx-llms-txt-link", "sphinx-no-pragma"
lint = ["doc8", "mypy", "pydoclint", "ruff"] lint = ["doc8", "mypy", "pydoclint", "ruff"]
test = ["coverage", "fake.py", "pytest", "pytest-codeblock", "pytest-cov", "pytest-ordering", "tox"] test = ["coverage", "fake.py", "pytest", "pytest-codeblock", "pytest-cov", "pytest-ordering", "tox"]
[[package]]
name = "tlv8"
version = "0.10.0"
description = "Python module to handle type-length-value (TLV) encoded data 8-bit type, 8-bit length, and N-byte value as described within the Apple HomeKit Accessory Protocol Specification Non-Commercial Version Release R2."
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "tlv8-0.10.0.tar.gz", hash = "sha256:7930a590267b809952272ac2a27ee81b99ec5191fa2eba08050e0daee4262684"},
]
[[package]] [[package]]
name = "tokenizers" name = "tokenizers"
version = "0.22.2" version = "0.22.2"
@@ -8934,6 +9028,26 @@ typing-extensions = ">=4.10.0"
opt-einsum = ["opt-einsum (>=3.3)"] opt-einsum = ["opt-einsum (>=3.3)"]
optree = ["optree (>=0.13.0)"] optree = ["optree (>=0.13.0)"]
[[package]]
name = "tornado"
version = "6.5.5"
description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "tornado-6.5.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:487dc9cc380e29f58c7ab88f9e27cdeef04b2140862e5076a66fb6bb68bb1bfa"},
{file = "tornado-6.5.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:65a7f1d46d4bb41df1ac99f5fcb685fb25c7e61613742d5108b010975a9a6521"},
{file = "tornado-6.5.5-cp39-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e74c92e8e65086b338fd56333fb9a68b9f6f2fe7ad532645a290a464bcf46be5"},
{file = "tornado-6.5.5-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:435319e9e340276428bbdb4e7fa732c2d399386d1de5686cb331ec8eee754f07"},
{file = "tornado-6.5.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3f54aa540bdbfee7b9eb268ead60e7d199de5021facd276819c193c0fb28ea4e"},
{file = "tornado-6.5.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:36abed1754faeb80fbd6e64db2758091e1320f6bba74a4cf8c09cd18ccce8aca"},
{file = "tornado-6.5.5-cp39-abi3-win32.whl", hash = "sha256:dd3eafaaeec1c7f2f8fdcd5f964e8907ad788fe8a5a32c4426fbbdda621223b7"},
{file = "tornado-6.5.5-cp39-abi3-win_amd64.whl", hash = "sha256:6443a794ba961a9f619b1ae926a2e900ac20c34483eea67be4ed8f1e58d3ef7b"},
{file = "tornado-6.5.5-cp39-abi3-win_arm64.whl", hash = "sha256:2c9a876e094109333f888539ddb2de4361743e5d21eece20688e3e351e4990a6"},
{file = "tornado-6.5.5.tar.gz", hash = "sha256:192b8f3ea91bd7f1f50c06955416ed76c6b72f96779b962f07f911b91e8d30e9"},
]
[[package]] [[package]]
name = "tqdm" name = "tqdm"
version = "4.67.3" version = "4.67.3"
@@ -9205,7 +9319,6 @@ files = [
{file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"},
{file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"},
] ]
markers = {main = "extra == \"voice\" or extra == \"research\" or extra == \"dev\""}
[package.dependencies] [package.dependencies]
pysocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""} pysocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""}
@@ -9720,4 +9833,4 @@ voice = ["openai-whisper", "piper-tts", "pyttsx3", "sounddevice"]
[metadata] [metadata]
lock-version = "2.1" lock-version = "2.1"
python-versions = ">=3.11,<4" python-versions = ">=3.11,<4"
content-hash = "5af3028474051032bef12182eaa5ef55950cbaeca21d1793f878d54c03994eb0" content-hash = "bca84c65e590e038a4b8bbd582ce8efa041f678b3adad47139d13c04690c5940"

View File

@@ -63,6 +63,8 @@ pytest-randomly = { version = ">=3.16.0", optional = true }
pytest-xdist = { version = ">=3.5.0", optional = true } pytest-xdist = { version = ">=3.5.0", optional = true }
anthropic = "^0.86.0" anthropic = "^0.86.0"
opencv-python = "^4.13.0.92" opencv-python = "^4.13.0.92"
websockets = ">=12.0"
pynostr = "*"
[tool.poetry.extras] [tool.poetry.extras]
telegram = ["python-telegram-bot"] telegram = ["python-telegram-bot"]

View File

@@ -5,7 +5,6 @@ Usage:
python scripts/add_pytest_markers.py python scripts/add_pytest_markers.py
""" """
import re
from pathlib import Path from pathlib import Path
@@ -93,7 +92,7 @@ def main():
print(f"⏭️ {rel_path:<50} (already marked)") print(f"⏭️ {rel_path:<50} (already marked)")
print(f"\n📊 Total files marked: {marked_count}") print(f"\n📊 Total files marked: {marked_count}")
print(f"\n✨ Pytest markers configured. Run 'pytest -m unit' to test specific categories.") print("\n✨ Pytest markers configured. Run 'pytest -m unit' to test specific categories.")
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -1,8 +1,7 @@
import os
def fix_l402_proxy(): def fix_l402_proxy():
path = "src/timmy_serve/l402_proxy.py" path = "src/timmy_serve/l402_proxy.py"
with open(path, "r") as f: with open(path) as f:
content = f.read() content = f.read()
# 1. Add hmac_secret to Macaroon dataclass # 1. Add hmac_secret to Macaroon dataclass
@@ -132,7 +131,7 @@ if _MACAROON_SECRET_RAW == _MACAROON_SECRET_DEFAULT or _HMAC_SECRET_RAW == _HMAC
def fix_xss(): def fix_xss():
# Fix chat_message.html # Fix chat_message.html
path = "src/dashboard/templates/partials/chat_message.html" path = "src/dashboard/templates/partials/chat_message.html"
with open(path, "r") as f: with open(path) as f:
content = f.read() content = f.read()
content = content.replace("{{ user_message }}", "{{ user_message | e }}") content = content.replace("{{ user_message }}", "{{ user_message | e }}")
content = content.replace("{{ response }}", "{{ response | e }}") content = content.replace("{{ response }}", "{{ response | e }}")
@@ -142,7 +141,7 @@ def fix_xss():
# Fix history.html # Fix history.html
path = "src/dashboard/templates/partials/history.html" path = "src/dashboard/templates/partials/history.html"
with open(path, "r") as f: with open(path) as f:
content = f.read() content = f.read()
content = content.replace("{{ msg.content }}", "{{ msg.content | e }}") content = content.replace("{{ msg.content }}", "{{ msg.content | e }}")
with open(path, "w") as f: with open(path, "w") as f:
@@ -150,7 +149,7 @@ def fix_xss():
# Fix briefing.html # Fix briefing.html
path = "src/dashboard/templates/briefing.html" path = "src/dashboard/templates/briefing.html"
with open(path, "r") as f: with open(path) as f:
content = f.read() content = f.read()
content = content.replace("{{ briefing.summary }}", "{{ briefing.summary | e }}") content = content.replace("{{ briefing.summary }}", "{{ briefing.summary | e }}")
with open(path, "w") as f: with open(path, "w") as f:
@@ -158,7 +157,7 @@ def fix_xss():
# Fix approval_card_single.html # Fix approval_card_single.html
path = "src/dashboard/templates/partials/approval_card_single.html" path = "src/dashboard/templates/partials/approval_card_single.html"
with open(path, "r") as f: with open(path) as f:
content = f.read() content = f.read()
content = content.replace("{{ item.title }}", "{{ item.title | e }}") content = content.replace("{{ item.title }}", "{{ item.title | e }}")
content = content.replace("{{ item.description }}", "{{ item.description | e }}") content = content.replace("{{ item.description }}", "{{ item.description | e }}")
@@ -168,7 +167,7 @@ def fix_xss():
# Fix marketplace.html # Fix marketplace.html
path = "src/dashboard/templates/marketplace.html" path = "src/dashboard/templates/marketplace.html"
with open(path, "r") as f: with open(path) as f:
content = f.read() content = f.read()
content = content.replace("{{ agent.name }}", "{{ agent.name | e }}") content = content.replace("{{ agent.name }}", "{{ agent.name | e }}")
content = content.replace("{{ agent.role }}", "{{ agent.role | e }}") content = content.replace("{{ agent.role }}", "{{ agent.role | e }}")

View File

@@ -8,8 +8,7 @@ from existing history so the LOOPSTAT panel isn't empty.
import json import json
import os import os
import re import re
import subprocess from datetime import UTC, datetime
from datetime import datetime, timezone
from pathlib import Path from pathlib import Path
from urllib.request import Request, urlopen from urllib.request import Request, urlopen
@@ -227,7 +226,7 @@ def generate_summary(entries: list[dict]):
stats["avg_duration"] = round(stats["total_duration"] / stats["count"]) stats["avg_duration"] = round(stats["total_duration"] / stats["count"])
summary = { summary = {
"updated_at": datetime.now(timezone.utc).isoformat(), "updated_at": datetime.now(UTC).isoformat(),
"window": len(recent), "window": len(recent),
"total_cycles": len(entries), "total_cycles": len(entries),
"success_rate": round(len(successes) / len(recent), 2) if recent else 0, "success_rate": round(len(successes) / len(recent), 2) if recent else 0,

View File

@@ -17,7 +17,7 @@ import importlib.util
import json import json
import sys import sys
import time import time
from datetime import datetime, timezone from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
import requests import requests
@@ -216,7 +216,7 @@ def generate_markdown(all_results: dict, run_date: str) -> str:
lines.append(f"- **Result:** {bres.get('detail', bres.get('error', 'n/a'))}") lines.append(f"- **Result:** {bres.get('detail', bres.get('error', 'n/a'))}")
snippet = bres.get("code_snippet", "") snippet = bres.get("code_snippet", "")
if snippet: if snippet:
lines.append(f"- **Generated code snippet:**") lines.append("- **Generated code snippet:**")
lines.append(" ```python") lines.append(" ```python")
for ln in snippet.splitlines()[:8]: for ln in snippet.splitlines()[:8]:
lines.append(f" {ln}") lines.append(f" {ln}")
@@ -287,7 +287,7 @@ def parse_args() -> argparse.Namespace:
def main() -> int: def main() -> int:
args = parse_args() args = parse_args()
run_date = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M UTC") run_date = datetime.now(UTC).strftime("%Y-%m-%d %H:%M UTC")
print(f"Model Benchmark Suite — {run_date}") print(f"Model Benchmark Suite — {run_date}")
print(f"Testing {len(args.models)} model(s): {', '.join(args.models)}") print(f"Testing {len(args.models)} model(s): {', '.join(args.models)}")

View File

@@ -46,8 +46,7 @@ import argparse
import json import json
import re import re
import subprocess import subprocess
import sys from datetime import UTC, datetime
from datetime import datetime, timezone
from pathlib import Path from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parent.parent REPO_ROOT = Path(__file__).resolve().parent.parent
@@ -91,7 +90,7 @@ def _epoch_tag(now: datetime | None = None) -> tuple[str, dict]:
When the date rolls over, the counter resets to 1. When the date rolls over, the counter resets to 1.
""" """
if now is None: if now is None:
now = datetime.now(timezone.utc) now = datetime.now(UTC)
iso_cal = now.isocalendar() # (year, week, weekday) iso_cal = now.isocalendar() # (year, week, weekday)
week = iso_cal[1] week = iso_cal[1]
@@ -221,7 +220,7 @@ def update_summary() -> None:
for k, v in sorted(by_weekday.items())} for k, v in sorted(by_weekday.items())}
summary = { summary = {
"updated_at": datetime.now(timezone.utc).isoformat(), "updated_at": datetime.now(UTC).isoformat(),
"current_epoch": current_epoch, "current_epoch": current_epoch,
"window": len(recent), "window": len(recent),
"measured_cycles": len(measured), "measured_cycles": len(measured),
@@ -293,7 +292,7 @@ def main() -> None:
truly_success = args.success and args.main_green truly_success = args.success and args.main_green
# Generate epoch turnover tag # Generate epoch turnover tag
now = datetime.now(timezone.utc) now = datetime.now(UTC)
epoch_tag, epoch_parts = _epoch_tag(now) epoch_tag, epoch_parts = _epoch_tag(now)
entry = { entry = {

View File

@@ -11,7 +11,6 @@ Usage: python scripts/dev_server.py [--port PORT]
""" """
import argparse import argparse
import datetime
import os import os
import socket import socket
import subprocess import subprocess
@@ -81,8 +80,8 @@ def _ollama_url() -> str:
def _smoke_ollama(url: str) -> str: def _smoke_ollama(url: str) -> str:
"""Quick connectivity check against Ollama.""" """Quick connectivity check against Ollama."""
import urllib.request
import urllib.error import urllib.error
import urllib.request
try: try:
req = urllib.request.Request(url, method="GET") req = urllib.request.Request(url, method="GET")
@@ -101,14 +100,14 @@ def _print_banner(port: int) -> None:
hr = "" * 62 hr = "" * 62
print(flush=True) print(flush=True)
print(f" {hr}") print(f" {hr}")
print(f" ┃ Timmy Time — Development Server") print(" ┃ Timmy Time — Development Server")
print(f" {hr}") print(f" {hr}")
print() print()
print(f" Dashboard: http://localhost:{port}") print(f" Dashboard: http://localhost:{port}")
print(f" API docs: http://localhost:{port}/docs") print(f" API docs: http://localhost:{port}/docs")
print(f" Health: http://localhost:{port}/health") print(f" Health: http://localhost:{port}/health")
print() print()
print(f" ── Status ──────────────────────────────────────────────") print(" ── Status ──────────────────────────────────────────────")
print(f" Backend: {ollama_url} [{ollama_status}]") print(f" Backend: {ollama_url} [{ollama_status}]")
print(f" Version: {version}") print(f" Version: {version}")
print(f" Git commit: {git}") print(f" Git commit: {git}")

View File

@@ -319,9 +319,9 @@ def main(argv: list[str] | None = None) -> int:
print(f"Exported {count} training examples to: {args.output}") print(f"Exported {count} training examples to: {args.output}")
print() print()
print("Next steps:") print("Next steps:")
print(f" mkdir -p ~/timmy-lora-training") print(" mkdir -p ~/timmy-lora-training")
print(f" cp {args.output} ~/timmy-lora-training/train.jsonl") print(f" cp {args.output} ~/timmy-lora-training/train.jsonl")
print(f" python scripts/lora_finetune.py --data ~/timmy-lora-training") print(" python scripts/lora_finetune.py --data ~/timmy-lora-training")
else: else:
print("No training examples exported.") print("No training examples exported.")
return 1 return 1

View File

@@ -18,9 +18,8 @@ Called by: deep_triage.sh (before the LLM triage), timmy-loop.sh (every 50 cycle
from __future__ import annotations from __future__ import annotations
import json import json
import sys
from collections import defaultdict from collections import defaultdict
from datetime import datetime, timezone, timedelta from datetime import UTC, datetime, timedelta
from pathlib import Path from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parent.parent REPO_ROOT = Path(__file__).resolve().parent.parent
@@ -52,7 +51,7 @@ def parse_ts(ts_str: str) -> datetime | None:
try: try:
dt = datetime.fromisoformat(ts_str.replace("Z", "+00:00")) dt = datetime.fromisoformat(ts_str.replace("Z", "+00:00"))
if dt.tzinfo is None: if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc) dt = dt.replace(tzinfo=UTC)
return dt return dt
except (ValueError, TypeError): except (ValueError, TypeError):
return None return None
@@ -60,7 +59,7 @@ def parse_ts(ts_str: str) -> datetime | None:
def window(entries: list[dict], days: int) -> list[dict]: def window(entries: list[dict], days: int) -> list[dict]:
"""Filter entries to the last N days.""" """Filter entries to the last N days."""
cutoff = datetime.now(timezone.utc) - timedelta(days=days) cutoff = datetime.now(UTC) - timedelta(days=days)
result = [] result = []
for e in entries: for e in entries:
ts = parse_ts(e.get("timestamp", "")) ts = parse_ts(e.get("timestamp", ""))
@@ -344,7 +343,7 @@ def main() -> None:
recommendations = generate_recommendations(trends, types, repeats, outliers, triage_eff) recommendations = generate_recommendations(trends, types, repeats, outliers, triage_eff)
insights = { insights = {
"generated_at": datetime.now(timezone.utc).isoformat(), "generated_at": datetime.now(UTC).isoformat(),
"total_cycles_analyzed": len(cycles), "total_cycles_analyzed": len(cycles),
"trends": trends, "trends": trends,
"by_type": types, "by_type": types,
@@ -371,7 +370,7 @@ def main() -> None:
header += f" · current epoch: {latest_epoch}" header += f" · current epoch: {latest_epoch}"
print(header) print(header)
print(f"\n TRENDS (7d vs previous 7d):") print("\n TRENDS (7d vs previous 7d):")
r7 = trends["recent_7d"] r7 = trends["recent_7d"]
p7 = trends["previous_7d"] p7 = trends["previous_7d"]
print(f" Cycles: {r7['count']:>3d} (was {p7['count']})") print(f" Cycles: {r7['count']:>3d} (was {p7['count']})")
@@ -383,14 +382,14 @@ def main() -> None:
print(f" PRs merged: {r7['prs_merged']:>3d} (was {p7['prs_merged']})") print(f" PRs merged: {r7['prs_merged']:>3d} (was {p7['prs_merged']})")
print(f" Lines net: {r7['lines_net']:>+5d}") print(f" Lines net: {r7['lines_net']:>+5d}")
print(f"\n BY TYPE:") print("\n BY TYPE:")
for t, info in sorted(types.items(), key=lambda x: -x[1]["count"]): for t, info in sorted(types.items(), key=lambda x: -x[1]["count"]):
print(f" {t:12s} n={info['count']:>2d} " print(f" {t:12s} n={info['count']:>2d} "
f"ok={info['success_rate']*100:>3.0f}% " f"ok={info['success_rate']*100:>3.0f}% "
f"avg={info['avg_duration']//60}m{info['avg_duration']%60:02d}s") f"avg={info['avg_duration']//60}m{info['avg_duration']%60:02d}s")
if repeats: if repeats:
print(f"\n REPEAT FAILURES:") print("\n REPEAT FAILURES:")
for rf in repeats[:3]: for rf in repeats[:3]:
print(f" #{rf['issue']} failed {rf['failure_count']}x") print(f" #{rf['issue']} failed {rf['failure_count']}x")

View File

@@ -360,7 +360,7 @@ def main(argv: list[str] | None = None) -> int:
return rc return rc
# Default: train # Default: train
print(f"Starting LoRA fine-tuning") print("Starting LoRA fine-tuning")
print(f" Model: {model_path}") print(f" Model: {model_path}")
print(f" Data: {args.data}") print(f" Data: {args.data}")
print(f" Adapter path: {args.adapter_path}") print(f" Adapter path: {args.adapter_path}")

View File

@@ -9,11 +9,10 @@ This script runs before commits to catch issues early:
- Syntax errors in test files - Syntax errors in test files
""" """
import sys
import subprocess
from pathlib import Path
import ast import ast
import re import subprocess
import sys
from pathlib import Path
def check_imports(): def check_imports():
@@ -70,7 +69,7 @@ def check_test_syntax():
for test_file in tests_dir.rglob("test_*.py"): for test_file in tests_dir.rglob("test_*.py"):
try: try:
with open(test_file, "r") as f: with open(test_file) as f:
ast.parse(f.read()) ast.parse(f.read())
print(f"{test_file.relative_to(tests_dir.parent)} has valid syntax") print(f"{test_file.relative_to(tests_dir.parent)} has valid syntax")
except SyntaxError as e: except SyntaxError as e:
@@ -86,7 +85,7 @@ def check_platform_specific_tests():
# Check for hardcoded /Users/ paths in tests # Check for hardcoded /Users/ paths in tests
tests_dir = Path("tests").resolve() tests_dir = Path("tests").resolve()
for test_file in tests_dir.rglob("test_*.py"): for test_file in tests_dir.rglob("test_*.py"):
with open(test_file, "r") as f: with open(test_file) as f:
content = f.read() content = f.read()
if 'startswith("/Users/")' in content: if 'startswith("/Users/")' in content:
issues.append( issues.append(
@@ -110,7 +109,7 @@ def check_docker_availability():
if docker_test_files: if docker_test_files:
for test_file in docker_test_files: for test_file in docker_test_files:
with open(test_file, "r") as f: with open(test_file) as f:
content = f.read() content = f.read()
has_skipif = "@pytest.mark.skipif" in content or "pytestmark = pytest.mark.skipif" in content has_skipif = "@pytest.mark.skipif" in content or "pytestmark = pytest.mark.skipif" in content
if not has_skipif and "docker" in content.lower(): if not has_skipif and "docker" in content.lower():

View File

@@ -83,8 +83,8 @@ def test_tcp_connection(host: str, port: int, timeout: float) -> tuple[bool, soc
return True, sock return True, sock
except OSError as exc: except OSError as exc:
print(f" ✗ Connection failed: {exc}") print(f" ✗ Connection failed: {exc}")
print(f" Checklist:") print(" Checklist:")
print(f" - Is Bannerlord running with GABS mod enabled?") print(" - Is Bannerlord running with GABS mod enabled?")
print(f" - Is port {port} open in Windows Firewall?") print(f" - Is port {port} open in Windows Firewall?")
print(f" - Is the VM IP correct? (got: {host})") print(f" - Is the VM IP correct? (got: {host})")
return False, None return False, None
@@ -92,7 +92,7 @@ def test_tcp_connection(host: str, port: int, timeout: float) -> tuple[bool, soc
def test_ping(sock: socket.socket) -> bool: def test_ping(sock: socket.socket) -> bool:
"""PASS: JSON-RPC ping returns a 2.0 response.""" """PASS: JSON-RPC ping returns a 2.0 response."""
print(f"\n[2/4] JSON-RPC ping") print("\n[2/4] JSON-RPC ping")
try: try:
t0 = time.monotonic() t0 = time.monotonic()
resp = _rpc(sock, "ping", req_id=1) resp = _rpc(sock, "ping", req_id=1)
@@ -109,7 +109,7 @@ def test_ping(sock: socket.socket) -> bool:
def test_game_state(sock: socket.socket) -> bool: def test_game_state(sock: socket.socket) -> bool:
"""PASS: get_game_state returns a result (game must be in a campaign).""" """PASS: get_game_state returns a result (game must be in a campaign)."""
print(f"\n[3/4] get_game_state call") print("\n[3/4] get_game_state call")
try: try:
t0 = time.monotonic() t0 = time.monotonic()
resp = _rpc(sock, "get_game_state", req_id=2) resp = _rpc(sock, "get_game_state", req_id=2)
@@ -120,7 +120,7 @@ def test_game_state(sock: socket.socket) -> bool:
if code == -32601: if code == -32601:
# Method not found — GABS version may not expose this method # Method not found — GABS version may not expose this method
print(f" ~ Method not available ({elapsed_ms:.1f} ms): {msg}") print(f" ~ Method not available ({elapsed_ms:.1f} ms): {msg}")
print(f" This is acceptable if game is not yet in a campaign.") print(" This is acceptable if game is not yet in a campaign.")
return True return True
print(f" ✗ RPC error ({elapsed_ms:.1f} ms) [{code}]: {msg}") print(f" ✗ RPC error ({elapsed_ms:.1f} ms) [{code}]: {msg}")
return False return False
@@ -191,7 +191,7 @@ def main() -> int:
args = parser.parse_args() args = parser.parse_args()
print("=" * 60) print("=" * 60)
print(f"GABS Connectivity Test Suite") print("GABS Connectivity Test Suite")
print(f"Target: {args.host}:{args.port}") print(f"Target: {args.host}:{args.port}")
print(f"Timeout: {args.timeout}s") print(f"Timeout: {args.timeout}s")
print("=" * 60) print("=" * 60)

View File

@@ -150,7 +150,7 @@ def test_model_available(model: str) -> bool:
def test_basic_response(model: str) -> bool: def test_basic_response(model: str) -> bool:
"""PASS: model responds coherently to a simple prompt.""" """PASS: model responds coherently to a simple prompt."""
print(f"\n[2/5] Basic response test") print("\n[2/5] Basic response test")
messages = [ messages = [
{"role": "user", "content": "Reply with exactly: HERMES_OK"}, {"role": "user", "content": "Reply with exactly: HERMES_OK"},
] ]
@@ -188,7 +188,7 @@ def test_memory_usage() -> bool:
def test_tool_calling(model: str) -> bool: def test_tool_calling(model: str) -> bool:
"""PASS: model produces a tool_calls response (not raw text) for a tool-use prompt.""" """PASS: model produces a tool_calls response (not raw text) for a tool-use prompt."""
print(f"\n[4/5] Tool-calling test") print("\n[4/5] Tool-calling test")
messages = [ messages = [
{ {
"role": "user", "role": "user",
@@ -236,7 +236,7 @@ def test_tool_calling(model: str) -> bool:
def test_timmy_persona(model: str) -> bool: def test_timmy_persona(model: str) -> bool:
"""PASS: model accepts a Timmy persona system prompt and responds in-character.""" """PASS: model accepts a Timmy persona system prompt and responds in-character."""
print(f"\n[5/5] Timmy-persona smoke test") print("\n[5/5] Timmy-persona smoke test")
messages = [ messages = [
{ {
"role": "system", "role": "system",

View File

@@ -26,7 +26,7 @@ import argparse
import json import json
import sys import sys
import time import time
from dataclasses import dataclass, field from dataclasses import dataclass
from typing import Any from typing import Any
try: try:

View File

@@ -16,7 +16,7 @@ import json
import os import os
import re import re
import sys import sys
from datetime import datetime, timezone from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
# ── Config ────────────────────────────────────────────────────────────── # ── Config ──────────────────────────────────────────────────────────────
@@ -277,7 +277,7 @@ def update_quarantine(scored: list[dict]) -> list[dict]:
"""Auto-quarantine issues that have failed >= 2 times. Returns filtered list.""" """Auto-quarantine issues that have failed >= 2 times. Returns filtered list."""
failures = load_cycle_failures() failures = load_cycle_failures()
quarantine = load_quarantine() quarantine = load_quarantine()
now = datetime.now(timezone.utc).isoformat() now = datetime.now(UTC).isoformat()
filtered = [] filtered = []
for item in scored: for item in scored:
@@ -366,7 +366,7 @@ def run_triage() -> list[dict]:
backup_data = QUEUE_BACKUP_FILE.read_text() backup_data = QUEUE_BACKUP_FILE.read_text()
json.loads(backup_data) # Validate backup json.loads(backup_data) # Validate backup
QUEUE_FILE.write_text(backup_data) QUEUE_FILE.write_text(backup_data)
print(f"[triage] Restored queue.json from backup") print("[triage] Restored queue.json from backup")
except (json.JSONDecodeError, OSError) as restore_exc: except (json.JSONDecodeError, OSError) as restore_exc:
print(f"[triage] ERROR: Backup restore failed: {restore_exc}", file=sys.stderr) print(f"[triage] ERROR: Backup restore failed: {restore_exc}", file=sys.stderr)
# Write empty list as last resort # Write empty list as last resort
@@ -377,7 +377,7 @@ def run_triage() -> list[dict]:
# Write retro entry # Write retro entry
retro_entry = { retro_entry = {
"timestamp": datetime.now(timezone.utc).isoformat(), "timestamp": datetime.now(UTC).isoformat(),
"total_open": len(all_issues), "total_open": len(all_issues),
"scored": len(scored), "scored": len(scored),
"ready": len(ready), "ready": len(ready),

View File

@@ -35,9 +35,9 @@ from dashboard.routes.chat_api_v1 import router as chat_api_v1_router
from dashboard.routes.daily_run import router as daily_run_router from dashboard.routes.daily_run import router as daily_run_router
from dashboard.routes.db_explorer import router as db_explorer_router from dashboard.routes.db_explorer import router as db_explorer_router
from dashboard.routes.discord import router as discord_router from dashboard.routes.discord import router as discord_router
from dashboard.routes.energy import router as energy_router
from dashboard.routes.experiments import router as experiments_router from dashboard.routes.experiments import router as experiments_router
from dashboard.routes.grok import router as grok_router from dashboard.routes.grok import router as grok_router
from dashboard.routes.energy import router as energy_router
from dashboard.routes.health import router as health_router from dashboard.routes.health import router as health_router
from dashboard.routes.hermes import router as hermes_router from dashboard.routes.hermes import router as hermes_router
from dashboard.routes.loop_qa import router as loop_qa_router from dashboard.routes.loop_qa import router as loop_qa_router
@@ -48,6 +48,7 @@ from dashboard.routes.models import router as models_router
from dashboard.routes.nexus import router as nexus_router from dashboard.routes.nexus import router as nexus_router
from dashboard.routes.quests import router as quests_router from dashboard.routes.quests import router as quests_router
from dashboard.routes.scorecards import router as scorecards_router from dashboard.routes.scorecards import router as scorecards_router
from dashboard.routes.self_correction import router as self_correction_router
from dashboard.routes.sovereignty_metrics import router as sovereignty_metrics_router from dashboard.routes.sovereignty_metrics import router as sovereignty_metrics_router
from dashboard.routes.sovereignty_ws import router as sovereignty_ws_router from dashboard.routes.sovereignty_ws import router as sovereignty_ws_router
from dashboard.routes.spark import router as spark_router from dashboard.routes.spark import router as spark_router
@@ -55,7 +56,6 @@ from dashboard.routes.system import router as system_router
from dashboard.routes.tasks import router as tasks_router from dashboard.routes.tasks import router as tasks_router
from dashboard.routes.telegram import router as telegram_router from dashboard.routes.telegram import router as telegram_router
from dashboard.routes.thinking import router as thinking_router from dashboard.routes.thinking import router as thinking_router
from dashboard.routes.self_correction import router as self_correction_router
from dashboard.routes.three_strike import router as three_strike_router from dashboard.routes.three_strike import router as three_strike_router
from dashboard.routes.tools import router as tools_router from dashboard.routes.tools import router as tools_router
from dashboard.routes.tower import router as tower_router from dashboard.routes.tower import router as tower_router

View File

View File

@@ -0,0 +1,154 @@
# TODO: This code should be moved to the timmy-nostr repository once it's available.
# See ADR-024 for more details.
import json
import logging
from typing import Any
import websockets
from pynostr.event import Event
from pynostr.key import PrivateKey
logger = logging.getLogger(__name__)
class NostrClient:
"""
A client for interacting with the Nostr network.
"""
def __init__(self, relays: list[str], private_key_hex: str | None = None):
self.relays = relays
self._connections: dict[str, websockets.WebSocketClientProtocol] = {}
if private_key_hex:
self.private_key = PrivateKey.from_hex(private_key_hex)
self.public_key = self.private_key.public_key
else:
self.private_key = None
self.public_key = None
async def connect(self):
"""
Connect to all the relays.
"""
for relay in self.relays:
try:
conn = await websockets.connect(relay)
self._connections[relay] = conn
logger.info(f"Connected to Nostr relay: {relay}")
except Exception as e:
logger.error(f"Failed to connect to Nostr relay {relay}: {e}")
async def disconnect(self):
"""
Disconnect from all the relays.
"""
for relay, conn in self._connections.items():
try:
await conn.close()
logger.info(f"Disconnected from Nostr relay: {relay}")
except Exception as e:
logger.error(f"Failed to disconnect from Nostr relay {relay}: {e}")
self._connections = {}
async def subscribe_for_events(
self,
subscription_id: str,
filters: list[dict[str, Any]],
unsubscribe_on_eose: bool = True,
):
"""
Subscribe to events from the Nostr network.
"""
for relay, conn in self._connections.items():
try:
request = ["REQ", subscription_id]
request.extend(filters)
await conn.send(json.dumps(request))
logger.info(f"Subscribed to events on {relay} with sub_id: {subscription_id}")
async for message in conn:
message_json = json.loads(message)
message_type = message_json[0]
if message_type == "EVENT":
yield message_json[2]
elif message_type == "EOSE":
logger.info(f"End of stored events for sub_id: {subscription_id} on {relay}")
if unsubscribe_on_eose:
await self.unsubscribe(subscription_id, relay)
break
except Exception as e:
logger.error(f"Failed to subscribe to events on {relay}: {e}")
async def unsubscribe(self, subscription_id: str, relay: str):
"""
Unsubscribe from events.
"""
if relay not in self._connections:
logger.warning(f"Not connected to relay: {relay}")
return
conn = self._connections[relay]
try:
request = ["CLOSE", subscription_id]
await conn.send(json.dumps(request))
logger.info(f"Unsubscribed from sub_id: {subscription_id} on {relay}")
except Exception as e:
logger.error(f"Failed to unsubscribe from {relay}: {e}")
async def publish_event(self, event: Event):
"""
Publish an event to all connected relays.
"""
for relay, conn in self._connections.items():
try:
request = ["EVENT", event.to_dict()]
await conn.send(json.dumps(request))
logger.info(f"Published event {event.id} to {relay}")
except Exception as e:
logger.error(f"Failed to publish event to {relay}: {e}")
# NIP-89 Implementation
async def find_capability_cards(self, kinds: list[int] | None = None):
"""
Find capability cards (Kind 31990) for other agents.
"""
# Kind 31990 is for "Handler recommendations" which is a precursor to NIP-89
# NIP-89 is for "Application-specific data" which is a more general purpose
# kind. The issue description says "Kind 31990 'Capability Card' monitoring"
# which is a bit of a mix of concepts. I will use Kind 31990 as the issue
# description says.
filters = [{"kinds": [31990]}]
if kinds:
filters[0]["#k"] = [str(k) for k in kinds]
sub_id = "capability-card-finder"
async for event in self.subscribe_for_events(sub_id, filters):
yield event
# NIP-90 Implementation
async def create_job_request(
self,
kind: int,
content: str,
tags: list[list[str]] | None = None,
) -> Event:
"""
Create and publish a job request (Kind 5000-5999).
"""
if not self.private_key:
raise Exception("Cannot create job request without a private key.")
if not 5000 <= kind <= 5999:
raise ValueError("Job request kind must be between 5000 and 5999.")
event = Event(
pubkey=self.public_key.hex(),
kind=kind,
content=content,
tags=tags or [],
)
event.sign(self.private_key.hex())
await self.publish_event(event)
return event

View File

@@ -19,7 +19,6 @@ Refs: #1009
""" """
import asyncio import asyncio
import json
import logging import logging
import subprocess import subprocess
import time import time

View File

@@ -24,8 +24,8 @@ from infrastructure.models.registry import (
model_registry, model_registry,
) )
from infrastructure.models.router import ( from infrastructure.models.router import (
TierLabel,
TieredModelRouter, TieredModelRouter,
TierLabel,
classify_tier, classify_tier,
get_tiered_router, get_tiered_router,
) )

View File

@@ -27,7 +27,6 @@ References:
- Issue #882 — Model Tiering Router: Local 8B / Hermes 70B / Cloud API Cascade - Issue #882 — Model Tiering Router: Local 8B / Hermes 70B / Cloud API Cascade
""" """
import asyncio
import logging import logging
import re import re
import time import time

View File

@@ -20,13 +20,11 @@ Usage::
from __future__ import annotations from __future__ import annotations
import json
import logging import logging
import sqlite3 import sqlite3
import uuid import uuid
from collections.abc import Generator from collections.abc import Generator
from contextlib import closing, contextmanager from contextlib import closing, contextmanager
from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -21,7 +21,6 @@ import base64
import json import json
import logging import logging
from datetime import UTC, datetime from datetime import UTC, datetime
from pathlib import Path
from typing import Any from typing import Any
import httpx import httpx

View File

@@ -22,21 +22,20 @@ import sqlite3
from datetime import datetime from datetime import datetime
from pathlib import Path from pathlib import Path
from timmy.thinking._db import Thought, _get_conn
from timmy.thinking.engine import ThinkingEngine
from timmy.thinking.seeds import (
SEED_TYPES,
_SENSITIVE_PATTERNS,
_META_OBSERVATION_PHRASES,
_THINK_TAG_RE,
_THINKING_PROMPT,
)
# Re-export HOT_MEMORY_PATH and SOUL_PATH so existing patch targets continue to work. # Re-export HOT_MEMORY_PATH and SOUL_PATH so existing patch targets continue to work.
# Tests that patch "timmy.thinking.HOT_MEMORY_PATH" or "timmy.thinking.SOUL_PATH" # Tests that patch "timmy.thinking.HOT_MEMORY_PATH" or "timmy.thinking.SOUL_PATH"
# should instead patch "timmy.thinking._snapshot.HOT_MEMORY_PATH" etc., but these # should instead patch "timmy.thinking._snapshot.HOT_MEMORY_PATH" etc., but these
# re-exports are kept for any code that reads them from the top-level namespace. # re-exports are kept for any code that reads them from the top-level namespace.
from timmy.memory_system import HOT_MEMORY_PATH, SOUL_PATH # noqa: F401 from timmy.memory_system import HOT_MEMORY_PATH, SOUL_PATH # noqa: F401
from timmy.thinking._db import Thought, _get_conn
from timmy.thinking.engine import ThinkingEngine
from timmy.thinking.seeds import (
_META_OBSERVATION_PHRASES,
_SENSITIVE_PATTERNS,
_THINK_TAG_RE,
_THINKING_PROMPT,
SEED_TYPES,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -4,7 +4,6 @@ import logging
from pathlib import Path from pathlib import Path
from config import settings from config import settings
from timmy.thinking.seeds import _META_OBSERVATION_PHRASES, _SENSITIVE_PATTERNS from timmy.thinking.seeds import _META_OBSERVATION_PHRASES, _SENSITIVE_PATTERNS
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -5,11 +5,11 @@ import random
from datetime import UTC, datetime from datetime import UTC, datetime
from timmy.thinking.seeds import ( from timmy.thinking.seeds import (
SEED_TYPES,
_CREATIVE_SEEDS, _CREATIVE_SEEDS,
_EXISTENTIAL_SEEDS, _EXISTENTIAL_SEEDS,
_OBSERVATION_SEEDS, _OBSERVATION_SEEDS,
_SOVEREIGNTY_SEEDS, _SOVEREIGNTY_SEEDS,
SEED_TYPES,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
"""System snapshot and memory context mixin for the thinking engine.""" """System snapshot and memory context mixin for the thinking engine."""
import logging import logging
from datetime import UTC, datetime from datetime import datetime
from timmy.memory_system import HOT_MEMORY_PATH, SOUL_PATH from timmy.memory_system import HOT_MEMORY_PATH, SOUL_PATH

View File

@@ -7,8 +7,7 @@ from difflib import SequenceMatcher
from pathlib import Path from pathlib import Path
from config import settings from config import settings
from timmy.thinking._db import _DEFAULT_DB, Thought, _get_conn, _row_to_thought
from timmy.thinking._db import Thought, _DEFAULT_DB, _get_conn, _row_to_thought
from timmy.thinking._distillation import _DistillationMixin from timmy.thinking._distillation import _DistillationMixin
from timmy.thinking._issue_filing import _IssueFilingMixin from timmy.thinking._issue_filing import _IssueFilingMixin
from timmy.thinking._seeds_mixin import _SeedsMixin from timmy.thinking._seeds_mixin import _SeedsMixin

View File

@@ -0,0 +1,93 @@
import json
import pytest
import websockets
from pynostr.key import PrivateKey
from src.infrastructure.clients.nostr_client import NostrClient
@pytest.mark.asyncio
async def test_nostr_client_connect_disconnect():
# Using a public mock relay for testing
relays = ["wss://relay.damus.io"]
client = NostrClient(relays)
await client.connect()
assert len(client._connections) == 1
for relay in relays:
assert relay in client._connections
assert client._connections[relay].state == websockets.protocol.State.OPEN
await client.disconnect()
assert len(client._connections) == 0
@pytest.mark.asyncio
async def test_find_capability_cards():
relays = ["wss://relay.damus.io"]
client = NostrClient(relays)
await client.connect()
# Create a dummy capability card event
# In a real scenario, this would be published by another agent
dummy_event = {
"id": "faked_id",
"pubkey": "faked_pubkey",
"created_at": 1678886400,
"kind": 31990,
"tags": [
["d", "test-platform"],
["k", "5000"]
],
"content": json.dumps({
"name": "Test Agent",
"about": "An agent for testing purposes"
}),
"sig": "faked_sig"
}
async def event_generator():
yield dummy_event
# Mock the subscribe_for_events method to return the dummy event
async def mock_subscribe_for_events(subscription_id, filters, unsubscribe_on_eose=True):
async for event in event_generator():
yield event
client.subscribe_for_events = mock_subscribe_for_events
async for event in client.find_capability_cards():
assert event["kind"] == 31990
await client.disconnect()
@pytest.mark.asyncio
async def test_create_job_request():
private_key = PrivateKey()
relays = ["wss://relay.damus.io"]
client = NostrClient(relays, private_key.hex())
await client.connect()
# Mock the publish_event method
published_events = []
async def mock_publish_event(event):
published_events.append(event)
client.publish_event = mock_publish_event
kind = 5001
content = "Test job request"
tags = [["d", "test-job"]]
event = await client.create_job_request(kind, content, tags)
assert event.kind == kind
assert event.content == content
assert event.tags == tags
assert event.pubkey == private_key.public_key.hex()
assert event.verify()
assert len(published_events) == 1
assert published_events[0] == event
await client.disconnect()

View File

@@ -27,7 +27,6 @@ from infrastructure.router.cascade import (
ProviderStatus, ProviderStatus,
) )
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Helpers # Helpers
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------

View File

@@ -10,13 +10,13 @@ Covers:
- "Plan the optimal path to become Hortator" → LOCAL_HEAVY - "Plan the optimal path to become Hortator" → LOCAL_HEAVY
""" """
from unittest.mock import AsyncMock, MagicMock, patch from unittest.mock import AsyncMock, MagicMock
import pytest import pytest
from infrastructure.models.router import ( from infrastructure.models.router import (
TierLabel,
TieredModelRouter, TieredModelRouter,
TierLabel,
_is_low_quality, _is_low_quality,
classify_tier, classify_tier,
get_tiered_router, get_tiered_router,

View File

@@ -5,7 +5,6 @@ from __future__ import annotations
from datetime import UTC, datetime, timedelta from datetime import UTC, datetime, timedelta
from unittest.mock import AsyncMock, MagicMock, patch from unittest.mock import AsyncMock, MagicMock, patch
import httpx
import pytest import pytest
from timmy.backlog_triage import ( from timmy.backlog_triage import (
@@ -28,7 +27,6 @@ from timmy.backlog_triage import (
score_issue, score_issue,
) )
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Helpers # Helpers
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------

View File

@@ -4,7 +4,6 @@ from unittest.mock import AsyncMock, MagicMock, patch
import pytest import pytest
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# exceeds_local_capacity # exceeds_local_capacity
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------

View File

@@ -34,7 +34,6 @@ from timmy.quest_system import (
update_quest_progress, update_quest_progress,
) )
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Helpers # Helpers
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------

View File

@@ -15,7 +15,6 @@ if "serpapi" not in sys.modules:
from timmy.research_tools import get_llm_client, google_web_search # noqa: E402 from timmy.research_tools import get_llm_client, google_web_search # noqa: E402
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# google_web_search # google_web_search
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------

View File

@@ -6,8 +6,7 @@ Refs: #957 (Session Sovereignty Report Generator)
import base64 import base64
import json import json
import time import time
from datetime import UTC, datetime from datetime import UTC
from pathlib import Path
from unittest.mock import MagicMock, patch from unittest.mock import MagicMock, patch
import pytest import pytest
@@ -18,14 +17,12 @@ from timmy.sovereignty.session_report import (
_format_duration, _format_duration,
_gather_session_data, _gather_session_data,
_gather_sovereignty_data, _gather_sovereignty_data,
_render_markdown,
commit_report, commit_report,
generate_and_commit_report, generate_and_commit_report,
generate_report, generate_report,
mark_session_start, mark_session_start,
) )
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# _format_duration # _format_duration
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------

View File

@@ -7,11 +7,8 @@ from __future__ import annotations
from unittest.mock import MagicMock, patch from unittest.mock import MagicMock, patch
import pytest
from timmy.tools.search import _extract_crawl_content, scrape_url, web_search from timmy.tools.search import _extract_crawl_content, scrape_url, web_search
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Helpers # Helpers
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------

View File

@@ -12,9 +12,7 @@ import argparse
import json import json
import sys import sys
from pathlib import Path from pathlib import Path
from unittest.mock import MagicMock, patch from unittest.mock import patch
import pytest
# Add timmy_automations to path for imports # Add timmy_automations to path for imports
_TA_PATH = Path(__file__).resolve().parent.parent.parent / "timmy_automations" / "daily_run" _TA_PATH = Path(__file__).resolve().parent.parent.parent / "timmy_automations" / "daily_run"

View File

@@ -7,7 +7,6 @@ falls back to the Ollama backend without crashing.
Refs #1284 Refs #1284
""" """
import sys
from unittest.mock import MagicMock, patch from unittest.mock import MagicMock, patch
import pytest import pytest

View File

@@ -11,11 +11,9 @@ from unittest.mock import MagicMock, patch
import pytest import pytest
from infrastructure.energy.monitor import ( from infrastructure.energy.monitor import (
_DEFAULT_MODEL_SIZE_GB,
EnergyBudgetMonitor, EnergyBudgetMonitor,
InferenceSample, InferenceSample,
_DEFAULT_MODEL_SIZE_GB,
_EFFICIENCY_SCORE_CEILING,
_WATTS_PER_GB_HEURISTIC,
) )

View File

@@ -1,9 +1,5 @@
"""Unit tests for infrastructure.self_correction.""" """Unit tests for infrastructure.self_correction."""
import os
import tempfile
from pathlib import Path
from unittest.mock import patch
import pytest import pytest

View File

@@ -13,10 +13,9 @@ Usage:
import argparse import argparse
import dataclasses import dataclasses
import json import json
import os
import sys import sys
import time import time
from datetime import datetime, timezone from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
try: try:
@@ -28,12 +27,14 @@ except ImportError:
# Add parent dir to path so levels can be imported # Add parent dir to path so levels can be imported
sys.path.insert(0, str(Path(__file__).parent)) sys.path.insert(0, str(Path(__file__).parent))
from levels import level_0_coin_flip from levels import (
from levels import level_1_tic_tac_toe level_0_coin_flip,
from levels import level_2_resource_mgmt level_1_tic_tac_toe,
from levels import level_3_battle_tactics level_2_resource_mgmt,
from levels import level_4_trade_route level_3_battle_tactics,
from levels import level_5_mini_campaign level_4_trade_route,
level_5_mini_campaign,
)
ALL_LEVELS = [ ALL_LEVELS = [
level_0_coin_flip, level_0_coin_flip,
@@ -86,7 +87,7 @@ def run_benchmark(
levels_to_run = list(range(len(ALL_LEVELS))) levels_to_run = list(range(len(ALL_LEVELS)))
print(f"\n{'=' * 60}") print(f"\n{'=' * 60}")
print(f" Timmy Cognitive Benchmark — Project Bannerlord M0") print(" Timmy Cognitive Benchmark — Project Bannerlord M0")
print(f"{'=' * 60}") print(f"{'=' * 60}")
print(f" Model: {model}") print(f" Model: {model}")
print(f" Levels: {levels_to_run}") print(f" Levels: {levels_to_run}")
@@ -100,7 +101,7 @@ def run_benchmark(
"model": model, "model": model,
"skipped": True, "skipped": True,
"reason": f"Model '{model}' not available", "reason": f"Model '{model}' not available",
"timestamp": datetime.now(timezone.utc).isoformat(), "timestamp": datetime.now(UTC).isoformat(),
} }
else: else:
print(f" ERROR: Model '{model}' not found in Ollama.", file=sys.stderr) print(f" ERROR: Model '{model}' not found in Ollama.", file=sys.stderr)
@@ -110,7 +111,7 @@ def run_benchmark(
results = { results = {
"model": model, "model": model,
"timestamp": datetime.now(timezone.utc).isoformat(), "timestamp": datetime.now(UTC).isoformat(),
"skipped": False, "skipped": False,
"levels": {}, "levels": {},
"summary": {}, "summary": {},

View File

@@ -21,11 +21,10 @@ import json
import os import os
import sys import sys
from dataclasses import dataclass, field from dataclasses import dataclass, field
from datetime import datetime, timezone from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
from typing import Any
from urllib.request import Request, urlopen
from urllib.error import HTTPError, URLError from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen
# ── Configuration ───────────────────────────────────────────────────────── # ── Configuration ─────────────────────────────────────────────────────────
@@ -260,7 +259,7 @@ def score_issue_for_path(issue: dict) -> int:
if updated_at: if updated_at:
try: try:
updated = datetime.fromisoformat(updated_at.replace("Z", "+00:00")) updated = datetime.fromisoformat(updated_at.replace("Z", "+00:00"))
days_old = (datetime.now(timezone.utc) - updated).days days_old = (datetime.now(UTC) - updated).days
if days_old < 7: if days_old < 7:
score += 2 score += 2
elif days_old < 30: elif days_old < 30:
@@ -388,7 +387,7 @@ def build_golden_path(
4. One more micro-fix or docs (closure) 4. One more micro-fix or docs (closure)
""" """
path = GoldenPath( path = GoldenPath(
generated_at=datetime.now(timezone.utc).isoformat(), generated_at=datetime.now(UTC).isoformat(),
target_minutes=target_minutes, target_minutes=target_minutes,
) )
@@ -478,7 +477,7 @@ def generate_golden_path(
if not client.is_available(): if not client.is_available():
# Return empty path with error indication # Return empty path with error indication
return GoldenPath( return GoldenPath(
generated_at=datetime.now(timezone.utc).isoformat(), generated_at=datetime.now(UTC).isoformat(),
target_minutes=target_minutes, target_minutes=target_minutes,
items=[], items=[],
) )

View File

@@ -17,11 +17,11 @@ import json
import os import os
import sys import sys
from dataclasses import dataclass, field from dataclasses import dataclass, field
from datetime import datetime, timedelta, timezone from datetime import UTC, datetime, timedelta
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from urllib.request import Request, urlopen
from urllib.error import HTTPError, URLError from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen
# ── Configuration ───────────────────────────────────────────────────────── # ── Configuration ─────────────────────────────────────────────────────────
@@ -327,7 +327,7 @@ def check_critical_issues(client: GiteaClient, config: dict) -> IssueSignal:
issues=all_critical[:10], # Limit stored issues issues=all_critical[:10], # Limit stored issues
) )
except (HTTPError, URLError) as exc: except (HTTPError, URLError):
return IssueSignal( return IssueSignal(
count=0, count=0,
p0_count=0, p0_count=0,
@@ -419,7 +419,7 @@ def check_token_economy(config: dict) -> TokenEconomySignal:
try: try:
# Read last 24 hours of transactions # Read last 24 hours of transactions
since = datetime.now(timezone.utc) - timedelta(hours=24) since = datetime.now(UTC) - timedelta(hours=24)
recent_mint = 0 recent_mint = 0
recent_burn = 0 recent_burn = 0
@@ -511,7 +511,7 @@ def generate_snapshot(config: dict, token: str | None) -> HealthSnapshot:
overall = calculate_overall_status(ci, issues, flakiness) overall = calculate_overall_status(ci, issues, flakiness)
return HealthSnapshot( return HealthSnapshot(
timestamp=datetime.now(timezone.utc).isoformat(), timestamp=datetime.now(UTC).isoformat(),
overall_status=overall, overall_status=overall,
ci=ci, ci=ci,
issues=issues, issues=issues,

View File

@@ -19,11 +19,11 @@ import argparse
import json import json
import os import os
import sys import sys
from datetime import datetime, timedelta, timezone from datetime import UTC, datetime, timedelta
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from urllib.request import Request, urlopen
from urllib.error import HTTPError, URLError from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen
# ── Token Economy Integration ────────────────────────────────────────────── # ── Token Economy Integration ──────────────────────────────────────────────
# Import token rules helpers for tracking Daily Run rewards # Import token rules helpers for tracking Daily Run rewards
@@ -31,12 +31,11 @@ from urllib.error import HTTPError, URLError
sys.path.insert( sys.path.insert(
0, str(Path(__file__).resolve().parent.parent) 0, str(Path(__file__).resolve().parent.parent)
) )
from utils.token_rules import TokenRules, compute_token_reward
# Health snapshot lives in the same package # Health snapshot lives in the same package
from health_snapshot import generate_snapshot as _generate_health_snapshot from health_snapshot import generate_snapshot as _generate_health_snapshot
from health_snapshot import get_token as _hs_get_token from health_snapshot import get_token as _hs_get_token
from health_snapshot import load_config as _hs_load_config from health_snapshot import load_config as _hs_load_config
from utils.token_rules import TokenRules, compute_token_reward
# ── Configuration ───────────────────────────────────────────────────────── # ── Configuration ─────────────────────────────────────────────────────────
@@ -284,7 +283,7 @@ def generate_agenda(issues: list[dict], config: dict) -> dict:
items.append(item) items.append(item)
return { return {
"generated_at": datetime.now(timezone.utc).isoformat(), "generated_at": datetime.now(UTC).isoformat(),
"time_budget_minutes": agenda_time, "time_budget_minutes": agenda_time,
"item_count": len(items), "item_count": len(items),
"items": items, "items": items,
@@ -322,7 +321,7 @@ def print_agenda(agenda: dict) -> None:
def fetch_recent_activity(client: GiteaClient, config: dict) -> dict: def fetch_recent_activity(client: GiteaClient, config: dict) -> dict:
"""Fetch recent issues and PRs from the lookback window.""" """Fetch recent issues and PRs from the lookback window."""
lookback_hours = config.get("lookback_hours", 24) lookback_hours = config.get("lookback_hours", 24)
since = datetime.now(timezone.utc) - timedelta(hours=lookback_hours) since = datetime.now(UTC) - timedelta(hours=lookback_hours)
since_str = since.isoformat() since_str = since.isoformat()
activity = { activity = {
@@ -399,7 +398,7 @@ def load_cycle_data() -> dict:
continue continue
# Get entries from last 24 hours # Get entries from last 24 hours
since = datetime.now(timezone.utc) - timedelta(hours=24) since = datetime.now(UTC) - timedelta(hours=24)
recent = [ recent = [
e for e in entries e for e in entries
if e.get("timestamp") and datetime.fromisoformat(e["timestamp"].replace("Z", "+00:00")) >= since if e.get("timestamp") and datetime.fromisoformat(e["timestamp"].replace("Z", "+00:00")) >= since
@@ -426,7 +425,7 @@ def load_cycle_data() -> dict:
def generate_day_summary(activity: dict, cycles: dict) -> dict: def generate_day_summary(activity: dict, cycles: dict) -> dict:
"""Generate a day summary from activity data.""" """Generate a day summary from activity data."""
return { return {
"generated_at": datetime.now(timezone.utc).isoformat(), "generated_at": datetime.now(UTC).isoformat(),
"lookback_hours": 24, "lookback_hours": 24,
"issues_touched": len(activity.get("issues_touched", [])), "issues_touched": len(activity.get("issues_touched", [])),
"issues_closed": len(activity.get("issues_closed", [])), "issues_closed": len(activity.get("issues_closed", [])),

View File

@@ -25,7 +25,6 @@ import sys
from collections import Counter from collections import Counter
from datetime import UTC, datetime, timedelta from datetime import UTC, datetime, timedelta
from pathlib import Path from pathlib import Path
from typing import Any
from urllib.error import HTTPError, URLError from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen from urllib.request import Request, urlopen

View File

@@ -12,7 +12,6 @@ Refs: #1105
from __future__ import annotations from __future__ import annotations
import json
import logging import logging
import os import os
import shutil import shutil