"Auto-commit via make git"
Some checks are pending
Python Code Quality / lock_file (push) Waiting to run
Python Code Quality / linting (push) Blocked by required conditions
Python Code Quality / formatting (push) Blocked by required conditions
Python Code Quality / type_consistency (push) Blocked by required conditions
Python Code Quality / tests (push) Blocked by required conditions
Python Code Quality / build (push) Blocked by required conditions

This commit is contained in:
neo.webmaster.2@gmail.com 2026-03-05 08:11:11 +01:00
parent 4818d30164
commit 7e0d9b68be
61 changed files with 499 additions and 68 deletions

3
.gitignore vendored
View File

@ -1,3 +1,6 @@
# === chaves privadas ===
src/neuro/tools/human_agent/env.py
# === sub-módulos clonados ===
src/modules/
.cache/

101
README.md
View File

@ -29,53 +29,58 @@ make tarball # gera dist/myhomelab-src.tar.gz
make release # publica release e cria tag automática
```
## Checkpoint (self-log)
ok, ajustei a estrutura dos agentes:
```
myhomelab/
├─ configure.ac
├─ Makefile.am
├─ README.md
├─ pyproject.toml # uv + ruff + (pytest)
├─ uv.lock
├─ src/
│ └─ neuro/
│ ├─ __init__.py
│ ├─ cli.py # entrypoints (tui / bench / repl)
│ │
│ ├─ tui/
│ │ ├─ app.py # Textual App
│ │ ├─ screens.py # screens/modals
│ │ ├─ widgets.py # componentes reutilizáveis
│ │ └─ state.py # estado e store (sem IA aqui)
│ │
│ ├─ engine/
│ │ ├─ __init__.py
│ │ ├─ session.py # Session, Chat/Prompt state
│ │ └─ runtime.py # “orquestração” do loop (streaming, cancel)
│ │
│ ├─ ai/
│ │ ├─ __init__.py
│ │ ├─ config.py # dataclasses: dims, vocab, layers, seed
│ │ ├─ tokenizer.py # tokenize/detokenize
│ │ ├─ embeddings.py # tok->vec, pos enc
│ │ ├─ transformer.py # forward() (minimo)
│ │ ├─ head.py # logits/probs
│ │ ├─ sampling.py # greedy, top-k, top-p, temp
│ │ └─ model.py # Model = cola tudo (tokenize->sample)
│ │
│ ├─ utils/
│ │ ├─ logging.py
│ │ ├─ rng.py
│ │ └─ perf.py
│ │
│ └─ data/
│ ├─ vocab.json # opcional no MVP
│ └─ tiny_weights.npz # opcional (se quiseres pesos fixos)
├─ tests/
│ ├─ test_tokenizer.py
│ ├─ test_sampling.py
│ └─ test_transformer_shapes.py
└─ tools/
├─ bench.py # microbench do forward/sampling
└─ export_vocab.py
tree src/neuro/tools
src/neuro/tools
├── ai_agent
│ ├── bench.py
│ └── export_vocab.py
├── doc_agent
│ ├── analyzer.py
│ ├── emit_json.py
│ ├── emit_md.py
│ ├── evaluator.py
│ ├── ir_schema.py
│ ├── main.py
│ ├── prompts
│ └── scanner.py
├── host_agent # o teu novo nome
│ └── main.py
├── human_agent
│ ├── env.py
│ └── env.py.temp
├── infra_agent
│ ├── custom_ubuntu_agent
│ ├── django_agent
│ ├── docker_agent
│ ├── flask_agent
│ ├── neuronio_agent
│ ├── nfdos_agent
│ ├── packer_agent
│ ├── terraform_agent
│ ├── vagrant_agent
│ └── wordpress_agent
└── llm_agent
└── main.py
18 directories, 13 files
```
tu estas a preocupar-te (e bem) com a seguranca, eu com a parte financeira 😂
runtime ok:
```
src/myhomelab
🚀 Iniciando o myhomelab 51b9a23-dirty...
[myhomelab] uv encontrado no PATH.
[myhomelab] Ambiente .venv já existente.
[myhomelab] Sincronizando dependências via uv.lock (uv sync)...
[myhomelab] uv sync
Resolved 12 packages in 1ms
Audited 11 packages in 0.34ms
[myhomelab] Iniciando neuro via uv run...
[myhomelab] uv run python -m neuro
🧠 Neuro runtime starting...
[runtime] run(): ok (stub)
```
quando tiver descansado iniciamos o "REPL" (Textual App com um input, um output, barra de status, botões: Run/Stop)

View File

@ -1,43 +1,134 @@
from __future__ import annotations
from pathlib import Path
import os
import shutil
import subprocess
import sys
import os
class Application(object):
class Application:
def __init__(self, *args, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
self.base_path = Path(__file__).resolve().parents[1]
self.venv_path = self.base_path / "venv"
self.python = self.venv_path / "bin" / "python"
self.venv_path = self.base_path / ".venv" # uv default
self._hello()
# -------- UX --------
def _hello(self):
print(f"🚀 Iniciando o {getattr(self, 'package', '')} {getattr(self, 'version', '')}...")
def _print(self, msg: str):
print(f"[{getattr(self, 'package', '')}] {msg}", flush=True)
def run(self):
self._ensure_venv()
self.launch_neuro()
# -------- main --------
def _ensure_venv(self):
def run(self):
os.chdir(self.base_path)
if not self.venv_path.exists():
self._print("Criando ambiente Python...")
subprocess.run(
[sys.executable, "-m", "venv", str(self.venv_path)],
check=True
)
uv = self.ensure_uv()
self.ensure_venv(uv)
self.ensure_dependencies(uv)
self.launch_neuro(uv)
# -------- uv / env --------
def ensure_uv(self) -> str:
"""
Garante que o comando `uv` existe no host.
Estratégia:
1) PATH
2) `python -m uv` (se uv estiver instalado no python do host)
3) fallback: instala uv via pip (host python) e tenta de novo
"""
if shutil.which("uv"):
self._print("uv encontrado no PATH.")
return "uv"
# tenta via `python -m uv` (muitas vezes funciona em ambientes dev)
try:
subprocess.run([sys.executable, "-m", "uv", "--version"], check=True, capture_output=True, text=True)
self._print("uv disponível via `python -m uv`.")
return f"{sys.executable} -m uv"
except Exception:
pass
# fallback pragmatico: pip install uv (host python)
self._print("uv não encontrado. Tentando instalar via pip no Python do host...")
try:
subprocess.run([sys.executable, "-m", "pip", "install", "--user", "uv"], check=True)
except subprocess.CalledProcessError as e:
raise RuntimeError(
"Não consegui instalar o `uv`. Instala manualmente e tenta novamente.\n"
"Sugestões:\n"
" - pip install --user uv\n"
" - ou usa o instalador oficial do uv\n"
) from e
if shutil.which("uv"):
self._print("uv instalado e disponível no PATH.")
return "uv"
# última tentativa: python -m uv
subprocess.run([sys.executable, "-m", "uv", "--version"], check=True)
self._print("uv instalado e disponível via `python -m uv`.")
return f"{sys.executable} -m uv"
def _uv(self, uv_cmd: str, *args: str) -> None:
"""
Executa um comando uv, aceitando uv_cmd como:
- "uv"
- "<python> -m uv"
"""
cmd = uv_cmd.split() + list(args)
self._print(" ".join(cmd))
subprocess.run(cmd, check=True)
def ensure_venv(self, uv_cmd: str) -> None:
"""
Garante que existe um venv gerido pelo uv em .venv.
"""
if self.venv_path.exists():
self._print("Ambiente .venv já existente.")
return
self._print("Criando ambiente .venv via uv...")
# uv cria .venv por default; este comando respeita pyproject
self._uv(uv_cmd, "venv")
def ensure_dependencies(self, uv_cmd: str) -> None:
"""
Instala/sincroniza deps. Com uv.lock presente, o correto é uv sync.
"""
lock = self.base_path / "uv.lock"
pyproject = self.base_path / "pyproject.toml"
if not pyproject.exists():
raise RuntimeError("pyproject.toml não encontrado na raiz do projeto.")
if lock.exists():
self._print("Sincronizando dependências via uv.lock (uv sync)...")
self._uv(uv_cmd, "sync")
else:
self._print("Ambiente Python já existente.")
# Se ainda não há lock, tenta resolver e criar
self._print("uv.lock não encontrado. Resolvendo dependências (uv lock + uv sync)...")
self._uv(uv_cmd, "lock")
self._uv(uv_cmd, "sync")
def launch_neuro(self):
neuro = self.base_path / "src" / "neuro" / "__main__.py"
subprocess.run([str(self.python), str(neuro)])
# -------- launch --------
def launch_neuro(self, uv_cmd: str) -> None:
"""
Entry point: roda a app dentro do ambiente uv.
Preferência: módulo -m neuro (usa src layout corretamente).
"""
# Se tens __main__.py em neuro, isto é perfeito:
self._print("Iniciando neuro via uv run...")
self._uv(uv_cmd, "run", "python", "-m", "neuro")

0
src/infra/shell.sh Normal file
View File

View File

@ -6,5 +6,5 @@ sys.path.insert(1, '/usr/local/local/lib/python3.12/dist-packages')
from bootstrap import Application
if __name__ == "__main__":
app = Application(package="myhomelab", version="51b9a23-dirty")
app = Application(package="myhomelab", version="fa244a4-dirty-3-gd1980d7-dirty-5-g4818d30-dirty")
app.run()

15
src/neuro/__main__.py Normal file
View File

@ -0,0 +1,15 @@
from neuro.engine.runtime import Runtime
from neuro.engine.session import Session
def main():
print("🧠 Neuro runtime starting...")
session = Session()
runtime = Runtime(session=session)
runtime.run()
if __name__ == "__main__":
main()

0
src/neuro/ai/__init__.py Normal file
View File

0
src/neuro/ai/config.py Normal file
View File

View File

0
src/neuro/ai/head.py Normal file
View File

0
src/neuro/ai/model.py Normal file
View File

0
src/neuro/ai/sampling.py Normal file
View File

View File

View File

View File

View File

View File

View File

View File

View File

View File

View File

View File

View File

@ -0,0 +1,17 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import Optional
from neuro.engine.session import Session
@dataclass
class Runtime:
session: Session
running: bool = False
def run(self) -> None:
# por agora, só prova vida
self.running = True
print("[runtime] run(): ok (stub)")

View File

@ -0,0 +1,10 @@
from __future__ import annotations
from dataclasses import dataclass, field
from typing import List
@dataclass
class Session:
prompt_history: List[str] = field(default_factory=list)
output_history: List[str] = field(default_factory=list)

View File

View File

View File

View File

View File

View File

View File

View File

View File

View File

View File

View File

@ -0,0 +1,240 @@
# src/neuro/tools/codex_agent/main.py
from __future__ import annotations
import os
import shutil
import subprocess
import sys
import threading
from pathlib import Path
CODEX_REPO = "https://github.com/openai/codex.git"
MIN_RUSTC = (1, 82, 0)
def _print(msg: str):
print(f"[host:codex] {msg}", flush=True)
def _run(cmd: list[str], cwd: Path | None = None, env: dict | None = None):
_print(f"RUN: {' '.join(cmd)}")
p = subprocess.Popen(
cmd,
cwd=str(cwd) if cwd else None,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
)
assert p.stdout is not None
for line in p.stdout:
print(f"[codex] {line.rstrip()}", flush=True)
rc = p.wait()
if rc != 0:
raise RuntimeError(f"Command failed: {' '.join(cmd)}")
def _run_capture(cmd: list[str], env: dict | None = None) -> str:
out = subprocess.check_output(cmd, env=env, text=True, stderr=subprocess.STDOUT)
return out.strip()
def _paths(base: Path):
cache = base / ".cache" / "codex"
root = cache / "repo"
workspace = root / "codex-rs"
exe = "codex.exe" if sys.platform.startswith("win") else "codex"
target_bin = workspace / "target" / "debug" / exe
return cache, root, workspace, target_bin
def _parse_rustc_version(line: str) -> tuple[int, int, int]:
# rustc 1.76.0 (....)
parts = line.split()
ver = parts[1].split(".")
return (int(ver[0]), int(ver[1]), int(ver[2]))
def _version_ge(a: tuple[int, int, int], b: tuple[int, int, int]) -> bool:
return a >= b
def _ensure_rust_min(env: dict) -> dict:
_print("Checking Rust toolchain...")
cargo_bin = Path.home() / ".cargo" / "bin"
env["PATH"] = f"{cargo_bin}:{env.get('PATH','')}"
if shutil.which("rustup") is None or shutil.which("cargo") is None or shutil.which("rustc") is None:
_print("Rust toolchain not fully found. Installing rustup...")
_run(
["bash", "-lc", "curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y"],
env=env,
)
env["PATH"] = f"{cargo_bin}:{env.get('PATH','')}"
cur_line = _run_capture(["bash", "-lc", "rustc --version"], env=env)
cur = _parse_rustc_version(cur_line)
_print(f"rustc version: {cur[0]}.{cur[1]}.{cur[2]} (min required 1.82.0)")
if not _version_ge(cur, MIN_RUSTC):
_print("rustc is too old. Updating stable toolchain...")
_run(["bash", "-lc", "rustup toolchain install stable"], env=env)
_run(["bash", "-lc", "rustup default stable"], env=env)
_run(["bash", "-lc", "rustup update stable"], env=env)
cur_line2 = _run_capture(["bash", "-lc", "rustc --version"], env=env)
cur2 = _parse_rustc_version(cur_line2)
_print(f"rustc version after update: {cur2[0]}.{cur2[1]}.{cur2[2]}")
if not _version_ge(cur2, MIN_RUSTC):
raise RuntimeError(f"rustc still < 1.82.0 after update: {cur_line2}")
_print("Ensuring rustfmt + clippy...")
_run(["bash", "-lc", "rustup component add rustfmt"], env=env)
_run(["bash", "-lc", "rustup component add clippy"], env=env)
return env
def _have_sudo() -> bool:
return shutil.which("sudo") is not None
def _sudo_noprompt_ok() -> bool:
if not _have_sudo():
return False
try:
subprocess.check_call(["sudo", "-n", "true"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
return True
except Exception:
return False
def _detect_pkg_manager() -> str | None:
for pm in ("apt-get", "dnf", "pacman", "zypper"):
if shutil.which(pm) is not None:
return pm
return None
def _install_system_deps_for_linux_sandbox() -> None:
"""
Codex linux-sandbox precisa de:
- pkg-config
- libcap (dev headers + libcap.pc)
"""
pm = _detect_pkg_manager()
if pm is None:
raise RuntimeError(
"Missing system deps (pkg-config/libcap). No supported package manager found.\n"
"Install manually: pkg-config + libcap development package (contains libcap.pc)."
)
if not _have_sudo():
raise RuntimeError(
"Missing system deps (pkg-config/libcap) and sudo not found.\n"
"Install manually: pkg-config + libcap development package (contains libcap.pc)."
)
if not _sudo_noprompt_ok():
# sem prompt: a ensure_codex não pode ficar parada pedindo senha
raise RuntimeError(
"Missing system deps (pkg-config/libcap). Need sudo privileges.\n"
"Run manually (then re-run neuro):\n"
" - Debian/Ubuntu: sudo apt-get update && sudo apt-get install -y pkg-config libcap-dev\n"
" - Fedora: sudo dnf install -y pkgconf-pkg-config libcap-devel\n"
" - Arch: sudo pacman -S --needed pkgconf libcap"
)
_print(f"Installing system dependencies via {pm} (sudo -n)...")
if pm == "apt-get":
_run(["sudo", "-n", "apt-get", "update"])
_run(["sudo", "-n", "apt-get", "install", "-y", "pkg-config", "libcap-dev"])
elif pm == "dnf":
_run(["sudo", "-n", "dnf", "install", "-y", "pkgconf-pkg-config", "libcap-devel"])
elif pm == "pacman":
_run(["sudo", "-n", "pacman", "-S", "--needed", "--noconfirm", "pkgconf", "libcap"])
elif pm == "zypper":
_run(["sudo", "-n", "zypper", "--non-interactive", "install", "pkg-config", "libcap-devel"])
else:
raise RuntimeError(f"Unsupported package manager: {pm}")
def _ensure_linux_sandbox_deps(env: dict):
# Só faz sentido no Linux
if not sys.platform.startswith("linux"):
_print("Non-Linux platform detected; skipping linux-sandbox deps check.")
return
_print("Checking system deps for codex-linux-sandbox (pkg-config + libcap)...")
# pkg-config
if shutil.which("pkg-config") is None:
_print("pkg-config not found.")
_install_system_deps_for_linux_sandbox()
return
# libcap via pkg-config
try:
subprocess.check_call(["pkg-config", "--exists", "libcap"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
_print("libcap found via pkg-config.")
except Exception:
_print("libcap not found via pkg-config (libcap.pc missing).")
_install_system_deps_for_linux_sandbox()
def _ensure_just(env: dict):
if shutil.which("just") is None:
_print("Installing just...")
_run(["bash", "-lc", "cargo install just"], env=env)
else:
_print("just already installed.")
def _ensure_nextest(env: dict):
if shutil.which("cargo-nextest") is None:
_print("Installing cargo-nextest (optional)...")
_run(["bash", "-lc", "cargo install --locked cargo-nextest"], env=env)
else:
_print("cargo-nextest already installed.")
def ensure_codex(base: Path) -> Path:
_print("Ensuring Codex build...")
cache, root, workspace, target_bin = _paths(base)
cache.mkdir(parents=True, exist_ok=True)
env = dict(os.environ)
# Repo
if not root.exists():
_print("Cloning Codex repository...")
_run(["git", "clone", CODEX_REPO, str(root)], cwd=cache, env=env)
else:
_print("Repository already exists. Checking for updates...")
_run(["git", "fetch"], cwd=root, env=env)
_run(["git", "pull"], cwd=root, env=env)
if not workspace.exists():
raise RuntimeError(f"Expected workspace not found: {workspace}")
# Rust
env = _ensure_rust_min(env)
# System deps (Linux sandbox)
_ensure_linux_sandbox_deps(env)
# Helper tools
_ensure_just(env)
_ensure_nextest(env)
# Build
_print("Building Codex...")
_run(["bash", "-lc", "cargo build"], cwd=workspace, env=env)
if not target_bin.exists():
raise RuntimeError(f"Codex build failed: binary not found at {target_bin}")
_print(f"Build complete: {target_bin}")
return target_bin

View File

@ -0,0 +1 @@
OPENAI_API = "<a_sua_chave_api>"

View File

@ -0,0 +1,49 @@
# src/neuro/tools/llm_agent/main.py
from __future__ import annotations
import shutil
import subprocess
import threading
import time
def _print(msg: str):
print(f"[llm] {msg}", flush=True)
def ensure_ollama():
if shutil.which("ollama"):
_print("Ollama encontrado.")
return
_print("Ollama não encontrado. A instalar...")
subprocess.run(
["bash", "-c", "curl -fsSL https://ollama.com/install.sh | sh"],
check=True
)
def _ollama_running():
try:
subprocess.run(
["ollama", "list"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
timeout=2,
)
return True
except Exception:
return False
def ensure_ollama_serve():
if _ollama_running():
_print("Ollama serve já ativo.")
return
_print("A iniciar ollama serve...")
def _serve():
subprocess.run(["ollama", "serve"])
t = threading.Thread(target=_serve, daemon=True)
t.start()
time.sleep(2)

0
src/neuro/tui/app.py Normal file
View File

0
src/neuro/tui/screens.py Normal file
View File

0
src/neuro/tui/state.py Normal file
View File

0
src/neuro/tui/widgets.py Normal file
View File

View File

0
src/neuro/utils/perf.py Normal file
View File

0
src/neuro/utils/rng.py Normal file
View File