"Auto-commit via make git"

This commit is contained in:
neo.webmaster.2@gmail.com 2026-01-07 12:25:18 +01:00
parent 02759319c3
commit 9faf7ac7e5
9 changed files with 110 additions and 1 deletions

View File

@ -1 +1,31 @@
print("NEORICALEX — ambiente ok")
#!/usr/bin/env python3
from core.agent import NeuronAgent
from system.env import ensure_ollama, ensure_ollama_serve, ensure_model
from system.requirements import ensure_requirements
def main():
print("🧠 NEORICALEX — Inicialização do neurónio cognitivo...\n")
# 1. Dependências Python
ensure_requirements()
# 2. Ollama
ensure_ollama()
# 3. Ollama serve
ensure_ollama_serve()
# 4. Modelo
ensure_model("deepseek-r1:1.5b")
# 5. Agente
agent = NeuronAgent(model="deepseek-r1:1.5b")
agent.run_once() # apenas um ciclo para checkpoint
if __name__ == "__main__":
main()

View File

20
src/builder/core/agent.py Normal file
View File

@ -0,0 +1,20 @@
import subprocess
class NeuronAgent:
def __init__(self, model: str):
self.model = model
def run_once(self):
print("\n🧪 Teste cognitivo inicial\n")
prompt = "Hello DeepSeek. Identify yourself in one sentence."
result = subprocess.run(
["ollama", "run", self.model, prompt],
capture_output=True,
text=True,
)
print("🧠 Resposta do neurónio:\n")
print(result.stdout.strip())

View File

View File

View File

59
src/builder/system/env.py Normal file
View File

@ -0,0 +1,59 @@
import shutil
import subprocess
import threading
import time
def ensure_ollama():
if shutil.which("ollama"):
print("[=] Ollama encontrado.")
return
print("[+] Ollama não encontrado. A instalar...")
subprocess.run(
["bash", "-c", "curl -fsSL https://ollama.com/install.sh | sh"],
check=True
)
def _ollama_running():
try:
subprocess.run(
["ollama", "list"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
timeout=2,
)
return True
except Exception:
return False
def ensure_ollama_serve():
if _ollama_running():
print("[=] Ollama serve já ativo.")
return
print("[+] A iniciar ollama serve...")
def _serve():
subprocess.run(["ollama", "serve"])
t = threading.Thread(target=_serve, daemon=True)
t.start()
time.sleep(2)
def ensure_model(model):
result = subprocess.run(
["ollama", "list"],
capture_output=True,
text=True,
)
if model in result.stdout:
print(f"[=] Modelo {model} disponível.")
return
print(f"[+] A obter modelo {model}...")
subprocess.run(["ollama", "pull", model], check=True)

View File

0
src/requirements.lock Normal file
View File