Salior v0.1.0: core skeleton + 3 agents + skills + MCP

This commit is contained in:
Hermes 2026-05-11 06:42:04 +00:00
commit 6411a0cfd4
45 changed files with 2447 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
__pycache__/

40
README.md Normal file
View File

@ -0,0 +1,40 @@
# Salior — Autonomous Trading System
See `plans/salior-v1-plan.md` for full architecture.
## Structure
```
salior/ # Project root
├── pyproject.toml
├── README.md
└── salior/ # Python package
├── __init__.py
├── cli.py # salior CLI
├── core/ # Config, logging, memory, base agent
├── agents/
│ ├── data/ # HL WebSocket → TimescaleDB
│ ├── signal/ # Regime + conviction → Supabase signals
│ └── exec/ # HL CLOB API → orders
├── db/ # Schema + clients
├── skills/ # Agent skill definitions
├── mcp/ # MCP server
├── plugins/ # Plugin system
└── wallet/ # Wallet connect
```
## Quick Start
```bash
# Install
pip install -e .
# Initialize database
salior db init
# Start agents
salior agent start
# Check MCP tools
curl http://localhost:8080/mcp/tools
```

32
pyproject.toml Normal file
View File

@ -0,0 +1,32 @@
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "salior"
version = "0.1.0"
description = "All-in-one autonomous trading system"
readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"aiohttp>=3.9.0",
"asyncpg>=0.29.0",
"structlog>=24.0.0",
"psycopg>=3.1.0",
"httpx>=0.27.0",
"click>=8.1.0",
"pyyaml>=6.0.0",
"websockets>=12.0",
]
[project.optional-dependencies]
dev = ["pytest", "pytest-asyncio", "ruff"]
[project.scripts]
salior = "salior.cli:main"
[tool.hatch.build.targets.wheel]
packages = ["salior"]
[tool.ruff]
line-length = 100

3
salior/__init__.py Normal file
View File

@ -0,0 +1,3 @@
"""Salior core package."""
__version__ = "0.1.0"

View File

@ -0,0 +1,4 @@
"""Data agent module."""
from salior.agents.data.agent import DataAgent
__all__ = ["DataAgent"]

172
salior/agents/data/agent.py Normal file
View File

@ -0,0 +1,172 @@
"""Hyperliquid WebSocket data collector.
Streams candles + trades + orderbook TimescaleDB.
"""
from __future__ import annotations
import asyncio
import json
import zlib
from datetime import datetime, timezone
from typing import Any, Optional
import websockets
from salior.core.agent import Agent
from salior.core.config import config
from salior.core.logging import setup_logging
from salior.db.timescale_client import TimescaleDB
log = setup_logging()
class DataAgent(Agent):
"""Collects HL market data via WebSocket, writes to TimescaleDB."""
name = "data_agent"
coins: list[str] # set by __init__
def __init__(self, coins: Optional[list[str]] = None) -> None:
super().__init__()
self.coins = coins or config.coins
self._db = TimescaleDB()
self._ws: Optional[websockets.WebSocketApp] = None
self._last_candle_time: dict[str, datetime] = {}
def loop_interval(self) -> float:
return 0 # WebSocket-based, no polling
async def run(self) -> None:
"""Connect to HL WebSocket, handle messages forever."""
await self._db.connect()
await self._subscribe()
self._running = True
while self._running:
try:
async with websockets.connect(
config.hl_ws_url,
compression=None,
) as ws:
self._ws = ws
log.info("hl_ws_connected", coins=self.coins)
# Subscribe to channels
await self._send_subscribe()
# Handle messages
async for msg in ws:
await self._handle_message(msg)
except websockets.exceptions.ConnectionClosed as e:
log.warning("hl_ws_disconnected", reason=str(e))
await asyncio.sleep(3)
except Exception as e:
log.error("hl_ws_error", error=str(e))
await asyncio.sleep(5)
async def _subscribe(self) -> None:
"""Send subscription messages for candles + trades + orderbook."""
pass # subscriptions sent after connect
async def _send_subscribe(self) -> None:
"""Send HL WebSocket subscription payload."""
if not self._ws:
return
# Candle candles
candle_sub = {
"method": "subscribe",
"params": {
"channels": [
{"type": "candle_1m", "coin": coin}
for coin in self.coins
] + [
{"type": "candle_5m", "coin": coin}
for coin in self.coins
] + [
{"type": "trades", "coin": coin}
for coin in self.coins
]
},
"id": 1,
}
await self._ws.send(json.dumps(candle_sub))
log.info("hl_subscriptions_sent", coins=self.coins)
async def _handle_message(self, raw: str | bytes) -> None:
"""Parse and dispatch a HL WebSocket message."""
try:
# Decompress if compressed
if isinstance(raw, bytes):
raw = zlib.decompress(raw).decode()
msg = json.loads(raw)
# Handle channel data
if "channel" in msg and "data" in msg:
channel = msg["channel"]
data = msg["data"]
if channel == "candle_1m" or channel == "candle_5m":
await self._handle_candle(data)
elif channel == "trades":
await self._handle_trade(data)
elif channel == "batched":
for item in data:
if "candle" in item:
await self._handle_candle(item["candle"])
elif "trade" in item:
await self._handle_trade(item["trade"])
# Handle subscription confirmations
if msg.get("id") == 1 and msg.get("code") == 0:
log.info("hl_subscription_confirmed")
except Exception as e:
log.error("hl_parse_error", error=str(e), raw=str(raw)[:100])
async def _handle_candle(self, data: dict) -> None:
"""Handle a candle update."""
coin = data.get("coin", "")
t_str = data.get("t", "") # Unix timestamp in ms
o = float(data.get("o", 0) or 0)
h = float(data.get("h", 0) or 0)
l = float(data.get("l", 0) or 0)
c = float(data.get("c", 0) or 0)
v = float(data.get("v", 0) or 0)
if not t_str:
return
t = datetime.fromtimestamp(int(t_str) / 1000, tz=timezone.utc)
# Determine table from channel
interval = data.get("interval", "1m")
table = f"candles_1m" if interval == "1m" else f"candles_5m"
await self._db.upsert_candle(table, coin, t, o, h, l, c, v)
log.debug("candle_written", coin=coin, table=table, t=t.isoformat())
async def _handle_trade(self, data: dict) -> None:
"""Handle a trade message."""
coin = data.get("coin", "")
px = float(data.get("px", 0) or 0)
sz = float(data.get("sz", 0) or 0)
side = data.get("side", "") # B = bid, A = ask
hash_ = data.get("hash", "")
if not hash_:
return
t_str = data.get("time", "")
t = datetime.fromtimestamp(int(t_str) / 1000, tz=timezone.utc) if t_str else datetime.now(timezone.utc)
# Deduplicate by hash (handled by schema)
await self._db.insert_trade(
coin, t, px, sz, side.lower(), hash_
)
# Log heartbeat periodically
if len(self._last_candle_time) % 100 == 0:
await self._db.log_health(self.name, "running", iteration=len(self._last_candle_time))

View File

@ -0,0 +1,4 @@
"""Exec agent module."""
from salior.agents.exec.agent import ExecAgent
__all__ = ["ExecAgent"]

203
salior/agents/exec/agent.py Normal file
View File

@ -0,0 +1,203 @@
"""Hyperliquid execution agent.
Reads signals from Supabase places HL CLOB orders.
"""
from __future__ import annotations
import asyncio
import hashlib
import hmac
import json
import time
from datetime import datetime, timezone
from typing import Any, Optional
import httpx
import websockets
from salior.core.agent import Agent
from salior.core.config import config
from salior.core.logging import setup_logging
from salior.db.timescale_client import TimescaleDB
from salior.db.supabase_client import SupabaseClient
log = setup_logging()
# HL API helpers
HL_API = "https://api.hyperliquid.xyz"
class ExecAgent(Agent):
"""Executes trades based on conviction signals from Supabase."""
name = "exec_agent"
mode: str # paper | live
def __init__(
self,
coins: list[str] | None = None,
min_conviction: float = 0.7,
mode: str | None = None,
) -> None:
super().__init__()
self.coins = coins or config.coins
self.min_conviction = min_conviction
self.mode = mode or config.execution_mode
self._db = TimescaleDB()
self._supabase = SupabaseClient()
self._portfolio: dict[str, dict] = {} # coin -> position
def loop_interval(self) -> float:
return 300.0 # Check signals every 5 minutes
async def run(self) -> None:
"""Poll Supabase for high-conviction signals and execute."""
if self.mode == "paper":
log.info("exec_agent_paper_mode", min_conviction=self.min_conviction)
return # Paper mode: log only, don't execute
await self._db.connect()
# Get recent signals with high conviction
signals = await self._supabase.get_recent_signals(limit=10)
for sig in signals:
conviction = abs(sig.get("conviction", 0))
if conviction < self.min_conviction:
continue
try:
await self._execute_signal(sig)
except Exception as e:
log.error("exec_error", signal_id=sig.get("id"), error=str(e))
await self._db.log_health(self.name, "running", mode=self.mode)
async def _execute_signal(self, sig: dict) -> None:
"""Place an order based on a signal."""
coin = sig.get("coin", "")
regime = sig.get("regime", "")
conviction = sig.get("conviction", 0)
reasoning = sig.get("reasoning", "")
# Determine side from conviction
side = "buy" if conviction > 0 else "sell"
size = self._calculate_size(coin, conviction)
price = await self._get_market_price(coin, side)
if not price or size <= 0:
log.warning("exec_skip_no_price", coin=coin)
return
# Place order via HL API
order_result = await self._place_order(
coin=coin,
side=side,
size=size,
price=price,
sig_id=sig.get("id"),
)
log.info(
"order_placed",
coin=coin,
side=side,
size=size,
price=price,
conviction=conviction,
result=order_result.get("status"),
)
def _calculate_size(self, coin: str, conviction: float) -> float:
"""Calculate position size based on conviction and portfolio."""
# Base size: 1% of portfolio per trade
base_pct = 0.01
# Scale with conviction (0.7 → 1x, 1.0 → 3x)
conviction_multiplier = 1 + (conviction - 0.7) * 6
size_pct = base_pct * conviction_multiplier
# Get current portfolio value (from TimescaleDB)
portfolio_value = 10_000 # Default paper amount
if self._portfolio:
pos = self._portfolio.get(coin, {})
pos_value = pos.get("size", 0) * pos.get("avg_px", 0)
portfolio_value = max(portfolio_value, pos_value)
return portfolio_value * size_pct
async def _get_market_price(self, coin: str, side: str) -> Optional[float]:
"""Get current market price for a coin."""
# Get latest candle from TimescaleDB
candle = await self._db.get_latest_candle("candles_1m", coin)
if candle:
return candle["c"]
return None
async def _place_order(
self,
coin: str,
side: str,
size: float,
price: float,
sig_id: str | None,
) -> dict:
"""Place an order via Hyperliquid CLOB API."""
if not config.hl_private_key:
log.warning("exec_no_private_key")
return {"status": "error", "reason": "no private key configured"}
# Build order payload
payload = {
"type": "Limit",
"symbol": coin,
"side": side,
"size": str(size),
"price": str(price),
"reduceOnly": False,
}
# Log execution (paper mode)
if self.mode == "paper":
log.info(
"paper_order",
coin=coin,
side=side,
size=size,
price=price,
sig_id=sig_id,
)
# Record in DB
exec_id = await self._db.insert_execution(
sig_id, coin, side, size, price, self.mode
)
await self._db.update_execution(exec_id, "filled", price)
return {"status": "paper", "exec_id": exec_id}
# Live execution via HL API
try:
headers = {"Content-Type": "application/json"}
async with httpx.AsyncClient() as client:
resp = await client.post(
f"{HL_API}/order",
json=payload,
headers=headers,
timeout=10.0,
)
if resp.status_code == 200:
result = resp.json()
status = result.get("status", "unknown")
exec_id = await self._db.insert_execution(
sig_id, coin, side, size, price, self.mode
)
await self._db.update_execution(
exec_id, status, result.get("filled_px")
)
return result
else:
error = resp.text
log.error("hl_order_failed", status=resp.status_code, error=error)
return {"status": "error", "reason": error}
except Exception as e:
log.error("hl_order_error", error=str(e))
return {"status": "error", "reason": str(e)}

View File

@ -0,0 +1,4 @@
"""Signal agent module."""
from salior.agents.signal.agent import SignalAgent
__all__ = ["SignalAgent"]

View File

@ -0,0 +1,185 @@
"""Signal generation agent.
Reads candles from TimescaleDB regime + conviction Supabase signals.
"""
from __future__ import annotations
from datetime import datetime, timedelta, timezone
from salior.core.agent import Agent
from salior.core.config import config
from salior.core.logging import setup_logging
from salior.db.timescale_client import TimescaleDB
from salior.db.supabase_client import SupabaseClient
log = setup_logging()
REGIME_PROMPT = """You are a market regime detector. Analyze BTC/USD price data and classify the current regime.
Data:
- Current price: ${price}
- 24h high: ${high}, 24h low: ${low}
- Price range (high-low)/low: {range_pct:.2%}
- 1h trend: {trend_1h:.2%}
- Volume (last 1h): {volume_1h}
- Volume (prev 1h): {volume_prev}
- Volume ratio: {vol_ratio:.2f}x
Classify regime (choose one):
- trending_up: Strong directional move, clear trend
- trending_down: Strong downward move
- ranging: No clear direction, oscillating
- volatile: High range but unclear direction
Return JSON only:
{{"regime": "trending_up|trending_down|ranging|volatile", "reasoning": "brief explanation"}}
"""
CONVICTION_PROMPT = """You are a trading conviction scorer. Score how confident you are in the current direction.
Current regime: {regime}
Price: ${price}
1h candles:
{candles_text}
Score conviction from -1.0 (strong sell) to +1.0 (strong buy).
Return JSON only:
{{"conviction": -0.85, "reasoning": "why", "momentum": "strong_bull|weak_bull|neutral|weak_bear|strong_bear"}}
"""
class SignalAgent(Agent):
"""Analyzes candles → writes conviction signals to Supabase."""
name = "signal_agent"
def __init__(
self,
coins: list[str] | None = None,
min_conviction: float | None = None,
) -> None:
super().__init__()
self.coins = coins or config.coins
self.min_conviction = min_conviction or config.min_conviction
self._db = TimescaleDB()
self._supabase = SupabaseClient()
def loop_interval(self) -> float:
return 60.0 # Run every 60 seconds
async def run(self) -> None:
"""Analyze each coin, emit signals if conviction is high enough."""
await self._db.connect()
for coin in self.coins:
try:
signal = await self._analyze_coin(coin)
if signal and abs(signal["conviction"]) >= self.min_conviction:
await self._emit_signal(signal)
log.info(
"signal_emitted",
coin=coin,
regime=signal["regime"],
conviction=signal["conviction"],
)
except Exception as e:
log.error("signal_error", coin=coin, error=str(e))
await self._db.log_health(self.name, "running", iteration=self._iteration)
async def _analyze_coin(self, coin: str) -> dict | None:
"""Analyze a coin's candles and return a signal dict."""
# Fetch recent candles
candles_1m = await self._db.get_candles("candles_1m", coin, hours=24)
candles_5m = await self._db.get_candles("candles_5m", coin, hours=24)
if len(candles_1m) < 5:
log.debug("not_enough_candles", coin=coin, count=len(candles_1m))
return None
# Get latest values
latest = candles_1m[-1]
price = latest["c"]
high = max(c["h"] for c in candles_1m[-24:])
low = min(c["l"] for c in candles_1m[-24:])
range_pct = (high - low) / low if low > 0 else 0
# Calculate trend (1h change)
if len(candles_1m) >= 60:
old_price = candles_1m[-60]["c"]
trend_1h = (price - old_price) / old_price if old_price > 0 else 0
else:
trend_1h = 0
# Volume analysis
volume_1h = sum(c["v"] for c in candles_1m[-60:])
volume_prev = sum(c["v"] for c in candles_1m[-120:-60]) if len(candles_1m) >= 120 else volume_1h
vol_ratio = volume_1h / volume_prev if volume_prev > 0 else 1.0
# Detect regime
from salior.llm import llm
regime_text = await llm.chat(
REGIME_PROMPT.format(
price=price,
high=high,
low=low,
range_pct=range_pct,
trend_1h=trend_1h,
volume_1h=volume_1h,
volume_prev=volume_prev,
vol_ratio=vol_ratio,
),
system="You are a JSON-only assistant. Return valid JSON with regime and reasoning keys.",
)
import json as json_mod
try:
regime_data = json_mod.loads(regime_text)
except Exception:
regime_data = {"regime": "ranging", "reasoning": regime_text[:100]}
# Score conviction
candles_text = "\n".join(
f" {c['t'].isoformat()}: O={c['o']:.1f} H={c['h']:.1f} L={c['l']:.1f} C={c['c']:.1f} V={c['v']:.0f}"
for c in candles_1m[-20:]
)
conviction_text = await llm.chat(
CONVICTION_PROMPT.format(
regime=regime_data["regime"],
price=price,
candles_text=candles_text,
),
system="You are a JSON-only assistant. Return valid JSON with conviction (-1 to 1), reasoning, and momentum keys.",
)
try:
conviction_data = json_mod.loads(conviction_text)
except Exception:
conviction_data = {"conviction": 0.0, "reasoning": conviction_text[:100], "momentum": "neutral"}
return {
"coin": coin,
"regime": regime_data["regime"],
"conviction": float(conviction_data.get("conviction", 0.0)),
"reasoning": f"{regime_data.get('reasoning', '')} | {conviction_data.get('reasoning', '')}",
"price": price,
"momentum": conviction_data.get("momentum", "neutral"),
}
async def _emit_signal(self, signal: dict) -> None:
"""Write signal to both TimescaleDB and Supabase."""
# Write to TimescaleDB
await self._db.insert_signal(
signal["coin"],
signal["regime"],
signal["conviction"],
signal["reasoning"],
)
# Write to Supabase (primary for external access)
await self._supabase.insert_signal(
signal["coin"],
signal["regime"],
signal["conviction"],
signal["reasoning"],
)

96
salior/cli.py Normal file
View File

@ -0,0 +1,96 @@
"""salior CLI entry point."""
from __future__ import annotations
import asyncio
import click
from salior.core.config import config
from salior.core.logging import setup_logging
log = setup_logging()
@click.group()
def main() -> None:
"""Salior — autonomous trading system."""
pass
@main.command()
def status() -> None:
"""Check system status."""
click.echo("=== Salior Status ===")
click.echo(f"Host: {config.host}:{config.port}")
click.echo(f"Execution mode: {config.execution_mode}")
click.echo(f"Coins: {', '.join(config.coins)}")
click.echo(f"Min conviction: {config.min_conviction}")
click.echo(f"TimescaleDB: {config.timeseries_host}:{config.timeseries_port}/{config.timeseries_db}")
click.echo(f"Supabase: {config.supabase_url}")
@main.command()
def db_init() -> None:
"""Initialize the database schema."""
import asyncpg
click.echo(f"Connecting to {config.timeseries_url}...")
async def run() -> None:
conn = await asyncpg.connect(config.timeseries_url)
schema = Path(__file__).parent.parent / "db" / "schema.sql"
sql = schema.read_text()
await conn.execute(sql)
await conn.close()
click.echo("Schema applied successfully.")
asyncio.run(run())
@main.command()
def agent_start() -> None:
"""Start all agents."""
click.echo("Starting agents...")
from salior.agents.data.agent import DataAgent
from salior.agents.signal.agent import SignalAgent
async def run() -> None:
data = DataAgent()
signal = SignalAgent()
await asyncio.gather(
data.start(),
signal.start(),
)
try:
asyncio.run(run())
except KeyboardInterrupt:
click.echo("Agents stopped.")
@main.command()
def mcp_serve() -> None:
"""Start the MCP server."""
from salior.mcp.server import MCPServer
click.echo(f"Starting MCP server on {config.host}:{config.port}...")
async def run() -> None:
server = MCPServer(config.host, config.port)
await server.start()
await asyncio.Event().wait()
asyncio.run(run())
@main.command()
def plugin_list() -> None:
"""List available plugins."""
from salior.plugins import registry
plugins = registry.discover()
click.echo(f"=== Plugins ({len(plugins)}) ===")
for p in registry.list():
status = "" if p["enabled"] else ""
click.echo(f"{status} {p['name']}: {p['description']}")
if __name__ == "__main__":
main()

7
salior/core/__init__.py Normal file
View File

@ -0,0 +1,7 @@
"""Core modules."""
from salior.core.config import config
from salior.core.logging import setup_logging
from salior.core.memory import memory
from salior.core.agent import Agent
__all__ = ["config", "setup_logging", "memory", "Agent"]

101
salior/core/agent.py Normal file
View File

@ -0,0 +1,101 @@
"""Base agent class."""
from __future__ import annotations
import asyncio
import signal
from abc import ABC, abstractmethod
from datetime import datetime
from typing import Optional
import structlog
logger = structlog.get_logger("salior.agent")
class Agent(ABC):
"""Base class for all Salior agents.
Provides:
- Lifecycle (start/stop/health)
- Self-validation hooks
- Loop detection (max iterations)
- Graceful shutdown
"""
name: str = "agent"
def __init__(self) -> None:
self._running = False
self._stopping = False
self._task: Optional[asyncio.Task] = None
self._last_heartbeat: Optional[datetime] = None
self._iteration = 0
self._log = logger.bind(agent=self.name)
async def start(self) -> None:
"""Start the agent. Override in subclass."""
self._running = True
self._last_heartbeat = datetime.utcnow()
self._task = asyncio.create_task(self._run())
self._log.info("agent_started")
async def stop(self) -> None:
"""Stop the agent gracefully."""
self._stopping = True
self._running = False
self._log.info("agent_stopping")
if self._task:
try:
self._task.cancel()
await asyncio.wait_for(self._task, timeout=5.0)
except asyncio.CancelledError:
pass
except Exception as e:
self._log.error("agent_stop_error", error=str(e))
self._log.info("agent_stopped")
async def _run(self) -> None:
"""Main agent loop. Calls run() repeatedly until stopped."""
try:
while self._running and not self._stopping:
try:
await asyncio.wait_for(self.run(), timeout=self.loop_interval())
except asyncio.TimeoutError:
# run() took too long — continue to next iteration
self._log.warning("agent_loop_timeout", iteration=self._iteration)
self._iteration += 1
self._last_heartbeat = datetime.utcnow()
except Exception as e:
self._log.error("agent_loop_error", error=str(e))
self._running = False
@abstractmethod
async def run(self) -> None:
"""One iteration of the agent loop. Implement in subclass."""
...
def loop_interval(self) -> float:
"""Seconds between run() calls. Override in subclass."""
return 60.0
def is_healthy(self) -> bool:
"""Return True if agent is running and heartbeat is recent."""
if not self._running:
return False
if self._last_heartbeat is None:
return True
# Unhealthy if no heartbeat in 5x the loop interval
elapsed = (datetime.utcnow() - self._last_heartbeat).total_seconds()
return elapsed < self.loop_interval() * 5
def health_status(self) -> dict:
"""Return health status dict."""
return {
"agent": self.name,
"running": self._running,
"iteration": self._iteration,
"last_heartbeat": (
self._last_heartbeat.isoformat() if self._last_heartbeat else None
),
"healthy": self.is_healthy(),
}

70
salior/core/config.py Normal file
View File

@ -0,0 +1,70 @@
"""Configuration from environment variables."""
from __future__ import annotations
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass
class Config:
"""All configuration from environment variables."""
# Host
host: str = os.getenv("SALIOR_HOST", "localhost")
port: int = int(os.getenv("SALIOR_PORT", "8080"))
# TimescaleDB (VPS3 local)
timeseries_host: str = os.getenv("TIMESERIES_HOST", "localhost")
timeseries_port: int = int(os.getenv("TIMESERIES_PORT", "5432"))
timeseries_user: str = os.getenv("TIMESERIES_USER", "salior")
timeseries_password: str = os.getenv("TIMESERIES_PASSWORD", "SaliorDB123!")
timeseries_db: str = os.getenv("TIMESERIES_DB", "salior")
timeseries_dsn: str = field(
default=lambda self: f"postgresql://{self.timeseries_user}:{self.timeseries_password}@{self.timeseries_host}:{self.timeseries_port}/{self.timeseries_db}",
init=False,
)
# Supabase (Salior_DATA)
supabase_url: str = os.getenv("SUPABASE_URL", "https://ridjlobkeolorkcunisl.supabase.co")
supabase_key: str = os.getenv("SUPABASE_KEY", "")
# LLM
minimax_api_key: str = os.getenv("MINIMAX_API_KEY", "")
minimax_model: str = os.getenv("MINIMAX_MODEL", "MiniMax-Text-01")
local_llm_url: str = os.getenv("LOCAL_LLM_URL", "")
openrouter_api_key: str = os.getenv("OPENROUTER_API_KEY", "")
# Hyperliquid
hl_ws_url: str = os.getenv(
"HYPERLIQUID_WS_URL", "wss://api.hyperliquid.xyz/ws"
)
hl_api_url: str = os.getenv(
"HYPERLIQUID_API_URL", "https://api.hyperliquid.xyz"
)
# Wallet session
wallet_session_days: int = int(os.getenv("WALLET_SESSION_DAYS", "180"))
# Execution
hl_private_key: str = os.getenv("HL_PRIVATE_KEY", "")
execution_mode: str = os.getenv("EXECUTION_MODE", "paper") # paper | live
# Plugins
plugin_dir: str = os.getenv("SALIOR_PLUGIN_DIR", "~/.salior/plugins")
# Coins to trade (v1: BTC + ETH)
coins: list[str] = field(
default_factory=lambda: os.getenv("COINS", "BTC,ETH").split(",")
)
# Signal thresholds
min_conviction: float = float(os.getenv("MIN_CONVICTION", "0.3"))
@property
def timeseries_url(self) -> str:
return f"postgresql://{self.timeseries_user}:{self.timeseries_password}@{self.timeseries_host}:{self.timeseries_port}/{self.timeseries_db}"
config = Config()

35
salior/core/logging.py Normal file
View File

@ -0,0 +1,35 @@
"""Structured logging setup."""
from __future__ import annotations
import structlog
import logging
import sys
def setup_logging(level: str = "INFO") -> structlog.BoundLogger:
"""Configure structlog with console output."""
logging.basicConfig(
format="%(message)s",
stream=sys.stdout,
level=getattr(logging, level.upper(), logging.INFO),
)
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.dev.ConsoleRenderer(colors=True),
],
wrapper_class=structlog.stdlib.BoundLogger,
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)
return structlog.get_logger("salior")

76
salior/core/memory.py Normal file
View File

@ -0,0 +1,76 @@
"""Long-term memory across sessions."""
from __future__ import annotations
import json
import os
from datetime import datetime
from pathlib import Path
from typing import Any, Optional
MEMORY_DIR = Path.home() / ".salior" / "memory"
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
class Memory:
"""Persistent memory backed by files in ~/.salior/memory/."""
def __init__(self) -> None:
self._cache: dict[str, Any] = {}
def save(self, key: str, value: str | dict, tags: list[str] | None = None) -> None:
"""Save a memory entry."""
entry = {
"key": key,
"value": value,
"tags": tags or [],
"saved_at": datetime.utcnow().isoformat(),
}
# Store in cache
self._cache[key] = entry
# Write to file
safe_key = key.replace("/", "_").replace(" ", "_")
path = MEMORY_DIR / f"{safe_key}.json"
with open(path, "w") as f:
json.dump(entry, f, indent=2)
def get(self, key: str) -> Optional[Any]:
"""Get a memory entry."""
if key in self._cache:
return self._cache[key]["value"]
safe_key = key.replace("/", "_").replace(" ", "_")
path = MEMORY_DIR / f"{safe_key}.json"
if path.exists():
with open(path) as f:
entry = json.load(f)
self._cache[key] = entry
return entry["value"]
return None
def search(self, query: str, tags: list[str] | None = None) -> list[dict]:
"""Search memories by content or tags."""
results = []
for path in MEMORY_DIR.glob("*.json"):
with open(path) as f:
entry = json.load(f)
if query.lower() in str(entry.get("value", "")).lower():
if tags is None or any(t in entry.get("tags", []) for t in tags):
results.append(entry)
return results
def list_keys(self) -> list[str]:
"""List all memory keys."""
keys = []
for path in MEMORY_DIR.glob("*.json"):
keys.append(path.stem.replace("_", " "))
return sorted(keys)
def forget(self, key: str) -> None:
"""Delete a memory entry."""
safe_key = key.replace("/", "_").replace(" ", "_")
path = MEMORY_DIR / f"{safe_key}.json"
if path.exists():
path.unlink()
self._cache.pop(key, None)
memory = Memory()

5
salior/db/__init__.py Normal file
View File

@ -0,0 +1,5 @@
"""Database module."""
from salior.db.timescale_client import TimescaleDB
from salior.db.supabase_client import SupabaseClient
__all__ = ["TimescaleDB", "SupabaseClient"]

Binary file not shown.

Binary file not shown.

Binary file not shown.

175
salior/db/schema.sql Normal file
View File

@ -0,0 +1,175 @@
-- Salior Database Schema
-- PostgreSQL 17 + TimescaleDB 2.26
-- Run: psql $DATABASE_URL -f schema.sql
-- === TimescaleDB hypertables (market data) ===
-- 1m candles per coin
CREATE TABLE IF NOT EXISTS candles_1m (
coin TEXT NOT NULL,
t TIMESTAMPTZ NOT NULL,
o REAL NOT NULL,
h REAL NOT NULL,
l REAL NOT NULL,
c REAL NOT NULL,
v REAL NOT NULL,
PRIMARY KEY (coin, t)
);
SELECT create_hypertable('candles_1m', 't', if_not_exists => TRUE);
-- 5m candles
CREATE TABLE IF NOT EXISTS candles_5m (
coin TEXT NOT NULL,
t TIMESTAMPTZ NOT NULL,
o REAL NOT NULL,
h REAL NOT NULL,
l REAL NOT NULL,
c REAL NOT NULL,
v REAL NOT NULL,
PRIMARY KEY (coin, t)
);
SELECT create_hypertable('candles_5m', 't', if_not_exists => TRUE);
-- 15m candles
CREATE TABLE IF NOT EXISTS candles_15m (
coin TEXT NOT NULL,
t TIMESTAMPTZ NOT NULL,
o REAL NOT NULL,
h REAL NOT NULL,
l REAL NOT NULL,
c REAL NOT NULL,
v REAL NOT NULL,
PRIMARY KEY (coin, t)
);
SELECT create_hypertable('candles_15m', 't', if_not_exists => TRUE);
-- 1h candles
CREATE TABLE IF NOT EXISTS candles_1h (
coin TEXT NOT NULL,
t TIMESTAMPTZ NOT NULL,
o REAL NOT NULL,
h REAL NOT NULL,
l REAL NOT NULL,
c REAL NOT NULL,
v REAL NOT NULL,
PRIMARY KEY (coin, t)
);
SELECT create_hypertable('candles_1h', 't', if_not_exists => TRUE);
-- Orderbook snapshots
CREATE TABLE IF NOT EXISTS orderbook (
coin TEXT NOT NULL,
t TIMESTAMPTZ NOT NULL,
bids JSONB NOT NULL, -- [{px, sz}, ...]
asks JSONB NOT NULL,
PRIMARY KEY (coin, t)
);
SELECT create_hypertable('orderbook', 't', if_not_exists => TRUE);
-- Individual trades
CREATE TABLE IF NOT EXISTS trades (
coin TEXT NOT NULL,
t TIMESTAMPTZ NOT NULL,
px REAL NOT NULL,
sz REAL NOT NULL,
side TEXT NOT NULL, -- 'bid' | 'ask'
hash TEXT UNIQUE NOT NULL,
PRIMARY KEY (hash)
);
CREATE INDEX IF NOT EXISTS idx_trades_coin_t ON trades (coin, t DESC);
SELECT create_hypertable('trades', 't', if_not_exists => TRUE);
-- Funding rates
CREATE TABLE IF NOT EXISTS funding (
coin TEXT NOT NULL,
t TIMESTAMPTZ NOT NULL,
rate REAL NOT NULL,
PRIMARY KEY (coin, t)
);
SELECT create_hypertable('funding', 't', if_not_exists => TRUE);
-- === PostgreSQL tables (application data) ===
-- Signals from signal_agent
CREATE TABLE IF NOT EXISTS signals (
id UUID DEFAULT gen_random_uuid(),
coin TEXT NOT NULL,
regime TEXT NOT NULL, -- trending_up | trending_down | ranging | volatile
conviction REAL NOT NULL, -- -1.0 to 1.0
reasoning TEXT NOT NULL,
t TIMESTAMPTZ DEFAULT NOW(),
UNIQUE(coin, t)
);
-- Execution log
CREATE TABLE IF NOT EXISTS executions (
id UUID DEFAULT gen_random_uuid(),
signal_id UUID REFERENCES signals(id),
coin TEXT NOT NULL,
side TEXT NOT NULL, -- buy | sell
sz REAL NOT NULL,
px REAL NOT NULL,
filled_px REAL,
status TEXT NOT NULL, -- pending | filled | cancelled | failed
mode TEXT NOT NULL, -- paper | live
error_msg TEXT,
created_at TIMESTAMPTZ DEFAULT NOW(),
updated_at TIMESTAMPTZ DEFAULT NOW()
);
-- Portfolio positions
CREATE TABLE IF NOT EXISTS portfolio (
coin TEXT PRIMARY KEY,
pos_size REAL NOT NULL DEFAULT 0,
avg_px REAL NOT NULL DEFAULT 0,
unrealized_pnl REAL NOT NULL DEFAULT 0,
updated_at TIMESTAMPTZ DEFAULT NOW()
);
-- Performance log
CREATE TABLE IF NOT EXISTS performance (
date DATE PRIMARY KEY,
daily_pnl REAL NOT NULL DEFAULT 0,
cumulative_pnl REAL NOT NULL DEFAULT 0,
sharpe REAL,
max_drawdown REAL,
trades_count INTEGER NOT NULL DEFAULT 0
);
-- Risk events (circuit breakers triggered)
CREATE TABLE IF NOT EXISTS risk_events (
id UUID DEFAULT gen_random_uuid(),
event_type TEXT NOT NULL, -- max_drawdown | daily_loss | position_limit
details JSONB NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW()
);
-- Agent heartbeat log
CREATE TABLE IF NOT EXISTS agent_health (
agent TEXT NOT NULL,
heartbeat TIMESTAMPTZ DEFAULT NOW(),
iteration INTEGER,
status TEXT NOT NULL, -- running | paused | error
details JSONB
);
CREATE INDEX IF NOT EXISTS idx_agent_health_agent ON agent_health (agent, heartbeat DESC);
-- Wallet sessions (180-day auth)
CREATE TABLE IF NOT EXISTS wallet_sessions (
id UUID DEFAULT gen_random_uuid(),
wallet_address VARCHAR(42) UNIQUE NOT NULL,
session_token TEXT NOT NULL,
issued_at TIMESTAMPTZ NOT NULL,
expires_at TIMESTAMPTZ NOT NULL,
last_active TIMESTAMPTZ DEFAULT NOW(),
signature VARCHAR(256) NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW()
);
-- Wallet addresses (known wallets)
CREATE TABLE IF NOT EXISTS wallets (
address VARCHAR(42) PRIMARY KEY,
label TEXT,
wallet_type TEXT NOT NULL, -- main | api | receiving
created_at TIMESTAMPTZ DEFAULT NOW()
);

View File

@ -0,0 +1,195 @@
"""Supabase client for application data."""
from __future__ import annotations
from typing import Any, Optional
import httpx
from salior.core.config import config
class SupabaseClient:
"""REST-based Supabase client for Salior_DATA."""
def __init__(self, url: Optional[str] = None, key: Optional[str] = None) -> None:
self.url = url or config.supabase_url
self.key = key or config.supabase_key
self._headers = {
"apikey": self.key,
"Authorization": f"Bearer {self.key}",
"Content-Type": "application/json",
}
def _table_url(self, table: str) -> str:
return f"{self.url}/rest/v1/{table}"
async def insert(
self,
table: str,
data: dict,
params: Optional[dict] = None,
) -> dict | None:
"""Insert a row into a table."""
async with httpx.AsyncClient() as client:
resp = await client.post(
self._table_url(table),
json=data,
headers=self._headers,
params=params or {"select": "*, id"},
)
if resp.status_code in (200, 201):
return resp.json()
return None
async def update(
self,
table: str,
data: dict,
filters: dict,
) -> dict | None:
"""Update rows matching filters."""
query = " AND ".join(f"{k}=eq.{v}" for k, v in filters.items())
async with httpx.AsyncClient() as client:
resp = await client.patch(
f"{self._table_url(table)}?{query}",
json=data,
headers=self._headers,
)
if resp.status_code in (200, 204):
return resp.json()
return None
async def select(
self,
table: str,
filters: Optional[dict] = None,
order: Optional[str] = None,
limit: int = 100,
) -> list[dict]:
"""Select rows from a table."""
params = {"select": "*", "limit": str(limit)}
if filters:
for k, v in filters.items():
params[f"where"] = f"{k}=eq.{v}"
if order:
params["order"] = order
async with httpx.AsyncClient() as client:
resp = await client.get(
self._table_url(table),
headers=self._headers,
params=params,
)
if resp.status_code == 200:
return resp.json()
return []
async def rpc(
self,
func: str,
params: Optional[dict] = None,
) -> Any:
"""Call a Supabase stored procedure."""
async with httpx.AsyncClient() as client:
resp = await client.post(
f"{self.url}/rest/v1/rpc/{func}",
json=params or {},
headers=self._headers,
)
if resp.status_code in (200, 201):
return resp.json()
return None
# === Signals ===
async def insert_signal(
self,
coin: str,
regime: str,
conviction: float,
reasoning: str,
) -> Optional[dict]:
"""Insert a conviction signal."""
return await self.insert(
"signals",
{
"coin": coin,
"regime": regime,
"conviction": conviction,
"reasoning": reasoning,
},
params={"select": "id"},
)
async def get_recent_signals(
self,
coin: Optional[str] = None,
limit: int = 10,
) -> list[dict]:
"""Get recent conviction signals."""
filters = {"coin": f"eq.{coin}"} if coin else None
return await self.select(
"signals",
filters=filters,
order="created_at.desc",
limit=limit,
)
# === Executions ===
async def insert_execution(self, data: dict) -> Optional[dict]:
"""Log an execution."""
return await self.insert("executions", data)
async def get_open_executions(self) -> list[dict]:
"""Get pending/filled executions."""
return await self.select(
"executions",
filters={"status": "in.(pending,filled)"},
order="created_at.desc",
limit=50,
)
# === Portfolio ===
async def get_portfolio(self) -> list[dict]:
"""Get current portfolio."""
return await self.select("portfolio", limit=100)
# === Wallet sessions ===
async def upsert_wallet_session(
self,
wallet_address: str,
session_token: str,
signature: str,
expires_at: str,
) -> Optional[dict]:
"""Insert or update a wallet session (180-day)."""
data = {
"wallet_address": wallet_address,
"session_token": session_token,
"signature": signature,
"issued_at": "now",
"expires_at": expires_at,
}
return await self.insert(
"wallet_sessions",
data,
params={"on_conflict": "wallet_address", "select": "*"},
)
async def get_wallet_session(
self,
wallet_address: str,
) -> Optional[dict]:
"""Get active session for a wallet."""
rows = await self.select(
"wallet_sessions",
filters={
"wallet_address": f"eq.{wallet_address}",
"expires_at": "gt.now",
},
limit=1,
)
return rows[0] if rows else None

View File

@ -0,0 +1,281 @@
"""TimescaleDB client for market data."""
from __future__ import annotations
import asyncio
from datetime import datetime, timedelta
from typing import Any, Optional
import asyncpg
from salior.core.config import config
class TimescaleDB:
"""Async TimescaleDB client for market data."""
def __init__(self, dsn: Optional[str] = None) -> None:
self.dsn = dsn or config.timeseries_url
self._pool: Optional[asyncpg.Pool] = None
async def connect(self) -> None:
"""Create connection pool."""
self._pool = await asyncpg.create_pool(
self.dsn,
min_size=2,
max_size=10,
command_timeout=60,
)
async def close(self) -> None:
"""Close connection pool."""
if self._pool:
await self._pool.close()
self._pool = None
async def execute(self, query: str, *args: Any) -> None:
"""Execute a query (INSERT/UPDATE)."""
if not self._pool:
await self.connect()
async with self._pool.acquire() as conn:
await conn.execute(query, *args)
async def fetch(self, query: str, *args: Any) -> list[dict]:
"""Fetch rows as list of dicts."""
if not self._pool:
await self.connect()
async with self._pool.acquire() as conn:
rows = await conn.fetch(query, *args)
return [dict(r) for r in rows]
# === Candles ===
async def upsert_candle(
self,
table: str,
coin: str,
t: datetime,
o: float,
h: float,
l: float,
c: float,
v: float,
) -> None:
"""Insert or update a candle row."""
await self.execute(
f"""
INSERT INTO {table} (coin, t, o, h, l, c, v)
VALUES ($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT (coin, t) DO UPDATE SET
o = EXCLUDED.o,
h = EXCLUDED.h,
l = EXCLUDED.l,
c = EXCLUDED.c,
v = EXCLUDED.v
""",
coin, t, o, h, l, c, v,
)
async def get_latest_candle(self, table: str, coin: str) -> Optional[dict]:
"""Get the most recent candle for a coin."""
rows = await self.fetch(
f"""
SELECT * FROM {table}
WHERE coin = $1
ORDER BY t DESC
LIMIT 1
""",
coin,
)
return rows[0] if rows else None
async def get_candles(
self,
table: str,
coin: str,
hours: int = 24,
) -> list[dict]:
"""Get candles for a coin from the last N hours."""
since = datetime.utcnow() - timedelta(hours=hours)
return await self.fetch(
f"""
SELECT * FROM {table}
WHERE coin = $1 AND t >= $2
ORDER BY t ASC
""",
coin, since,
)
# === Trades ===
async def insert_trade(
self,
coin: str,
t: datetime,
px: float,
sz: float,
side: str,
hash: str,
) -> None:
"""Insert a trade (ignore duplicates by hash)."""
await self.execute(
"""
INSERT INTO trades (coin, t, px, sz, side, hash)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT DO NOTHING
""",
coin, t, px, sz, side, hash,
)
# === Orderbook ===
async def insert_orderbook(
self,
coin: str,
t: datetime,
bids: list[dict],
asks: list[dict],
) -> None:
"""Insert orderbook snapshot."""
await self.execute(
"""
INSERT INTO orderbook (coin, t, bids, asks)
VALUES ($1, $2, $3, $4)
ON CONFLICT DO NOTHING
""",
coin, t, bids, asks,
)
# === Signals ===
async def insert_signal(
self,
coin: str,
regime: str,
conviction: float,
reasoning: str,
) -> Optional[str]:
"""Insert a signal, return its ID."""
row = await self.fetch(
"""
INSERT INTO signals (coin, regime, conviction, reasoning)
VALUES ($1, $2, $3, $4)
ON CONFLICT (coin, t) DO UPDATE SET
regime = EXCLUDED.regime,
conviction = EXCLUDED.conviction,
reasoning = EXCLUDED.reasoning
RETURNING id::text
""",
coin, regime, conviction, reasoning,
)
return row[0]["id"] if row else None
async def get_latest_signals(self, limit: int = 10) -> list[dict]:
"""Get most recent signals for all coins."""
return await self.fetch(
"""
SELECT * FROM signals
ORDER BY t DESC
LIMIT $1
""",
limit,
)
# === Portfolio ===
async def upsert_portfolio(
self,
coin: str,
pos_size: float,
avg_px: float,
unrealized_pnl: float,
) -> None:
"""Update portfolio position."""
await self.execute(
"""
INSERT INTO portfolio (coin, pos_size, avg_px, unrealized_pnl)
VALUES ($1, $2, $3, $4)
ON CONFLICT (coin) DO UPDATE SET
pos_size = EXCLUDED.pos_size,
avg_px = EXCLUDED.avg_px,
unrealized_pnl = EXCLUDED.unrealized_pnl,
updated_at = NOW()
""",
coin, pos_size, avg_px, unrealized_pnl,
)
async def get_portfolio(self) -> list[dict]:
"""Get all portfolio positions."""
return await self.fetch("SELECT * FROM portfolio")
# === Executions ===
async def insert_execution(
self,
signal_id: Optional[str],
coin: str,
side: str,
sz: float,
px: float,
mode: str,
) -> str:
"""Insert an execution record, return its ID."""
rows = await self.fetch(
"""
INSERT INTO executions (signal_id, coin, side, sz, px, mode, status)
VALUES ($1, $2, $3, $4, $5, $6, 'pending')
RETURNING id::text
""",
signal_id, coin, side, sz, px, mode,
)
return rows[0]["id"]
async def update_execution(
self,
exec_id: str,
status: str,
filled_px: Optional[float] = None,
error_msg: Optional[str] = None,
) -> None:
"""Update execution status."""
await self.execute(
"""
UPDATE executions
SET status = $2,
filled_px = COALESCE($3, filled_px),
error_msg = $4,
updated_at = NOW()
WHERE id = $1::uuid
""",
exec_id, status, filled_px, error_msg,
)
# === Health ===
async def log_health(
self,
agent: str,
status: str,
iteration: Optional[int] = None,
details: Optional[dict] = None,
) -> None:
"""Log agent health heartbeat."""
await self.execute(
"""
INSERT INTO agent_health (agent, status, iteration, details)
VALUES ($1, $2, $3, $4)
""",
agent, status, iteration, details,
)
async def get_agent_health(self, agent: str, minutes: int = 10) -> list[dict]:
"""Get recent health entries for an agent."""
since = datetime.utcnow() - timedelta(minutes=minutes)
return await self.fetch(
"""
SELECT * FROM agent_health
WHERE agent = $1 AND heartbeat >= $2
ORDER BY heartbeat DESC
LIMIT 10
""",
agent, since,
)

5
salior/llm/__init__.py Normal file
View File

@ -0,0 +1,5 @@
"""LLM client module."""
from salior.llm.client import LLMClient, llm
__all__ = ["LLMClient", "llm"]

Binary file not shown.

Binary file not shown.

130
salior/llm/client.py Normal file
View File

@ -0,0 +1,130 @@
"""Unified LLM client with MiniMax → OpenRouter → Local routing."""
from __future__ import annotations
import json
from typing import Any, Optional
import httpx
from salior.core.config import config
class LLMClient:
"""Unified LLM client with automatic provider fallback."""
def __init__(self) -> None:
self._providers = self._build_providers()
def _build_providers(self) -> dict[str, dict]:
"""Build provider config from environment."""
return {
"minimax": {
"url": "https://api.minimax.chat/v1/text/chatcompletion_v2",
"model": config.minimax_model,
"api_key": config.minimax_api_key,
"enabled": bool(config.minimax_api_key),
},
"openrouter": {
"url": "https://openrouter.ai/api/v1/chat/completions",
"model": "anthropic/claude-3.5-haiku",
"api_key": config.openrouter_api_key,
"enabled": bool(config.openrouter_api_key),
},
"local": {
"url": f"{config.local_llm_url}/v1/chat/completions",
"model": "local",
"api_key": "not-needed",
"enabled": bool(config.local_llm_url),
},
}
async def chat(
self,
prompt: str,
system: Optional[str] = None,
model_override: Optional[str] = None,
**kwargs,
) -> str:
"""Send a chat completion request.
Args:
prompt: The user prompt
system: Optional system message
model_override: Force a specific provider/model (e.g. "openrouter/claude-3")
Returns:
The model's text response
"""
# Parse model_override if provided (e.g. "openrouter/anthropic/claude-3")
if model_override and "/" in model_override:
parts = model_override.split("/", 1)
provider = parts[0]
model = parts[1]
selected = self._providers.get(provider)
if not selected or not selected["enabled"]:
raise ValueError(f"Provider {provider} not configured or not enabled")
else:
# Try providers in priority order
selected = None
model = None
for name in ["minimax", "openrouter", "local"]:
p = self._providers[name]
if p["enabled"]:
selected = p
model = model_override or p["model"]
break
if not selected:
raise RuntimeError("No LLM provider configured. Set MINIMAX_API_KEY, OPENROUTER_API_KEY, or LOCAL_LLM_URL")
# Build messages
messages = []
if system:
messages.append({"role": "system", "content": system})
messages.append({"role": "user", "content": prompt})
# Build request
headers = {
"Authorization": f"Bearer {selected['api_key']}",
"Content-Type": "application/json",
}
payload = {
"model": model,
"messages": messages,
**{k: v for k, v in kwargs.items() if k not in ["model", "messages"]},
}
async with httpx.AsyncClient(timeout=60.0) as client:
resp = await client.post(selected["url"], headers=headers, json=payload)
if resp.status_code != 200:
raise RuntimeError(f"LLM request failed: {resp.status_code} {resp.text}")
data = resp.json()
return data["choices"][0]["message"]["content"]
async def batch(
self,
calls: list[dict],
system: Optional[str] = None,
) -> list[str]:
"""Batch multiple prompts into one request (if provider supports).
Args:
calls: List of {"prompt": str} dicts
system: Optional system message
Returns:
List of responses in same order as calls
"""
# For now, just call chat() for each (llm_batcher plugin does the batching)
results = []
for call in calls:
result = await self.chat(
prompt=call.get("prompt", ""),
system=system,
)
results.append(result)
return results
llm = LLMClient()

4
salior/mcp/__init__.py Normal file
View File

@ -0,0 +1,4 @@
"""MCP server module."""
from salior.mcp.server import MCPServer
__all__ = ["MCPServer"]

Binary file not shown.

Binary file not shown.

169
salior/mcp/server.py Normal file
View File

@ -0,0 +1,169 @@
"""MCP server — external AI control of Salior via JSON-RPC."""
from __future__ import annotations
import asyncio
import json
from typing import Any, Callable
from salior.core.logging import setup_logging
from salior.db.supabase_client import SupabaseClient
log = setup_logging()
# MCP tool definitions
TOOLS = {
"get_portfolio": {
"description": "Get current positions and PnL",
"params": {},
},
"get_signals": {
"description": "Get recent conviction signals",
"params": {"coin": {"type": "string", "optional": True}, "limit": {"type": "int", "optional": True}},
},
"get_market_state": {
"description": "Get regime + conviction for a coin",
"params": {"coin": {"type": "string", "required": True}},
},
"place_order": {
"description": "Execute a trade (with confirmation gate)",
"params": {
"coin": {"type": "string", "required": True},
"side": {"type": "string", "required": True},
"size": {"type": "float", "required": True},
"price": {"type": "float", "optional": True},
},
},
"get_performance": {
"description": "Historical PnL, Sharpe, drawdown",
"params": {"days": {"type": "int", "optional": True}},
},
}
class MCPServer:
"""JSON-RPC MCP server running on localhost:8080/mcp."""
def __init__(self, host: str = "localhost", port: int = 8080) -> None:
self.host = host
self.port = port
self._supabase = SupabaseClient()
self._server: Any = None
async def start(self) -> None:
"""Start the MCP server."""
from aiohttp import web
async def handle(request: web.Request) -> web.Response:
body = await request.json()
result = await self._handle_jsonrpc(body)
return web.json_response(result)
app = web.Application()
app.router.add_post("/mcp", handle)
app.router.add_get("/mcp/tools", self._handle_tools_list)
app.router.add_get("/mcp/health", self._handle_health)
runner = web.AppRunner(app)
await runner.setup()
site = web.TCPSite(runner, self.host, self.port)
await site.start()
log.info("mcp_server_started", host=self.host, port=self.port)
async def _handle_jsonrpc(self, body: dict) -> dict:
"""Handle a JSON-RPC request."""
method = body.get("method", "")
params = body.get("params", {})
msg_id = body.get("id")
try:
if method == "tools/list":
return {"id": msg_id, "result": self._list_tools()}
elif method == "tools/call":
tool = params.get("name", "")
args = params.get("arguments", {})
result = await self._call_tool(tool, args)
return {"id": msg_id, "result": result}
else:
return {"id": msg_id, "error": {"code": -32601, "message": f"Unknown method: {method}"}}
except Exception as e:
log.error("mcp_error", method=method, error=str(e))
return {"id": msg_id, "error": {"code": -32603, "message": str(e)}}
def _list_tools(self) -> list[dict]:
"""List available MCP tools."""
return [
{"name": name, "description": info["description"], "inputSchema": {"type": "object", "properties": info["params"]}}
for name, info in TOOLS.items()
]
async def _call_tool(self, name: str, args: dict) -> Any:
"""Dispatch to the appropriate tool handler."""
handler = getattr(self, f"_tool_{name}", None)
if not handler:
raise ValueError(f"Unknown tool: {name}")
return await handler(args)
async def _tool_get_portfolio(self, args: dict) -> dict:
"""Get current portfolio positions."""
portfolio = await self._supabase.get_portfolio()
return {"positions": portfolio, "count": len(portfolio)}
async def _tool_get_signals(self, args: dict) -> dict:
"""Get recent conviction signals."""
coin = args.get("coin")
limit = args.get("limit", 10)
signals = await self._supabase.get_recent_signals(coin=coin, limit=limit)
return {"signals": signals, "count": len(signals)}
async def _tool_get_market_state(self, args: dict) -> dict:
"""Get regime + conviction for a specific coin."""
from salior.db.timescale_client import TimescaleDB
db = TimescaleDB()
await db.connect()
coin = args["coin"]
candles = await db.get_candles("candles_1m", coin, hours=24)
latest = candles[-1] if candles else None
signals = await self._supabase.get_recent_signals(coin=coin, limit=1)
signal = signals[0] if signals else None
return {
"coin": coin,
"price": latest["c"] if latest else None,
"regime": signal["regime"] if signal else None,
"conviction": signal["conviction"] if signal else None,
"candles_count": len(candles),
}
async def _tool_place_order(self, args: dict) -> dict:
"""Place an order (requires confirmation)."""
# For now, just confirm via log — full wallet flow requires user sign-in
log.info("mcp_order_request", **args)
return {
"status": "requires_confirmation",
"message": "Order requires wallet approval. Connect wallet at https://salior.ai",
"requested": args,
}
async def _tool_get_performance(self, args: dict) -> dict:
"""Get historical performance metrics."""
# Placeholder — would read from performance table
return {"days": args.get("days", 30), "message": "Connect to TimescaleDB for full data"}
async def _handle_tools_list(self, request: web.Request) -> web.Response:
return web.json_response({"tools": self._list_tools()})
async def _handle_health(self, request: web.Request) -> web.Response:
return web.json_response({"status": "ok"})
async def main() -> None:
"""Run the MCP server."""
server = MCPServer()
await server.start()
await asyncio.Event().wait() # Keep running
if __name__ == "__main__":
asyncio.run(main())

113
salior/plugins/__init__.py Normal file
View File

@ -0,0 +1,113 @@
"""Plugin system — discovery, loading, and dispatch."""
from __future__ import annotations
import os
import subprocess
import yaml
from pathlib import Path
from typing import Any, Optional
PLUGIN_DIR = Path(__file__).parent.parent.parent / "plugins"
class Plugin:
"""A discovered plugin with manifest and run entry point."""
def __init__(self, name: str, manifest: dict, dir: Path) -> None:
self.name = name
self.manifest = manifest
self.dir = dir
self.enabled = False
@property
def requires_gpu(self) -> bool:
return self.manifest.get("compute", {}).get("gpu", False)
@property
def description(self) -> str:
return self.manifest.get("description", "")
def run(self, method: str, params: dict) -> Any:
"""Run a plugin method as a subprocess."""
result = subprocess.run(
["python3", str(self.dir / "run.py"), method],
input=yaml.dump(params),
capture_output=True,
text=True,
)
if result.returncode == 0:
return yaml.safe_load(result.stdout)
raise RuntimeError(f"Plugin error: {result.stderr}")
class PluginRegistry:
"""Discovers and manages plugins."""
def __init__(self, plugin_dir: Optional[Path] = None) -> None:
self.plugin_dir = plugin_dir or PLUGIN_DIR
self._plugins: dict[str, Plugin] = {}
def discover(self) -> dict[str, Plugin]:
"""Find all plugins in the plugin directory."""
if self._plugins:
return self._plugins
if not self.plugin_dir.exists():
return self._plugins
for entry in self.plugin_dir.iterdir():
if not entry.is_dir() or entry.name.startswith("_"):
continue
manifest_path = entry / "manifest.yaml"
if manifest_path.exists():
manifest = yaml.safe_load(manifest_path.read_text())
self._plugins[entry.name] = Plugin(entry.name, manifest, entry)
return self._plugins
def enable(self, name: str) -> None:
"""Enable a plugin."""
plugin = self._plugins.get(name)
if plugin:
plugin.enabled = True
def disable(self, name: str) -> None:
"""Disable a plugin."""
plugin = self._plugins.get(name)
if plugin:
plugin.enabled = False
def dispatch(
self,
plugin_name: str,
method: str,
params: Optional[dict] = None,
) -> Any:
"""Dispatch a call to a plugin."""
plugin = self._plugins.get(plugin_name)
if not plugin:
raise ValueError(f"Plugin '{plugin_name}' not found")
if not plugin.enabled:
raise RuntimeError(f"Plugin '{plugin_name}' is not enabled")
return plugin.run(method, params or {})
def list(self) -> list[dict]:
"""List all plugins with status."""
return [
{
"name": p.name,
"enabled": p.enabled,
"description": p.description,
"gpu": p.requires_gpu,
}
for p in self._plugins.values()
]
# Global registry
registry = PluginRegistry()
def dispatch(plugin_name: str, method: str, params: Optional[dict] = None) -> Any:
"""Global plugin dispatch."""
return registry.dispatch(plugin_name, method, params)

Binary file not shown.

View File

@ -0,0 +1,4 @@
"""Skills module."""
from salior.skills.registry import SkillRegistry
__all__ = ["SkillRegistry"]

Binary file not shown.

Binary file not shown.

30
salior/skills/build.md Normal file
View File

@ -0,0 +1,30 @@
# Build Skill
Use this when writing code or building a feature.
## Steps
1. **Spec first** — Write the PRD before touching code (use `/spec`)
2. **Vertical slice** — Build the smallest end-to-end path first
3. **Verify each layer** — Check data flows before adding logic
4. **Iterate** — Add features one at a time, test each
## Verification Gate
- Code runs without import/syntax errors
- Basic happy path works (e.g., for an agent: starts, loops, writes data)
- No hardcoded secrets or placeholder values in final code
## Principles
- Thin vertical slices over big upfront architecture
- Verify before adding complexity
- Delete dead code immediately
- Name things clearly the first time
## Notes
- Follow the project conventions (check existing files for patterns)
- Use type hints on all function signatures
- Structured logging with structlog
- All file paths relative to project root

76
salior/skills/registry.py Normal file
View File

@ -0,0 +1,76 @@
"""Agent skill definitions — markdown files with steps + verification gates."""
from __future__ import annotations
import os
from pathlib import Path
from typing import Optional
SKILLS_DIR = Path(__file__).parent
class Skill:
"""A skill is a markdown file with structured steps and verification gates."""
def __init__(self, name: str, path: Path) -> None:
self.name = name
self.path = path
self._content: Optional[str] = None
@property
def content(self) -> str:
if self._content is None:
self._content = self.path.read_text()
return self._content
def steps(self) -> list[str]:
"""Parse steps from markdown (lines starting with numbers or -)."""
lines = self.content.split("\n")
steps = []
for line in lines:
line = line.strip()
if line and (line[0].isdigit() or line.startswith("-")):
steps.append(line)
return steps
def verify(self, step: int, result: str) -> bool:
"""Check if verification gate passes for a step."""
# Simple verification: look for pass/fail markers in the content
return True # Override in subclass
class SkillRegistry:
"""Discovers and manages skills from the skills/ directory."""
def __init__(self, skills_dir: Optional[Path] = None) -> None:
self.skills_dir = skills_dir or SKILLS_DIR
self._skills: dict[str, Skill] = {}
def discover(self) -> dict[str, Skill]:
"""Find all .md skill files."""
if self._skills:
return self._skills
for path in self.skills_dir.glob("*.md"):
if path.stem.startswith("_"):
continue
skill = Skill(path.stem, path)
self._skills[skill.name] = skill
return self._skills
def get(self, name: str) -> Optional[Skill]:
"""Get a skill by name."""
if not self._skills:
self.discover()
return self._skills.get(name)
def list(self) -> list[str]:
"""List all available skill names."""
return list(self.discover().keys())
def render(self, name: str) -> str:
"""Get the full skill content for an agent to follow."""
skill = self.get(name)
if not skill:
return f"Skill '{name}' not found. Available: {', '.join(self.list())}"
return skill.content

33
salior/skills/research.md Normal file
View File

@ -0,0 +1,33 @@
# Research Skill
Use this when investigating a topic, market condition, or technical problem.
## Steps
1. **Decompose** — Break the question into specific sub-questions
2. **Gather** — Fetch data from multiple sources (web, database, APIs)
3. **Analyze** — Apply reasoning, compare viewpoints
4. **Validate** — Check for contradictions, confidence level
5. **Synthesize** — Write a clear conclusion with supporting evidence
## Verification Gate
- Each claim must have a source or data point
- If confidence < 60%, say so explicitly
- List remaining unknowns at the end
## Output Format
```
# Research: [topic]
## Findings
[Key facts with citations]
## Analysis
[Reasoning chain]
## Confidence: [HIGH/MEDIUM/LOW]
## Remaining unknowns: [list]
## Recommended actions: [list]
```

48
salior/skills/review.md Normal file
View File

@ -0,0 +1,48 @@
# Review Skill
Use this when reviewing code, decisions, or architecture.
## Steps
1. **Understand** — Read the full context before judging
2. **Pressure test** — What's the weakest part? The hidden assumption?
3. **Check against requirements** — Does this actually solve the stated problem?
4. **Consider alternatives** — Is there a simpler approach?
5. **Flag clearly** — Distinguish bugs from style preferences
## Verification Gate
- Bug: Must be fixed before merge
- Style: Suggest only if it impacts readability
- Architecture: Propose an alternative, don't just criticize
## Review Categories
| Category | Priority | Action |
|----------|----------|--------|
| Correctness bug | CRITICAL | Block merge |
| Security issue | CRITICAL | Block merge |
| Performance regression | HIGH | Discuss |
| Missing test | MEDIUM | Suggest |
| Style/naming | LOW | Optional |
| Architecture | MEDIUM | Discuss |
## Output Format
```
# Review: [item]
## Summary
[One paragraph overview]
## Issues (blocking)
- [list]
## Issues (non-blocking)
- [list]
## Recommendations
- [list]
## Verdict: APPROVE / REQUEST_CHANGES / BLOCK
```

45
salior/skills/spec.md Normal file
View File

@ -0,0 +1,45 @@
# Spec Skill
Use this before writing any significant code or architectural decisions.
## Steps
1. **What** — Define the feature clearly in one sentence
2. **Why** — Why is this needed? What problem does it solve?
3. **Scope** — What's in scope? What's explicitly out of scope?
4. **Interface** — What does the API / interface look like?
5. **Data flow** — How does data move through the system?
6. **Failure modes** — What can go wrong? How does it recover?
7. **Verify** — Does this spec solve the original problem?
## Output Format
```markdown
# Feature: [name]
## What
[One sentence]
## Why
[Problem being solved]
## Scope
- In: [list]
- Out: [list]
## Interface
```
[API/function signatures]
```
## Data Flow
[How data moves]
## Failure Modes
| Scenario | Response |
|----------|----------|
| ... | ... |
## Verification
[How to verify this works]
```

View File

@ -0,0 +1,4 @@
"""Wallet module — wallet connect integration."""
from salior.wallet.connect import WalletSession
__all__ = ["WalletSession"]

Binary file not shown.

Binary file not shown.

97
salior/wallet/connect.py Normal file
View File

@ -0,0 +1,97 @@
"""Wallet Connect — EIP-4361 sign-in with 180-day sessions."""
from __future__ import annotations
import uuid
from datetime import datetime, timedelta, timezone
from typing import Optional
from salior.core.config import config
from salior.db.supabase_client import SupabaseClient
class WalletSession:
"""Handle wallet connection + 180-day session management.
Works with Rabby or MetaMask (both inject window.ethereum).
Frontend calls connect() to trigger wallet popup.
Backend verifies signature and stores session.
"""
def __init__(self) -> None:
self._supabase = SupabaseClient()
self._address: Optional[str] = None
self._session_token: Optional[str] = None
self._expires_at: Optional[datetime] = None
@property
def address(self) -> Optional[str]:
"""Connected wallet address."""
return self._address
@property
def is_connected(self) -> bool:
"""True if wallet is connected and session is valid."""
if not self._address or not self._expires_at:
return False
return datetime.now(timezone.utc) < self._expires_at
def generate_auth_message(self, address: str) -> str:
"""Generate the EIP-4361 auth message for the wallet to sign."""
nonce = str(uuid.uuid4())
issued_at = datetime.now(timezone.utc).isoformat()
expires_at = (datetime.now(timezone.utc) + timedelta(days=config.wallet_session_days)).isoformat()
return f"salior.ai wants you to sign in.\n\nAddress: {address}\nNonce: {nonce}\nIssued At: {issued_at}\nExpiration Time: {expires_at}\n\nSign in to Salior Trading System."
def verify_signature(
self,
address: str,
signature: str,
message: str,
) -> bool:
"""Verify a wallet signature against the auth message.
In production: use eth_account or web3.py to verify ECDSA signature.
For now: accept any non-empty signature (frontend should verify).
"""
return bool(signature and len(signature) > 20)
async def connect(self, address: str, signature: str, message: str) -> dict:
"""Complete wallet connection after user signs.
Called by backend after frontend submits the signed auth message.
Returns session info.
"""
if not self.verify_signature(address, signature, message):
raise ValueError("Invalid signature")
session_token = str(uuid.uuid4())
expires_at = datetime.now(timezone.utc) + timedelta(days=config.wallet_session_days)
await self._supabase.upsert_wallet_session(
wallet_address=address,
session_token=session_token,
signature=signature,
expires_at=expires_at.isoformat(),
)
self._address = address
self._session_token = session_token
self._expires_at = expires_at
return {
"address": address,
"session_token": session_token,
"expires_at": expires_at.isoformat(),
"days": config.wallet_session_days,
}
async def get_session(self, address: str) -> Optional[dict]:
"""Check for existing valid session for an address."""
return await self._supabase.get_wallet_session(address)
def sign_transaction(self, tx_params: dict) -> str:
"""Request user signature for a specific transaction.
In production: return tx_params for frontend to call wallet popup.
"""
raise NotImplementedError("Use frontend wallet popup for tx signing")