Skip to content

Commit 5a1755c

Browse files
Merge pull request #286 from cortexlinux/feature/mcp-server
[feature] Add MCP server for AI assistant integration
2 parents 5a3f6f2 + 10e5f0c commit 5a1755c

File tree

4 files changed

+363
-0
lines changed

4 files changed

+363
-0
lines changed

AGENTS.md

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
# Cortex Linux - AI Agent Guidelines
2+
3+
## Project Overview
4+
5+
Cortex Linux is an AI-native package manager for Debian/Ubuntu that understands natural language commands. It wraps `apt` with LLM intelligence to parse requests, detect hardware, resolve dependencies, and execute installations safely.
6+
7+
**Repository**: https://github.com/cortexlinux/cortex
8+
**License**: Apache 2.0
9+
**Primary Language**: Python 3.10+
10+
11+
## Quick Start
12+
13+
```bash
14+
# Clone and setup
15+
git clone https://github.com/cortexlinux/cortex.git
16+
cd cortex
17+
python3 -m venv venv
18+
source venv/bin/activate
19+
pip install -e .
20+
21+
# Configure API key
22+
echo 'ANTHROPIC_API_KEY=your-key-here' > .env
23+
24+
# Verify installation
25+
cortex install nginx --dry-run
26+
```
27+
28+
## Development Environment
29+
30+
### Prerequisites
31+
- Python 3.10 or higher
32+
- Ubuntu 22.04+ or Debian 12+
33+
- Virtual environment (required)
34+
- Anthropic API key or OpenAI API key
35+
36+
### Setup Commands
37+
```bash
38+
python3 -m venv venv
39+
source venv/bin/activate
40+
pip install -e .
41+
pip install -r requirements-dev.txt
42+
pytest tests/ -v
43+
```
44+
45+
## Testing Instructions
46+
47+
```bash
48+
# Run all tests
49+
pytest tests/ -v
50+
51+
# Test dry-run (safe)
52+
cortex install nginx --dry-run
53+
54+
# Test hardware detection
55+
cortex-detect-hardware
56+
```
57+
58+
## Code Standards
59+
60+
- Follow PEP 8
61+
- Type hints required
62+
- Docstrings for public APIs
63+
- >80% test coverage for PRs
64+
65+
## Safety Requirements
66+
67+
1. Dry-run by default for all installations
68+
2. No silent sudo
69+
3. Firejail sandboxing required
70+
4. Audit logging to ~/.cortex/history.db
71+
72+
## PR Guidelines
73+
74+
- Title format: [component] Description
75+
- All tests must pass
76+
- Documentation required for new features
77+
78+
## Contact
79+
80+
- Discord: https://discord.gg/uCqHvxjU83
81+

README_MCP.md

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
# Cortex Linux MCP Server
2+
3+
Connect any MCP-compatible AI (Claude, ChatGPT, Cursor, VS Code) to Cortex Linux.
4+
5+
## Install
6+
7+
```bash
8+
pip install cortex-mcp-server
9+
```
10+
11+
## Configure Claude Desktop
12+
13+
Add to `~/.config/claude/claude_desktop_config.json`:
14+
15+
```json
16+
{
17+
"mcpServers": {
18+
"cortex-linux": {
19+
"command": "cortex-mcp-server"
20+
}
21+
}
22+
}
23+
```
24+
25+
## Available Tools
26+
27+
| Tool | Description |
28+
|------|-------------|
29+
| install_package | Install packages via natural language |
30+
| search_packages | Search package database |
31+
| get_history | View installation history |
32+
| rollback | Rollback previous installation |
33+
| detect_hardware | Detect GPU/CPU |
34+
| system_status | Get system status |
35+
36+
## Safety
37+
38+
- Dry-run by default
39+
- Explicit confirmation required for changes
40+
- Firejail sandboxing
41+
- Full audit logging
42+
43+
## Links
44+
45+
- [MCP Specification](https://modelcontextprotocol.io)
46+
- [AAIF](https://aaif.io)
47+
- [Discord](https://discord.gg/uCqHvxjU83)

mcp/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
"""Cortex Linux MCP Server package."""
2+
from .cortex_mcp_server import CortexMCPServer, main
3+
4+
__all__ = ["CortexMCPServer", "main"]

mcp/cortex_mcp_server.py

Lines changed: 231 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,231 @@
1+
#!/usr/bin/env python3
2+
"""
3+
Cortex Linux MCP Server
4+
5+
Model Context Protocol server for AI-native package management.
6+
Connects Claude, ChatGPT, Cursor, VS Code to Cortex Linux.
7+
"""
8+
9+
import asyncio
10+
import json
11+
import logging
12+
import os
13+
import subprocess
14+
from datetime import datetime
15+
from typing import Optional
16+
17+
try:
18+
from mcp.server import Server
19+
from mcp.server.stdio import stdio_server
20+
from mcp.types import Tool, TextContent, CallToolResult, ListToolsResult
21+
except ImportError:
22+
print("MCP SDK not installed. Run: pip install mcp[cli]")
23+
import sys
24+
sys.exit(1)
25+
26+
logging.basicConfig(level=logging.INFO)
27+
logger = logging.getLogger("cortex-mcp")
28+
29+
SERVER_NAME = "cortex-linux"
30+
SERVER_VERSION = "1.0.0"
31+
32+
33+
class CortexMCPServer:
34+
def __init__(self):
35+
self.server = Server(SERVER_NAME)
36+
self._setup_handlers()
37+
self._cortex_path = self._find_cortex()
38+
39+
def _find_cortex(self) -> str:
40+
result = subprocess.run(["which", "cortex"], capture_output=True, text=True)
41+
return result.stdout.strip() if result.returncode == 0 else "cortex"
42+
43+
def _setup_handlers(self):
44+
@self.server.list_tools()
45+
async def list_tools() -> ListToolsResult:
46+
return ListToolsResult(tools=[
47+
Tool(
48+
name="install_package",
49+
description="Install packages using natural language. Safe dry-run by default.",
50+
inputSchema={
51+
"type": "object",
52+
"properties": {
53+
"request": {"type": "string", "description": "Package name or description"},
54+
"dry_run": {"type": "boolean", "default": True},
55+
"optimize_hardware": {"type": "boolean", "default": True}
56+
},
57+
"required": ["request"]
58+
}
59+
),
60+
Tool(
61+
name="search_packages",
62+
description="Search for packages by name or description.",
63+
inputSchema={
64+
"type": "object",
65+
"properties": {
66+
"query": {"type": "string"},
67+
"limit": {"type": "integer", "default": 10}
68+
},
69+
"required": ["query"]
70+
}
71+
),
72+
Tool(
73+
name="get_history",
74+
description="Get installation history with rollback IDs.",
75+
inputSchema={
76+
"type": "object",
77+
"properties": {"limit": {"type": "integer", "default": 10}}
78+
}
79+
),
80+
Tool(
81+
name="rollback",
82+
description="Rollback a previous installation.",
83+
inputSchema={
84+
"type": "object",
85+
"properties": {
86+
"installation_id": {"type": "string"},
87+
"dry_run": {"type": "boolean", "default": True}
88+
},
89+
"required": ["installation_id"]
90+
}
91+
),
92+
Tool(
93+
name="detect_hardware",
94+
description="Detect GPU/CPU and get optimization recommendations.",
95+
inputSchema={"type": "object", "properties": {}}
96+
),
97+
Tool(
98+
name="system_status",
99+
description="Get system disk space, packages, and updates.",
100+
inputSchema={"type": "object", "properties": {}}
101+
),
102+
])
103+
104+
@self.server.call_tool()
105+
async def call_tool(name: str, arguments: dict) -> CallToolResult:
106+
try:
107+
if name == "install_package":
108+
result = await self._install_package(
109+
arguments.get("request", ""),
110+
arguments.get("dry_run", True),
111+
arguments.get("optimize_hardware", True)
112+
)
113+
elif name == "search_packages":
114+
result = await self._search_packages(
115+
arguments.get("query", ""),
116+
arguments.get("limit", 10)
117+
)
118+
elif name == "get_history":
119+
result = await self._get_history(arguments.get("limit", 10))
120+
elif name == "rollback":
121+
result = await self._rollback(
122+
arguments.get("installation_id", ""),
123+
arguments.get("dry_run", True)
124+
)
125+
elif name == "detect_hardware":
126+
result = await self._detect_hardware()
127+
elif name == "system_status":
128+
result = await self._system_status()
129+
else:
130+
result = {"error": f"Unknown tool: {name}"}
131+
132+
return CallToolResult(
133+
content=[TextContent(type="text", text=json.dumps(result, indent=2, default=str))]
134+
)
135+
except Exception as e:
136+
return CallToolResult(
137+
content=[TextContent(type="text", text=json.dumps({"error": str(e)}))],
138+
isError=True
139+
)
140+
141+
async def _run_cortex(self, args: list[str]) -> dict:
142+
cmd = [self._cortex_path] + args
143+
try:
144+
process = await asyncio.create_subprocess_exec(
145+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
146+
)
147+
stdout, stderr = await process.communicate()
148+
return {
149+
"success": process.returncode == 0,
150+
"stdout": stdout.decode("utf-8"),
151+
"stderr": stderr.decode("utf-8")
152+
}
153+
except FileNotFoundError:
154+
return {"success": False, "error": "Cortex CLI not found"}
155+
156+
async def _install_package(self, request: str, dry_run: bool = True, optimize: bool = True) -> dict:
157+
args = ["install", request, "--dry-run" if dry_run else "--execute"]
158+
if optimize:
159+
args.append("--optimize")
160+
result = await self._run_cortex(args)
161+
return {"mode": "dry_run" if dry_run else "execute", "request": request, **result}
162+
163+
async def _search_packages(self, query: str, limit: int = 10) -> dict:
164+
process = await asyncio.create_subprocess_exec(
165+
"apt-cache", "search", query,
166+
stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
167+
)
168+
stdout, _ = await process.communicate()
169+
lines = stdout.decode("utf-8").strip().split("\n")[:limit]
170+
packages = [{"name": l.split(" - ")[0], "description": l.split(" - ")[1]}
171+
for l in lines if " - " in l]
172+
return {"query": query, "count": len(packages), "packages": packages}
173+
174+
async def _get_history(self, limit: int = 10) -> dict:
175+
result = await self._run_cortex(["history", "--limit", str(limit)])
176+
return {"limit": limit, **result}
177+
178+
async def _rollback(self, installation_id: str, dry_run: bool = True) -> dict:
179+
args = ["rollback", installation_id]
180+
if dry_run:
181+
args.append("--dry-run")
182+
result = await self._run_cortex(args)
183+
return {"installation_id": installation_id, "mode": "dry_run" if dry_run else "execute", **result}
184+
185+
async def _detect_hardware(self) -> dict:
186+
hardware = {}
187+
try:
188+
with open("/proc/cpuinfo") as f:
189+
for line in f:
190+
if line.startswith("model name"):
191+
hardware["cpu"] = line.split(":")[1].strip()
192+
break
193+
except:
194+
hardware["cpu"] = "Unknown"
195+
196+
try:
197+
process = await asyncio.create_subprocess_exec(
198+
"nvidia-smi", "--query-gpu=name", "--format=csv,noheader",
199+
stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
200+
)
201+
stdout, _ = await process.communicate()
202+
if process.returncode == 0:
203+
hardware["gpu"] = stdout.decode("utf-8").strip()
204+
except:
205+
hardware["gpu"] = None
206+
207+
return hardware
208+
209+
async def _system_status(self) -> dict:
210+
status = {"timestamp": datetime.now().isoformat()}
211+
process = await asyncio.create_subprocess_exec(
212+
"df", "-h", "/", stdout=asyncio.subprocess.PIPE
213+
)
214+
stdout, _ = await process.communicate()
215+
lines = stdout.decode("utf-8").strip().split("\n")
216+
if len(lines) >= 2:
217+
parts = lines[1].split()
218+
status["disk"] = {"total": parts[1], "used": parts[2], "available": parts[3]}
219+
return status
220+
221+
async def run(self):
222+
async with stdio_server() as (read_stream, write_stream):
223+
await self.server.run(read_stream, write_stream, self.server.create_initialization_options())
224+
225+
226+
def main():
227+
asyncio.run(CortexMCPServer().run())
228+
229+
230+
if __name__ == "__main__":
231+
main()

0 commit comments

Comments
 (0)