Backend Development Guide
Developing the IfAI backend with Rust and Tauri.
Getting Started
Prerequisites
- Rust 1.70+
- Cargo (comes with Rust)
- Basic Rust knowledge
Development Setup
bash
# Navigate to Tauri directory
cd src-tauri
# Run in development mode
cargo tauri dev
# Run tests
cargo test
# Run linter
cargo clippy
# Format code
cargo fmtProject Structure
src-tauri/src/
├── main.rs # Entry point
├── lib.rs # Library exports
├── commands/ # Tauri commands
│ ├── mod.rs
│ ├── atomic_commands.rs
│ ├── ai_commands.rs
│ └── symbol_commands.rs
├── ai/ # AI module
├── agent_system/ # Agent framework
├── codebase/ # Code analysis
└── utils/ # UtilitiesCreating Tauri Commands
Basic Command
rust
// commands/atomic_commands.rs
use tauri::State;
#[tauri::command]
async fn greet(name: &str) -> String {
format!("Hello, {}! You've been greeted from Rust!", name)
}Register Commands
rust
// lib.rs
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
tauri::Builder::default()
.invoke_handler(tauri::generate_handler![
greet,
read_file,
write_file
])
.run(tauri::generate_context!())
.expect("error while running tauri application");
}Error Handling
rust
use thiserror::Error;
#[derive(Debug, Error)]
pub enum MyError {
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
#[error("Parse error: {0}")]
Parse(String),
}
#[tauri::command]
async fn read_file(path: String) -> Result<String, MyError> {
let content = fs::read_to_string(&path)?;
Ok(content)
}AI Integration
OpenAI Client
rust
// ai/openai.rs
use reqwest::Client;
use serde::{Deserialize, Serialize};
#[derive(Serialize)]
struct OpenAIRequest {
model: String,
messages: Vec<Message>,
}
#[derive(Deserialize)]
struct OpenAIResponse {
choices: Vec<Choice>,
}
pub async fn chat_completion(messages: Vec<Message>) -> Result<String> {
let client = Client::new();
let request = OpenAIRequest {
model: "gpt-4".to_string(),
messages,
};
let response = client
.post("https://api.openai.com/v1/chat/completions")
.header("Authorization", format!("Bearer {}", API_KEY))
.json(&request)
.send()
.await?;
// Parse response...
Ok(content)
}Local LLM with Ollama
rust
// ai/local.rs
use reqwest::Client;
pub async fn ollama_chat(messages: Vec<Message>) -> Result<String> {
let client = Client::new();
let response = client
.post("http://localhost:11434/api/chat")
.json(&serde_json::json!({
"model": "qwen2.5-coder",
"messages": messages
}))
.send()
.await?;
Ok(response.text().await?)
}Code Analysis
Tree-sitter Integration
rust
// codebase/parser.rs
use tree_sitter::{Parser, Language};
pub fn parse_code(code: &str, lang: &str) -> Result<Node> {
let mut parser = Parser::new();
let language = match lang {
"typescript" => tree_sitter_typescript::LANGUAGEtypescript,
"rust" => tree_sitter_rust::LANGUAGE,
_ => return Err(MyError::Parse("Unsupported language".into())),
};
parser.set_language(language)?;
let tree = parser.parse(code, None)?;
Ok(tree.root_node())
}Symbol Extraction
rust
// codebase/symbols.rs
pub fn extract_symbols(node: Node) -> Vec<Symbol> {
let mut symbols = Vec::new();
// Traverse tree and extract function/class definitions
if node.kind() == "function_definition" {
symbols.push(Symbol {
name: get_node_name(&node),
kind: SymbolKind::Function,
range: node.range(),
});
}
// Recurse into children
for child in node.children(&mut cursor) {
symbols.extend(extract_symbols(child));
}
symbols
}Async Runtime
Tokio Setup
rust
// main.rs
#[tokio::main]
async fn main() {
// Async operations here
let result = async_operation().await.unwrap();
}Async Commands
rust
#[tauri::command]
async fn async_operation() -> Result<String> {
tokio::time::sleep(Duration::from_secs(1)).await;
Ok("Done".to_string())
}File System Operations
Reading Files
rust
use std::fs;
use std::path::Path;
#[tauri::command]
fn read_file(path: String) -> Result<String, String> {
fs::read_to_string(&path)
.map_err(|e| e.to_string())
}Writing Files
rust
#[tauri::command]
fn write_file(path: String, content: String) -> Result<(), String> {
fs::write(&path, content)
.map_err(|e| e.to_string())
}File Watcher
rust
// file_system/watcher.rs
use notify::{Watcher, RecursiveMode, watcher};
pub fn watch_directory<F>(path: &Path, mut callback: F) -> notify::Result<()>
where
F: FnMut(notify::Event),
{
let (tx, rx) = std::sync::mpsc::channel();
let mut watcher = watcher(tx, RecursiveMode::Recursive)?;
watcher.watch(path, RecursiveMode::Recursive)?;
thread::spawn(move || {
for event in rx {
callback(event);
}
});
Ok(())
}Testing
Unit Tests
rust
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_code() {
let code = "function test() { return 42; }";
let result = parse_code(code, "typescript");
assert!(result.is_ok());
}
}Integration Tests
rust
#[tokio::test]
async fn test_ai_chat() {
let messages = vec![
Message {
role: "user".to_string(),
content: "Hello".to_string()
}
];
let result = ollama_chat(messages).await;
assert!(result.is_ok());
}Cargo Configuration
Cargo.toml
toml
[package]
name = "ifai"
version = "0.3.0"
edition = "2021"
[dependencies]
tauri = { version = "2.0", features = [] }
serde = { version = "1.0", features = ["derive"] }
tokio = { version = "1", features = ["full"] }
reqwest = { version = "0.11", features = ["json"] }
tree-sitter = "0.22"
[dev-dependencies]
tokio-test = "0.4"Best Practices
1. Use Result Types
rust
// Good: Explicit error handling
fn read_file(path: &str) -> Result<String, io::Error> {
fs::read_to_string(path)
}
// Avoid: Panic on error
fn read_file(path: &str) -> String {
fs::read_to_string(path).unwrap() // Don't do this
}2. Async/Await
rust
// Good: Use async for I/O operations
async fn fetch_data(url: &str) -> Result<String> {
let response = reqwest::get(url).await?;
Ok(response.text().await?)
}3. Error Chains
rust
use thiserror::Error;
#[derive(Error, Debug)]
pub enum MyError {
#[error("Network error: {0}")]
Network(#[from] reqwest::Error),
#[error("Parse error: {0}")]
Parse(#[from] serde_json::Error),
}Next Steps
- Frontend Guide - React development
- API Reference - Command reference
- Testing Guide - Testing strategies