feat: rebuild website with full learning funnel
Website rebuilt from scratch based on analysis of 11 beloved language websites (Elm, Zig, Gleam, Swift, Kotlin, Haskell, OCaml, Crystal, Roc, Rust, Go). New website structure: - Homepage with hero, playground, three pillars, install guide - Language Tour with interactive lessons (hello world, types, effects) - Examples cookbook with categorized sidebar - API documentation index - Installation guide (Nix and source) - Sleek/noble design (black/gold, serif typography) Also includes: - New stdlib/json.lux module for JSON serialization - Enhanced stdlib/http.lux with middleware and routing - New string functions (charAt, indexOf, lastIndexOf, repeat) - LSP improvements (rename, signature help, formatting) - Package manager transitive dependency resolution - Updated documentation for effects and stdlib - New showcase example (task_manager.lux) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
637
src/registry.rs
Normal file
637
src/registry.rs
Normal file
@@ -0,0 +1,637 @@
|
||||
//! Package Registry Server for Lux
|
||||
//!
|
||||
//! Provides a central repository for sharing Lux packages.
|
||||
//! The registry serves package metadata and tarballs via HTTP.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::io::{Read, Write};
|
||||
use std::net::{TcpListener, TcpStream};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::thread;
|
||||
|
||||
/// Package metadata stored in the registry
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PackageMetadata {
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
pub description: String,
|
||||
pub authors: Vec<String>,
|
||||
pub license: Option<String>,
|
||||
pub repository: Option<String>,
|
||||
pub keywords: Vec<String>,
|
||||
pub dependencies: HashMap<String, String>,
|
||||
pub checksum: String,
|
||||
pub published_at: String,
|
||||
}
|
||||
|
||||
/// A version entry for a package
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VersionEntry {
|
||||
pub version: String,
|
||||
pub checksum: String,
|
||||
pub published_at: String,
|
||||
pub yanked: bool,
|
||||
}
|
||||
|
||||
/// Package index entry (all versions of a package)
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PackageIndex {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub versions: Vec<VersionEntry>,
|
||||
pub latest_version: String,
|
||||
}
|
||||
|
||||
/// The package registry
|
||||
pub struct Registry {
|
||||
/// Base directory for storing packages
|
||||
storage_dir: PathBuf,
|
||||
/// In-memory index of all packages
|
||||
index: Arc<RwLock<HashMap<String, PackageIndex>>>,
|
||||
}
|
||||
|
||||
impl Registry {
|
||||
/// Create a new registry with the given storage directory
|
||||
pub fn new(storage_dir: &Path) -> Self {
|
||||
let registry = Self {
|
||||
storage_dir: storage_dir.to_path_buf(),
|
||||
index: Arc::new(RwLock::new(HashMap::new())),
|
||||
};
|
||||
registry.load_index();
|
||||
registry
|
||||
}
|
||||
|
||||
/// Load the package index from disk
|
||||
fn load_index(&self) {
|
||||
let index_path = self.storage_dir.join("index.json");
|
||||
if !index_path.exists() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Ok(content) = fs::read_to_string(&index_path) {
|
||||
if let Ok(index) = parse_index_json(&content) {
|
||||
let mut idx = self.index.write().unwrap();
|
||||
*idx = index;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Save the package index to disk
|
||||
fn save_index(&self) {
|
||||
let index_path = self.storage_dir.join("index.json");
|
||||
let idx = self.index.read().unwrap();
|
||||
let json = format_index_json(&idx);
|
||||
fs::write(&index_path, json).ok();
|
||||
}
|
||||
|
||||
/// Publish a new package version
|
||||
pub fn publish(&self, metadata: PackageMetadata, tarball: &[u8]) -> Result<(), String> {
|
||||
// Validate package name
|
||||
if !is_valid_package_name(&metadata.name) {
|
||||
return Err("Invalid package name. Use lowercase letters, numbers, and hyphens.".to_string());
|
||||
}
|
||||
|
||||
// Create package directory
|
||||
let pkg_dir = self.storage_dir.join("packages").join(&metadata.name);
|
||||
fs::create_dir_all(&pkg_dir)
|
||||
.map_err(|e| format!("Failed to create package directory: {}", e))?;
|
||||
|
||||
// Write tarball
|
||||
let tarball_path = pkg_dir.join(format!("{}-{}.tar.gz", metadata.name, metadata.version));
|
||||
fs::write(&tarball_path, tarball)
|
||||
.map_err(|e| format!("Failed to write package tarball: {}", e))?;
|
||||
|
||||
// Write metadata
|
||||
let meta_path = pkg_dir.join(format!("{}-{}.json", metadata.name, metadata.version));
|
||||
let meta_json = format_metadata_json(&metadata);
|
||||
fs::write(&meta_path, meta_json)
|
||||
.map_err(|e| format!("Failed to write package metadata: {}", e))?;
|
||||
|
||||
// Update index
|
||||
{
|
||||
let mut idx = self.index.write().unwrap();
|
||||
let entry = idx.entry(metadata.name.clone()).or_insert_with(|| PackageIndex {
|
||||
name: metadata.name.clone(),
|
||||
description: metadata.description.clone(),
|
||||
versions: Vec::new(),
|
||||
latest_version: String::new(),
|
||||
});
|
||||
|
||||
// Check if version already exists
|
||||
if entry.versions.iter().any(|v| v.version == metadata.version) {
|
||||
return Err(format!("Version {} already exists", metadata.version));
|
||||
}
|
||||
|
||||
entry.versions.push(VersionEntry {
|
||||
version: metadata.version.clone(),
|
||||
checksum: metadata.checksum.clone(),
|
||||
published_at: metadata.published_at.clone(),
|
||||
yanked: false,
|
||||
});
|
||||
|
||||
// Update latest version (simple comparison for now)
|
||||
entry.latest_version = metadata.version.clone();
|
||||
entry.description = metadata.description.clone();
|
||||
}
|
||||
|
||||
self.save_index();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get package metadata
|
||||
pub fn get_metadata(&self, name: &str, version: &str) -> Option<PackageMetadata> {
|
||||
let meta_path = self.storage_dir
|
||||
.join("packages")
|
||||
.join(name)
|
||||
.join(format!("{}-{}.json", name, version));
|
||||
|
||||
if let Ok(content) = fs::read_to_string(&meta_path) {
|
||||
parse_metadata_json(&content)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Get package tarball
|
||||
pub fn get_tarball(&self, name: &str, version: &str) -> Option<Vec<u8>> {
|
||||
let tarball_path = self.storage_dir
|
||||
.join("packages")
|
||||
.join(name)
|
||||
.join(format!("{}-{}.tar.gz", name, version));
|
||||
|
||||
fs::read(&tarball_path).ok()
|
||||
}
|
||||
|
||||
/// Search packages
|
||||
pub fn search(&self, query: &str) -> Vec<PackageIndex> {
|
||||
let idx = self.index.read().unwrap();
|
||||
let query_lower = query.to_lowercase();
|
||||
|
||||
idx.values()
|
||||
.filter(|pkg| {
|
||||
pkg.name.to_lowercase().contains(&query_lower) ||
|
||||
pkg.description.to_lowercase().contains(&query_lower)
|
||||
})
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// List all packages
|
||||
pub fn list_all(&self) -> Vec<PackageIndex> {
|
||||
let idx = self.index.read().unwrap();
|
||||
idx.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get package index entry
|
||||
pub fn get_package(&self, name: &str) -> Option<PackageIndex> {
|
||||
let idx = self.index.read().unwrap();
|
||||
idx.get(name).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
/// HTTP Registry Server
|
||||
pub struct RegistryServer {
|
||||
registry: Arc<Registry>,
|
||||
bind_addr: String,
|
||||
}
|
||||
|
||||
impl RegistryServer {
|
||||
/// Create a new registry server
|
||||
pub fn new(storage_dir: &Path, bind_addr: &str) -> Self {
|
||||
Self {
|
||||
registry: Arc::new(Registry::new(storage_dir)),
|
||||
bind_addr: bind_addr.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Run the server
|
||||
pub fn run(&self) -> Result<(), String> {
|
||||
let listener = TcpListener::bind(&self.bind_addr)
|
||||
.map_err(|e| format!("Failed to bind to {}: {}", self.bind_addr, e))?;
|
||||
|
||||
println!("Lux Package Registry running at http://{}", self.bind_addr);
|
||||
println!("Storage directory: {}", self.registry.storage_dir.display());
|
||||
println!();
|
||||
println!("Endpoints:");
|
||||
println!(" GET /api/v1/packages - List all packages");
|
||||
println!(" GET /api/v1/packages/:name - Get package info");
|
||||
println!(" GET /api/v1/packages/:name/:ver - Get version metadata");
|
||||
println!(" GET /api/v1/download/:name/:ver - Download package tarball");
|
||||
println!(" GET /api/v1/search?q=query - Search packages");
|
||||
println!(" POST /api/v1/publish - Publish a package");
|
||||
println!();
|
||||
|
||||
for stream in listener.incoming() {
|
||||
match stream {
|
||||
Ok(stream) => {
|
||||
let registry = Arc::clone(&self.registry);
|
||||
thread::spawn(move || {
|
||||
handle_request(stream, ®istry);
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Connection error: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle an HTTP request
|
||||
fn handle_request(mut stream: TcpStream, registry: &Registry) {
|
||||
let mut buffer = [0; 8192];
|
||||
let bytes_read = match stream.read(&mut buffer) {
|
||||
Ok(n) => n,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
let request = String::from_utf8_lossy(&buffer[..bytes_read]);
|
||||
let lines: Vec<&str> = request.lines().collect();
|
||||
|
||||
if lines.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let parts: Vec<&str> = lines[0].split_whitespace().collect();
|
||||
if parts.len() < 2 {
|
||||
return;
|
||||
}
|
||||
|
||||
let method = parts[0];
|
||||
let path = parts[1];
|
||||
|
||||
// Parse path and query string
|
||||
let (path, query) = if let Some(q_pos) = path.find('?') {
|
||||
(&path[..q_pos], Some(&path[q_pos + 1..]))
|
||||
} else {
|
||||
(path, None)
|
||||
};
|
||||
|
||||
let response = match (method, path) {
|
||||
("GET", "/") => {
|
||||
html_response(200, r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head><title>Lux Package Registry</title></head>
|
||||
<body>
|
||||
<h1>Lux Package Registry</h1>
|
||||
<p>Welcome to the Lux package registry.</p>
|
||||
<h2>API Endpoints</h2>
|
||||
<ul>
|
||||
<li>GET /api/v1/packages - List all packages</li>
|
||||
<li>GET /api/v1/packages/:name - Get package info</li>
|
||||
<li>GET /api/v1/packages/:name/:version - Get version metadata</li>
|
||||
<li>GET /api/v1/download/:name/:version - Download package</li>
|
||||
<li>GET /api/v1/search?q=query - Search packages</li>
|
||||
</ul>
|
||||
</body>
|
||||
</html>
|
||||
"#)
|
||||
}
|
||||
|
||||
("GET", "/api/v1/packages") => {
|
||||
let packages = registry.list_all();
|
||||
let json = format_packages_list_json(&packages);
|
||||
json_response(200, &json)
|
||||
}
|
||||
|
||||
("GET", path) if path.starts_with("/api/v1/packages/") => {
|
||||
let rest = &path[17..]; // Remove "/api/v1/packages/"
|
||||
let parts: Vec<&str> = rest.split('/').collect();
|
||||
|
||||
match parts.len() {
|
||||
1 => {
|
||||
// Get package info
|
||||
if let Some(pkg) = registry.get_package(parts[0]) {
|
||||
let json = format_package_json(&pkg);
|
||||
json_response(200, &json)
|
||||
} else {
|
||||
json_response(404, r#"{"error": "Package not found"}"#)
|
||||
}
|
||||
}
|
||||
2 => {
|
||||
// Get version metadata
|
||||
if let Some(meta) = registry.get_metadata(parts[0], parts[1]) {
|
||||
let json = format_metadata_json(&meta);
|
||||
json_response(200, &json)
|
||||
} else {
|
||||
json_response(404, r#"{"error": "Version not found"}"#)
|
||||
}
|
||||
}
|
||||
_ => json_response(400, r#"{"error": "Invalid path"}"#)
|
||||
}
|
||||
}
|
||||
|
||||
("GET", path) if path.starts_with("/api/v1/download/") => {
|
||||
let rest = &path[17..]; // Remove "/api/v1/download/"
|
||||
let parts: Vec<&str> = rest.split('/').collect();
|
||||
|
||||
if parts.len() == 2 {
|
||||
if let Some(tarball) = registry.get_tarball(parts[0], parts[1]) {
|
||||
tarball_response(&tarball)
|
||||
} else {
|
||||
json_response(404, r#"{"error": "Package not found"}"#)
|
||||
}
|
||||
} else {
|
||||
json_response(400, r#"{"error": "Invalid path"}"#)
|
||||
}
|
||||
}
|
||||
|
||||
("GET", "/api/v1/search") => {
|
||||
let q = query
|
||||
.and_then(|qs| parse_query_string(qs).get("q").cloned())
|
||||
.unwrap_or_default();
|
||||
|
||||
let results = registry.search(&q);
|
||||
let json = format_packages_list_json(&results);
|
||||
json_response(200, &json)
|
||||
}
|
||||
|
||||
("POST", "/api/v1/publish") => {
|
||||
// Find content length
|
||||
let content_length: usize = lines.iter()
|
||||
.find(|l| l.to_lowercase().starts_with("content-length:"))
|
||||
.and_then(|l| l.split(':').nth(1))
|
||||
.and_then(|s| s.trim().parse().ok())
|
||||
.unwrap_or(0);
|
||||
|
||||
// Find body start
|
||||
let body_start = request.find("\r\n\r\n")
|
||||
.map(|i| i + 4)
|
||||
.unwrap_or(bytes_read);
|
||||
|
||||
// For now, return a message about publishing
|
||||
// Real implementation would parse multipart form data
|
||||
json_response(200, &format!(
|
||||
r#"{{"message": "Publish endpoint ready", "content_length": {}}}"#,
|
||||
content_length
|
||||
))
|
||||
}
|
||||
|
||||
_ => {
|
||||
json_response(404, r#"{"error": "Not found"}"#)
|
||||
}
|
||||
};
|
||||
|
||||
stream.write_all(response.as_bytes()).ok();
|
||||
}
|
||||
|
||||
/// Create an HTML response
|
||||
fn html_response(status: u16, body: &str) -> String {
|
||||
let status_text = match status {
|
||||
200 => "OK",
|
||||
400 => "Bad Request",
|
||||
404 => "Not Found",
|
||||
500 => "Internal Server Error",
|
||||
_ => "Unknown",
|
||||
};
|
||||
|
||||
format!(
|
||||
"HTTP/1.1 {} {}\r\nContent-Type: text/html\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}",
|
||||
status, status_text, body.len(), body
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a JSON response
|
||||
fn json_response(status: u16, body: &str) -> String {
|
||||
let status_text = match status {
|
||||
200 => "OK",
|
||||
400 => "Bad Request",
|
||||
404 => "Not Found",
|
||||
500 => "Internal Server Error",
|
||||
_ => "Unknown",
|
||||
};
|
||||
|
||||
format!(
|
||||
"HTTP/1.1 {} {}\r\nContent-Type: application/json\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}",
|
||||
status, status_text, body.len(), body
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a tarball response
|
||||
fn tarball_response(data: &[u8]) -> String {
|
||||
format!(
|
||||
"HTTP/1.1 200 OK\r\nContent-Type: application/gzip\r\nContent-Length: {}\r\nConnection: close\r\n\r\n",
|
||||
data.len()
|
||||
)
|
||||
}
|
||||
|
||||
/// Validate package name
|
||||
fn is_valid_package_name(name: &str) -> bool {
|
||||
!name.is_empty() &&
|
||||
name.len() <= 64 &&
|
||||
name.chars().all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '-' || c == '_') &&
|
||||
name.chars().next().map(|c| c.is_ascii_lowercase()).unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Parse query string into key-value pairs
|
||||
fn parse_query_string(qs: &str) -> HashMap<String, String> {
|
||||
let mut params = HashMap::new();
|
||||
for part in qs.split('&') {
|
||||
if let Some(eq_pos) = part.find('=') {
|
||||
let key = &part[..eq_pos];
|
||||
let value = &part[eq_pos + 1..];
|
||||
params.insert(
|
||||
urlldecode(key),
|
||||
urlldecode(value),
|
||||
);
|
||||
}
|
||||
}
|
||||
params
|
||||
}
|
||||
|
||||
/// Simple URL decoding
|
||||
fn urlldecode(s: &str) -> String {
|
||||
let mut result = String::new();
|
||||
let mut chars = s.chars().peekable();
|
||||
while let Some(c) = chars.next() {
|
||||
if c == '%' {
|
||||
let hex: String = chars.by_ref().take(2).collect();
|
||||
if let Ok(byte) = u8::from_str_radix(&hex, 16) {
|
||||
result.push(byte as char);
|
||||
}
|
||||
} else if c == '+' {
|
||||
result.push(' ');
|
||||
} else {
|
||||
result.push(c);
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
// JSON formatting helpers
|
||||
|
||||
fn format_metadata_json(meta: &PackageMetadata) -> String {
|
||||
let deps: Vec<String> = meta.dependencies.iter()
|
||||
.map(|(k, v)| format!(r#""{}": "{}""#, k, v))
|
||||
.collect();
|
||||
|
||||
let authors: Vec<String> = meta.authors.iter()
|
||||
.map(|a| format!(r#""{}""#, a))
|
||||
.collect();
|
||||
|
||||
let keywords: Vec<String> = meta.keywords.iter()
|
||||
.map(|k| format!(r#""{}""#, k))
|
||||
.collect();
|
||||
|
||||
format!(
|
||||
r#"{{
|
||||
"name": "{}",
|
||||
"version": "{}",
|
||||
"description": "{}",
|
||||
"authors": [{}],
|
||||
"license": {},
|
||||
"repository": {},
|
||||
"keywords": [{}],
|
||||
"dependencies": {{{}}},
|
||||
"checksum": "{}",
|
||||
"published_at": "{}"
|
||||
}}"#,
|
||||
meta.name,
|
||||
meta.version,
|
||||
escape_json(&meta.description),
|
||||
authors.join(", "),
|
||||
meta.license.as_ref().map(|l| format!(r#""{}""#, l)).unwrap_or("null".to_string()),
|
||||
meta.repository.as_ref().map(|r| format!(r#""{}""#, r)).unwrap_or("null".to_string()),
|
||||
keywords.join(", "),
|
||||
deps.join(", "),
|
||||
meta.checksum,
|
||||
meta.published_at,
|
||||
)
|
||||
}
|
||||
|
||||
fn format_package_json(pkg: &PackageIndex) -> String {
|
||||
let versions: Vec<String> = pkg.versions.iter()
|
||||
.map(|v| format!(
|
||||
r#"{{"version": "{}", "checksum": "{}", "published_at": "{}", "yanked": {}}}"#,
|
||||
v.version, v.checksum, v.published_at, v.yanked
|
||||
))
|
||||
.collect();
|
||||
|
||||
format!(
|
||||
r#"{{
|
||||
"name": "{}",
|
||||
"description": "{}",
|
||||
"latest_version": "{}",
|
||||
"versions": [{}]
|
||||
}}"#,
|
||||
pkg.name,
|
||||
escape_json(&pkg.description),
|
||||
pkg.latest_version,
|
||||
versions.join(", ")
|
||||
)
|
||||
}
|
||||
|
||||
fn format_packages_list_json(packages: &[PackageIndex]) -> String {
|
||||
let items: Vec<String> = packages.iter()
|
||||
.map(|pkg| format!(
|
||||
r#"{{"name": "{}", "description": "{}", "latest_version": "{}"}}"#,
|
||||
pkg.name,
|
||||
escape_json(&pkg.description),
|
||||
pkg.latest_version
|
||||
))
|
||||
.collect();
|
||||
|
||||
format!(r#"{{"packages": [{}]}}"#, items.join(", "))
|
||||
}
|
||||
|
||||
fn format_index_json(index: &HashMap<String, PackageIndex>) -> String {
|
||||
let items: Vec<String> = index.values()
|
||||
.map(|pkg| format_package_json(pkg))
|
||||
.collect();
|
||||
|
||||
format!(r#"{{"packages": [{}]}}"#, items.join(",\n"))
|
||||
}
|
||||
|
||||
fn parse_index_json(content: &str) -> Result<HashMap<String, PackageIndex>, String> {
|
||||
// Simple JSON parsing for the index
|
||||
// In production, would use serde_json
|
||||
let mut index = HashMap::new();
|
||||
|
||||
// Basic parsing - find package names and latest versions
|
||||
// This is a simplified parser for the index format
|
||||
let content = content.trim();
|
||||
if !content.starts_with('{') || !content.ends_with('}') {
|
||||
return Err("Invalid JSON format".to_string());
|
||||
}
|
||||
|
||||
// For now, return empty index if parsing fails
|
||||
// Real implementation would properly parse JSON
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
fn parse_metadata_json(content: &str) -> Option<PackageMetadata> {
|
||||
// Simple JSON parsing for metadata
|
||||
// In production, would use serde_json
|
||||
let mut name = String::new();
|
||||
let mut version = String::new();
|
||||
let mut description = String::new();
|
||||
let mut checksum = String::new();
|
||||
let mut published_at = String::new();
|
||||
|
||||
for line in content.lines() {
|
||||
let line = line.trim();
|
||||
if line.contains("\"name\":") {
|
||||
name = extract_json_string(line);
|
||||
} else if line.contains("\"version\":") {
|
||||
version = extract_json_string(line);
|
||||
} else if line.contains("\"description\":") {
|
||||
description = extract_json_string(line);
|
||||
} else if line.contains("\"checksum\":") {
|
||||
checksum = extract_json_string(line);
|
||||
} else if line.contains("\"published_at\":") {
|
||||
published_at = extract_json_string(line);
|
||||
}
|
||||
}
|
||||
|
||||
if name.is_empty() || version.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(PackageMetadata {
|
||||
name,
|
||||
version,
|
||||
description,
|
||||
authors: Vec::new(),
|
||||
license: None,
|
||||
repository: None,
|
||||
keywords: Vec::new(),
|
||||
dependencies: HashMap::new(),
|
||||
checksum,
|
||||
published_at,
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_json_string(line: &str) -> String {
|
||||
// Extract string value from "key": "value" format
|
||||
if let Some(colon) = line.find(':') {
|
||||
let value = line[colon + 1..].trim();
|
||||
let value = value.trim_start_matches('"');
|
||||
if let Some(end) = value.find('"') {
|
||||
return value[..end].to_string();
|
||||
}
|
||||
}
|
||||
String::new()
|
||||
}
|
||||
|
||||
fn escape_json(s: &str) -> String {
|
||||
s.replace('\\', "\\\\")
|
||||
.replace('"', "\\\"")
|
||||
.replace('\n', "\\n")
|
||||
.replace('\r', "\\r")
|
||||
.replace('\t', "\\t")
|
||||
}
|
||||
|
||||
/// Run the registry server (called from main)
|
||||
pub fn run_registry_server(storage_dir: &str, bind_addr: &str) -> Result<(), String> {
|
||||
let storage_path = PathBuf::from(storage_dir);
|
||||
fs::create_dir_all(&storage_path)
|
||||
.map_err(|e| format!("Failed to create storage directory: {}", e))?;
|
||||
|
||||
let server = RegistryServer::new(&storage_path, bind_addr);
|
||||
server.run()
|
||||
}
|
||||
Reference in New Issue
Block a user