- Extract find_pattern_occurrences_in_lines helper; use in both references and rename handlers (deduplicates word-boundary search logic) - Add MD_EXT constant to replace three repeated "md" string literals - Fix double lock acquire/drop/re-acquire in code_action (use single guard) - Fix symbol() to populate file_lines cache when parsing from disk (avoids redundant re-reads on subsequent requests) - Remove section separator comment blocks (code structure is self-evident from Rust syntax)
1051 lines
36 KiB
Rust
1051 lines
36 KiB
Rust
use std::fs;
|
|
use std::path::PathBuf;
|
|
use std::sync::Arc;
|
|
|
|
use chrono::{Timelike, Utc};
|
|
use chrono_tz::Tz;
|
|
use dashmap::DashMap;
|
|
use once_cell::sync::Lazy;
|
|
use regex::Regex;
|
|
use tokio::sync::RwLock;
|
|
use tower_lsp::jsonrpc::Result;
|
|
use tower_lsp::lsp_types::*;
|
|
use tower_lsp::{Client, LanguageServer, LspService, Server};
|
|
use walkdir::WalkDir;
|
|
|
|
use crate::error::StreamdError;
|
|
use crate::extract::parse_markdown_file;
|
|
use crate::localize::{localize_stream_file, merge_repository_configuration, TaskConfiguration};
|
|
use crate::models::{LocalizedShard, RepositoryConfiguration};
|
|
use crate::timesheet::{
|
|
extract_timesheets, find_overlapping_timecards, load_repository_config,
|
|
BasicTimesheetConfiguration,
|
|
};
|
|
|
|
const MD_EXT: &str = "md";
|
|
|
|
/// R15 file name validation: YYYYMMDD[-HHMMSS][_type][...].md
|
|
/// The extraction regex requires at least one non-dot char after date/time.
|
|
static FILE_NAME_REGEX: Lazy<Regex> =
|
|
Lazy::new(|| Regex::new(r"^(?P<date>\d{8})(?:-(?P<time>\d{4,6}))?.+\.md$").unwrap());
|
|
|
|
/// Collect all `@Word` marker names present on a line.
|
|
pub fn extract_markers_from_line(line: &str) -> Vec<String> {
|
|
let mut markers = Vec::new();
|
|
let mut remaining = line;
|
|
while let Some(at_pos) = remaining.find('@') {
|
|
remaining = &remaining[at_pos + 1..];
|
|
let end = remaining
|
|
.find(|c: char| !c.is_alphanumeric() && c != '_')
|
|
.unwrap_or(remaining.len());
|
|
let marker = &remaining[..end];
|
|
if !marker.is_empty() {
|
|
markers.push(marker.to_string());
|
|
}
|
|
remaining = &remaining[end..];
|
|
}
|
|
markers
|
|
}
|
|
|
|
/// Extract the marker name under the cursor (the word after the nearest `@`).
|
|
/// `cursor_col` is the 0-indexed character offset; the character *at* that
|
|
/// offset is treated as part of the cursor position (inclusive).
|
|
pub fn extract_marker_at_position(line: &str, cursor_col: u32) -> Option<String> {
|
|
// +1 so the character at the cursor itself is included in the search.
|
|
let col = ((cursor_col as usize).saturating_add(1)).min(line.len());
|
|
let line_prefix = &line[..col];
|
|
let at_pos = line_prefix.rfind('@')?;
|
|
|
|
let after_at = &line[at_pos + 1..];
|
|
let end = after_at
|
|
.find(|c: char| !c.is_alphanumeric() && c != '_')
|
|
.unwrap_or(after_at.len());
|
|
|
|
// Cursor must be within or at the end of the marker word.
|
|
if col > at_pos + 1 + end {
|
|
return None;
|
|
}
|
|
|
|
let marker = &after_at[..end];
|
|
if marker.is_empty() {
|
|
None
|
|
} else {
|
|
Some(marker.to_string())
|
|
}
|
|
}
|
|
|
|
/// Produce a file-name diagnostic if the basename does not match R15.
|
|
pub fn compute_file_name_diagnostic(file_name: &str) -> Option<Diagnostic> {
|
|
if !file_name.ends_with(".md") {
|
|
return None;
|
|
}
|
|
if FILE_NAME_REGEX.is_match(file_name) {
|
|
return None;
|
|
}
|
|
Some(Diagnostic {
|
|
range: Range {
|
|
start: Position::new(0, 0),
|
|
end: Position::new(0, 0),
|
|
},
|
|
severity: Some(DiagnosticSeverity::WARNING),
|
|
source: Some("streamd".to_string()),
|
|
message: "File name does not match streamd format: \
|
|
YYYYMMDD-HHMMSS[_type] [markers].md"
|
|
.to_string(),
|
|
..Default::default()
|
|
})
|
|
}
|
|
|
|
/// Build completion items for a given line and cursor position.
|
|
pub fn completions_for_line(
|
|
line: &str,
|
|
cursor_col: u32,
|
|
config: &RepositoryConfiguration,
|
|
) -> Vec<CompletionItem> {
|
|
let col = (cursor_col as usize).min(line.len());
|
|
let line_prefix = &line[..col];
|
|
|
|
// Only trigger when there is a `@` before the cursor.
|
|
let at_pos = match line_prefix.rfind('@') {
|
|
Some(p) => p,
|
|
None => return vec![],
|
|
};
|
|
|
|
let after_at = &line_prefix[at_pos + 1..];
|
|
|
|
// Temporal snippet: `@` followed by a digit → offer date/time format snippets.
|
|
if after_at.starts_with(|c: char| c.is_ascii_digit()) {
|
|
return vec![
|
|
CompletionItem {
|
|
label: "YYYYMMDD".to_string(),
|
|
kind: Some(CompletionItemKind::SNIPPET),
|
|
detail: Some("Date marker (R16)".to_string()),
|
|
insert_text: Some("${1:YYYYMMDD}".to_string()),
|
|
insert_text_format: Some(InsertTextFormat::SNIPPET),
|
|
..Default::default()
|
|
},
|
|
CompletionItem {
|
|
label: "HHMMSS".to_string(),
|
|
kind: Some(CompletionItemKind::SNIPPET),
|
|
detail: Some("Time marker (R16)".to_string()),
|
|
insert_text: Some("${1:HHMMSS}".to_string()),
|
|
insert_text_format: Some(InsertTextFormat::SNIPPET),
|
|
..Default::default()
|
|
},
|
|
];
|
|
}
|
|
|
|
// Markers already on this line (for conditional suggestion logic).
|
|
let existing: Vec<String> = extract_markers_from_line(line);
|
|
|
|
// Collect markers whose `if_with` conditions are satisfied by existing markers:
|
|
// if marker M has a placement with `if_with: {A, B}`, and A is on the line,
|
|
// then B is a candidate to suggest.
|
|
let mut conditional: std::collections::HashSet<String> = std::collections::HashSet::new();
|
|
for existing_name in &existing {
|
|
if let Some(marker_def) = config.markers.get(existing_name) {
|
|
for placement in &marker_def.placements {
|
|
for cond in &placement.if_with {
|
|
if !existing.contains(cond) {
|
|
conditional.insert(cond.clone());
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Filter by typed prefix.
|
|
let prefix = after_at.to_lowercase();
|
|
let mut items: Vec<CompletionItem> = config
|
|
.markers
|
|
.iter()
|
|
.filter(|(name, _)| name.to_lowercase().starts_with(&prefix))
|
|
.map(|(name, marker)| {
|
|
let is_conditional = conditional.contains(name.as_str());
|
|
CompletionItem {
|
|
label: format!("@{}", name),
|
|
kind: Some(CompletionItemKind::KEYWORD),
|
|
detail: Some(marker.display_name.clone()),
|
|
// Conditional completions sort before unconditional ones.
|
|
sort_text: Some(if is_conditional {
|
|
format!("0_{}", name)
|
|
} else {
|
|
format!("1_{}", name)
|
|
}),
|
|
..Default::default()
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
items.sort_by(|a, b| {
|
|
a.sort_text
|
|
.as_deref()
|
|
.unwrap_or("")
|
|
.cmp(b.sort_text.as_deref().unwrap_or(""))
|
|
});
|
|
|
|
items
|
|
}
|
|
|
|
/// Convert a `LocalizedShard` tree node to a `DocumentSymbol`.
|
|
fn shard_to_document_symbol(shard: &LocalizedShard) -> DocumentSymbol {
|
|
let name = if !shard.markers.is_empty() {
|
|
shard.markers.join(" @")
|
|
} else if !shard.tags.is_empty() {
|
|
shard.tags.join(" @")
|
|
} else {
|
|
"(shard)".to_string()
|
|
};
|
|
|
|
let start_line = shard.start_line.saturating_sub(1) as u32;
|
|
let end_line = shard.end_line.saturating_sub(1) as u32;
|
|
|
|
let range = Range {
|
|
start: Position::new(start_line, 0),
|
|
end: Position::new(end_line, u32::MAX),
|
|
};
|
|
let selection_range = Range {
|
|
start: Position::new(start_line, 0),
|
|
end: Position::new(start_line, u32::MAX),
|
|
};
|
|
|
|
let children: Vec<DocumentSymbol> = shard
|
|
.children
|
|
.iter()
|
|
.map(shard_to_document_symbol)
|
|
.collect();
|
|
|
|
#[allow(deprecated)]
|
|
DocumentSymbol {
|
|
name,
|
|
detail: None,
|
|
kind: SymbolKind::STRING,
|
|
tags: None,
|
|
deprecated: None,
|
|
range,
|
|
selection_range,
|
|
children: if children.is_empty() {
|
|
None
|
|
} else {
|
|
Some(children)
|
|
},
|
|
}
|
|
}
|
|
|
|
/// Collect workspace symbols from a shard tree, filtering by query.
|
|
fn collect_workspace_symbols(
|
|
shard: &LocalizedShard,
|
|
uri: &Url,
|
|
query: &str,
|
|
out: &mut Vec<SymbolInformation>,
|
|
) {
|
|
let name = if !shard.markers.is_empty() {
|
|
shard.markers.join(" @")
|
|
} else if !shard.tags.is_empty() {
|
|
shard.tags.join(" @")
|
|
} else {
|
|
return;
|
|
};
|
|
|
|
if query.is_empty() || name.to_lowercase().contains(query) {
|
|
let start_line = shard.start_line.saturating_sub(1) as u32;
|
|
let end_line = shard.end_line.saturating_sub(1) as u32;
|
|
#[allow(deprecated)]
|
|
out.push(SymbolInformation {
|
|
name: name.clone(),
|
|
kind: SymbolKind::STRING,
|
|
tags: None,
|
|
deprecated: None,
|
|
location: Location {
|
|
uri: uri.clone(),
|
|
range: Range {
|
|
start: Position::new(start_line, 0),
|
|
end: Position::new(end_line, 0),
|
|
},
|
|
},
|
|
container_name: None,
|
|
});
|
|
}
|
|
|
|
for child in &shard.children {
|
|
collect_workspace_symbols(child, uri, query, out);
|
|
}
|
|
}
|
|
|
|
/// Find all word-boundary-respecting occurrences of `pattern` in `lines`.
|
|
/// Returns `(line_idx, start_col, end_col)` for each match.
|
|
fn find_pattern_occurrences_in_lines(
|
|
lines: &[String],
|
|
pattern: &str,
|
|
) -> Vec<(usize, usize, usize)> {
|
|
let mut results = Vec::new();
|
|
for (line_idx, line) in lines.iter().enumerate() {
|
|
let mut start = 0;
|
|
while let Some(pos) = line[start..].find(pattern) {
|
|
let abs = start + pos;
|
|
let after = abs + pattern.len();
|
|
let is_word_boundary =
|
|
after >= line.len() || !line.as_bytes()[after].is_ascii_alphanumeric();
|
|
if is_word_boundary {
|
|
results.push((line_idx, abs, after));
|
|
}
|
|
start = abs + pattern.len();
|
|
if start >= line.len() {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
results
|
|
}
|
|
|
|
struct LspState {
|
|
config: RepositoryConfiguration,
|
|
tz: Tz,
|
|
base_folder: PathBuf,
|
|
/// URI → localized shard root (None when file could not be localized)
|
|
file_cache: DashMap<Url, Option<LocalizedShard>>,
|
|
/// URI → lines of text (for line-based operations)
|
|
file_lines: DashMap<Url, Vec<String>>,
|
|
}
|
|
|
|
impl LspState {
|
|
/// Parse `text` for `uri`, update caches.
|
|
fn parse_and_cache(&self, uri: &Url, text: &str) {
|
|
let lines: Vec<String> = text.lines().map(String::from).collect();
|
|
self.file_lines.insert(uri.clone(), lines);
|
|
|
|
let path_str = uri.path();
|
|
let stream_file = parse_markdown_file(path_str, text);
|
|
let localized = localize_stream_file(&stream_file, &self.config, self.tz).ok();
|
|
self.file_cache.insert(uri.clone(), localized);
|
|
}
|
|
|
|
/// Parse a file from disk on demand (for cross-file features).
|
|
/// Also populates `file_lines` so subsequent line-based requests avoid a re-read.
|
|
fn parse_file_from_disk(&self, uri: &Url, path: &std::path::Path) -> Option<LocalizedShard> {
|
|
let text = fs::read_to_string(path).ok()?;
|
|
let lines: Vec<String> = text.lines().map(String::from).collect();
|
|
self.file_lines.insert(uri.clone(), lines);
|
|
let path_str = path.to_string_lossy();
|
|
let stream_file = parse_markdown_file(&path_str, &text);
|
|
localize_stream_file(&stream_file, &self.config, self.tz).ok()
|
|
}
|
|
|
|
/// Ensure a file's lines are in `file_lines` (read from disk if not cached).
|
|
fn ensure_lines(&self, uri: &Url, path: &std::path::Path) -> Option<Vec<String>> {
|
|
if let Some(cached) = self.file_lines.get(uri) {
|
|
return Some(cached.clone());
|
|
}
|
|
let text = fs::read_to_string(path).ok()?;
|
|
let lines: Vec<String> = text.lines().map(String::from).collect();
|
|
self.file_lines.insert(uri.clone(), lines.clone());
|
|
Some(lines)
|
|
}
|
|
|
|
/// Compute diagnostics for `uri`:
|
|
/// - R15 file-name format
|
|
/// - R18 timesheet violations
|
|
fn compute_diagnostics(&self, uri: &Url) -> Vec<Diagnostic> {
|
|
let mut diags = Vec::new();
|
|
|
|
// File-name format (R15)
|
|
if let Some(name) = uri.path_segments().and_then(|mut s| s.next_back()) {
|
|
if let Some(d) = compute_file_name_diagnostic(name) {
|
|
diags.push(d);
|
|
}
|
|
}
|
|
|
|
// Timesheet diagnostics (R18)
|
|
if let Some(entry) = self.file_cache.get(uri) {
|
|
if let Some(root) = entry.as_ref() {
|
|
let now = Utc::now();
|
|
match extract_timesheets(std::slice::from_ref(root), now, self.tz) {
|
|
Ok(timesheets) => {
|
|
for ts in ×heets {
|
|
for (first, second) in find_overlapping_timecards(&ts.timecards) {
|
|
diags.push(Diagnostic {
|
|
range: Range {
|
|
start: Position::new(0, 0),
|
|
end: Position::new(0, 0),
|
|
},
|
|
severity: Some(DiagnosticSeverity::WARNING),
|
|
source: Some("streamd".to_string()),
|
|
message: format!(
|
|
"Overlapping timecards on {}: \
|
|
{:02}:{:02}-{:02}:{:02} overlaps \
|
|
{:02}:{:02}-{:02}:{:02}",
|
|
ts.date,
|
|
first.0.hour(),
|
|
first.0.minute(),
|
|
first.1.hour(),
|
|
first.1.minute(),
|
|
second.0.hour(),
|
|
second.0.minute(),
|
|
second.1.hour(),
|
|
second.1.minute(),
|
|
),
|
|
..Default::default()
|
|
});
|
|
}
|
|
}
|
|
}
|
|
Err(e) => {
|
|
// Unclosed day or conflicting day types → Error
|
|
diags.push(Diagnostic {
|
|
range: Range {
|
|
start: Position::new(0, 0),
|
|
end: Position::new(0, 0),
|
|
},
|
|
severity: Some(DiagnosticSeverity::ERROR),
|
|
source: Some("streamd".to_string()),
|
|
message: format!("Timesheet error: {}", e),
|
|
..Default::default()
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
diags
|
|
}
|
|
}
|
|
|
|
struct Backend {
|
|
client: Client,
|
|
/// None → passive mode (no .streamd.toml found)
|
|
state: RwLock<Option<Arc<LspState>>>,
|
|
}
|
|
|
|
impl Backend {
|
|
fn new(client: Client) -> Self {
|
|
Backend {
|
|
client,
|
|
state: RwLock::new(None),
|
|
}
|
|
}
|
|
|
|
async fn build_state(root: PathBuf) -> Option<Arc<LspState>> {
|
|
let config_path = root.join(".streamd.toml");
|
|
if !config_path.exists() {
|
|
return None;
|
|
}
|
|
let repo_config = load_repository_config(&root).ok()?;
|
|
let tz: Tz = repo_config
|
|
.timezone
|
|
.as_deref()
|
|
.and_then(|s| s.parse().ok())
|
|
.unwrap_or(chrono_tz::UTC);
|
|
|
|
let config =
|
|
merge_repository_configuration(&BasicTimesheetConfiguration, &TaskConfiguration);
|
|
|
|
Some(Arc::new(LspState {
|
|
config,
|
|
tz,
|
|
base_folder: root,
|
|
file_cache: DashMap::new(),
|
|
file_lines: DashMap::new(),
|
|
}))
|
|
}
|
|
}
|
|
|
|
#[tower_lsp::async_trait]
|
|
impl LanguageServer for Backend {
|
|
async fn initialize(&self, params: InitializeParams) -> Result<InitializeResult> {
|
|
#[allow(deprecated)]
|
|
let root: Option<PathBuf> = params
|
|
.root_uri
|
|
.as_ref()
|
|
.and_then(|u| u.to_file_path().ok())
|
|
.or_else(|| params.root_path.as_ref().map(PathBuf::from));
|
|
|
|
if let Some(root) = root {
|
|
match Backend::build_state(root).await {
|
|
Some(state) => {
|
|
*self.state.write().await = Some(state);
|
|
self.client
|
|
.log_message(
|
|
MessageType::INFO,
|
|
"streamd LSP: active (found .streamd.toml)",
|
|
)
|
|
.await;
|
|
}
|
|
None => {
|
|
self.client
|
|
.log_message(
|
|
MessageType::INFO,
|
|
"streamd LSP: passive mode (no .streamd.toml in workspace root)",
|
|
)
|
|
.await;
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(InitializeResult {
|
|
capabilities: ServerCapabilities {
|
|
text_document_sync: Some(TextDocumentSyncCapability::Options(
|
|
TextDocumentSyncOptions {
|
|
open_close: Some(true),
|
|
change: Some(TextDocumentSyncKind::FULL),
|
|
save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions {
|
|
include_text: Some(true),
|
|
})),
|
|
..Default::default()
|
|
},
|
|
)),
|
|
completion_provider: Some(CompletionOptions {
|
|
trigger_characters: Some(vec!["@".to_string()]),
|
|
..Default::default()
|
|
}),
|
|
document_symbol_provider: Some(OneOf::Left(true)),
|
|
code_action_provider: Some(CodeActionProviderCapability::Simple(true)),
|
|
workspace_symbol_provider: Some(OneOf::Left(true)),
|
|
references_provider: Some(OneOf::Left(true)),
|
|
rename_provider: Some(OneOf::Left(true)),
|
|
..Default::default()
|
|
},
|
|
server_info: Some(ServerInfo {
|
|
name: "streamd-lsp".to_string(),
|
|
version: Some(env!("CARGO_PKG_VERSION").to_string()),
|
|
}),
|
|
})
|
|
}
|
|
|
|
async fn initialized(&self, _: InitializedParams) {
|
|
// Register a watcher for .streamd.toml so we can reload on change.
|
|
let opts = DidChangeWatchedFilesRegistrationOptions {
|
|
watchers: vec![FileSystemWatcher {
|
|
glob_pattern: GlobPattern::String("**/.streamd.toml".to_string()),
|
|
kind: Some(WatchKind::all()),
|
|
}],
|
|
};
|
|
if let Ok(opts_value) = serde_json::to_value(opts) {
|
|
let registration = Registration {
|
|
id: "streamd-config-watcher".to_string(),
|
|
method: "workspace/didChangeWatchedFiles".to_string(),
|
|
register_options: Some(opts_value),
|
|
};
|
|
let _ = self.client.register_capability(vec![registration]).await;
|
|
}
|
|
}
|
|
|
|
async fn shutdown(&self) -> Result<()> {
|
|
Ok(())
|
|
}
|
|
|
|
async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
|
|
let has_config_change = params
|
|
.changes
|
|
.iter()
|
|
.any(|c| c.uri.path().ends_with(".streamd.toml"));
|
|
if !has_config_change {
|
|
return;
|
|
}
|
|
|
|
let base_folder: Option<PathBuf> = {
|
|
let guard = self.state.read().await;
|
|
guard.as_ref().map(|s| s.base_folder.clone())
|
|
};
|
|
|
|
if let Some(root) = base_folder {
|
|
match Backend::build_state(root).await {
|
|
Some(new_state) => {
|
|
*self.state.write().await = Some(new_state);
|
|
self.client
|
|
.log_message(MessageType::INFO, "streamd LSP: reloaded .streamd.toml")
|
|
.await;
|
|
}
|
|
None => {
|
|
*self.state.write().await = None;
|
|
self.client
|
|
.log_message(
|
|
MessageType::INFO,
|
|
"streamd LSP: .streamd.toml removed, entering passive mode",
|
|
)
|
|
.await;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
async fn did_open(&self, params: DidOpenTextDocumentParams) {
|
|
let guard = self.state.read().await;
|
|
if let Some(state) = guard.as_ref() {
|
|
let uri = ¶ms.text_document.uri;
|
|
state.parse_and_cache(uri, ¶ms.text_document.text);
|
|
let diags = state.compute_diagnostics(uri);
|
|
self.client
|
|
.publish_diagnostics(uri.clone(), diags, None)
|
|
.await;
|
|
}
|
|
}
|
|
|
|
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
|
let guard = self.state.read().await;
|
|
if let Some(state) = guard.as_ref() {
|
|
let uri = ¶ms.text_document.uri;
|
|
if let Some(change) = params.content_changes.into_iter().last() {
|
|
state.parse_and_cache(uri, &change.text);
|
|
let diags = state.compute_diagnostics(uri);
|
|
self.client
|
|
.publish_diagnostics(uri.clone(), diags, None)
|
|
.await;
|
|
}
|
|
}
|
|
}
|
|
|
|
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
|
let guard = self.state.read().await;
|
|
if let Some(state) = guard.as_ref() {
|
|
let uri = ¶ms.text_document.uri;
|
|
if let Some(text) = params.text.as_deref() {
|
|
state.parse_and_cache(uri, text);
|
|
} else if let Ok(path) = uri.to_file_path() {
|
|
if let Ok(text) = fs::read_to_string(&path) {
|
|
state.parse_and_cache(uri, &text);
|
|
}
|
|
}
|
|
let diags = state.compute_diagnostics(uri);
|
|
self.client
|
|
.publish_diagnostics(uri.clone(), diags, None)
|
|
.await;
|
|
}
|
|
}
|
|
|
|
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
|
|
let guard = self.state.read().await;
|
|
let state = match guard.as_ref() {
|
|
Some(s) => s,
|
|
None => return Ok(None),
|
|
};
|
|
|
|
let uri = ¶ms.text_document_position.text_document.uri;
|
|
let position = params.text_document_position.position;
|
|
|
|
let lines = match state.file_lines.get(uri) {
|
|
Some(l) => l.clone(),
|
|
None => return Ok(None),
|
|
};
|
|
let line = lines
|
|
.get(position.line as usize)
|
|
.map(String::as_str)
|
|
.unwrap_or("");
|
|
|
|
Ok(Some(CompletionResponse::Array(completions_for_line(
|
|
line,
|
|
position.character,
|
|
&state.config,
|
|
))))
|
|
}
|
|
|
|
async fn document_symbol(
|
|
&self,
|
|
params: DocumentSymbolParams,
|
|
) -> Result<Option<DocumentSymbolResponse>> {
|
|
let guard = self.state.read().await;
|
|
let state = match guard.as_ref() {
|
|
Some(s) => s,
|
|
None => return Ok(None),
|
|
};
|
|
|
|
let uri = ¶ms.text_document.uri;
|
|
let entry = match state.file_cache.get(uri) {
|
|
Some(e) => e,
|
|
None => return Ok(None),
|
|
};
|
|
let root = match entry.as_ref() {
|
|
Some(r) => r,
|
|
None => return Ok(None),
|
|
};
|
|
|
|
Ok(Some(DocumentSymbolResponse::Nested(vec![
|
|
shard_to_document_symbol(root),
|
|
])))
|
|
}
|
|
|
|
async fn code_action(&self, params: CodeActionParams) -> Result<Option<CodeActionResponse>> {
|
|
let guard = self.state.read().await;
|
|
let state = match guard.as_ref() {
|
|
Some(s) => s,
|
|
None => return Ok(None),
|
|
};
|
|
|
|
let uri = ¶ms.text_document.uri;
|
|
let lines = match state.file_lines.get(uri) {
|
|
Some(l) => l.clone(),
|
|
None => return Ok(None),
|
|
};
|
|
|
|
let mut actions: Vec<CodeActionOrCommand> = Vec::new();
|
|
let start_line = params.range.start.line as usize;
|
|
let end_line = params.range.end.line as usize;
|
|
|
|
let range_end = end_line.min(lines.len().saturating_sub(1));
|
|
for (line_idx, line) in lines
|
|
.iter()
|
|
.enumerate()
|
|
.skip(start_line)
|
|
.take(range_end.saturating_sub(start_line) + 1)
|
|
{
|
|
if line.contains("@Task") && !line.contains("@Done") {
|
|
let new_line = line.replacen("@Task", "@Task @Done", 1);
|
|
let mut changes: std::collections::HashMap<Url, Vec<TextEdit>> =
|
|
std::collections::HashMap::new();
|
|
changes.insert(
|
|
uri.clone(),
|
|
vec![TextEdit {
|
|
range: Range {
|
|
start: Position::new(line_idx as u32, 0),
|
|
end: Position::new(line_idx as u32, line.len() as u32),
|
|
},
|
|
new_text: new_line,
|
|
}],
|
|
);
|
|
actions.push(CodeActionOrCommand::CodeAction(CodeAction {
|
|
title: "Mark task as done".to_string(),
|
|
kind: Some(CodeActionKind::QUICKFIX),
|
|
edit: Some(WorkspaceEdit {
|
|
changes: Some(changes),
|
|
..Default::default()
|
|
}),
|
|
..Default::default()
|
|
}));
|
|
}
|
|
}
|
|
|
|
Ok(Some(actions))
|
|
}
|
|
|
|
async fn symbol(
|
|
&self,
|
|
params: WorkspaceSymbolParams,
|
|
) -> Result<Option<Vec<SymbolInformation>>> {
|
|
let guard = self.state.read().await;
|
|
let state = match guard.as_ref() {
|
|
Some(s) => s,
|
|
None => return Ok(None),
|
|
};
|
|
|
|
let query = params.query.to_lowercase();
|
|
let mut symbols = Vec::new();
|
|
|
|
for entry in WalkDir::new(&state.base_folder)
|
|
.max_depth(1)
|
|
.into_iter()
|
|
.filter_map(|e| e.ok())
|
|
{
|
|
let path = entry.path();
|
|
if path.extension().is_none_or(|e| e != MD_EXT) {
|
|
continue;
|
|
}
|
|
let uri = match Url::from_file_path(path) {
|
|
Ok(u) => u,
|
|
Err(()) => continue,
|
|
};
|
|
|
|
let shard = if let Some(cached) = state.file_cache.get(&uri) {
|
|
cached.clone()
|
|
} else {
|
|
let parsed = state.parse_file_from_disk(&uri, path);
|
|
state.file_cache.insert(uri.clone(), parsed.clone());
|
|
parsed
|
|
};
|
|
|
|
if let Some(root) = shard {
|
|
collect_workspace_symbols(&root, &uri, &query, &mut symbols);
|
|
}
|
|
}
|
|
|
|
Ok(Some(symbols))
|
|
}
|
|
|
|
async fn references(&self, params: ReferenceParams) -> Result<Option<Vec<Location>>> {
|
|
let guard = self.state.read().await;
|
|
let state = match guard.as_ref() {
|
|
Some(s) => s,
|
|
None => return Ok(None),
|
|
};
|
|
|
|
let uri = ¶ms.text_document_position.text_document.uri;
|
|
let position = params.text_document_position.position;
|
|
|
|
let marker_name: Option<String> = {
|
|
let lines = match state.file_lines.get(uri) {
|
|
Some(l) => l.clone(),
|
|
None => return Ok(None),
|
|
};
|
|
let line = match lines.get(position.line as usize) {
|
|
Some(l) => l.clone(),
|
|
None => return Ok(None),
|
|
};
|
|
extract_marker_at_position(&line, position.character)
|
|
};
|
|
|
|
let marker_name = match marker_name {
|
|
Some(m) => m,
|
|
None => return Ok(Some(vec![])),
|
|
};
|
|
|
|
let pattern = format!("@{}", marker_name);
|
|
let mut locations = Vec::new();
|
|
|
|
for entry in WalkDir::new(&state.base_folder)
|
|
.max_depth(1)
|
|
.into_iter()
|
|
.filter_map(|e| e.ok())
|
|
{
|
|
let path = entry.path();
|
|
if path.extension().is_none_or(|e| e != MD_EXT) {
|
|
continue;
|
|
}
|
|
let file_uri = match Url::from_file_path(path) {
|
|
Ok(u) => u,
|
|
Err(()) => continue,
|
|
};
|
|
let lines = match state.ensure_lines(&file_uri, path) {
|
|
Some(l) => l,
|
|
None => continue,
|
|
};
|
|
|
|
for (line_idx, abs, after) in find_pattern_occurrences_in_lines(&lines, &pattern) {
|
|
locations.push(Location {
|
|
uri: file_uri.clone(),
|
|
range: Range {
|
|
start: Position::new(line_idx as u32, abs as u32),
|
|
end: Position::new(line_idx as u32, after as u32),
|
|
},
|
|
});
|
|
}
|
|
}
|
|
|
|
Ok(Some(locations))
|
|
}
|
|
|
|
async fn rename(&self, params: RenameParams) -> Result<Option<WorkspaceEdit>> {
|
|
let guard = self.state.read().await;
|
|
let state = match guard.as_ref() {
|
|
Some(s) => s,
|
|
None => return Ok(None),
|
|
};
|
|
|
|
let uri = ¶ms.text_document_position.text_document.uri;
|
|
let position = params.text_document_position.position;
|
|
// Strip leading `@` if the user typed it.
|
|
let new_name = params.new_name.trim_start_matches('@').to_string();
|
|
|
|
let marker_name: Option<String> = {
|
|
let lines = match state.file_lines.get(uri) {
|
|
Some(l) => l.clone(),
|
|
None => return Ok(None),
|
|
};
|
|
let line = match lines.get(position.line as usize) {
|
|
Some(l) => l.clone(),
|
|
None => return Ok(None),
|
|
};
|
|
extract_marker_at_position(&line, position.character)
|
|
};
|
|
|
|
let marker_name = match marker_name {
|
|
Some(m) => m,
|
|
None => return Ok(None),
|
|
};
|
|
|
|
let pattern = format!("@{}", marker_name);
|
|
let replacement = format!("@{}", new_name);
|
|
let mut changes: std::collections::HashMap<Url, Vec<TextEdit>> =
|
|
std::collections::HashMap::new();
|
|
|
|
for entry in WalkDir::new(&state.base_folder)
|
|
.max_depth(1)
|
|
.into_iter()
|
|
.filter_map(|e| e.ok())
|
|
{
|
|
let path = entry.path();
|
|
if path.extension().is_none_or(|e| e != MD_EXT) {
|
|
continue;
|
|
}
|
|
let file_uri = match Url::from_file_path(path) {
|
|
Ok(u) => u,
|
|
Err(()) => continue,
|
|
};
|
|
let lines = match state.ensure_lines(&file_uri, path) {
|
|
Some(l) => l,
|
|
None => continue,
|
|
};
|
|
|
|
let file_edits: Vec<TextEdit> = find_pattern_occurrences_in_lines(&lines, &pattern)
|
|
.into_iter()
|
|
.map(|(line_idx, abs, after)| TextEdit {
|
|
range: Range {
|
|
start: Position::new(line_idx as u32, abs as u32),
|
|
end: Position::new(line_idx as u32, after as u32),
|
|
},
|
|
new_text: replacement.clone(),
|
|
})
|
|
.collect();
|
|
if !file_edits.is_empty() {
|
|
changes.insert(file_uri, file_edits);
|
|
}
|
|
}
|
|
|
|
Ok(Some(WorkspaceEdit {
|
|
changes: Some(changes),
|
|
..Default::default()
|
|
}))
|
|
}
|
|
}
|
|
|
|
/// Run the LSP server over stdin/stdout.
|
|
pub fn run() -> std::result::Result<(), StreamdError> {
|
|
let rt = tokio::runtime::Runtime::new().map_err(StreamdError::IoError)?;
|
|
rt.block_on(async {
|
|
let stdin = tokio::io::stdin();
|
|
let stdout = tokio::io::stdout();
|
|
let (service, socket) = LspService::new(Backend::new);
|
|
Server::new(stdin, stdout, socket).serve(service).await;
|
|
});
|
|
Ok(())
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
use crate::models::{Dimension, Marker, MarkerPlacement};
|
|
|
|
fn make_config() -> RepositoryConfiguration {
|
|
RepositoryConfiguration::new()
|
|
.with_dimension("task", Dimension::new("Task").with_propagate(false))
|
|
.with_marker(
|
|
"Task",
|
|
Marker::new("Task").with_placements(vec![
|
|
MarkerPlacement::new("task").with_value("open"),
|
|
MarkerPlacement::new("task")
|
|
.with_if_with(vec!["Done"])
|
|
.with_value("done"),
|
|
MarkerPlacement::new("task")
|
|
.with_if_with(vec!["Waiting"])
|
|
.with_value("waiting"),
|
|
]),
|
|
)
|
|
.with_marker("Done", Marker::new("Done").with_placements(vec![]))
|
|
.with_marker("Waiting", Marker::new("Waiting").with_placements(vec![]))
|
|
.with_marker("Break", Marker::new("Break").with_placements(vec![]))
|
|
}
|
|
|
|
#[test]
|
|
fn test_valid_file_name_no_diagnostic() {
|
|
assert!(compute_file_name_diagnostic("20260413-120000_daily.md").is_none());
|
|
assert!(compute_file_name_diagnostic("20260413-1200_daily.md").is_none());
|
|
assert!(compute_file_name_diagnostic("20260413-120000 markers.md").is_none());
|
|
assert!(compute_file_name_diagnostic("20260413 some title.md").is_none());
|
|
}
|
|
|
|
#[test]
|
|
fn test_invalid_file_name_emits_warning() {
|
|
let d = compute_file_name_diagnostic("notes.md");
|
|
assert!(d.is_some());
|
|
assert_eq!(d.unwrap().severity, Some(DiagnosticSeverity::WARNING));
|
|
}
|
|
|
|
#[test]
|
|
fn test_non_md_file_no_diagnostic() {
|
|
assert!(compute_file_name_diagnostic("notes.txt").is_none());
|
|
}
|
|
|
|
#[test]
|
|
fn test_extract_markers_from_line() {
|
|
let markers = extract_markers_from_line("@Task @Done some content @Tag");
|
|
assert_eq!(markers, vec!["Task", "Done", "Tag"]);
|
|
}
|
|
|
|
#[test]
|
|
fn test_extract_markers_empty_line() {
|
|
assert!(extract_markers_from_line("no markers here").is_empty());
|
|
}
|
|
|
|
#[test]
|
|
fn test_extract_marker_at_position_on_marker() {
|
|
let line = "some @Task content";
|
|
// Cursor on 'T' of Task → position 6
|
|
assert_eq!(
|
|
extract_marker_at_position(line, 6),
|
|
Some("Task".to_string())
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_extract_marker_at_position_at_at_sign() {
|
|
let line = "some @Task content";
|
|
// Cursor on '@' → position 5
|
|
assert_eq!(
|
|
extract_marker_at_position(line, 5),
|
|
Some("Task".to_string())
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_extract_marker_at_position_on_content() {
|
|
let line = "some @Task content";
|
|
// Cursor on 'c' of 'content' (position 11)
|
|
assert!(extract_marker_at_position(line, 11).is_none());
|
|
}
|
|
|
|
#[test]
|
|
fn test_completions_no_at_sign_returns_empty() {
|
|
let config = make_config();
|
|
let items = completions_for_line("hello world", 5, &config);
|
|
assert!(items.is_empty());
|
|
}
|
|
|
|
#[test]
|
|
fn test_completions_at_sign_returns_all_markers() {
|
|
let config = make_config();
|
|
let items = completions_for_line("@", 1, &config);
|
|
assert!(!items.is_empty());
|
|
let labels: Vec<&str> = items.iter().map(|i| i.label.as_str()).collect();
|
|
assert!(labels.contains(&"@Task"));
|
|
assert!(labels.contains(&"@Done"));
|
|
}
|
|
|
|
#[test]
|
|
fn test_completions_prefix_filters() {
|
|
let config = make_config();
|
|
let items = completions_for_line("@Ta", 3, &config);
|
|
assert!(items
|
|
.iter()
|
|
.all(|i| i.label.starts_with("@Ta") || i.label.starts_with("@ta")));
|
|
assert!(items.iter().any(|i| i.label == "@Task"));
|
|
}
|
|
|
|
#[test]
|
|
fn test_completions_conditional_sorted_first() {
|
|
let config = make_config();
|
|
// @Task is on the line → Done and Waiting should be conditional suggestions
|
|
let items = completions_for_line("@Task @", 7, &config);
|
|
let done = items.iter().find(|i| i.label == "@Done");
|
|
let waiting = items.iter().find(|i| i.label == "@Waiting");
|
|
assert!(done.is_some());
|
|
assert!(waiting.is_some());
|
|
// Conditional items have sort_text starting with "0_"
|
|
assert!(done
|
|
.unwrap()
|
|
.sort_text
|
|
.as_deref()
|
|
.unwrap_or("")
|
|
.starts_with("0_"));
|
|
}
|
|
|
|
#[test]
|
|
fn test_completions_temporal_snippet_digit_after_at() {
|
|
let config = make_config();
|
|
let items = completions_for_line("@2", 2, &config);
|
|
assert!(!items.is_empty());
|
|
let labels: Vec<&str> = items.iter().map(|i| i.label.as_str()).collect();
|
|
assert!(labels.contains(&"YYYYMMDD"));
|
|
assert!(labels.contains(&"HHMMSS"));
|
|
// Temporal items use snippet format
|
|
assert!(items
|
|
.iter()
|
|
.any(|i| i.kind == Some(CompletionItemKind::SNIPPET)));
|
|
}
|
|
}
|