Compare commits

..

No commits in common. "main" and "v0.1.1" have entirely different histories.
main ... v0.1.1

31 changed files with 433 additions and 3957 deletions

View file

@ -11,7 +11,7 @@ jobs:
steps:
- name: Check out Repository
uses: https://git.konstantinfickel.de/actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v6
uses: https://git.konstantinfickel.de/actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- run: nix --version
- run: nix flake check
@ -22,6 +22,6 @@ jobs:
steps:
- name: Check out Repository
uses: https://git.konstantinfickel.de/actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v6
uses: https://git.konstantinfickel.de/actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- run: nix build

View file

@ -2,8 +2,8 @@ name: Release
on:
push:
branches:
- main
tags:
- 'v*'
workflow_dispatch:
jobs:
@ -13,14 +13,15 @@ jobs:
steps:
- name: Check out Repository
uses: https://git.konstantinfickel.de/actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v6
uses: https://git.konstantinfickel.de/actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
- name: Extract version and handle tagging
id: version
run: |
# Read version from Cargo.toml
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
# Manual trigger: read version from Cargo.toml
VERSION_LINE=$(grep '^version' Cargo.toml | head -1)
VERSION="${VERSION_LINE#*\"}"
VERSION="${VERSION%\"*}"
@ -28,9 +29,8 @@ jobs:
# Check if tag already exists
if git rev-parse "$TAG" >/dev/null 2>&1; then
echo "Tag ${TAG} already exists, skipping release"
echo "SKIP=true" >> $GITHUB_OUTPUT
exit 0
echo "::error::Version ${VERSION} is already released"
exit 1
fi
# Create and push the tag
@ -39,35 +39,26 @@ jobs:
echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
echo "TAG=${TAG}" >> $GITHUB_OUTPUT
echo "SKIP=false" >> $GITHUB_OUTPUT
else
# Tag push trigger: extract version from tag
VERSION="${GITHUB_REF_NAME#v}"
echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
echo "TAG=${GITHUB_REF_NAME}" >> $GITHUB_OUTPUT
fi
- name: Build .deb package
if: steps.version.outputs.SKIP != 'true'
run: nix build .#streamd-deb -o result-deb
- name: Build static binary
if: steps.version.outputs.SKIP != 'true'
run: nix build .#streamd-musl -o result-musl
- name: Build Windows binary
if: steps.version.outputs.SKIP != 'true'
run: nix build .#streamd-windows -o result-windows
- name: Build Zed extension
if: steps.version.outputs.SKIP != 'true'
run: nix build .#zed-extension-zip -o result-zed-extension-zip
- name: Prepare release artifacts
if: steps.version.outputs.SKIP != 'true'
run: |
mkdir -p release
cp result-deb release/streamd_${{ steps.version.outputs.VERSION }}_amd64.deb
cp result-musl/bin/streamd release/streamd-${{ steps.version.outputs.VERSION }}-linux-x86_64
cp result-windows/bin/streamd.exe release/streamd-${{ steps.version.outputs.VERSION }}-windows-x86_64.exe
cp result-zed-extension-zip release/streamd-zed-extension-${{ steps.version.outputs.VERSION }}.zip
- name: Create release
if: steps.version.outputs.SKIP != 'true'
uses: https://git.konstantinfickel.de/actions/forgejo-release@v2
with:
direction: upload

View file

@ -16,8 +16,6 @@ cargo clippy # Lint
cargo fmt # Format
```
After finishing tests, always check the package with `nix flake check`.
## Architecture
Streamd parses markdown files into hierarchical **Shards**, then **localizes** them by assigning temporal moments and dimensional placements based on `@Tag` markers.

734
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[package]
name = "streamd"
version = "0.2.5"
version = "0.1.1"
edition = "2021"
description = "Personal knowledge management and time-tracking CLI using @Tag annotations"
license = "AGPL-3.0-only"
@ -11,7 +11,6 @@ repository = "https://github.com/konstantinfickel/streamd"
clap = { version = "4", features = ["derive", "env"] }
clap_complete = "4"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
toml = "1.0"
thiserror = "2"
miette = { version = "7", features = ["fancy"] }
@ -24,9 +23,6 @@ walkdir = "2"
indexmap = { version = "2", features = ["serde"] }
itertools = "0.14"
directories = "6"
tower-lsp = "0.20"
tokio = { version = "1", features = ["rt-multi-thread", "io-std"] }
dashmap = "6"
[dev-dependencies]
pretty_assertions = "=1.4.1"

129
README.md
View file

@ -49,27 +49,17 @@ Markdown files are named with a timestamp: `YYYYMMDD-HHMMSS [markers].md`
For example: `20260131-210000 Task Streamd.md`
An optional `_file_type` segment can follow the timestamp to classify the file:
```
YYYYMMDD-HHMMSS_<file_type> [markers].md
```
For example: `20260413-083000_daily.md` — the `daily` prefix is stored as the `file_type` dimension and propagates to all child shards.
Within files, `@`-prefixed markers at the beginning of paragraphs or headings define how a shard is categorized.
## Commands
- `streamd` / `streamd new` — Create a new timestamped markdown entry, opening your editor
- `streamd daily [YYYYMMDD]` — Open today's daily file (or create it if missing); pass a date to open that day's file instead
- `streamd todo` — Show all open tasks (shards with `@Task` markers), numbered for easy reference
- `streamd todo N edit` — Edit task N in your editor, jumping to the task's line
- `streamd todo N done` — Mark task N as done by inserting `@Done` after `@Task`
- `streamd todo --show-future` — Include tasks with future dates in the listing
- `streamd edit [number]` — Edit a stream file by index (most recent first)
- `streamd timesheet` — Generate time reports from `@Timesheet` markers
- `streamd lsp` — Start the LSP server (stdin/stdout transport; see [Editor Integration](#editor-integration) below)
## Configuration
@ -111,122 +101,3 @@ Running `streamd todo` finds all shards marked as open tasks and displays them n
You can quickly edit or complete tasks by number:
- `streamd todo 1 edit` opens task 1 in your editor at the correct line
- `streamd todo 1 done` marks task 1 as done by inserting `@Done` after `@Task`
## Editor Integration
`streamd lsp` starts a Language Server Protocol server that provides IDE features for your stream markdown files. The server communicates over **stdin/stdout** and auto-activates only when a `.streamd.toml` file is present in the workspace root.
### Features
| Feature | Description |
|---|---|
| `@` completions | Suggests known markers from your config; conditional suggestions (e.g. `@Done` when `@Task` is on the line) |
| Temporal snippets | `@` followed by a digit offers `YYYYMMDD` / `HHMMSS` format snippets |
| Diagnostics | File-name format warnings (R15); timesheet errors (overlapping timecards, unclosed days) |
| Document symbols | Shard tree exposed as outline symbols |
| "Mark task as done" | Quick-fix code action: inserts `@Done` after `@Task` |
| Workspace symbols | Search shards across all `.md` files |
| References | Find all occurrences of an `@Marker` across the workspace |
| Rename | Rename an `@Marker` across all files |
### Zed
Add to `~/.config/zed/settings.json`:
```json
{
"languages": {
"Markdown": {
"language_servers": ["streamd-lsp", "..."]
}
},
"lsp": {
"streamd-lsp": {
"binary": {
"path": "streamd",
"arguments": ["lsp"]
}
}
}
}
```
The `"..."` keeps Zed's default Markdown servers (e.g. `marksman`) active alongside streamd.
#### Zed Extension (WSL2)
If you run Zed on Windows with streamd installed inside WSL2, use the pre-built Zed extension instead of the manual config above. The extension auto-detects Windows and routes LSP communication through WSL2.
**1. Install streamd in WSL2** (e.g. via the `.deb` package):
```bash
wget https://git.konstantinfickel.de/kfickel/streamd/releases/download/vX.Y.Z/streamd_X.Y.Z_amd64.deb
sudo dpkg -i streamd_X.Y.Z_amd64.deb
```
**2. Download the extension** from the same release page:
```
streamd-zed-extension-X.Y.Z.zip
```
**3. Extract the zip** to a permanent folder on your Windows machine, e.g.:
```
C:\Users\<you>\zed-extensions\streamd-zed-extension\
```
The folder must contain `extension.toml` and `extension.wasm`.
**4. Install the extension in Zed** via the command palette (`Ctrl+Shift+P`):
```
zed: install dev extension
```
Point Zed to the extracted folder.
**5. Verify** by opening a Markdown file inside a directory that contains `.streamd.toml``@` completions and diagnostics should become active.
### Neovim (nvim-lspconfig)
**1. Register the server** — add to your Neovim config (e.g. `~/.config/nvim/init.lua` or a plugin file):
```lua
local lspconfig = require('lspconfig')
local configs = require('lspconfig.configs')
if not configs.streamd then
configs.streamd = {
default_config = {
cmd = { 'streamd', 'lsp' },
filetypes = { 'markdown' },
root_dir = lspconfig.util.root_pattern('.streamd.toml'),
single_file_support = false,
},
}
end
lspconfig.streamd.setup {}
```
The server activates automatically when Neovim opens a Markdown file inside a directory that contains a `.streamd.toml` file.
**2. Using LSP features** — standard Neovim LSP keymaps apply (`:help lsp`):
| Action | Default keymap | Notes |
|---|---|---|
| Trigger `@` completions | `<C-x><C-o>` (insert mode) | Or via your completion plugin (`nvim-cmp`, `blink.cmp`, …) |
| Show diagnostics for current line | `<C-w>d` / `gl` | File-name format warnings, timesheet errors |
| Jump to next / previous diagnostic | `]d` / `[d` | Navigate between warnings |
| Code actions (mark task as done) | `<leader>ca` (Neovim ≥ 0.10) | Place cursor on a line with `@Task` |
| Rename marker across all files | `<leader>cr` / `grn` | Renames the `@Marker` under the cursor everywhere |
| Find all references to a marker | `grr` / `<leader>fr` | Lists every occurrence of `@Marker` across the workspace |
| Document outline (shard tree) | `:lua vim.lsp.buf.document_symbol()` | Or via Telescope: `:Telescope lsp_document_symbols` |
| Workspace symbol search | `:lua vim.lsp.buf.workspace_symbol()` | Or via Telescope: `:Telescope lsp_workspace_symbols` |
> **Note:** default keymaps (`grn`, `grr`, `<C-w>d`, `]d`/`[d`) are available from Neovim 0.10+. On older versions use `:lua vim.lsp.buf.*` commands or set up keymaps manually in your `on_attach` callback.
### VS Code (tasks.json / manual)
Use any extension that lets you configure custom LSP servers, pointing `cmd` to `streamd lsp`.

View file

@ -275,18 +275,13 @@ This allows conditional placements to override base placements.
### R15: File Name Format
Files follow the pattern: `YYYYMMDD-HHMMSS[_file_type] [markers].md`
Files follow the pattern: `YYYYMMDD-HHMMSS [markers].md`
- `YYYYMMDD`: Date (8 digits, required)
- `HHMMSS`: Time (4-6 digits, optional, pads with zeros)
- `_file_type`: Optional alphanumeric prefix identifying the file type (e.g. `_daily`)
- `[markers]`: Space-separated marker names extracted from file content
**Extraction regex for datetime:** `^(?P<date>\d{8})(?:-(?P<time>\d{4,6}))?.+\.md$`
**Extraction regex for file type:** `^\d{8}(?:-\d{4,6})?_([a-zA-Z0-9]+)`
When a `_file_type` prefix is present it is stored in the `file_type` dimension of the root shard and propagates to all child shards.
**Extraction regex:** `^(?P<date>\d{8})(?:-(?P<time>\d{4,6}))?.+\.md$`
### R16: Temporal Markers
@ -392,7 +387,6 @@ Provide recursive search through the shard tree:
| Command | Description |
|---------|-------------|
| `streamd new` | Create new timestamped file, open editor, rename with markers on close |
| `streamd daily [YYYYMMDD]` | Open the earliest daily file for the given date (default: today in configured timezone), or create a new `_daily` file if none exists |
| `streamd todo` | List all shards with `task: "open"`, numbered, hiding future tasks |
| `streamd todo --show-future` | Include tasks with future dates in the todo listing |
| `streamd todo N edit` | Edit task N in editor, cursor positioned at task line |
@ -400,24 +394,6 @@ Provide recursive search through the shard tree:
| `streamd edit [n]` | Edit nth file (supports negative indexing for recent files) |
| `streamd timesheet` | Generate formatted timesheet report with expected/actual hours |
| `streamd completions <shell>` | Generate shell completions (bash, zsh, fish, elvish, powershell) |
| `streamd lsp` | Start Language Server Protocol server over stdin/stdout |
### R21a: Daily Command Behavior
`streamd daily [YYYYMMDD]` provides quick access to the daily journal entry for a given date.
**Date resolution:**
- If a `YYYYMMDD` argument is provided, it is parsed as the target date.
- If no argument is given, today's date is used, interpreted in the repository timezone (from `.streamd.toml`, defaulting to UTC).
**File lookup:**
- All markdown files in the base folder are localized.
- Files with `file_type = "daily"` whose root shard `moment` falls within the target date (in the configured timezone) are collected.
- The file with the earliest `moment` is opened in `$EDITOR` (defaults to `vi`).
**File creation:**
- If no matching file is found, a new file is created at `<now_local>_daily.md` (e.g. `20260413-083000_daily.md`) containing `# ` and opened in the editor.
- The `_daily` suffix is permanent — it identifies the file type and is not renamed after editing.
### R21: Todo Command Behavior
@ -472,53 +448,3 @@ Multiple configurations can be merged:
- Dimensions are combined (later configs can add new dimensions)
- Markers are combined (later configs can add new markers)
- This allows base configuration + domain-specific extensions
---
## LSP Server
### R25: LSP Subcommand
`streamd lsp` starts a Language Server Protocol server over stdin/stdout.
**Workspace root resolution:**
- The base folder is taken from `initializeParams.rootUri` (or `rootPath` as fallback).
- R22/R23 global config resolution is bypassed in LSP mode.
**Passive mode:**
- If `.streamd.toml` is absent from the workspace root, the server enters passive mode: all requests return empty results and no diagnostics are published.
**Config watching:**
- The server registers a `workspace/didChangeWatchedFiles` watcher for `.streamd.toml`.
- Config is reloaded without restarting the server when `.streamd.toml` changes.
**Document sync:**
- Full-document sync (`TextDocumentSyncKind::FULL`).
- Re-parses on `didOpen`, `didChange`, and `didSave`.
### R25a: LSP Completion
- Trigger character: `@`
- Returns marker names from the merged config (BasicTimesheetConfiguration + TaskConfiguration).
- Conditional suggestions: if marker A is on the line and A has placements with `if_with: {B}`, B is offered with higher priority.
- Temporal snippets: `@` followed by a digit offers `YYYYMMDD` and `HHMMSS` format snippets (R16).
### R25b: LSP Diagnostics
- **File-name format (R15)**: Warning when the file basename does not match `^(\d{8})(?:-(\d{4,6}))?.+\.md$`.
- **Timesheet violations (R18)**: Error when a day ends without a break; Warning for overlapping timecards.
### R25c: LSP Document Symbols
- Returns the `LocalizedShard` tree as nested `DocumentSymbol` nodes.
- Symbol names are derived from marker names or tag names.
### R25d: LSP Code Actions
- "Mark task as done": offered on any line containing `@Task` without `@Done`; inserts ` @Done` after `@Task`.
### R25e: LSP Cross-file Features
- `workspace/symbol`: searches all `.md` files in base folder (depth 1) for shards matching the query.
- `textDocument/references`: finds all occurrences of the `@Marker` under the cursor across the workspace.
- `textDocument/rename`: renames an `@Marker` across all files via `WorkspaceEdit`.

24
flake.lock generated
View file

@ -2,11 +2,11 @@
"nodes": {
"crane": {
"locked": {
"lastModified": 1775839657,
"narHash": "sha256-SPm9ck7jh3Un9nwPuMGbRU04UroFmOHjLP56T10MOeM=",
"lastModified": 1774313767,
"narHash": "sha256-hy0XTQND6avzGEUFrJtYBBpFa/POiiaGBr2vpU6Y9tY=",
"owner": "ipetkov",
"repo": "crane",
"rev": "7cf72d978629469c4bd4206b95c402514c1f6000",
"rev": "3d9df76e29656c679c744968b17fbaf28f0e923d",
"type": "github"
},
"original": {
@ -40,11 +40,11 @@
]
},
"locked": {
"lastModified": 1775585728,
"narHash": "sha256-8Psjt+TWvE4thRKktJsXfR6PA/fWWsZ04DVaY6PUhr4=",
"lastModified": 1775036584,
"narHash": "sha256-zW0lyy7ZNNT/x8JhzFHBsP2IPx7ATZIPai4FJj12BgU=",
"owner": "cachix",
"repo": "git-hooks.nix",
"rev": "580633fa3fe5fc0379905986543fd7495481913d",
"rev": "4e0eb042b67d863b1b34b3f64d52ceb9cd926735",
"type": "github"
},
"original": {
@ -76,11 +76,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1775710090,
"narHash": "sha256-ar3rofg+awPB8QXDaFJhJ2jJhu+KqN/PRCXeyuXR76E=",
"lastModified": 1775036866,
"narHash": "sha256-ZojAnPuCdy657PbTq5V0Y+AHKhZAIwSIT2cb8UgAz/U=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "4c1018dae018162ec878d42fec712642d214fdfa",
"rev": "6201e203d09599479a3b3450ed24fa81537ebc4e",
"type": "github"
},
"original": {
@ -105,11 +105,11 @@
]
},
"locked": {
"lastModified": 1775963625,
"narHash": "sha256-OmwF0Rd/HDbEGC0ZcBS2jPMvmCcn3HDqUypjXrR7KfM=",
"lastModified": 1775099554,
"narHash": "sha256-3xBsGnGDLOFtnPZ1D3j2LU19wpAlYefRKTlkv648rU0=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "573a61faa8ec910a6b8576cc3c145844245574f3",
"rev": "8d6387ed6d8e6e6672fd3ed4b61b59d44b124d99",
"type": "github"
},
"original": {

119
flake.nix
View file

@ -89,41 +89,6 @@
}
);
mkZedExtension =
system:
let
pkgs = mkPkgs system;
toolchain = pkgs.rust-bin.stable.latest.default.override {
targets = [ "wasm32-wasip1" ];
};
craneLib = (crane.mkLib pkgs).overrideToolchain toolchain;
extensionSrc = ./zed-extension;
vendoredDeps = craneLib.vendorCargoDeps { src = extensionSrc; };
in
pkgs.stdenv.mkDerivation {
pname = "streamd-zed-extension";
version = "0.0.1";
src = extensionSrc;
nativeBuildInputs = [
toolchain
pkgs.cargo-component
];
buildPhase = ''
export HOME=$TMPDIR
mkdir -p .cargo
cp ${vendoredDeps}/config.toml .cargo/config.toml
cargo component build --release --offline
'';
installPhase = ''
mkdir -p $out
cp extension.toml $out/
cp target/wasm32-wasip1/release/streamd_zed.wasm $out/extension.wasm
'';
};
mkGitHooksCheck =
system:
let
@ -141,36 +106,6 @@
};
};
mkGitHooksDev =
system:
let
pkgs = mkPkgs system;
toolchain = pkgs.rust-bin.stable.latest.default;
in
git-hooks.lib.${system}.run {
src = ./.;
hooks = {
rustfmt = {
enable = true;
package = toolchain;
};
clippy = {
enable = true;
package = toolchain;
settings.denyWarnings = true;
};
cargo-test = {
enable = true;
name = "cargo test";
entry = "${toolchain}/bin/cargo test";
pass_filenames = false;
language = "system";
files = "\\.(rs|toml)$";
};
commitizen.enable = true;
};
};
mkMuslCraneLib =
system:
let
@ -197,53 +132,6 @@
in
craneLib.buildPackage (commonArgs // { inherit cargoArtifacts; });
mkWindowsCraneLib =
system:
let
pkgs = mkPkgs system;
toolchain = pkgs.rust-bin.stable.latest.default.override {
targets = [ "x86_64-pc-windows-gnu" ];
};
in
(crane.mkLib pkgs).overrideToolchain toolchain;
mkStreamdWindows =
system:
let
pkgs = mkPkgs system;
pkgsCross = pkgs.pkgsCross.mingwW64;
craneLib = mkWindowsCraneLib system;
commonArgs = {
src = craneLib.path ./.;
pname = "streamd";
inherit version;
strictDeps = true;
CARGO_BUILD_TARGET = "x86_64-pc-windows-gnu";
CC_x86_64_pc_windows_gnu = "${pkgsCross.stdenv.cc}/bin/x86_64-w64-mingw32-gcc";
CARGO_TARGET_X86_64_PC_WINDOWS_GNU_LINKER = "${pkgsCross.stdenv.cc}/bin/x86_64-w64-mingw32-gcc";
nativeBuildInputs = [ pkgsCross.stdenv.cc ];
buildInputs = [ pkgsCross.windows.pthreads ];
doCheck = false;
};
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
in
craneLib.buildPackage (commonArgs // { inherit cargoArtifacts; });
mkZedExtensionZip =
system:
let
pkgs = mkPkgs system;
zed-extension = mkZedExtension system;
in
pkgs.runCommand "streamd-zed-extension-${version}.zip" {
nativeBuildInputs = [ pkgs.zip ];
} ''
mkdir -p streamd-zed-extension
cp ${zed-extension}/extension.toml streamd-zed-extension/
cp ${zed-extension}/extension.wasm streamd-zed-extension/
zip -r $out streamd-zed-extension
'';
mkStreamdDeb =
system:
let
@ -293,12 +181,9 @@
streamd = mkStreamd system;
streamd-musl = mkStreamdMusl system;
streamd-deb = mkStreamdDeb system;
streamd-windows = mkStreamdWindows system;
zed-extension = mkZedExtension system;
zed-extension-zip = mkZedExtensionZip system;
in
{
inherit streamd streamd-musl streamd-deb streamd-windows zed-extension zed-extension-zip;
inherit streamd streamd-musl streamd-deb;
default = streamd;
}
);
@ -385,7 +270,7 @@
];
shellHook = ''
${(mkGitHooksDev system).shellHook}
${(mkGitHooksCheck system).shellHook}
'';
};
}

View file

@ -20,11 +20,10 @@ pub enum TodoAction {
/// Task number to edit
number: usize,
},
/// Mark one or more tasks as done
/// Mark a task as done
Done {
/// Task numbers to mark as done (processed highest-index-first for stable indices)
#[arg(required = true, num_args = 1..)]
numbers: Vec<usize>,
/// Task number to mark as done
number: usize,
},
}
@ -51,21 +50,7 @@ pub enum Commands {
},
/// Display extracted timesheets
Timesheet {
/// Display time as decimal hours (X.XXh) instead of the default HH:MM format
#[arg(short, long)]
decimal: bool,
/// Show all timecards grouped by day instead of the summary report
#[arg(short, long)]
debug: bool,
},
/// Open or create the daily entry for a given date
Daily {
/// Date in YYYYMMDD format (defaults to today in configured timezone)
date: Option<String>,
},
Timesheet,
/// Generate shell completions
Completions {
@ -73,7 +58,4 @@ pub enum Commands {
#[arg(value_enum)]
shell: Shell,
},
/// Start LSP server (communicates over stdin/stdout)
Lsp,
}

View file

@ -1,83 +0,0 @@
use std::fs;
use std::path::Path;
use std::process::Command;
use chrono::{Days, NaiveDate, NaiveDateTime, NaiveTime, TimeZone, Utc};
use chrono_tz::Tz;
use crate::config::Settings;
use crate::error::StreamdError;
use crate::models::RepositoryConfiguration;
use crate::timesheet::load_repository_config;
use super::load_markdown_shards;
pub fn run(date: Option<String>) -> Result<(), StreamdError> {
let settings = Settings::load()?;
let base_folder = Path::new(&settings.base_folder);
let repo_config = load_repository_config(base_folder)?;
let tz: Tz = repo_config
.timezone
.as_deref()
.and_then(|s| s.parse().ok())
.unwrap_or(chrono_tz::UTC);
let target_date: NaiveDate = match date {
Some(s) => NaiveDate::parse_from_str(&s, "%Y%m%d").map_err(|_| {
StreamdError::ConfigError("Invalid date format, expected YYYYMMDD".into())
})?,
None => Utc::now().with_timezone(&tz).date_naive(),
};
let day_start = tz
.from_local_datetime(&NaiveDateTime::new(target_date, NaiveTime::MIN))
.earliest()
.unwrap()
.with_timezone(&Utc);
let day_end = tz
.from_local_datetime(&NaiveDateTime::new(
target_date + Days::new(1),
NaiveTime::MIN,
))
.earliest()
.unwrap()
.with_timezone(&Utc);
let all_shards = load_markdown_shards(base_folder, &RepositoryConfiguration::new(), tz)?;
let mut daily_shards: Vec<_> = all_shards
.into_iter()
.filter(|s| {
s.location
.get("file_type")
.map(|v| v == "daily")
.unwrap_or(false)
&& s.moment >= day_start
&& s.moment < day_end
})
.collect();
daily_shards.sort_by_key(|s| s.moment);
let editor = std::env::var("EDITOR").unwrap_or_else(|_| "vi".to_string());
if let Some(shard) = daily_shards.first() {
let file_path = shard.location.get("file").unwrap();
Command::new(&editor).arg(file_path).status()?;
} else {
let now_local = Utc::now().with_timezone(&tz);
let file_timestamp = if target_date == now_local.date_naive() {
now_local
} else {
tz.from_local_datetime(&NaiveDateTime::new(target_date, NaiveTime::MIN))
.earliest()
.unwrap()
};
let file_name = file_timestamp.format("%Y%m%d-%H%M%S_daily.md").to_string();
let file_path = base_folder.join(&file_name);
fs::write(&file_path, "# ")?;
Command::new(&editor).arg(&file_path).status()?;
println!("Created {}", file_name);
}
Ok(())
}

View file

@ -1,19 +1,40 @@
use std::path::Path;
use std::fs;
use std::process::Command;
use walkdir::WalkDir;
use crate::config::Settings;
use crate::error::StreamdError;
use crate::localize::TaskConfiguration;
use crate::extract::parse_markdown_file;
use crate::localize::{localize_stream_file, TaskConfiguration};
use crate::models::LocalizedShard;
use super::load_markdown_shards;
fn all_files() -> Result<Vec<LocalizedShard>, StreamdError> {
let settings = Settings::load()?;
let mut shards = Vec::new();
for entry in WalkDir::new(&settings.base_folder)
.max_depth(1)
.into_iter()
.filter_map(|e| e.ok())
{
let path = entry.path();
if path.extension().map(|e| e == "md").unwrap_or(false) {
let file_name = path.to_string_lossy().to_string();
let content = fs::read_to_string(path)?;
let stream_file = parse_markdown_file(&file_name, &content);
if let Ok(shard) = localize_stream_file(&stream_file, &TaskConfiguration) {
shards.push(shard);
}
}
}
Ok(shards)
}
pub fn run(number: i32) -> Result<(), StreamdError> {
let settings = Settings::load()?;
let all_shards = load_markdown_shards(
Path::new(&settings.base_folder),
&TaskConfiguration,
chrono_tz::UTC,
)?;
let all_shards = all_files()?;
// Sort by moment (timestamp)
let mut sorted_shards = all_shards;
@ -44,13 +65,7 @@ pub fn run(number: i32) -> Result<(), StreamdError> {
};
if let Some(file_path) = sorted_shards[selected_index].location.get("file") {
let editor = std::env::var("EDITOR").unwrap_or_else(|_| {
if cfg!(windows) {
"notepad".to_string()
} else {
"vi".to_string()
}
});
let editor = std::env::var("EDITOR").unwrap_or_else(|_| "vi".to_string());
Command::new(&editor).arg(file_path).status()?;
}

File diff suppressed because it is too large Load diff

View file

@ -1,42 +1,5 @@
use std::fs;
use std::path::Path;
use chrono_tz::Tz;
use walkdir::WalkDir;
use crate::error::StreamdError;
use crate::extract::parse_markdown_file;
use crate::localize::localize_stream_file;
use crate::models::{LocalizedShard, RepositoryConfiguration};
pub mod completions;
pub mod daily;
pub mod edit;
pub mod lsp;
pub mod new;
pub mod timesheet;
pub mod todo;
pub fn load_markdown_shards(
base_folder: &Path,
config: &RepositoryConfiguration,
tz: Tz,
) -> Result<Vec<LocalizedShard>, StreamdError> {
let mut shards = Vec::new();
for entry in WalkDir::new(base_folder)
.max_depth(1)
.into_iter()
.filter_map(|e| e.ok())
{
let path = entry.path();
if path.extension().map(|e| e == "md").unwrap_or(false) {
let file_name = path.to_string_lossy().to_string();
let content = fs::read_to_string(path)?;
let stream_file = parse_markdown_file(&file_name, &content);
if let Ok(shard) = localize_stream_file(&stream_file, config, tz) {
shards.push(shard);
}
}
}
Ok(shards)
}

View file

@ -24,13 +24,7 @@ pub fn run() -> Result<(), StreamdError> {
drop(file);
// Open in editor
let editor = std::env::var("EDITOR").unwrap_or_else(|_| {
if cfg!(windows) {
"notepad".to_string()
} else {
"vi".to_string()
}
});
let editor = std::env::var("EDITOR").unwrap_or_else(|_| "vi".to_string());
let status = Command::new(&editor).arg(&preliminary_path).status()?;
if !status.success() {

View file

@ -1,58 +1,57 @@
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use chrono::Datelike;
use chrono::NaiveDate;
use chrono::Utc;
use chrono_tz::Tz;
use walkdir::WalkDir;
use crate::config::Settings;
const SEPARATOR_WIDTH: usize = 71;
const COLUMN_SEPARATOR_WIDTH: usize = 65;
use crate::error::StreamdError;
use crate::models::Timesheet;
use crate::extract::parse_markdown_file;
use crate::localize::localize_stream_file;
use crate::models::LocalizedShard;
use crate::timesheet::{
extract_timesheets, generate_report, load_repository_config, BasicTimesheetConfiguration,
DayType, DayWarning, MonthReport, TimesheetReport,
};
use super::load_markdown_shards;
fn load_all_shards(base_folder: &Path) -> Result<Vec<LocalizedShard>, StreamdError> {
let mut shards = Vec::new();
enum DisplayMode {
Minutes,
Decimal,
for entry in WalkDir::new(base_folder)
.max_depth(1)
.into_iter()
.filter_map(|e| e.ok())
{
let path = entry.path();
if path.extension().map(|e| e == "md").unwrap_or(false) {
let file_name = path.to_string_lossy().to_string();
let content = fs::read_to_string(path)?;
let stream_file = parse_markdown_file(&file_name, &content);
if let Ok(shard) = localize_stream_file(&stream_file, &BasicTimesheetConfiguration) {
shards.push(shard);
}
}
}
Ok(shards)
}
/// Format minutes with sign for display.
fn format_diff(minutes: i64, mode: &DisplayMode) -> String {
let sign = if minutes >= 0 { "+" } else { "-" };
match mode {
DisplayMode::Minutes => {
let h = minutes.unsigned_abs() / 60;
let m = minutes.unsigned_abs() % 60;
format!("{}{}:{:02}", sign, h, m)
}
DisplayMode::Decimal => {
let hours = minutes.unsigned_abs() as f64 / 60.0;
format!("{}{:.2}h", sign, hours)
}
/// Format hours with sign for display.
fn format_diff(hours: f64) -> String {
if hours >= 0.0 {
format!("+{:.1}h", hours.abs())
} else {
format!("{:.1}h", hours)
}
}
/// Format minutes for display without sign.
fn format_hours(minutes: i64, mode: &DisplayMode) -> String {
match mode {
DisplayMode::Minutes => {
let h = minutes.unsigned_abs() / 60;
let m = minutes.unsigned_abs() % 60;
format!("{}:{:02}", h, m)
}
DisplayMode::Decimal => {
let hours = minutes.unsigned_abs() as f64 / 60.0;
format!("{:.2}h", hours)
}
}
/// Format hours for display without sign.
fn format_hours(hours: f64) -> String {
format!("{:.1}h", hours.abs())
}
/// Get the weekday abbreviation.
@ -78,8 +77,8 @@ fn print_header() {
}
/// Print a month report.
fn print_month(month: &MonthReport, mode: &DisplayMode) {
let diff_str = format_diff(month.diff(), mode);
fn print_month(month: &MonthReport) {
let diff_str = format_diff(month.diff());
let month_title = format!("{} {}", month.month_name(), month.year);
// Month header with diff
@ -100,9 +99,9 @@ fn print_month(month: &MonthReport, mode: &DisplayMode) {
for day in &month.days {
let date_str = day.date.format("%Y-%m-%d").to_string();
let weekday = weekday_abbrev(day.date);
let expected = format_hours(day.expected_minutes, mode);
let actual = format_hours(day.actual_minutes, mode);
let diff = format_diff(day.diff(), mode);
let expected = format_hours(day.expected_hours);
let actual = format_hours(day.actual_hours);
let diff = format_diff(day.diff());
let type_str = match day.day_type {
DayType::Regular => String::new(),
@ -135,26 +134,26 @@ fn print_month(month: &MonthReport, mode: &DisplayMode) {
println!(" {}", light_line);
println!(
" Monthly: {:>7} {:>7} {:>6}",
format_hours(month.total_expected(), mode),
format_hours(month.total_actual(), mode),
format_diff(month.diff(), mode)
format_hours(month.total_expected()),
format_hours(month.total_actual()),
format_diff(month.diff())
);
println!();
}
/// Print the cumulative balance.
fn print_cumulative_balance(balance: i64, mode: &DisplayMode) {
fn print_cumulative_balance(balance: f64) {
let light_line = "\u{2500}".repeat(SEPARATOR_WIDTH);
println!("{}", light_line);
println!(
" CUMULATIVE BALANCE: {}",
format_diff(balance, mode)
format_diff(balance)
);
println!("{}", light_line);
}
/// Print warnings section.
fn print_warnings(report: &TimesheetReport, mode: &DisplayMode) {
fn print_warnings(report: &TimesheetReport) {
if !report.has_warnings() {
return;
}
@ -215,11 +214,11 @@ fn print_warnings(report: &TimesheetReport, mode: &DisplayMode) {
if !outside_period_warnings.is_empty() {
println!(" Work logged outside configured periods:");
for w in &outside_period_warnings {
if let DayWarning::OutsidePeriod { minutes_worked } = &w.warning {
if let DayWarning::OutsidePeriod { hours_worked } = &w.warning {
println!(
" - {}: {} worked (no period configured)",
" - {}: {:.1}h worked (no period configured)",
w.date.format("%Y-%m-%d"),
format_hours(*minutes_worked, mode)
hours_worked
);
}
}
@ -227,83 +226,13 @@ fn print_warnings(report: &TimesheetReport, mode: &DisplayMode) {
}
}
/// Print debug view: all timecards grouped and sorted by day.
fn print_debug(report: &TimesheetReport, timesheets: &[Timesheet]) {
let timesheets_by_date: HashMap<NaiveDate, &Timesheet> =
timesheets.iter().map(|ts| (ts.date, ts)).collect();
for month in &report.months {
let month_title = format!("{} {}", month.month_name(), month.year);
let separator = "\u{2550}".repeat(SEPARATOR_WIDTH);
println!("{}", separator);
println!(" {}", month_title);
println!("{}", separator);
println!();
for day in &month.days {
let date_str = day.date.format("%Y-%m-%d").to_string();
let weekday = weekday_abbrev(day.date);
let mut parts: Vec<String> = Vec::new();
// Add day type label for non-regular days
let type_label = match day.day_type {
DayType::Regular => None,
DayType::SickLeave => Some("Sick Leave"),
DayType::Vacation => Some("Vacation"),
DayType::Holiday => Some("Holiday"),
DayType::FlexDay => Some("Flex Day"),
DayType::Weekend => Some("Weekend"),
DayType::Missing => Some("Missing"),
DayType::OutsidePeriod => Some("Outside Period"),
};
if let Some(label) = type_label {
parts.push(label.to_string());
}
// Add timecards
if let Some(ts) = timesheets_by_date.get(&day.date) {
for tc in &ts.timecards {
parts.push(format!(
"{} - {}",
tc.from_time.format("%H:%M"),
tc.to_time.format("%H:%M")
));
}
}
let content = if parts.is_empty() {
String::new()
} else {
parts.join(", ")
};
println!(" {} ({}): {}", date_str, weekday, content);
}
println!();
}
}
pub fn run(decimal: bool, debug: bool) -> Result<(), StreamdError> {
let mode = if decimal {
DisplayMode::Decimal
} else {
DisplayMode::Minutes
};
pub fn run() -> Result<(), StreamdError> {
let settings = Settings::load()?;
let base_folder = Path::new(&settings.base_folder);
// Load repository configuration
let repo_config = load_repository_config(base_folder)?;
// Parse timezone from config, defaulting to UTC
let tz: Tz = repo_config
.timezone
.as_deref()
.and_then(|s| s.parse().ok())
.unwrap_or(chrono_tz::UTC);
// Check if timesheet is configured
let timesheet_config = match repo_config.timesheet {
Some(config) => config,
@ -321,70 +250,27 @@ pub fn run(decimal: bool, debug: bool) -> Result<(), StreamdError> {
}
};
let now = Utc::now();
// Load all markdown files and extract timesheets
let all_shards = load_markdown_shards(base_folder, &BasicTimesheetConfiguration, tz)?;
let timesheets = extract_timesheets(&all_shards, now, tz)?;
let all_shards = load_all_shards(base_folder)?;
let timesheets = extract_timesheets(&all_shards)?;
// Generate the report
let report = generate_report(&timesheets, &timesheet_config, now, tz)?;
let report = generate_report(&timesheets, &timesheet_config)?;
if report.months.is_empty() {
println!("No timesheet data found for the configured periods.");
return Ok(());
}
if debug {
print_debug(&report, &timesheets);
} else {
// Print the report
print_header();
for month in &report.months {
print_month(month, &mode);
print_month(month);
}
print_cumulative_balance(report.cumulative_balance, &mode);
print_warnings(&report, &mode);
}
print_cumulative_balance(report.cumulative_balance);
print_warnings(&report);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_format_hours_decimal() {
assert_eq!(format_hours(480, &DisplayMode::Decimal), "8.00h");
assert_eq!(format_hours(510, &DisplayMode::Decimal), "8.50h");
assert_eq!(format_hours(507, &DisplayMode::Decimal), "8.45h");
assert_eq!(format_hours(0, &DisplayMode::Decimal), "0.00h");
}
#[test]
fn test_format_hours_minutes() {
assert_eq!(format_hours(480, &DisplayMode::Minutes), "8:00");
assert_eq!(format_hours(510, &DisplayMode::Minutes), "8:30");
assert_eq!(format_hours(0, &DisplayMode::Minutes), "0:00");
assert_eq!(format_hours(75, &DisplayMode::Minutes), "1:15");
assert_eq!(format_hours(77, &DisplayMode::Minutes), "1:17");
assert_eq!(format_hours(200, &DisplayMode::Minutes), "3:20");
}
#[test]
fn test_format_diff_decimal() {
assert_eq!(format_diff(30, &DisplayMode::Decimal), "+0.50h");
assert_eq!(format_diff(-90, &DisplayMode::Decimal), "-1.50h");
assert_eq!(format_diff(0, &DisplayMode::Decimal), "+0.00h");
}
#[test]
fn test_format_diff_minutes() {
assert_eq!(format_diff(30, &DisplayMode::Minutes), "+0:30");
assert_eq!(format_diff(-90, &DisplayMode::Minutes), "-1:30");
assert_eq!(format_diff(0, &DisplayMode::Minutes), "+0:00");
assert_eq!(format_diff(75, &DisplayMode::Minutes), "+1:15");
}
}

View file

@ -1,24 +1,42 @@
use std::fs;
use std::path::Path;
use std::process::Command;
use chrono::Utc;
use walkdir::WalkDir;
use crate::config::Settings;
use crate::error::StreamdError;
use crate::localize::TaskConfiguration;
use crate::extract::parse_markdown_file;
use crate::localize::{localize_stream_file, TaskConfiguration};
use crate::models::LocalizedShard;
use crate::query::find_shard_by_position;
use super::load_markdown_shards;
fn all_files() -> Result<Vec<LocalizedShard>, StreamdError> {
let settings = Settings::load()?;
let mut shards = Vec::new();
for entry in WalkDir::new(&settings.base_folder)
.max_depth(1)
.into_iter()
.filter_map(|e| e.ok())
{
let path = entry.path();
if path.extension().map(|e| e == "md").unwrap_or(false) {
let file_name = path.to_string_lossy().to_string();
let content = fs::read_to_string(path)?;
let stream_file = parse_markdown_file(&file_name, &content);
if let Ok(shard) = localize_stream_file(&stream_file, &TaskConfiguration) {
shards.push(shard);
}
}
}
Ok(shards)
}
pub fn collect_open_tasks(show_future: bool) -> Result<Vec<LocalizedShard>, StreamdError> {
let settings = Settings::load()?;
let all_shards = load_markdown_shards(
Path::new(&settings.base_folder),
&TaskConfiguration,
chrono_tz::UTC,
)?;
let all_shards = all_files()?;
let now = Utc::now();
let mut tasks: Vec<LocalizedShard> = find_shard_by_position(&all_shards, "task", "open")
@ -72,19 +90,13 @@ pub fn run_edit(number: usize) -> Result<(), StreamdError> {
.get("file")
.ok_or(StreamdError::MissingFilePath)?;
let editor = std::env::var("EDITOR").unwrap_or_else(|_| {
if cfg!(windows) {
"notepad".to_string()
} else {
"vi".to_string()
}
});
let editor = std::env::var("EDITOR").unwrap_or_else(|_| "vi".to_string());
let line_arg = format!("+{}", task.start_line);
let mut cmd = Command::new(&editor);
if !editor.to_lowercase().contains("notepad") {
cmd.arg(format!("+{}", task.start_line));
}
let status = cmd.arg(file_path).status()?;
let status = Command::new(&editor)
.arg(&line_arg)
.arg(file_path)
.status()?;
if !status.success() {
return Err(StreamdError::IoError(std::io::Error::other(
@ -95,12 +107,15 @@ pub fn run_edit(number: usize) -> Result<(), StreamdError> {
Ok(())
}
pub fn mark_task_done(task_number: usize, tasks: &[LocalizedShard]) -> Result<(), StreamdError> {
if task_number == 0 || task_number > tasks.len() {
return Err(StreamdError::InvalidTaskNumber(task_number, tasks.len()));
pub fn run_done(number: usize) -> Result<(), StreamdError> {
// Always include all tasks for done (user might want to mark a future task as done)
let tasks = collect_open_tasks(true)?;
if number == 0 || number > tasks.len() {
return Err(StreamdError::InvalidTaskNumber(number, tasks.len()));
}
let task = &tasks[task_number - 1];
let task = &tasks[number - 1];
let file_path = task
.location
.get("file")
@ -109,6 +124,7 @@ pub fn mark_task_done(task_number: usize, tasks: &[LocalizedShard]) -> Result<()
let content = fs::read_to_string(file_path)?;
let mut lines: Vec<String> = content.lines().map(String::from).collect();
// Find the line containing @Task (should be at start_line)
let task_line_idx = task.start_line.saturating_sub(1);
if task_line_idx >= lines.len() {
return Err(StreamdError::InvalidLineNumber);
@ -116,6 +132,7 @@ pub fn mark_task_done(task_number: usize, tasks: &[LocalizedShard]) -> Result<()
let line = &lines[task_line_idx];
// Check for multiple @Task occurrences
let task_count = line.matches("@Task").count();
if task_count > 1 {
return Err(StreamdError::MultipleTaskMarkers(
@ -130,9 +147,11 @@ pub fn mark_task_done(task_number: usize, tasks: &[LocalizedShard]) -> Result<()
));
}
// Insert @Done after @Task
let new_line = line.replacen("@Task", "@Task @Done", 1);
lines[task_line_idx] = new_line.clone();
lines[task_line_idx] = new_line;
// Write back to file, preserving trailing newline if present
let new_content = if content.ends_with('\n') {
format!("{}\n", lines.join("\n"))
} else {
@ -140,39 +159,7 @@ pub fn mark_task_done(task_number: usize, tasks: &[LocalizedShard]) -> Result<()
};
fs::write(file_path, new_content)?;
// Print the completed task block
let start = task.start_line.saturating_sub(1);
let end = std::cmp::min(task.end_line, lines.len());
println!(
"Done: [{}] --- {}:{} ---",
task_number, file_path, task.start_line
);
for line in &lines[start..end] {
println!("{}", line);
}
println!();
Ok(())
}
pub fn run_done(numbers: &[usize]) -> Result<(), StreamdError> {
let tasks = collect_open_tasks(true)?;
// Validate all numbers before processing any
for &number in numbers {
if number == 0 || number > tasks.len() {
return Err(StreamdError::InvalidTaskNumber(number, tasks.len()));
}
}
// Process highest-index-first so earlier indices remain valid
let mut sorted_numbers: Vec<usize> = numbers.to_vec();
sorted_numbers.sort_unstable_by(|a, b| b.cmp(a));
sorted_numbers.dedup();
for number in sorted_numbers {
mark_task_done(number, &tasks)?;
}
println!("Marked task {} as done", number);
Ok(())
}
@ -341,65 +328,4 @@ mod tests {
"No @Task marker found in /path/file.md:42"
);
}
#[test]
fn test_done_numbers_sorted_highest_first() {
let mut numbers: Vec<usize> = vec![1, 3, 2];
numbers.sort_unstable_by(|a, b| b.cmp(a));
assert_eq!(numbers, vec![3, 2, 1]);
}
#[test]
fn test_done_numbers_deduped() {
let mut numbers: Vec<usize> = vec![3, 2, 3, 1];
numbers.sort_unstable_by(|a, b| b.cmp(a));
numbers.dedup();
assert_eq!(numbers, vec![3, 2, 1]);
}
#[test]
fn test_mark_task_done_writes_file_and_prints() {
use std::io::Write;
use tempfile::NamedTempFile;
let mut tmp = NamedTempFile::new().unwrap();
writeln!(tmp, "## Fix the thing @Task").unwrap();
let path = tmp.path().to_str().unwrap().to_string();
let now = Utc::now();
let mut location = IndexMap::new();
location.insert("file".to_string(), path.clone());
location.insert("task".to_string(), "open".to_string());
let task = LocalizedShard {
markers: vec!["Task".to_string()],
tags: vec![],
start_line: 1,
end_line: 1,
moment: now,
location,
children: vec![],
};
let tasks = vec![task];
mark_task_done(1, &tasks).unwrap();
let result = fs::read_to_string(&path).unwrap();
assert!(result.contains("@Task @Done"));
}
#[test]
fn test_mark_task_done_invalid_number_zero() {
let tasks = vec![];
let err = mark_task_done(0, &tasks).unwrap_err();
assert!(matches!(err, StreamdError::InvalidTaskNumber(0, 0)));
}
#[test]
fn test_mark_task_done_invalid_number_exceeds() {
let now = Utc::now();
let tasks = vec![make_task_shard(now, "a.md")];
let err = mark_task_done(2, &tasks).unwrap_err();
assert!(matches!(err, StreamdError::InvalidTaskNumber(2, 1)));
}
}

View file

@ -37,10 +37,8 @@ impl Settings {
fn config_path() -> PathBuf {
if let Some(proj_dirs) = ProjectDirs::from("", "", "streamd") {
proj_dirs.config_dir().join("config.toml")
} else if let Some(base_dirs) = directories::BaseDirs::new() {
base_dirs.config_dir().join("streamd").join("config.toml")
} else {
PathBuf::from("streamd_config.toml")
PathBuf::from("~/.config/streamd/config.toml")
}
}
}

View file

@ -12,8 +12,6 @@ struct BlockInfo {
end_line: usize,
block_type: BlockType,
events: Vec<Event<'static>>,
/// Nested list items contained within this block (for ListItem blocks with sub-lists).
nested_items: Vec<BlockInfo>,
}
#[derive(Debug, Clone, PartialEq)]
@ -112,14 +110,12 @@ pub fn parse_markdown_file(file_name: &str, file_content: &str) -> StreamFile {
fn collect_blocks(content: &str, parser: Parser) -> Vec<BlockInfo> {
let mut blocks = Vec::new();
let mut current_block: Option<BlockInfo> = None;
let _current_events: Vec<Event<'static>> = Vec::new();
let mut depth = 0;
let mut list_items: Vec<BlockInfo> = Vec::new();
let mut in_list = false;
let mut list_start_line = 0;
// Stack for nested lists: (saved current_block, saved list_items, saved list_start_line)
let mut list_nesting_stack: Vec<(Option<BlockInfo>, Vec<BlockInfo>, usize)> = Vec::new();
// Pre-compute line starts for offset-to-line mapping
let line_starts: Vec<usize> = std::iter::once(0)
.chain(content.match_indices('\n').map(|(i, _)| i + 1))
@ -139,7 +135,6 @@ fn collect_blocks(content: &str, parser: Parser) -> Vec<BlockInfo> {
end_line: line,
block_type: BlockType::Paragraph,
events: Vec::new(),
nested_items: Vec::new(),
});
}
depth += 1;
@ -171,7 +166,6 @@ fn collect_blocks(content: &str, parser: Parser) -> Vec<BlockInfo> {
end_line: line,
block_type: BlockType::Heading(heading_level),
events: Vec::new(),
nested_items: Vec::new(),
});
}
depth += 1;
@ -192,15 +186,7 @@ fn collect_blocks(content: &str, parser: Parser) -> Vec<BlockInfo> {
}
}
Event::Start(Tag::List(_)) => {
if in_list {
// Entering a nested list: save current list item and collected items
list_nesting_stack.push((
current_block.take(),
std::mem::take(&mut list_items),
list_start_line,
));
list_start_line = line;
} else {
if !in_list {
in_list = true;
list_start_line = line;
list_items.clear();
@ -209,18 +195,7 @@ fn collect_blocks(content: &str, parser: Parser) -> Vec<BlockInfo> {
}
Event::End(TagEnd::List(_)) => {
depth -= 1;
if let Some((parent_block, parent_items, parent_start_line)) =
list_nesting_stack.pop()
{
// Nested list ended: attach collected items as nested children of parent item
let nested = std::mem::take(&mut list_items);
list_start_line = parent_start_line;
list_items = parent_items;
current_block = parent_block.map(|mut item| {
item.nested_items = nested;
item
});
} else if depth == 0 && in_list {
if depth == 0 && in_list {
in_list = false;
// Create a list block containing all list items
if !list_items.is_empty() {
@ -229,7 +204,6 @@ fn collect_blocks(content: &str, parser: Parser) -> Vec<BlockInfo> {
end_line: line,
block_type: BlockType::List,
events: vec![], // List events are handled through list_items
nested_items: vec![],
});
// Store list items for later processing
for item in list_items.drain(..) {
@ -248,7 +222,6 @@ fn collect_blocks(content: &str, parser: Parser) -> Vec<BlockInfo> {
end_line: line,
block_type: BlockType::ListItem,
events: Vec::new(),
nested_items: Vec::new(),
});
}
}
@ -267,7 +240,6 @@ fn collect_blocks(content: &str, parser: Parser) -> Vec<BlockInfo> {
end_line: line,
block_type: BlockType::CodeBlock,
events: Vec::new(),
nested_items: Vec::new(),
});
}
depth += 1;
@ -535,21 +507,13 @@ fn parse_single_block_shard(
}
}
BlockType::List | BlockType::ListItem => {
// List handling is complex - for now, extract any markers/tags
let (markers, tags) = extract_block_markers_and_tags(block);
// Recursively build child shards from nested list items
let children: Vec<Shard> = block
.nested_items
.iter()
.filter_map(|item| {
let (child, _) = parse_single_block_shard(item, item.start_line, item.end_line);
child
})
.collect();
if markers.is_empty() && children.is_empty() {
if markers.is_empty() {
(None, tags)
} else {
(
Some(build_shard(start_line, end_line, markers, tags, children)),
Some(build_shard(start_line, end_line, markers, tags, vec![])),
vec![],
)
}
@ -752,26 +716,6 @@ mod tests {
);
}
#[test]
fn test_parse_nested_list_creates_three_shards() {
let content = "* @Task 1\n * @Task 2\n* @Task 3";
let result = parse_markdown_file(&make_file_name(), content);
let root = result.shard.unwrap();
// The root shard should have two top-level children: @Task 1 and @Task 3
assert_eq!(root.children.len(), 2, "expected 2 top-level shards");
let task1 = &root.children[0];
let task3 = &root.children[1];
// @Task 1 must carry its marker and contain @Task 2 as a child
assert_eq!(task1.markers, vec!["Task"], "@Task 1 marker");
assert_eq!(task1.children.len(), 1, "@Task 1 should have one child");
let task2 = &task1.children[0];
assert_eq!(task2.markers, vec!["Task"], "@Task 2 marker");
assert!(task2.children.is_empty(), "@Task 2 should have no children");
// @Task 3 is a sibling of @Task 1
assert_eq!(task3.markers, vec!["Task"], "@Task 3 marker");
assert!(task3.children.is_empty(), "@Task 3 should have no children");
}
#[test]
fn test_parse_continues_looking_for_markers_after_first_link_marker() {
let result = parse_markdown_file(

View file

@ -1,5 +1,4 @@
use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, TimeZone, Utc};
use chrono_tz::Tz;
use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};
use once_cell::sync::Lazy;
use regex::Regex;
use std::path::Path;
@ -9,11 +8,6 @@ use std::path::Path;
static FILE_NAME_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"^(?P<date>\d{8})(?:-(?P<time>\d{4,6}))?.+\.md$").unwrap());
/// Regex for extracting a file-type prefix from file names.
/// Matches filenames like `20260412-123456_daily.md` or `20260412_daily Some Title.md`.
static FILE_TYPE_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"^\d{8}(?:-\d{4,6})?_([a-zA-Z0-9]+)").unwrap());
/// Regex for validating datetime marker format (14 digits).
static DATETIME_MARKER_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\d{14}$").unwrap());
@ -23,25 +17,15 @@ static DATE_MARKER_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\d{8}$").unwr
/// Regex for validating time marker format (6 digits).
static TIME_MARKER_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\d{6}$").unwrap());
/// Convert a NaiveDateTime to UTC via the given timezone.
/// Falls back to the earliest local interpretation for ambiguous DST times.
fn naive_to_utc(dt: NaiveDateTime, tz: Tz) -> Option<DateTime<Utc>> {
tz.from_local_datetime(&dt)
.single()
.or_else(|| tz.from_local_datetime(&dt).earliest())
.map(|dt| dt.with_timezone(&Utc))
}
/// Extract a datetime from a file name in the format YYYYMMDD-HHMMSS.
///
/// The time component is optional and can be 4-6 digits (HHMM, HHMMS, or HHMMSS).
/// The datetime is interpreted in the given timezone.
///
/// # Examples
/// - "20230101-123456 Some Text.md" -> DateTime for 2023-01-01 12:34:56 in tz
/// - "20230101 Some Text.md" -> DateTime for 2023-01-01 00:00:00 in tz
/// - "20230101-123456 Some Text.md" -> DateTime for 2023-01-01 12:34:56
/// - "20230101 Some Text.md" -> DateTime for 2023-01-01 00:00:00
/// - "invalid-file-name.md" -> None
pub fn extract_datetime_from_file_name(file_name: &str, tz: Tz) -> Option<DateTime<Utc>> {
pub fn extract_datetime_from_file_name(file_name: &str) -> Option<DateTime<Utc>> {
let base_name = Path::new(file_name)
.file_name()
.and_then(|s| s.to_str())
@ -64,45 +48,20 @@ pub fn extract_datetime_from_file_name(file_name: &str, tz: Tz) -> Option<DateTi
NaiveDateTime::parse_from_str(&datetime_str, "%Y%m%d %H:%M:%S")
.ok()
.and_then(|dt| naive_to_utc(dt, tz))
}
/// Extract the file-type prefix from a filename.
///
/// Filenames with a `_prefix` segment after the timestamp (and optional time component)
/// are recognised. The prefix must consist of alphanumeric characters only.
///
/// # Examples
/// - `"20260412-123456_daily.md"` → `Some("daily")`
/// - `"20260412_daily Some Title.md"` → `Some("daily")`
/// - `"20260412-123456 Some Title.md"` → `None`
/// - `"/path/to/20260412-123456_daily.md"` → `Some("daily")`
pub fn extract_file_type_from_file_name(file_name: &str) -> Option<String> {
let base_name = Path::new(file_name)
.file_name()
.and_then(|s| s.to_str())
.unwrap_or(file_name);
FILE_TYPE_REGEX
.captures(base_name)
.and_then(|c| c.get(1))
.map(|m| m.as_str().to_string())
}
/// Parse a 14-digit marker string as a NaiveDateTime without timezone conversion.
fn parse_naive_datetime_from_marker(marker: &str) -> Option<NaiveDateTime> {
if !DATETIME_MARKER_REGEX.is_match(marker) {
return None;
}
NaiveDateTime::parse_from_str(marker, "%Y%m%d%H%M%S").ok()
.map(|dt| dt.and_utc())
}
/// Extract a datetime from a marker string in the exact format: YYYYMMDDHHMMSS.
///
/// The datetime is interpreted in the given timezone.
/// Returns the parsed datetime if the format matches and values are valid.
pub fn extract_datetime_from_marker(marker: &str, tz: Tz) -> Option<DateTime<Utc>> {
parse_naive_datetime_from_marker(marker).and_then(|dt| naive_to_utc(dt, tz))
pub fn extract_datetime_from_marker(marker: &str) -> Option<DateTime<Utc>> {
if !DATETIME_MARKER_REGEX.is_match(marker) {
return None;
}
NaiveDateTime::parse_from_str(marker, "%Y%m%d%H%M%S")
.ok()
.map(|dt| dt.and_utc())
}
/// Extract a date from a marker string in the exact format: YYYYMMDD.
@ -131,7 +90,6 @@ pub fn extract_time_from_marker(marker: &str) -> Option<NaiveTime> {
///
/// The function processes markers in reverse order, allowing later markers to override
/// earlier ones. It combines date-only and time-only markers when both are present.
/// All naive datetimes (from markers and the inherited fallback) are interpreted in `tz`.
///
/// Rules:
/// - If a full datetime marker (14 digits) is found, it sets both date and time
@ -141,7 +99,6 @@ pub fn extract_time_from_marker(marker: &str) -> Option<NaiveTime> {
pub fn extract_datetime_from_marker_list(
markers: &[String],
inherited_datetime: DateTime<Utc>,
tz: Tz,
) -> DateTime<Utc> {
let mut shard_time: Option<NaiveTime> = None;
let mut shard_date: Option<NaiveDate> = None;
@ -154,84 +111,34 @@ pub fn extract_datetime_from_marker_list(
if let Some(date) = extract_date_from_marker(marker) {
shard_date = Some(date);
}
if let Some(naive_dt) = parse_naive_datetime_from_marker(marker) {
shard_date = Some(naive_dt.date());
shard_time = Some(naive_dt.time());
if let Some(datetime) = extract_datetime_from_marker(marker) {
shard_date = Some(datetime.naive_utc().date());
shard_time = Some(datetime.naive_utc().time());
}
}
// Interpret the inherited datetime in the configured timezone for fallback values
let inherited_local = inherited_datetime.with_timezone(&tz).naive_local();
// Combine date and time, applying defaults as needed
let final_date = shard_date.unwrap_or_else(|| inherited_local.date());
let final_date = shard_date.unwrap_or_else(|| inherited_datetime.naive_utc().date());
let final_time = match (shard_date, shard_time) {
// If we have a date but no time, use midnight
(Some(_), None) => NaiveTime::from_hms_opt(0, 0, 0).unwrap(),
// Otherwise use the shard time or inherit
_ => shard_time.unwrap_or_else(|| inherited_local.time()),
_ => shard_time.unwrap_or_else(|| inherited_datetime.naive_utc().time()),
};
let naive = NaiveDateTime::new(final_date, final_time);
naive_to_utc(naive, tz).unwrap_or(inherited_datetime)
NaiveDateTime::new(final_date, final_time).and_utc()
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::TimeZone;
use chrono_tz::UTC;
#[test]
fn test_extract_file_type_with_time() {
assert_eq!(
extract_file_type_from_file_name("20260412-123456_daily.md"),
Some("daily".to_string())
);
}
#[test]
fn test_extract_file_type_with_time_and_title() {
assert_eq!(
extract_file_type_from_file_name("20260412-123456_daily Some Title.md"),
Some("daily".to_string())
);
}
#[test]
fn test_extract_file_type_without_time() {
assert_eq!(
extract_file_type_from_file_name("20260412_daily.md"),
Some("daily".to_string())
);
}
#[test]
fn test_extract_file_type_without_prefix() {
assert_eq!(
extract_file_type_from_file_name("20260412-123456 Some Title.md"),
None
);
}
#[test]
fn test_extract_file_type_with_full_path() {
assert_eq!(
extract_file_type_from_file_name("/path/to/20260412-123456_daily.md"),
Some("daily".to_string())
);
}
#[test]
fn test_extract_file_type_no_timestamp() {
assert_eq!(extract_file_type_from_file_name("notes.md"), None);
}
#[test]
fn test_extract_date_from_file_name_valid() {
let file_name = "20230101-123456 Some Text.md";
assert_eq!(
extract_datetime_from_file_name(file_name, UTC),
extract_datetime_from_file_name(file_name),
Some(Utc.with_ymd_and_hms(2023, 1, 1, 12, 34, 56).unwrap())
);
}
@ -239,14 +146,14 @@ mod tests {
#[test]
fn test_extract_date_from_file_name_invalid() {
let file_name = "invalid-file-name.md";
assert_eq!(extract_datetime_from_file_name(file_name, UTC), None);
assert_eq!(extract_datetime_from_file_name(file_name), None);
}
#[test]
fn test_extract_date_from_file_name_without_time() {
let file_name = "20230101 Some Text.md";
assert_eq!(
extract_datetime_from_file_name(file_name, UTC),
extract_datetime_from_file_name(file_name),
Some(Utc.with_ymd_and_hms(2023, 1, 1, 0, 0, 0).unwrap())
);
}
@ -255,7 +162,7 @@ mod tests {
fn test_extract_date_from_file_name_short_time() {
let file_name = "20230101-1234 Some Text.md";
assert_eq!(
extract_datetime_from_file_name(file_name, UTC),
extract_datetime_from_file_name(file_name),
Some(Utc.with_ymd_and_hms(2023, 1, 1, 12, 34, 0).unwrap())
);
}
@ -263,61 +170,41 @@ mod tests {
#[test]
fn test_extract_date_from_file_name_empty_string() {
let file_name = "";
assert_eq!(extract_datetime_from_file_name(file_name, UTC), None);
assert_eq!(extract_datetime_from_file_name(file_name), None);
}
#[test]
fn test_extract_date_from_file_name_with_full_path() {
let file_name = "/path/to/20230101-123456 Some Text.md";
assert_eq!(
extract_datetime_from_file_name(file_name, UTC),
extract_datetime_from_file_name(file_name),
Some(Utc.with_ymd_and_hms(2023, 1, 1, 12, 34, 56).unwrap())
);
}
#[test]
fn test_extract_date_from_file_name_with_timezone_offset() {
// Europe/Berlin is UTC+1 in January (CET)
let file_name = "20230101-120000 Some Text.md";
assert_eq!(
extract_datetime_from_file_name(file_name, chrono_tz::Europe::Berlin),
Some(Utc.with_ymd_and_hms(2023, 1, 1, 11, 0, 0).unwrap())
);
}
#[test]
fn test_extract_datetime_from_marker_valid() {
let marker = "20250101150000";
assert_eq!(
extract_datetime_from_marker(marker, UTC),
extract_datetime_from_marker(marker),
Some(Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap())
);
}
#[test]
fn test_extract_datetime_from_marker_with_timezone_offset() {
// Europe/Berlin is UTC+1 in January (CET)
let marker = "20250101150000";
assert_eq!(
extract_datetime_from_marker(marker, chrono_tz::Europe::Berlin),
Some(Utc.with_ymd_and_hms(2025, 1, 1, 14, 0, 0).unwrap())
);
}
#[test]
fn test_extract_datetime_from_marker_invalid_format() {
assert_eq!(extract_datetime_from_marker("2025010115000", UTC), None); // too short
assert_eq!(extract_datetime_from_marker("202501011500000", UTC), None); // too long
assert_eq!(extract_datetime_from_marker("2025-01-01T150000", UTC), None); // separators
assert_eq!(extract_datetime_from_marker("2025010115000a", UTC), None); // non-digit
assert_eq!(extract_datetime_from_marker("", UTC), None);
assert_eq!(extract_datetime_from_marker("2025010115000"), None); // too short
assert_eq!(extract_datetime_from_marker("202501011500000"), None); // too long
assert_eq!(extract_datetime_from_marker("2025-01-01T150000"), None); // separators
assert_eq!(extract_datetime_from_marker("2025010115000a"), None); // non-digit
assert_eq!(extract_datetime_from_marker(""), None);
}
#[test]
fn test_extract_datetime_from_marker_invalid_values() {
assert_eq!(extract_datetime_from_marker("20250230120000", UTC), None); // Feb 30
assert_eq!(extract_datetime_from_marker("20250101126000", UTC), None); // minute 60
assert_eq!(extract_datetime_from_marker("20250101240000", UTC), None); // hour 24
assert_eq!(extract_datetime_from_marker("20250230120000"), None); // Feb 30
assert_eq!(extract_datetime_from_marker("20250101126000"), None); // minute 60
assert_eq!(extract_datetime_from_marker("20250101240000"), None); // hour 24
}
#[test]
@ -373,10 +260,7 @@ mod tests {
#[test]
fn test_no_markers_inherits_datetime() {
let inherited = Utc.with_ymd_and_hms(2025, 1, 2, 3, 4, 5).unwrap();
assert_eq!(
extract_datetime_from_marker_list(&[], inherited, UTC),
inherited
);
assert_eq!(extract_datetime_from_marker_list(&[], inherited), inherited);
}
#[test]
@ -389,7 +273,7 @@ mod tests {
"1234567".to_string(),
];
assert_eq!(
extract_datetime_from_marker_list(&markers, inherited, UTC),
extract_datetime_from_marker_list(&markers, inherited),
inherited
);
}
@ -399,7 +283,7 @@ mod tests {
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
let markers = vec!["20250101".to_string()];
assert_eq!(
extract_datetime_from_marker_list(&markers, inherited, UTC),
extract_datetime_from_marker_list(&markers, inherited),
Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap()
);
}
@ -409,7 +293,7 @@ mod tests {
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
let markers = vec!["150000".to_string()];
assert_eq!(
extract_datetime_from_marker_list(&markers, inherited, UTC),
extract_datetime_from_marker_list(&markers, inherited),
Utc.with_ymd_and_hms(2025, 6, 7, 15, 0, 0).unwrap()
);
}
@ -419,7 +303,7 @@ mod tests {
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
let markers = vec!["20250101150000".to_string()];
assert_eq!(
extract_datetime_from_marker_list(&markers, inherited, UTC),
extract_datetime_from_marker_list(&markers, inherited),
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
);
}
@ -429,7 +313,7 @@ mod tests {
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
let markers = vec!["20250101".to_string(), "150000".to_string()];
assert_eq!(
extract_datetime_from_marker_list(&markers, inherited, UTC),
extract_datetime_from_marker_list(&markers, inherited),
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
);
}
@ -444,7 +328,7 @@ mod tests {
"160000".to_string(),
];
assert_eq!(
extract_datetime_from_marker_list(&markers, inherited, UTC),
extract_datetime_from_marker_list(&markers, inherited),
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
);
}
@ -459,7 +343,7 @@ mod tests {
];
// The first date (20250101) and first time (150000) should win over the later combined datetime
assert_eq!(
extract_datetime_from_marker_list(&markers, inherited, UTC),
extract_datetime_from_marker_list(&markers, inherited),
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
);
}
@ -474,19 +358,8 @@ mod tests {
"150000".to_string(), // valid
];
assert_eq!(
extract_datetime_from_marker_list(&markers, inherited, UTC),
extract_datetime_from_marker_list(&markers, inherited),
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
);
}
#[test]
fn test_marker_list_with_timezone_offset() {
// Europe/Berlin is UTC+2 in summer (CEST)
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
let markers = vec!["150000".to_string()];
assert_eq!(
extract_datetime_from_marker_list(&markers, inherited, chrono_tz::Europe::Berlin),
Utc.with_ymd_and_hms(2025, 6, 7, 13, 0, 0).unwrap()
);
}
}

View file

@ -9,7 +9,7 @@ pub use configuration::{
};
pub use datetime::{
extract_date_from_marker, extract_datetime_from_file_name, extract_datetime_from_marker,
extract_datetime_from_marker_list, extract_file_type_from_file_name, extract_time_from_marker,
extract_datetime_from_marker_list, extract_time_from_marker,
};
pub use preconfigured::TaskConfiguration;
pub use shard::{localize_shard, localize_stream_file};

View file

@ -20,12 +20,6 @@ pub static TaskConfiguration: Lazy<RepositoryConfiguration> = Lazy::new(|| {
.with_comment("Project the task is attached to")
.with_propagate(true),
)
.with_dimension(
"file_type",
Dimension::new("File Type")
.with_comment("Type of file derived from filename prefix (e.g. 'daily')")
.with_propagate(true),
)
.with_marker(
"Task",
Marker::new("Task").with_placements(vec![

View file

@ -1,14 +1,10 @@
use chrono::{DateTime, Utc};
use chrono_tz::Tz;
use indexmap::{IndexMap, IndexSet};
use crate::error::StreamdError;
use crate::models::{LocalizedShard, RepositoryConfiguration, Shard, StreamFile};
use super::datetime::{
extract_datetime_from_file_name, extract_datetime_from_marker_list,
extract_file_type_from_file_name,
};
use super::datetime::{extract_datetime_from_file_name, extract_datetime_from_marker_list};
/// Localize a shard within the repository's coordinate system.
///
@ -21,13 +17,12 @@ pub fn localize_shard(
config: &RepositoryConfiguration,
propagated: &IndexMap<String, String>,
moment: DateTime<Utc>,
tz: Tz,
) -> LocalizedShard {
let mut position = propagated.clone();
let mut private_position: IndexMap<String, String> = IndexMap::new();
// Extract datetime from markers
let adjusted_moment = extract_datetime_from_marker_list(&shard.markers, moment, tz);
let adjusted_moment = extract_datetime_from_marker_list(&shard.markers, moment);
// Convert markers to a set for if_with checking
let marker_set: IndexSet<String> = shard.markers.iter().cloned().collect();
@ -69,7 +64,7 @@ pub fn localize_shard(
let children: Vec<LocalizedShard> = shard
.children
.iter()
.map(|child| localize_shard(child, config, &position, adjusted_moment, tz))
.map(|child| localize_shard(child, config, &position, adjusted_moment))
.collect();
// Merge private position into final position
@ -89,13 +84,11 @@ pub fn localize_shard(
/// Localize an entire stream file.
///
/// Extracts the datetime from the file name and localizes the root shard.
/// Timestamps in the file name and markers are interpreted in `tz`.
pub fn localize_stream_file(
stream_file: &StreamFile,
config: &RepositoryConfiguration,
tz: Tz,
) -> Result<LocalizedShard, StreamdError> {
let shard_date = extract_datetime_from_file_name(&stream_file.file_name, tz)
let shard_date = extract_datetime_from_file_name(&stream_file.file_name)
.ok_or_else(|| StreamdError::DateExtractionError(stream_file.file_name.clone()))?;
let shard = stream_file
@ -105,17 +98,8 @@ pub fn localize_stream_file(
let mut initial_location = IndexMap::new();
initial_location.insert("file".to_string(), stream_file.file_name.clone());
if let Some(file_type) = extract_file_type_from_file_name(&stream_file.file_name) {
initial_location.insert("file_type".to_string(), file_type);
}
Ok(localize_shard(
shard,
config,
&initial_location,
shard_date,
tz,
))
Ok(localize_shard(shard, config, &initial_location, shard_date))
}
#[cfg(test)]
@ -123,7 +107,6 @@ mod tests {
use super::*;
use crate::models::{Dimension, Marker, MarkerPlacement};
use chrono::TimeZone;
use chrono_tz::UTC;
fn make_config() -> RepositoryConfiguration {
RepositoryConfiguration::new()
@ -166,7 +149,7 @@ mod tests {
let stream_file = StreamFile::new("20250622-121000 Test File.md")
.with_shard(Shard::new(1, 1).with_markers(vec!["Streamd".to_string()]));
let result = localize_stream_file(&stream_file, &config, UTC).unwrap();
let result = localize_stream_file(&stream_file, &config).unwrap();
assert_eq!(
result.moment,
@ -187,7 +170,7 @@ mod tests {
Shard::new(1, 1).with_markers(vec!["Timesheet".to_string(), "Streamd".to_string()]),
);
let result = localize_stream_file(&stream_file, &config, UTC).unwrap();
let result = localize_stream_file(&stream_file, &config).unwrap();
assert_eq!(
result.moment,
@ -219,7 +202,7 @@ mod tests {
let stream_file = StreamFile::new("20260131-210000 Test File.md")
.with_shard(Shard::new(1, 1).with_markers(vec!["A".to_string(), "B".to_string()]));
let result = localize_stream_file(&stream_file, &config, UTC).unwrap();
let result = localize_stream_file(&stream_file, &config).unwrap();
assert_eq!(result.location.get("project"), Some(&"b".to_string()));
}
@ -243,7 +226,7 @@ mod tests {
let stream_file = StreamFile::new("20260131-210000 Test File.md")
.with_shard(Shard::new(1, 1).with_markers(vec!["A".to_string(), "B".to_string()]));
let result = localize_stream_file(&stream_file, &config, UTC).unwrap();
let result = localize_stream_file(&stream_file, &config).unwrap();
assert_eq!(result.location.get("project"), Some(&"a".to_string()));
}
@ -267,7 +250,7 @@ mod tests {
let stream_file = StreamFile::new("20260131-210000 Test File.md")
.with_shard(Shard::new(1, 1).with_markers(vec!["A".to_string(), "B".to_string()]));
let result = localize_stream_file(&stream_file, &config, UTC).unwrap();
let result = localize_stream_file(&stream_file, &config).unwrap();
assert_eq!(result.location.get("label"), Some(&"b".to_string()));
}
@ -292,7 +275,7 @@ mod tests {
let stream_file = StreamFile::new("20260131-210000 Test File.md")
.with_shard(Shard::new(1, 1).with_markers(vec!["A".to_string(), "B".to_string()]));
let result = localize_stream_file(&stream_file, &config, UTC).unwrap();
let result = localize_stream_file(&stream_file, &config).unwrap();
assert_eq!(result.location.get("label"), Some(&"a".to_string()));
}

View file

@ -12,18 +12,14 @@ fn main() -> miette::Result<()> {
}) => match action {
None => streamd::cli::commands::todo::run_list(show_future)?,
Some(TodoAction::Edit { number }) => streamd::cli::commands::todo::run_edit(number)?,
Some(TodoAction::Done { numbers }) => streamd::cli::commands::todo::run_done(&numbers)?,
Some(TodoAction::Done { number }) => streamd::cli::commands::todo::run_done(number)?,
},
Some(Commands::Edit { number }) => streamd::cli::commands::edit::run(number)?,
Some(Commands::Timesheet { decimal, debug }) => {
streamd::cli::commands::timesheet::run(decimal, debug)?
}
Some(Commands::Daily { date }) => streamd::cli::commands::daily::run(date)?,
Some(Commands::Timesheet) => streamd::cli::commands::timesheet::run()?,
Some(Commands::Completions { shell }) => {
streamd::cli::commands::completions::run(shell);
}
Some(Commands::Lsp) => streamd::cli::commands::lsp::run()?,
None => streamd::cli::commands::daily::run(None)?,
None => streamd::cli::commands::new::run()?,
}
Ok(())

View file

@ -1,5 +1,4 @@
use chrono::{DateTime, Utc};
use chrono_tz::Tz;
use itertools::Itertools;
use crate::error::StreamdError;
@ -36,11 +35,7 @@ fn shards_to_timesheet_points(shards: &[LocalizedShard]) -> Vec<TimesheetPoint>
}
/// Aggregate timesheet points for a single day into a Timesheet.
fn aggregate_timecard_day(
points: &[TimesheetPoint],
now: DateTime<Utc>,
tz: Tz,
) -> Result<Option<Timesheet>, StreamdError> {
fn aggregate_timecard_day(points: &[TimesheetPoint]) -> Result<Option<Timesheet>, StreamdError> {
if points.is_empty() {
return Ok(None);
}
@ -51,23 +46,23 @@ fn aggregate_timecard_day(
pts
};
let card_date = sorted_points[0].moment.with_timezone(&tz).date_naive();
let card_date = sorted_points[0].moment.date_naive();
let mut is_sick_leave = false;
let mut special_day_type: Option<SpecialDayType> = None;
// State machine: starting in "break" mode (not working)
let mut last_is_break = true;
let mut last_time = sorted_points[0].moment.with_timezone(&tz).time();
let mut last_time = sorted_points[0].moment.time();
let mut timecards: Vec<Timecard> = Vec::new();
for point in &sorted_points {
if point.moment.with_timezone(&tz).date_naive() != card_date {
if point.moment.date_naive() != card_date {
return Err(StreamdError::TimesheetError(
"Dates of all given timesheet days should be consistent".to_string(),
));
}
let point_time = point.moment.with_timezone(&tz).time();
let point_time = point.moment.time();
match point.point_type {
TimesheetPointType::Holiday => {
@ -118,17 +113,11 @@ fn aggregate_timecard_day(
// Check that we ended in break mode
if !last_is_break {
let now_local = now.with_timezone(&tz);
if card_date == now_local.date_naive() {
// No closing break yet for today — artificially close at now
timecards.push(Timecard::new(last_time, now_local.time()));
} else {
return Err(StreamdError::TimesheetError(format!(
"Last Timecard of {} is not a break!",
card_date
)));
}
}
// Only return a timesheet if there's meaningful data
if timecards.is_empty() && !is_sick_leave && special_day_type.is_none() {
@ -144,24 +133,17 @@ fn aggregate_timecard_day(
}
/// Aggregate timesheet points into timesheets, grouped by day.
fn aggregate_timecards(
points: &[TimesheetPoint],
now: DateTime<Utc>,
tz: Tz,
) -> Result<Vec<Timesheet>, StreamdError> {
fn aggregate_timecards(points: &[TimesheetPoint]) -> Result<Vec<Timesheet>, StreamdError> {
let mut timesheets = Vec::new();
// Sort points by moment to ensure proper grouping
let mut sorted_points = points.to_vec();
sorted_points.sort_by_key(|p| p.moment);
// Group by local date in the configured timezone
for (_date, group) in &sorted_points
.iter()
.chunk_by(|p| p.moment.with_timezone(&tz).date_naive())
{
// Group by date
for (_date, group) in &sorted_points.iter().chunk_by(|p| p.moment.date_naive()) {
let day_points: Vec<_> = group.cloned().collect();
if let Some(timesheet) = aggregate_timecard_day(&day_points, now, tz)? {
if let Some(timesheet) = aggregate_timecard_day(&day_points)? {
timesheets.push(timesheet);
}
}
@ -170,13 +152,9 @@ fn aggregate_timecards(
}
/// Extract timesheets from localized shards.
pub fn extract_timesheets(
shards: &[LocalizedShard],
now: DateTime<Utc>,
tz: Tz,
) -> Result<Vec<Timesheet>, StreamdError> {
pub fn extract_timesheets(shards: &[LocalizedShard]) -> Result<Vec<Timesheet>, StreamdError> {
let points = shards_to_timesheet_points(shards);
aggregate_timecards(&points, now, tz)
aggregate_timecards(&points)
}
#[cfg(test)]
@ -185,13 +163,6 @@ mod tests {
use chrono::{NaiveTime, TimeZone};
use indexmap::IndexMap;
use chrono_tz::UTC;
/// A fixed "now" in the past, so tests never match today.
fn past_now() -> DateTime<Utc> {
Utc.with_ymd_and_hms(2020, 1, 1, 0, 0, 0).unwrap()
}
fn point(at: DateTime<Utc>, point_type: TimesheetPointType) -> LocalizedShard {
let mut location = IndexMap::new();
location.insert(
@ -227,7 +198,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].date, day.date_naive());
@ -280,7 +251,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].timecards.len(), 3);
@ -331,7 +302,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].timecards.len(), 3);
@ -365,7 +336,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert_eq!(result.len(), 2);
assert_eq!(result[0].date, day1.date_naive());
@ -388,7 +359,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].special_day_type, Some(SpecialDayType::Vacation));
@ -411,7 +382,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].special_day_type, Some(SpecialDayType::Holiday));
@ -433,7 +404,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].special_day_type, Some(SpecialDayType::Undertime));
@ -460,7 +431,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert_eq!(result.len(), 1);
assert!(result[0].is_sick_leave);
@ -483,7 +454,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert_eq!(result.len(), 1);
assert!(result[0].is_sick_leave);
@ -492,7 +463,7 @@ mod tests {
#[test]
fn test_empty_input() {
let result = extract_timesheets(&[], past_now(), UTC).unwrap();
let result = extract_timesheets(&[]).unwrap();
assert!(result.is_empty());
}
@ -512,7 +483,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC);
let result = extract_timesheets(&shards);
assert!(result.is_err());
let err = result.unwrap_err();
@ -540,7 +511,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC);
let result = extract_timesheets(&shards);
assert!(result.is_err());
let err = result.unwrap_err();
@ -563,7 +534,7 @@ mod tests {
),
];
let result = extract_timesheets(&shards, past_now(), UTC).unwrap();
let result = extract_timesheets(&shards).unwrap();
assert!(result.is_empty());
}

View file

@ -2,8 +2,7 @@ use std::collections::HashMap;
use std::fs;
use std::path::Path;
use chrono::{DateTime, Datelike, NaiveDate, Utc, Weekday};
use chrono_tz::Tz;
use chrono::{Datelike, NaiveDate, Weekday};
use crate::error::StreamdError;
use crate::models::{SpecialDayType, Timesheet};
@ -31,14 +30,14 @@ pub fn load_repository_config(base_folder: &Path) -> Result<RepositoryConfig, St
Ok(config)
}
/// Calculate total minutes worked from timecards.
fn calculate_timecard_minutes(timesheet: &Timesheet) -> i64 {
/// Calculate total hours worked from timecards.
fn calculate_timecard_hours(timesheet: &Timesheet) -> f64 {
timesheet
.timecards
.iter()
.map(|tc| {
let duration = tc.to_time - tc.from_time;
duration.num_minutes()
duration.num_minutes() as f64 / 60.0
})
.sum()
}
@ -80,35 +79,31 @@ fn determine_day_type(date: NaiveDate, timesheet: Option<&Timesheet>, has_period
DayType::Missing
}
/// Calculate expected minutes for a day based on period config and day type.
fn calculate_expected_minutes(day_type: DayType, minutes_per_day: i64) -> i64 {
/// Calculate expected hours for a day based on period config and day type.
fn calculate_expected_hours(day_type: DayType, hours_per_day: f64, _date: NaiveDate) -> f64 {
match day_type {
DayType::Regular => minutes_per_day,
DayType::SickLeave => minutes_per_day,
DayType::Vacation => minutes_per_day,
DayType::Holiday => 0,
DayType::FlexDay => minutes_per_day,
DayType::Weekend => 0,
DayType::Missing => minutes_per_day,
DayType::OutsidePeriod => 0,
DayType::Regular => hours_per_day,
DayType::SickLeave => hours_per_day,
DayType::Vacation => hours_per_day,
DayType::Holiday => 0.0,
DayType::FlexDay => hours_per_day,
DayType::Weekend => 0.0,
DayType::Missing => hours_per_day,
DayType::OutsidePeriod => 0.0,
}
}
/// Calculate actual minutes for a day based on day type rules.
fn calculate_actual_minutes(
day_type: DayType,
timecard_minutes: i64,
expected_minutes: i64,
) -> i64 {
/// Calculate actual hours for a day based on day type rules.
fn calculate_actual_hours(day_type: DayType, timecard_hours: f64, expected_hours: f64) -> f64 {
match day_type {
DayType::Regular => timecard_minutes,
DayType::SickLeave => expected_minutes.max(timecard_minutes),
DayType::Vacation => expected_minutes + timecard_minutes,
DayType::Holiday => timecard_minutes,
DayType::FlexDay => 0,
DayType::Weekend => timecard_minutes,
DayType::Missing => 0,
DayType::OutsidePeriod => timecard_minutes,
DayType::Regular => timecard_hours,
DayType::SickLeave => expected_hours.max(timecard_hours),
DayType::Vacation => expected_hours + timecard_hours,
DayType::Holiday => timecard_hours,
DayType::FlexDay => 0.0,
DayType::Weekend => timecard_hours,
DayType::Missing => 0.0,
DayType::OutsidePeriod => timecard_hours,
}
}
@ -116,8 +111,6 @@ fn calculate_actual_minutes(
pub fn generate_report(
timesheets: &[Timesheet],
config: &TimesheetConfig,
now: DateTime<Utc>,
tz: Tz,
) -> Result<TimesheetReport, StreamdError> {
if config.periods.is_empty() {
return Ok(TimesheetReport::new());
@ -131,14 +124,14 @@ pub fn generate_report(
let earliest_period_start = config.periods.iter().map(|p| p.start).min().unwrap();
let latest_period_end = config.periods.iter().map(|p| p.end).max().unwrap();
// Limit to today in the configured timezone
let today = now.with_timezone(&tz).date_naive();
// Limit to today
let today = chrono::Local::now().date_naive();
let end_date = latest_period_end.min(today);
// Group by month and generate reports
let mut month_reports: Vec<MonthReport> = Vec::new();
let mut all_warnings: Vec<ReportWarning> = Vec::new();
let mut cumulative_balance: i64 = 0;
let mut cumulative_balance: f64 = 0.0;
// Iterate through all dates in the range
let mut current_date = earliest_period_start;
@ -167,32 +160,29 @@ pub fn generate_report(
// Find if this date falls within a period
let period = config.find_period(current_date);
let has_period = period.is_some();
let minutes_per_day = period
.map(|p| (p.hours_per_day() * 60.0).round() as i64)
.unwrap_or(0);
let hours_per_day = period.map(|p| p.hours_per_day()).unwrap_or(0.0);
// Get timesheet for this date
let timesheet = timesheets_by_date.get(&current_date).copied();
let timecard_minutes = timesheet.map(calculate_timecard_minutes).unwrap_or(0);
let timecard_hours = timesheet.map(calculate_timecard_hours).unwrap_or(0.0);
// Determine day type
let day_type = determine_day_type(current_date, timesheet, has_period);
// Skip weekends with no work and days outside periods with no work
let should_include = match day_type {
DayType::Weekend => timecard_minutes > 0,
DayType::OutsidePeriod => timecard_minutes > 0,
DayType::Weekend => timecard_hours > 0.0,
DayType::OutsidePeriod => timecard_hours > 0.0,
_ => has_period, // Only include days within periods
};
if should_include {
// Calculate expected and actual minutes
let expected_minutes = calculate_expected_minutes(day_type, minutes_per_day);
let actual_minutes =
calculate_actual_minutes(day_type, timecard_minutes, expected_minutes);
// Calculate expected and actual hours
let expected_hours = calculate_expected_hours(day_type, hours_per_day, current_date);
let actual_hours = calculate_actual_hours(day_type, timecard_hours, expected_hours);
let mut day_report =
DayReport::new(current_date, expected_minutes, actual_minutes, day_type);
DayReport::new(current_date, expected_hours, actual_hours, day_type);
// Collect warnings
let mut day_warnings: Vec<DayWarning> = Vec::new();
@ -217,9 +207,9 @@ pub fn generate_report(
}
// Warning: Work outside period
if day_type == DayType::OutsidePeriod && timecard_minutes > 0 {
if day_type == DayType::OutsidePeriod && timecard_hours > 0.0 {
let warning = DayWarning::OutsidePeriod {
minutes_worked: timecard_minutes,
hours_worked: timecard_hours,
};
day_warnings.push(warning.clone());
all_warnings.push(ReportWarning::new(current_date, warning));
@ -263,13 +253,7 @@ mod tests {
use super::*;
use crate::models::Timecard;
use crate::timesheet::Period;
use chrono::{NaiveTime, TimeZone};
use chrono_tz::UTC;
/// A "now" well past all test dates so report limits aren't hit.
fn future_now() -> DateTime<Utc> {
Utc.with_ymd_and_hms(2030, 1, 1, 0, 0, 0).unwrap()
}
use chrono::NaiveTime;
fn date(year: i32, month: u32, day: u32) -> NaiveDate {
NaiveDate::from_ymd_opt(year, month, day).unwrap()
@ -302,17 +286,17 @@ mod tests {
}
#[test]
fn test_calculate_timecard_minutes() {
fn test_calculate_timecard_hours() {
let ts = make_timesheet(date(2026, 3, 2), vec![(9, 0, 12, 0), (13, 0, 17, 0)]);
let minutes = calculate_timecard_minutes(&ts);
assert_eq!(minutes, 420); // 3h + 4h = 7h = 420 min
let hours = calculate_timecard_hours(&ts);
assert!((hours - 7.0).abs() < 0.0001);
}
#[test]
fn test_calculate_timecard_minutes_with_minutes() {
fn test_calculate_timecard_hours_with_minutes() {
let ts = make_timesheet(date(2026, 3, 2), vec![(9, 0, 12, 30), (13, 0, 17, 15)]);
let minutes = calculate_timecard_minutes(&ts);
assert_eq!(minutes, 465); // 3.5h + 4.25h = 7.75h = 465 min
let hours = calculate_timecard_hours(&ts);
assert!((hours - 7.75).abs() < 0.0001);
}
#[test]
@ -392,61 +376,61 @@ mod tests {
}
#[test]
fn test_expected_minutes_regular() {
let minutes = calculate_expected_minutes(DayType::Regular, 456); // 7.6h = 456 min
assert_eq!(minutes, 456);
fn test_expected_hours_regular() {
let hours = calculate_expected_hours(DayType::Regular, 7.6, date(2026, 3, 2));
assert!((hours - 7.6).abs() < 0.0001);
}
#[test]
fn test_expected_minutes_holiday() {
let minutes = calculate_expected_minutes(DayType::Holiday, 456);
assert_eq!(minutes, 0);
fn test_expected_hours_holiday() {
let hours = calculate_expected_hours(DayType::Holiday, 7.6, date(2026, 3, 2));
assert!((hours - 0.0).abs() < 0.0001);
}
#[test]
fn test_expected_minutes_weekend() {
let minutes = calculate_expected_minutes(DayType::Weekend, 456);
assert_eq!(minutes, 0);
fn test_expected_hours_weekend() {
let hours = calculate_expected_hours(DayType::Weekend, 7.6, date(2026, 3, 7));
assert!((hours - 0.0).abs() < 0.0001);
}
#[test]
fn test_actual_minutes_regular() {
let minutes = calculate_actual_minutes(DayType::Regular, 480, 456); // 8h, expected 7.6h
assert_eq!(minutes, 480);
fn test_actual_hours_regular() {
let hours = calculate_actual_hours(DayType::Regular, 8.0, 7.6);
assert!((hours - 8.0).abs() < 0.0001);
}
#[test]
fn test_actual_minutes_sick_leave_max() {
fn test_actual_hours_sick_leave_max() {
// Sick leave: max(expected, worked)
let minutes = calculate_actual_minutes(DayType::SickLeave, 180, 456); // 3h worked, 7.6h expected
assert_eq!(minutes, 456);
let hours = calculate_actual_hours(DayType::SickLeave, 3.0, 7.6);
assert!((hours - 7.6).abs() < 0.0001);
}
#[test]
fn test_actual_minutes_sick_leave_worked_more() {
fn test_actual_hours_sick_leave_worked_more() {
// Sick leave where worked > expected
let minutes = calculate_actual_minutes(DayType::SickLeave, 540, 456); // 9h worked, 7.6h expected
assert_eq!(minutes, 540);
let hours = calculate_actual_hours(DayType::SickLeave, 9.0, 7.6);
assert!((hours - 9.0).abs() < 0.0001);
}
#[test]
fn test_actual_minutes_vacation() {
fn test_actual_hours_vacation() {
// Vacation: expected + worked
let minutes = calculate_actual_minutes(DayType::Vacation, 120, 456); // 2h worked, 7.6h expected
assert_eq!(minutes, 576); // 2h + 7.6h = 9.6h = 576 min
let hours = calculate_actual_hours(DayType::Vacation, 2.0, 7.6);
assert!((hours - 9.6).abs() < 0.0001);
}
#[test]
fn test_actual_minutes_flex_day() {
fn test_actual_hours_flex_day() {
// Flex day: always 0
let minutes = calculate_actual_minutes(DayType::FlexDay, 300, 456);
assert_eq!(minutes, 0);
let hours = calculate_actual_hours(DayType::FlexDay, 5.0, 7.6);
assert!((hours - 0.0).abs() < 0.0001);
}
#[test]
fn test_generate_report_empty_config() {
let config = TimesheetConfig { periods: vec![] };
let report = generate_report(&[], &config, future_now(), UTC).unwrap();
let report = generate_report(&[], &config).unwrap();
assert!(report.months.is_empty());
}
@ -456,15 +440,15 @@ mod tests {
let timesheets = vec![make_timesheet(date(2026, 3, 2), vec![(9, 0, 17, 0)])];
let config = make_config(date(2026, 3, 2), date(2026, 3, 2), 40.0);
let report = generate_report(&timesheets, &config, future_now(), UTC).unwrap();
let report = generate_report(&timesheets, &config).unwrap();
assert_eq!(report.months.len(), 1);
assert_eq!(report.months[0].days.len(), 1);
let day = &report.months[0].days[0];
assert_eq!(day.date, date(2026, 3, 2));
assert_eq!(day.expected_minutes, 480); // 8h = 480 min
assert_eq!(day.actual_minutes, 480);
assert!((day.expected_hours - 8.0).abs() < 0.0001);
assert!((day.actual_hours - 8.0).abs() < 0.0001);
assert_eq!(day.day_type, DayType::Regular);
}
@ -475,7 +459,7 @@ mod tests {
// March 2 is Monday, March 3 is Tuesday
let config = make_config(date(2026, 3, 2), date(2026, 3, 3), 40.0);
let report = generate_report(&timesheets, &config, future_now(), UTC).unwrap();
let report = generate_report(&timesheets, &config).unwrap();
assert_eq!(report.months[0].days.len(), 2);
@ -493,7 +477,7 @@ mod tests {
let timesheets = vec![make_timesheet(date(2026, 3, 2), vec![(9, 0, 17, 0)])];
let config = make_config(date(2026, 3, 2), date(2026, 3, 8), 40.0);
let report = generate_report(&timesheets, &config, future_now(), UTC).unwrap();
let report = generate_report(&timesheets, &config).unwrap();
// Should only include Mon-Fri (5 days), not Sat-Sun
let days = &report.months[0].days;
@ -512,7 +496,7 @@ mod tests {
];
let config = make_config(date(2026, 3, 2), date(2026, 3, 8), 40.0);
let report = generate_report(&timesheets, &config, future_now(), UTC).unwrap();
let report = generate_report(&timesheets, &config).unwrap();
// Should include Saturday
let has_saturday = report.months[0]
@ -535,7 +519,7 @@ mod tests {
};
let config = make_config(date(2026, 3, 2), date(2026, 3, 2), 40.0);
let report = generate_report(&[ts], &config, future_now(), UTC).unwrap();
let report = generate_report(&[ts], &config).unwrap();
assert!(report.has_warnings());
assert!(report.months[0].days[0].has_warnings());
@ -550,9 +534,9 @@ mod tests {
];
let config = make_config(date(2026, 3, 2), date(2026, 3, 3), 40.0);
let report = generate_report(&timesheets, &config, future_now(), UTC).unwrap();
let report = generate_report(&timesheets, &config).unwrap();
// Balance should be +120 min (+2h: 18h actual - 16h expected)
assert_eq!(report.cumulative_balance, 120);
// Balance should be +2h (18h actual - 16h expected)
assert!((report.cumulative_balance - 2.0).abs() < 0.0001);
}
}

View file

@ -48,7 +48,7 @@ pub enum DayWarning {
second: (NaiveTime, NaiveTime),
},
/// Work logged outside any configured period.
OutsidePeriod { minutes_worked: i64 },
OutsidePeriod { hours_worked: f64 },
}
impl fmt::Display for DayWarning {
@ -67,12 +67,8 @@ impl fmt::Display for DayWarning {
second.1.format("%H:%M")
)
}
DayWarning::OutsidePeriod { minutes_worked } => {
write!(
f,
"{:.1}h worked (no period configured)",
*minutes_worked as f64 / 60.0
)
DayWarning::OutsidePeriod { hours_worked } => {
write!(f, "{:.1}h worked (no period configured)", hours_worked)
}
}
}
@ -82,23 +78,18 @@ impl fmt::Display for DayWarning {
#[derive(Debug, Clone)]
pub struct DayReport {
pub date: NaiveDate,
pub expected_minutes: i64,
pub actual_minutes: i64,
pub expected_hours: f64,
pub actual_hours: f64,
pub day_type: DayType,
pub warnings: Vec<DayWarning>,
}
impl DayReport {
pub fn new(
date: NaiveDate,
expected_minutes: i64,
actual_minutes: i64,
day_type: DayType,
) -> Self {
pub fn new(date: NaiveDate, expected_hours: f64, actual_hours: f64, day_type: DayType) -> Self {
Self {
date,
expected_minutes,
actual_minutes,
expected_hours,
actual_hours,
day_type,
warnings: Vec::new(),
}
@ -114,9 +105,9 @@ impl DayReport {
self
}
/// Calculate the difference between actual and expected minutes.
pub fn diff(&self) -> i64 {
self.actual_minutes - self.expected_minutes
/// Calculate the difference between actual and expected hours.
pub fn diff(&self) -> f64 {
self.actual_hours - self.expected_hours
}
/// Check if this day has any warnings.
@ -147,18 +138,18 @@ impl MonthReport {
self
}
/// Calculate total expected minutes for the month.
pub fn total_expected(&self) -> i64 {
self.days.iter().map(|d| d.expected_minutes).sum()
/// Calculate total expected hours for the month.
pub fn total_expected(&self) -> f64 {
self.days.iter().map(|d| d.expected_hours).sum()
}
/// Calculate total actual minutes for the month.
pub fn total_actual(&self) -> i64 {
self.days.iter().map(|d| d.actual_minutes).sum()
/// Calculate total actual hours for the month.
pub fn total_actual(&self) -> f64 {
self.days.iter().map(|d| d.actual_hours).sum()
}
/// Calculate the difference for the month.
pub fn diff(&self) -> i64 {
pub fn diff(&self) -> f64 {
self.total_actual() - self.total_expected()
}
@ -187,7 +178,7 @@ impl ReportWarning {
#[derive(Debug, Clone)]
pub struct TimesheetReport {
pub months: Vec<MonthReport>,
pub cumulative_balance: i64,
pub cumulative_balance: f64,
pub warnings: Vec<ReportWarning>,
}
@ -195,7 +186,7 @@ impl TimesheetReport {
pub fn new() -> Self {
Self {
months: Vec::new(),
cumulative_balance: 0,
cumulative_balance: 0.0,
warnings: Vec::new(),
}
}
@ -205,7 +196,7 @@ impl TimesheetReport {
self
}
pub fn with_cumulative_balance(mut self, balance: i64) -> Self {
pub fn with_cumulative_balance(mut self, balance: f64) -> Self {
self.cumulative_balance = balance;
self
}
@ -241,30 +232,27 @@ mod tests {
#[test]
fn test_day_report_diff() {
// 7.6h = 456 min, 8.2h = 492 min, diff = 36 min
let report = DayReport::new(date(2026, 3, 2), 456, 492, DayType::Regular);
assert_eq!(report.diff(), 36);
let report = DayReport::new(date(2026, 3, 2), 7.6, 8.2, DayType::Regular);
assert!((report.diff() - 0.6).abs() < 0.0001);
}
#[test]
fn test_day_report_negative_diff() {
// 7.6h = 456 min, 6.0h = 360 min, diff = -96 min
let report = DayReport::new(date(2026, 3, 2), 456, 360, DayType::Regular);
assert_eq!(report.diff(), -96);
let report = DayReport::new(date(2026, 3, 2), 7.6, 6.0, DayType::Regular);
assert!((report.diff() - (-1.6)).abs() < 0.0001);
}
#[test]
fn test_month_report_totals() {
// 7.6h = 456 min, 8.2h = 492 min, 6.0h = 360 min
let month = MonthReport::new(2026, 3).with_days(vec![
DayReport::new(date(2026, 3, 2), 456, 492, DayType::Regular),
DayReport::new(date(2026, 3, 3), 456, 456, DayType::Regular),
DayReport::new(date(2026, 3, 4), 456, 360, DayType::Regular),
DayReport::new(date(2026, 3, 2), 7.6, 8.2, DayType::Regular),
DayReport::new(date(2026, 3, 3), 7.6, 7.6, DayType::Regular),
DayReport::new(date(2026, 3, 4), 7.6, 6.0, DayType::Regular),
]);
assert_eq!(month.total_expected(), 1368); // 456 * 3
assert_eq!(month.total_actual(), 1308); // 492 + 456 + 360
assert_eq!(month.diff(), -60); // -1 hour
assert!((month.total_expected() - 22.8).abs() < 0.0001);
assert!((month.total_actual() - 21.8).abs() < 0.0001);
assert!((month.diff() - (-1.0)).abs() < 0.0001);
}
#[test]
@ -293,15 +281,13 @@ mod tests {
#[test]
fn test_day_warning_outside_period_display() {
let warning = DayWarning::OutsidePeriod {
minutes_worked: 210,
}; // 3.5h
let warning = DayWarning::OutsidePeriod { hours_worked: 3.5 };
assert_eq!(warning.to_string(), "3.5h worked (no period configured)");
}
#[test]
fn test_day_report_with_warnings() {
let report = DayReport::new(date(2026, 3, 2), 456, 492, DayType::Regular).with_warning(
let report = DayReport::new(date(2026, 3, 2), 7.6, 8.2, DayType::Regular).with_warning(
DayWarning::OverlappingTimecards {
first: (time(9, 0), time(12, 30)),
second: (time(12, 0), time(13, 0)),

817
zed-extension/Cargo.lock generated
View file

@ -1,817 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "adler2"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "anyhow"
version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]]
name = "auditable-serde"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c7bf8143dfc3c0258df908843e169b5cc5fcf76c7718bd66135ef4a9cd558c5"
dependencies = [
"semver",
"serde",
"serde_json",
"topological-sort",
]
[[package]]
name = "bitflags"
version = "2.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4512299f36f043ab09a583e57bceb5a5aab7a73db1805848e8fef3c9e8c78b3"
[[package]]
name = "cfg-if"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
name = "crc32fast"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
dependencies = [
"cfg-if",
]
[[package]]
name = "displaydoc"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "equivalent"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "flate2"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c"
dependencies = [
"crc32fast",
"miniz_oxide",
]
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "form_urlencoded"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
dependencies = [
"percent-encoding",
]
[[package]]
name = "futures"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-channel"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
dependencies = [
"futures-core",
"futures-sink",
]
[[package]]
name = "futures-core"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d"
[[package]]
name = "futures-executor"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d"
dependencies = [
"futures-core",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-io"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718"
[[package]]
name = "futures-macro"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "futures-sink"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893"
[[package]]
name = "futures-task"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393"
[[package]]
name = "futures-util"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
dependencies = [
"futures-channel",
"futures-core",
"futures-io",
"futures-macro",
"futures-sink",
"futures-task",
"memchr",
"pin-project-lite",
"slab",
]
[[package]]
name = "hashbrown"
version = "0.15.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
dependencies = [
"foldhash",
]
[[package]]
name = "hashbrown"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f467dd6dccf739c208452f8014c75c18bb8301b050ad1cfb27153803edb0f51"
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "icu_collections"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2984d1cd16c883d7935b9e07e44071dca8d917fd52ecc02c04d5fa0b5a3f191c"
dependencies = [
"displaydoc",
"potential_utf",
"utf8_iter",
"yoke",
"zerofrom",
"zerovec",
]
[[package]]
name = "icu_locale_core"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92219b62b3e2b4d88ac5119f8904c10f8f61bf7e95b640d25ba3075e6cac2c29"
dependencies = [
"displaydoc",
"litemap",
"tinystr",
"writeable",
"zerovec",
]
[[package]]
name = "icu_normalizer"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c56e5ee99d6e3d33bd91c5d85458b6005a22140021cc324cea84dd0e72cff3b4"
dependencies = [
"icu_collections",
"icu_normalizer_data",
"icu_properties",
"icu_provider",
"smallvec",
"zerovec",
]
[[package]]
name = "icu_normalizer_data"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da3be0ae77ea334f4da67c12f149704f19f81d1adf7c51cf482943e84a2bad38"
[[package]]
name = "icu_properties"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bee3b67d0ea5c2cca5003417989af8996f8604e34fb9ddf96208a033901e70de"
dependencies = [
"icu_collections",
"icu_locale_core",
"icu_properties_data",
"icu_provider",
"zerotrie",
"zerovec",
]
[[package]]
name = "icu_properties_data"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e2bbb201e0c04f7b4b3e14382af113e17ba4f63e2c9d2ee626b720cbce54a14"
[[package]]
name = "icu_provider"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "139c4cf31c8b5f33d7e199446eff9c1e02decfc2f0eec2c8d71f65befa45b421"
dependencies = [
"displaydoc",
"icu_locale_core",
"writeable",
"yoke",
"zerofrom",
"zerotrie",
"zerovec",
]
[[package]]
name = "id-arena"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954"
[[package]]
name = "idna"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
dependencies = [
"idna_adapter",
"smallvec",
"utf8_iter",
]
[[package]]
name = "idna_adapter"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
dependencies = [
"icu_normalizer",
"icu_properties",
]
[[package]]
name = "indexmap"
version = "2.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d466e9454f08e4a911e14806c24e16fba1b4c121d1ea474396f396069cf949d9"
dependencies = [
"equivalent",
"hashbrown 0.17.0",
"serde",
"serde_core",
]
[[package]]
name = "itoa"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682"
[[package]]
name = "leb128fmt"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]]
name = "litemap"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92daf443525c4cce67b150400bc2316076100ce0b3686209eb8cf3c31612e6f0"
[[package]]
name = "log"
version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
name = "memchr"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
[[package]]
name = "miniz_oxide"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
dependencies = [
"adler2",
"simd-adler32",
]
[[package]]
name = "once_cell"
version = "1.21.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50"
[[package]]
name = "percent-encoding"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "pin-project-lite"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd"
[[package]]
name = "potential_utf"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0103b1cef7ec0cf76490e969665504990193874ea05c85ff9bab8b911d0a0564"
dependencies = [
"zerovec",
]
[[package]]
name = "prettyplease"
version = "0.2.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
dependencies = [
"proc-macro2",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924"
dependencies = [
"proc-macro2",
]
[[package]]
name = "semver"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a7852d02fc848982e0c167ef163aaff9cd91dc640ba85e263cb1ce46fae51cd"
dependencies = [
"serde",
"serde_core",
]
[[package]]
name = "serde"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
dependencies = [
"serde_core",
"serde_derive",
]
[[package]]
name = "serde_core"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.149"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
dependencies = [
"itoa",
"memchr",
"serde",
"serde_core",
"zmij",
]
[[package]]
name = "simd-adler32"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "703d5c7ef118737c72f1af64ad2f6f8c5e1921f818cdcb97b8fe6fc69bf66214"
[[package]]
name = "slab"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5"
[[package]]
name = "smallvec"
version = "1.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
[[package]]
name = "spdx"
version = "0.10.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3"
dependencies = [
"smallvec",
]
[[package]]
name = "stable_deref_trait"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
[[package]]
name = "streamd-zed"
version = "0.0.1"
dependencies = [
"zed_extension_api",
]
[[package]]
name = "syn"
version = "2.0.117"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "synstructure"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tinystr"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8323304221c2a851516f22236c5722a72eaa19749016521d6dff0824447d96d"
dependencies = [
"displaydoc",
"zerovec",
]
[[package]]
name = "topological-sort"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d"
[[package]]
name = "unicode-ident"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
[[package]]
name = "unicode-xid"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
[[package]]
name = "url"
version = "2.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed"
dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
"serde",
]
[[package]]
name = "utf8_iter"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
[[package]]
name = "wasm-encoder"
version = "0.227.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80bb72f02e7fbf07183443b27b0f3d4144abf8c114189f2e088ed95b696a7822"
dependencies = [
"leb128fmt",
"wasmparser",
]
[[package]]
name = "wasm-metadata"
version = "0.227.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce1ef0faabbbba6674e97a56bee857ccddf942785a336c8b47b42373c922a91d"
dependencies = [
"anyhow",
"auditable-serde",
"flate2",
"indexmap",
"serde",
"serde_derive",
"serde_json",
"spdx",
"url",
"wasm-encoder",
"wasmparser",
]
[[package]]
name = "wasmparser"
version = "0.227.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2"
dependencies = [
"bitflags",
"hashbrown 0.15.5",
"indexmap",
"semver",
]
[[package]]
name = "wit-bindgen"
version = "0.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10fb6648689b3929d56bbc7eb1acf70c9a42a29eb5358c67c10f54dbd5d695de"
dependencies = [
"wit-bindgen-rt",
"wit-bindgen-rust-macro",
]
[[package]]
name = "wit-bindgen-core"
version = "0.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b"
dependencies = [
"anyhow",
"heck",
"wit-parser",
]
[[package]]
name = "wit-bindgen-rt"
version = "0.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621"
dependencies = [
"bitflags",
"futures",
"once_cell",
]
[[package]]
name = "wit-bindgen-rust"
version = "0.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce"
dependencies = [
"anyhow",
"heck",
"indexmap",
"prettyplease",
"syn",
"wasm-metadata",
"wit-bindgen-core",
"wit-component",
]
[[package]]
name = "wit-bindgen-rust-macro"
version = "0.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad19eec017904e04c60719592a803ee5da76cb51c81e3f6fbf9457f59db49799"
dependencies = [
"anyhow",
"prettyplease",
"proc-macro2",
"quote",
"syn",
"wit-bindgen-core",
"wit-bindgen-rust",
]
[[package]]
name = "wit-component"
version = "0.227.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676"
dependencies = [
"anyhow",
"bitflags",
"indexmap",
"log",
"serde",
"serde_derive",
"serde_json",
"wasm-encoder",
"wasm-metadata",
"wasmparser",
"wit-parser",
]
[[package]]
name = "wit-parser"
version = "0.227.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11"
dependencies = [
"anyhow",
"id-arena",
"indexmap",
"log",
"semver",
"serde",
"serde_derive",
"serde_json",
"unicode-xid",
"wasmparser",
]
[[package]]
name = "writeable"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ffae5123b2d3fc086436f8834ae3ab053a283cfac8fe0a0b8eaae044768a4c4"
[[package]]
name = "yoke"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abe8c5fda708d9ca3df187cae8bfb9ceda00dd96231bed36e445a1a48e66f9ca"
dependencies = [
"stable_deref_trait",
"yoke-derive",
"zerofrom",
]
[[package]]
name = "yoke-derive"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de844c262c8848816172cef550288e7dc6c7b7814b4ee56b3e1553f275f1858e"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zed_extension_api"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0729d50b4ca0a7e28e590bbe32e3ca0194d97ef654961451a424c661a366fca0"
dependencies = [
"serde",
"serde_json",
"wit-bindgen",
]
[[package]]
name = "zerofrom"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69faa1f2a1ea75661980b013019ed6687ed0e83d069bc1114e2cc74c6c04c4df"
dependencies = [
"zerofrom-derive",
]
[[package]]
name = "zerofrom-derive"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11532158c46691caf0f2593ea8358fed6bbf68a0315e80aae9bd41fbade684a1"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerotrie"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f9152d31db0792fa83f70fb2f83148effb5c1f5b8c7686c3459e361d9bc20bf"
dependencies = [
"displaydoc",
"yoke",
"zerofrom",
]
[[package]]
name = "zerovec"
version = "0.11.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90f911cbc359ab6af17377d242225f4d75119aec87ea711a880987b18cd7b239"
dependencies = [
"yoke",
"zerofrom",
"zerovec-derive",
]
[[package]]
name = "zerovec-derive"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "625dc425cab0dca6dc3c3319506e6593dcb08a9f387ea3b284dbd52a92c40555"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "zmij"
version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa"

View file

@ -1,13 +0,0 @@
[package]
name = "streamd-zed"
version = "0.0.1"
edition = "2021"
[lib]
crate-type = ["cdylib"]
[package.metadata.component]
package = "zed:extension"
[dependencies]
zed_extension_api = "0.7"

View file

@ -1,11 +0,0 @@
id = "streamd"
name = "Streamd"
version = "0.0.1"
schema_version = 1
authors = ["Konstantin Fickel"]
description = "Streamd LSP support for Zed editor"
repository = "https://git.konstantinfickel.de/kfickel/streamd"
[language_servers.streamd]
name = "Streamd LSP"
languages = ["Markdown"]

View file

@ -1,32 +0,0 @@
use zed_extension_api::{self as zed, Command, LanguageServerId, Os, Result, Worktree, current_platform};
struct StreamdExtension;
impl zed::Extension for StreamdExtension {
fn new() -> Self {
StreamdExtension
}
fn language_server_command(
&mut self,
_language_server_id: &LanguageServerId,
_worktree: &Worktree,
) -> Result<Command> {
let (os, _) = current_platform();
if os == Os::Windows {
Ok(Command {
command: "wsl".into(),
args: vec!["streamd".into(), "lsp".into()],
env: vec![],
})
} else {
Ok(Command {
command: "streamd".into(),
args: vec!["lsp".into()],
env: vec![],
})
}
}
}
zed::register_extension!(StreamdExtension);