Compare commits
20 commits
82796c37df
...
d1b4281f40
| Author | SHA1 | Date | |
|---|---|---|---|
| d1b4281f40 | |||
| d548842fbb | |||
| 3b2d8c7e63 | |||
| a7579a7083 | |||
| f29840313d | |||
| 8d07a86fc4 | |||
| ed493cff29 | |||
| 20a3e8b437 | |||
| d89ad8b131 | |||
| ca6b5bbd4d | |||
| ce5e476b23 | |||
| b4848bb661 | |||
| f9ed0463f7 | |||
| af2debc19b | |||
| 49cd9bcfa0 | |||
| 1e203d9db3 | |||
| 2298bdaa8f | |||
| 646241f355 | |||
| c0911307fd | |||
| 34ba9869d1 |
72 changed files with 6179 additions and 3585 deletions
2
.envrc
2
.envrc
|
|
@ -1 +1 @@
|
||||||
use flake .#impure
|
use flake
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-and-lint:
|
check:
|
||||||
name: Lint, Check & Test
|
name: Lint, Check & Test
|
||||||
runs-on: nix
|
runs-on: nix
|
||||||
|
|
||||||
|
|
@ -16,8 +16,12 @@ jobs:
|
||||||
- run: nix --version
|
- run: nix --version
|
||||||
- run: nix flake check
|
- run: nix flake check
|
||||||
|
|
||||||
- name: Install the project
|
build:
|
||||||
run: 'nix develop .#impure --command bash -c "uv sync --locked --all-extras --dev"'
|
name: Build Package
|
||||||
|
runs-on: nix
|
||||||
|
|
||||||
- name: Test with PyTest
|
steps:
|
||||||
run: nix develop .#impure --command bash -c "uv run pytest --junit-xml test-report.xml"
|
- name: Check out Repository
|
||||||
|
uses: https://git.konstantinfickel.de/actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||||
|
|
||||||
|
- run: nix build
|
||||||
|
|
|
||||||
195
.gitignore
vendored
195
.gitignore
vendored
|
|
@ -1,179 +1,24 @@
|
||||||
# Created by https://www.toptal.com/developers/gitignore/api/python
|
# Rust build artifacts
|
||||||
# Edit at https://www.toptal.com/developers/gitignore?templates=python
|
/target/
|
||||||
|
|
||||||
### Python ###
|
# IDE
|
||||||
# Byte-compiled / optimized / DLL files
|
.idea/
|
||||||
__pycache__/
|
.vscode/
|
||||||
*.py[cod]
|
*.swp
|
||||||
*$py.class
|
*.swo
|
||||||
|
*~
|
||||||
|
|
||||||
# C extensions
|
# OS
|
||||||
*.so
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
# Distribution / packaging
|
|
||||||
.Python
|
|
||||||
build/
|
|
||||||
develop-eggs/
|
|
||||||
dist/
|
|
||||||
downloads/
|
|
||||||
eggs/
|
|
||||||
.eggs/
|
|
||||||
lib/
|
|
||||||
lib64/
|
|
||||||
parts/
|
|
||||||
sdist/
|
|
||||||
var/
|
|
||||||
wheels/
|
|
||||||
share/python-wheels/
|
|
||||||
*.egg-info/
|
|
||||||
.installed.cfg
|
|
||||||
*.egg
|
|
||||||
MANIFEST
|
|
||||||
|
|
||||||
# PyInstaller
|
|
||||||
# Usually these files are written by a python script from a template
|
|
||||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
||||||
*.manifest
|
|
||||||
*.spec
|
|
||||||
|
|
||||||
# Installer logs
|
|
||||||
pip-log.txt
|
|
||||||
pip-delete-this-directory.txt
|
|
||||||
|
|
||||||
# Unit test / coverage reports
|
|
||||||
htmlcov/
|
|
||||||
.tox/
|
|
||||||
.nox/
|
|
||||||
.coverage
|
|
||||||
.coverage.*
|
|
||||||
.cache
|
|
||||||
nosetests.xml
|
|
||||||
coverage.xml
|
|
||||||
*.cover
|
|
||||||
*.py,cover
|
|
||||||
.hypothesis/
|
|
||||||
.pytest_cache/
|
|
||||||
cover/
|
|
||||||
|
|
||||||
# Translations
|
|
||||||
*.mo
|
|
||||||
*.pot
|
|
||||||
|
|
||||||
# Django stuff:
|
|
||||||
*.log
|
|
||||||
local_settings.py
|
|
||||||
db.sqlite3
|
|
||||||
db.sqlite3-journal
|
|
||||||
|
|
||||||
# Flask stuff:
|
|
||||||
instance/
|
|
||||||
.webassets-cache
|
|
||||||
|
|
||||||
# Scrapy stuff:
|
|
||||||
.scrapy
|
|
||||||
|
|
||||||
# Sphinx documentation
|
|
||||||
docs/_build/
|
|
||||||
|
|
||||||
# PyBuilder
|
|
||||||
.pybuilder/
|
|
||||||
target/
|
|
||||||
|
|
||||||
# Jupyter Notebook
|
|
||||||
.ipynb_checkpoints
|
|
||||||
|
|
||||||
# IPython
|
|
||||||
profile_default/
|
|
||||||
ipython_config.py
|
|
||||||
|
|
||||||
# pyenv
|
|
||||||
# For a library or package, you might want to ignore these files since the code is
|
|
||||||
# intended to run in multiple environments; otherwise, check them in:
|
|
||||||
# .python-version
|
|
||||||
|
|
||||||
# pipenv
|
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
||||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
||||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
||||||
# install all needed dependencies.
|
|
||||||
#Pipfile.lock
|
|
||||||
|
|
||||||
# poetry
|
|
||||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
||||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
||||||
# commonly ignored for libraries.
|
|
||||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
||||||
#poetry.lock
|
|
||||||
|
|
||||||
# pdm
|
|
||||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
||||||
#pdm.lock
|
|
||||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
|
||||||
# in version control.
|
|
||||||
# https://pdm.fming.dev/#use-with-ide
|
|
||||||
.pdm.toml
|
|
||||||
|
|
||||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
||||||
__pypackages__/
|
|
||||||
|
|
||||||
# Celery stuff
|
|
||||||
celerybeat-schedule
|
|
||||||
celerybeat.pid
|
|
||||||
|
|
||||||
# SageMath parsed files
|
|
||||||
*.sage.py
|
|
||||||
|
|
||||||
# Environments
|
|
||||||
.env
|
|
||||||
.venv
|
|
||||||
env/
|
|
||||||
venv/
|
|
||||||
ENV/
|
|
||||||
env.bak/
|
|
||||||
venv.bak/
|
|
||||||
|
|
||||||
# Spyder project settings
|
|
||||||
.spyderproject
|
|
||||||
.spyproject
|
|
||||||
|
|
||||||
# Rope project settings
|
|
||||||
.ropeproject
|
|
||||||
|
|
||||||
# mkdocs documentation
|
|
||||||
/site
|
|
||||||
|
|
||||||
# mypy
|
|
||||||
.mypy_cache/
|
|
||||||
.dmypy.json
|
|
||||||
dmypy.json
|
|
||||||
|
|
||||||
# Pyre type checker
|
|
||||||
.pyre/
|
|
||||||
|
|
||||||
# pytype static type analyzer
|
|
||||||
.pytype/
|
|
||||||
|
|
||||||
# Cython debug symbols
|
|
||||||
cython_debug/
|
|
||||||
|
|
||||||
# PyCharm
|
|
||||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
||||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
||||||
#.idea/
|
|
||||||
|
|
||||||
### Python Patch ###
|
|
||||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
|
||||||
poetry.toml
|
|
||||||
|
|
||||||
# ruff
|
|
||||||
.ruff_cache/
|
|
||||||
|
|
||||||
# LSP config files
|
|
||||||
pyrightconfig.json
|
|
||||||
|
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
|
||||||
|
|
||||||
|
# Nix
|
||||||
.direnv
|
.direnv
|
||||||
test-report.xml
|
result
|
||||||
|
result-*
|
||||||
|
|
||||||
|
# Test artifacts
|
||||||
|
*.profraw
|
||||||
|
*.profdata
|
||||||
|
|
||||||
|
.pre-commit-config.yaml
|
||||||
|
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
repos:
|
|
||||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
|
||||||
rev: 0.7.13
|
|
||||||
hooks:
|
|
||||||
- id: uv-lock
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
|
||||||
rev: v0.12.0
|
|
||||||
hooks:
|
|
||||||
- id: ruff
|
|
||||||
args: [ --fix ]
|
|
||||||
- id: ruff-format
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
3.14.3
|
|
||||||
35
CLAUDE.md
Normal file
35
CLAUDE.md
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
# CLAUDE.md
|
||||||
|
|
||||||
|
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||||
|
|
||||||
|
## Build & Test Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nix develop # Enter dev shell with Rust toolchain
|
||||||
|
nix build # Build the package
|
||||||
|
nix flake check # Run all checks (clippy, fmt, tests, pre-commit)
|
||||||
|
|
||||||
|
# Inside nix develop:
|
||||||
|
cargo test # Run all tests
|
||||||
|
cargo test test_name # Run a specific test
|
||||||
|
cargo clippy # Lint
|
||||||
|
cargo fmt # Format
|
||||||
|
```
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
Streamd parses markdown files into hierarchical **Shards**, then **localizes** them by assigning temporal moments and dimensional placements based on `@Tag` markers.
|
||||||
|
|
||||||
|
**Data flow:** Markdown → `extract::parser` → `Shard` tree → `localize::shard` → `LocalizedShard` tree
|
||||||
|
|
||||||
|
**Key modules:**
|
||||||
|
- `models/` — Core types: `Shard`, `LocalizedShard`, `Dimension`, `Marker`, `Timecard`
|
||||||
|
- `extract/` — Tag extraction (`tag_extraction.rs`) and markdown parsing (`parser.rs`)
|
||||||
|
- `localize/` — DateTime extraction, configuration merging, shard localization
|
||||||
|
- `timesheet/` — State machine that converts localized shards into timecards
|
||||||
|
- `query/` — Recursive search functions for finding shards by predicate
|
||||||
|
- `cli/` — Clap-based CLI commands
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
`REQUIREMENTS.md` contains the formal specification. Update it along with the `README.md` whenever implementing or changing features.
|
||||||
1334
Cargo.lock
generated
Normal file
1334
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
40
Cargo.toml
Normal file
40
Cargo.toml
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
[package]
|
||||||
|
name = "streamd"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
description = "Personal knowledge management and time-tracking CLI using @Tag annotations"
|
||||||
|
license = "AGPL-3.0-only"
|
||||||
|
authors = ["Konstantin Fickel"]
|
||||||
|
repository = "https://github.com/konstantinfickel/streamd"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
clap = { version = "4", features = ["derive", "env"] }
|
||||||
|
clap_complete = "4"
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
toml = "0.9"
|
||||||
|
thiserror = "2"
|
||||||
|
miette = { version = "7", features = ["fancy"] }
|
||||||
|
pulldown-cmark = "0.13"
|
||||||
|
regex = "1"
|
||||||
|
once_cell = "1"
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
walkdir = "2"
|
||||||
|
indexmap = { version = "2", features = ["serde"] }
|
||||||
|
itertools = "0.14"
|
||||||
|
directories = "5"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
pretty_assertions = "=1.4.1"
|
||||||
|
tempfile = "=3.27.0"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "streamd"
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "streamd"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
lto = true
|
||||||
|
strip = true
|
||||||
37
README.md
37
README.md
|
|
@ -1,7 +1,36 @@
|
||||||
# streamer
|
# strea.md
|
||||||
|
|
||||||
Searching for `@tags` in time-based [streams](https://www.cs.yale.edu/homes/freeman/lifestreams.html).
|

|
||||||
|
|
||||||
# Usage
|
Strea.md is a personal knowledge management and time-tracking CLI tool. It organizes time-ordered markdown files using `@tag` annotations, letting you manage tasks, track time, and query your notes from the terminal.
|
||||||
|
|
||||||
Running `streamer` finds all lines with @Task
|
## Core Concepts
|
||||||
|
|
||||||
|
- **Shards** — Sections of markdown files, organized hierarchically by headings. Each shard can contain markers, tags, and nested child shards.
|
||||||
|
- **Markers** — Special `@tags` like `@Task`, `@Done`, `@Waiting`, or `@Timesheet` that give shards semantic meaning and place them into dimensions.
|
||||||
|
- **Dimensions** — Classification axes (e.g. task state, project, timesheet) that categorize shards. Some dimensions propagate to child shards.
|
||||||
|
|
||||||
|
## File Format
|
||||||
|
|
||||||
|
Markdown files are named with a timestamp: `YYYYMMDD-HHMMSS [markers].md`
|
||||||
|
|
||||||
|
For example: `20260131-210000 Task Streamd.md`
|
||||||
|
|
||||||
|
Within files, `@`-prefixed markers at the beginning of paragraphs or headings define how a shard is categorized.
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
- `streamd` / `streamd new` — Create a new timestamped markdown entry, opening your editor
|
||||||
|
- `streamd todo` — Show all open tasks (shards with `@Task` markers)
|
||||||
|
- `streamd edit [number]` — Edit a stream file by index (most recent first)
|
||||||
|
- `streamd timesheet` — Generate time reports from `@Timesheet` markers
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Streamd reads its configuration from `~/.config/streamd/config.toml` (XDG standard). The main setting is `base_folder`, which points to the directory containing your stream files (defaults to the current working directory).
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Running `streamd` opens your editor to create a new entry. After saving, the file is renamed based on its timestamp and any markers found in the content.
|
||||||
|
|
||||||
|
Running `streamd todo` finds all shards marked as open tasks and displays them as rich-formatted panels in your terminal.
|
||||||
|
|
|
||||||
375
REQUIREMENTS.md
Normal file
375
REQUIREMENTS.md
Normal file
|
|
@ -0,0 +1,375 @@
|
||||||
|
# Streamd Requirements
|
||||||
|
|
||||||
|
Streamd (stylized as "Strea.md") is a personal knowledge management and time-tracking CLI tool that organizes time-ordered markdown files using `@Tag` annotations.
|
||||||
|
|
||||||
|
## Core Concepts
|
||||||
|
|
||||||
|
### Shard
|
||||||
|
|
||||||
|
A **Shard** is the fundamental unit of content. It represents a section of a markdown file (paragraph, heading, list item) that can contain markers and tags.
|
||||||
|
|
||||||
|
```
|
||||||
|
Shard {
|
||||||
|
markers: [String] // @Tag annotations at START of content
|
||||||
|
tags: [String] // @Tag annotations AFTER content begins
|
||||||
|
start_line: int
|
||||||
|
end_line: int
|
||||||
|
children: [Shard] // Nested shards (hierarchical)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### LocalizedShard
|
||||||
|
|
||||||
|
A **LocalizedShard** extends Shard with temporal and dimensional placement information.
|
||||||
|
|
||||||
|
```
|
||||||
|
LocalizedShard {
|
||||||
|
markers: [String]
|
||||||
|
tags: [String]
|
||||||
|
start_line: int
|
||||||
|
end_line: int
|
||||||
|
moment: DateTime // When this entry was created
|
||||||
|
location: Map<String, String> // Dimension placements
|
||||||
|
children: [LocalizedShard]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Tag Extraction Logic
|
||||||
|
|
||||||
|
### R1: Tag Recognition Pattern
|
||||||
|
|
||||||
|
Tags are recognized by the regex pattern: `@([^\s*\x60~\[\]]+)`
|
||||||
|
|
||||||
|
A tag is `@` followed by word characters, excluding:
|
||||||
|
- Whitespace
|
||||||
|
- Asterisks `*`
|
||||||
|
- Backticks `` ` ``
|
||||||
|
- Tildes `~`
|
||||||
|
- Brackets `[]`
|
||||||
|
|
||||||
|
**Examples of valid tags:**
|
||||||
|
- `@Task`, `@Done`, `@Waiting`
|
||||||
|
- `@Timesheet`, `@Break`
|
||||||
|
- `@ProjectName`, `@Client-ABC`
|
||||||
|
|
||||||
|
### R2: Marker vs Tag Distinction
|
||||||
|
|
||||||
|
The extraction MUST distinguish between **markers** and **tags** based on their position within a block:
|
||||||
|
|
||||||
|
| Type | Position | Purpose |
|
||||||
|
|------|----------|---------|
|
||||||
|
| **Marker** | Before any non-whitespace content | Semantic classification (triggers shard creation) |
|
||||||
|
| **Tag** | After non-whitespace content | Metadata annotation (does not trigger shard creation) |
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```markdown
|
||||||
|
@Task @Streamd Working on feature <!-- @Task and @Streamd are MARKERS -->
|
||||||
|
Some text here @CompletedFeature <!-- @CompletedFeature is a TAG -->
|
||||||
|
```
|
||||||
|
|
||||||
|
### R3: Marker Boundary Tracking
|
||||||
|
|
||||||
|
The extraction algorithm MUST track a "marker boundary" state:
|
||||||
|
|
||||||
|
1. Start with `marker_boundary_encountered = false`
|
||||||
|
2. While processing tokens:
|
||||||
|
- If whitespace-only: continue (boundary not crossed)
|
||||||
|
- If `@Tag` token found AND boundary NOT crossed: add to markers
|
||||||
|
- If `@Tag` token found AND boundary crossed: add to tags
|
||||||
|
- If any non-whitespace content found: set boundary = crossed
|
||||||
|
|
||||||
|
### R4: Nested Token Handling
|
||||||
|
|
||||||
|
Tag extraction MUST handle nested markdown formatting:
|
||||||
|
|
||||||
|
- Emphasis: `*@Tag*` or `_@Tag_`
|
||||||
|
- Strong: `**@Tag**` or `__@Tag__`
|
||||||
|
- Strikethrough: `~~@Tag~~`
|
||||||
|
- Links: `[@Tag](url)`
|
||||||
|
|
||||||
|
Tags inside these formatting elements are still valid and should be extracted.
|
||||||
|
|
||||||
|
### R5: Applicable Block Types
|
||||||
|
|
||||||
|
Tag extraction applies to:
|
||||||
|
- Headings (`# Heading with @Tag`)
|
||||||
|
- Paragraphs (`@Tag in paragraph`)
|
||||||
|
- Quoute Blocks (`> @Tag in Quote`)
|
||||||
|
- List items (each item can have its own markers)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Parsing Logic
|
||||||
|
|
||||||
|
### R6: Heading-Based Hierarchy
|
||||||
|
|
||||||
|
The parser MUST create a hierarchical shard structure based on markdown headings.
|
||||||
|
|
||||||
|
**Algorithm for determining split level:**
|
||||||
|
|
||||||
|
1. Find the minimum heading level that either:
|
||||||
|
- Appears 2+ times in the block list, OR
|
||||||
|
- Has markers AND is not the first heading
|
||||||
|
2. If no such level exists, do not split (return None)
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```markdown
|
||||||
|
# Main Title
|
||||||
|
Content here
|
||||||
|
|
||||||
|
## Section A <!-- Split point (level 2 appears twice) -->
|
||||||
|
Section A content
|
||||||
|
|
||||||
|
## Section B <!-- Split point -->
|
||||||
|
Section B content
|
||||||
|
```
|
||||||
|
|
||||||
|
### R7: List Item Shard Creation
|
||||||
|
|
||||||
|
Each list item with markers MUST become its own shard:
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
- @Task Item one <!-- Shard 1 -->
|
||||||
|
- @Task Item two <!-- Shard 2 -->
|
||||||
|
- Item three <!-- NOT a separate shard (no markers) -->
|
||||||
|
```
|
||||||
|
|
||||||
|
### R8: Shard Simplification
|
||||||
|
|
||||||
|
When building shards, apply this optimization:
|
||||||
|
- If a shard has exactly 1 child AND no markers AND no tags
|
||||||
|
- Return the child directly instead of wrapping it
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Dimension Placement Logic
|
||||||
|
|
||||||
|
### R9: Dimension Configuration
|
||||||
|
|
||||||
|
A **Dimension** defines a classification axis:
|
||||||
|
|
||||||
|
```
|
||||||
|
Dimension {
|
||||||
|
display_name: String // For UI display
|
||||||
|
comment: String? // Documentation
|
||||||
|
propagate: bool // Whether children inherit this dimension
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### R10: Marker Configuration
|
||||||
|
|
||||||
|
A **Marker** defines how a tag affects dimension placement:
|
||||||
|
|
||||||
|
```
|
||||||
|
Marker {
|
||||||
|
display_name: String
|
||||||
|
placements: [MarkerPlacement]
|
||||||
|
}
|
||||||
|
|
||||||
|
MarkerPlacement {
|
||||||
|
if_with: Set<String> // Conditional: only apply if ALL these markers present
|
||||||
|
dimension: String // Target dimension name
|
||||||
|
value: String? // Value to assign (defaults to marker name)
|
||||||
|
overwrites: bool // Can overwrite existing placement
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### R11: Conditional Placement
|
||||||
|
|
||||||
|
Placements with `if_with` conditions MUST only apply when ALL specified markers are present on the same shard.
|
||||||
|
|
||||||
|
**Example Configuration:**
|
||||||
|
```
|
||||||
|
Marker "Task" {
|
||||||
|
placements: [
|
||||||
|
{ dimension: "task", value: "open" },
|
||||||
|
{ if_with: ["Done"], dimension: "task", value: "done" },
|
||||||
|
{ if_with: ["Waiting"], dimension: "task", value: "waiting" },
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Behavior:**
|
||||||
|
- `@Task` alone → `task: "open"`
|
||||||
|
- `@Task @Done` → `task: "done"` (conditional overrides default)
|
||||||
|
- `@Task @Waiting` → `task: "waiting"`
|
||||||
|
|
||||||
|
### R12: Localization Algorithm
|
||||||
|
|
||||||
|
The localization process MUST follow this algorithm:
|
||||||
|
|
||||||
|
```
|
||||||
|
function localize_shard(shard, config, propagated_from_parent, moment):
|
||||||
|
position = copy(propagated_from_parent) // Start with inherited
|
||||||
|
private_position = {} // Non-propagating dimensions
|
||||||
|
|
||||||
|
for marker in shard.markers:
|
||||||
|
if marker in config.markers:
|
||||||
|
for placement in marker.placements:
|
||||||
|
// Check conditional
|
||||||
|
if placement.if_with is subset of shard.markers:
|
||||||
|
dimension = config.dimensions[placement.dimension]
|
||||||
|
value = placement.value OR marker
|
||||||
|
|
||||||
|
// Check if we can apply this placement
|
||||||
|
target = dimension.propagate ? position : private_position
|
||||||
|
if placement.dimension not in target OR placement.overwrites:
|
||||||
|
target[placement.dimension] = value
|
||||||
|
|
||||||
|
// Recursively localize children with propagating dimensions
|
||||||
|
children = [
|
||||||
|
localize_shard(child, config, position, moment)
|
||||||
|
for child in shard.children
|
||||||
|
]
|
||||||
|
|
||||||
|
// Merge private dimensions into final position
|
||||||
|
position.update(private_position)
|
||||||
|
|
||||||
|
return LocalizedShard(
|
||||||
|
markers: shard.markers,
|
||||||
|
tags: shard.tags,
|
||||||
|
location: position,
|
||||||
|
moment: moment,
|
||||||
|
children: children,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### R13: Dimension Propagation
|
||||||
|
|
||||||
|
When `propagate = true`:
|
||||||
|
- Children inherit the dimension value from their parent
|
||||||
|
- Child can override with their own placement
|
||||||
|
|
||||||
|
When `propagate = false`:
|
||||||
|
- Dimension value is NOT inherited by children
|
||||||
|
- Each shard must have its own marker to be placed in this dimension
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```
|
||||||
|
dimensions: {
|
||||||
|
"project": { propagate: true }, // Children inherit project
|
||||||
|
"task": { propagate: false }, // Each task is independent
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
# @Project-X
|
||||||
|
## @Task Item A <!-- project: "Project-X", task: "open" -->
|
||||||
|
### Sub-item <!-- project: "Project-X", task: (none) -->
|
||||||
|
## @Task Item B <!-- project: "Project-X", task: "open" -->
|
||||||
|
```
|
||||||
|
|
||||||
|
### R14: Overwrite Behavior
|
||||||
|
|
||||||
|
Default: A placement does NOT overwrite an existing value in the dimension.
|
||||||
|
|
||||||
|
With `overwrites: true`: The placement WILL replace any existing value.
|
||||||
|
|
||||||
|
This allows conditional placements to override base placements.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File Naming Convention
|
||||||
|
|
||||||
|
### R15: File Name Format
|
||||||
|
|
||||||
|
Files follow the pattern: `YYYYMMDD-HHMMSS [markers].md`
|
||||||
|
|
||||||
|
- `YYYYMMDD`: Date (8 digits, required)
|
||||||
|
- `HHMMSS`: Time (4-6 digits, optional, pads with zeros)
|
||||||
|
- `[markers]`: Space-separated marker names extracted from file content
|
||||||
|
|
||||||
|
**Extraction regex:** `^(?P<date>\d{8})(?:-(?P<time>\d{4,6}))?.+\.md$`
|
||||||
|
|
||||||
|
### R16: Temporal Markers
|
||||||
|
|
||||||
|
Special markers can override the file timestamp:
|
||||||
|
|
||||||
|
- Date markers: `@YYYYMMDD` (8 digits)
|
||||||
|
- Time markers: `@HHMMSS` (6 digits)
|
||||||
|
|
||||||
|
These are used for entries that reference a different time than when the file was created.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Timesheet Module
|
||||||
|
|
||||||
|
### R17: Timesheet Point Types
|
||||||
|
|
||||||
|
```
|
||||||
|
TimesheetPointType {
|
||||||
|
Card, // Clock in / start work
|
||||||
|
Break, // Clock out / end work
|
||||||
|
SickLeave,
|
||||||
|
Vacation,
|
||||||
|
Holiday,
|
||||||
|
Undertime,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### R18: Timesheet State Machine
|
||||||
|
|
||||||
|
Process timesheet shards chronologically per day:
|
||||||
|
|
||||||
|
1. Start state: "on break" (not working)
|
||||||
|
2. `Card` marker: Transition to "working", record start time
|
||||||
|
3. `Break` marker: Transition to "on break", emit timecard from previous start to now
|
||||||
|
4. Special markers (SickLeave, Vacation, etc.): Set day type
|
||||||
|
|
||||||
|
**Validation:** The last entry of each day MUST be a `Break` (cannot end day while working).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Query System
|
||||||
|
|
||||||
|
### R19: Shard Search
|
||||||
|
|
||||||
|
Provide recursive search through the shard tree:
|
||||||
|
|
||||||
|
- `find_shard(predicate)`: Find all shards matching a predicate function
|
||||||
|
- `find_by_position(dimension, value)`: Find shards where `location[dimension] == value`
|
||||||
|
- `find_by_set_dimension(dimension)`: Find shards where dimension exists in location
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## CLI Commands
|
||||||
|
|
||||||
|
### R20: Core Commands
|
||||||
|
|
||||||
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `streamd new` | Create new timestamped file, open editor, rename with markers on close |
|
||||||
|
| `streamd todo` | List all shards with `task: "open"` |
|
||||||
|
| `streamd edit [n]` | Edit nth file (supports negative indexing for recent files) |
|
||||||
|
| `streamd timesheet` | Extract and export timesheet data as CSV |
|
||||||
|
| `streamd completions <shell>` | Generate shell completions (bash, zsh, fish, elvish, powershell) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Application Configuration
|
||||||
|
|
||||||
|
### R22: Config File Location
|
||||||
|
|
||||||
|
The application configuration is stored at `~/.config/streamd/config.toml`:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
base_folder = "/path/to/stream/files"
|
||||||
|
```
|
||||||
|
|
||||||
|
### R23: Environment Variable Override
|
||||||
|
|
||||||
|
The `STREAMD_BASE_FOLDER` environment variable can override the config file setting.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Configuration Merging
|
||||||
|
|
||||||
|
### R24: Configuration Composition
|
||||||
|
|
||||||
|
Multiple configurations can be merged:
|
||||||
|
|
||||||
|
- Dimensions are combined (later configs can add new dimensions)
|
||||||
|
- Markers are combined (later configs can add new markers)
|
||||||
|
- This allows base configuration + domain-specific extensions
|
||||||
155
flake.lock
generated
155
flake.lock
generated
|
|
@ -1,95 +1,120 @@
|
||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
"nixpkgs": {
|
"crane": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1769789167,
|
"lastModified": 1774313767,
|
||||||
"narHash": "sha256-kKB3bqYJU5nzYeIROI82Ef9VtTbu4uA3YydSk/Bioa8=",
|
"narHash": "sha256-hy0XTQND6avzGEUFrJtYBBpFa/POiiaGBr2vpU6Y9tY=",
|
||||||
|
"owner": "ipetkov",
|
||||||
|
"repo": "crane",
|
||||||
|
"rev": "3d9df76e29656c679c744968b17fbaf28f0e923d",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "ipetkov",
|
||||||
|
"repo": "crane",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-compat": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1767039857,
|
||||||
|
"narHash": "sha256-vNpUSpF5Nuw8xvDLj2KCwwksIbjua2LZCqhV1LNRDns=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "flake-compat",
|
||||||
"rev": "62c8382960464ceb98ea593cb8321a2cf8f9e3e5",
|
"rev": "5edf11c44bc78a0d334f6334cdaf7d60d732daab",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"git-hooks": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-compat": "flake-compat",
|
||||||
|
"gitignore": "gitignore",
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1774861927,
|
||||||
|
"narHash": "sha256-FB1fbeJQjaTMI2JFAa0LNMaYXiShiYbJA6puGQC4xdg=",
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "git-hooks.nix",
|
||||||
|
"rev": "9c4469b68b62e122c3b3d2ab0ed3caeb04ff1ac4",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "git-hooks.nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"gitignore": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": [
|
||||||
|
"git-hooks",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1709087332,
|
||||||
|
"narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "gitignore.nix",
|
||||||
|
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "gitignore.nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1774709303,
|
||||||
|
"narHash": "sha256-D3Q07BbIA2KnTcSXIqqu9P586uWxN74zNoCH3h2ESHg=",
|
||||||
|
"owner": "nixos",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "8110df5ad7abf5d4c0f6fb0f8f978390e77f9685",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nixos",
|
||||||
"ref": "nixos-unstable",
|
"ref": "nixos-unstable",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"pyproject-build-systems": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": [
|
|
||||||
"nixpkgs"
|
|
||||||
],
|
|
||||||
"pyproject-nix": [
|
|
||||||
"pyproject-nix"
|
|
||||||
],
|
|
||||||
"uv2nix": [
|
|
||||||
"uv2nix"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1763662255,
|
|
||||||
"narHash": "sha256-4bocaOyLa3AfiS8KrWjZQYu+IAta05u3gYZzZ6zXbT0=",
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "build-system-pkgs",
|
|
||||||
"rev": "042904167604c681a090c07eb6967b4dd4dae88c",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "build-system-pkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pyproject-nix": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": [
|
|
||||||
"nixpkgs"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1769936401,
|
|
||||||
"narHash": "sha256-kwCOegKLZJM9v/e/7cqwg1p/YjjTAukKPqmxKnAZRgA=",
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "pyproject.nix",
|
|
||||||
"rev": "b0d513eeeebed6d45b4f2e874f9afba2021f7812",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "pyproject-nix",
|
|
||||||
"repo": "pyproject.nix",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
|
"crane": "crane",
|
||||||
|
"git-hooks": "git-hooks",
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": "nixpkgs",
|
||||||
"pyproject-build-systems": "pyproject-build-systems",
|
"rust-overlay": "rust-overlay"
|
||||||
"pyproject-nix": "pyproject-nix",
|
|
||||||
"uv2nix": "uv2nix"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"uv2nix": {
|
"rust-overlay": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
],
|
|
||||||
"pyproject-nix": [
|
|
||||||
"pyproject-nix"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1769957392,
|
"lastModified": 1774840424,
|
||||||
"narHash": "sha256-6PkqwwYf5K2CHi2V+faI/9pqjfz/HxUkI/MVid6hlOY=",
|
"narHash": "sha256-3Oi4mBKzOCFQYLUyEjyc0s5cnlNj1MzmhpVKoLptpe8=",
|
||||||
"owner": "pyproject-nix",
|
"owner": "oxalica",
|
||||||
"repo": "uv2nix",
|
"repo": "rust-overlay",
|
||||||
"rev": "d18bc50ae1c3d4be9c41c2d94ea765524400af75",
|
"rev": "d9f52b51548e76ab8b6e7d647763047ebdec835c",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "pyproject-nix",
|
"owner": "oxalica",
|
||||||
"repo": "uv2nix",
|
"repo": "rust-overlay",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
370
flake.nix
370
flake.nix
|
|
@ -1,24 +1,18 @@
|
||||||
{
|
{
|
||||||
description = "Hello world flake using uv2nix";
|
description = "Using Markdown Files to organize your life as a @Tag-Stream";
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||||
|
|
||||||
pyproject-nix = {
|
rust-overlay = {
|
||||||
url = "github:pyproject-nix/pyproject.nix";
|
url = "github:oxalica/rust-overlay";
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
};
|
};
|
||||||
|
|
||||||
uv2nix = {
|
crane.url = "github:ipetkov/crane";
|
||||||
url = "github:pyproject-nix/uv2nix";
|
|
||||||
inputs.pyproject-nix.follows = "pyproject-nix";
|
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
|
||||||
};
|
|
||||||
|
|
||||||
pyproject-build-systems = {
|
git-hooks = {
|
||||||
url = "github:pyproject-nix/build-system-pkgs";
|
url = "github:cachix/git-hooks.nix";
|
||||||
inputs.pyproject-nix.follows = "pyproject-nix";
|
|
||||||
inputs.uv2nix.follows = "uv2nix";
|
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
@ -27,198 +21,186 @@
|
||||||
{
|
{
|
||||||
self,
|
self,
|
||||||
nixpkgs,
|
nixpkgs,
|
||||||
uv2nix,
|
rust-overlay,
|
||||||
pyproject-nix,
|
crane,
|
||||||
pyproject-build-systems,
|
git-hooks,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
inherit (nixpkgs) lib;
|
inherit (nixpkgs) lib;
|
||||||
|
forAllSystems = lib.genAttrs lib.systems.flakeExposed;
|
||||||
|
|
||||||
# Load a uv workspace from a workspace root.
|
mkPkgs =
|
||||||
# Uv2nix treats all uv projects as workspace projects.
|
system:
|
||||||
workspace = uv2nix.lib.workspace.loadWorkspace { workspaceRoot = ./.; };
|
import nixpkgs {
|
||||||
|
inherit system;
|
||||||
# Create package overlay from workspace.
|
overlays = [ rust-overlay.overlays.default ];
|
||||||
overlay = workspace.mkPyprojectOverlay {
|
|
||||||
# Prefer prebuilt binary wheels as a package source.
|
|
||||||
# Sdists are less likely to "just work" because of the metadata missing from uv.lock.
|
|
||||||
# Binary wheels are more likely to, but may still require overrides for library dependencies.
|
|
||||||
sourcePreference = "wheel"; # or sourcePreference = "sdist";
|
|
||||||
# Optionally customise PEP 508 environment
|
|
||||||
# environ = {
|
|
||||||
# platform_release = "5.10.65";
|
|
||||||
# };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
# Extend generated overlay with build fixups
|
mkCraneLib =
|
||||||
#
|
system:
|
||||||
# Uv2nix can only work with what it has, and uv.lock is missing essential metadata to perform some builds.
|
|
||||||
# This is an additional overlay implementing build fixups.
|
|
||||||
# See:
|
|
||||||
# - https://pyproject-nix.github.io/uv2nix/FAQ.html
|
|
||||||
pyprojectOverrides = _final: _prev: {
|
|
||||||
# Implement build fixups here.
|
|
||||||
# Note that uv2nix is _not_ using Nixpkgs buildPythonPackage.
|
|
||||||
# It's using https://pyproject-nix.github.io/pyproject.nix/build.html
|
|
||||||
};
|
|
||||||
|
|
||||||
# This example is only using x86_64-linux
|
|
||||||
pkgs = nixpkgs.legacyPackages.x86_64-linux;
|
|
||||||
|
|
||||||
# Use Python 3.14 from nixpkgs
|
|
||||||
python = pkgs.python314;
|
|
||||||
|
|
||||||
# Construct package set
|
|
||||||
pythonSet =
|
|
||||||
# Use base package set from pyproject.nix builders
|
|
||||||
(pkgs.callPackage pyproject-nix.build.packages {
|
|
||||||
inherit python;
|
|
||||||
}).overrideScope
|
|
||||||
(
|
|
||||||
lib.composeManyExtensions [
|
|
||||||
pyproject-build-systems.overlays.default
|
|
||||||
overlay
|
|
||||||
pyprojectOverrides
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
in
|
|
||||||
{
|
|
||||||
# Package a virtual environment as our main application.
|
|
||||||
#
|
|
||||||
# Enable no optional dependencies for production build.
|
|
||||||
packages.x86_64-linux =
|
|
||||||
let
|
let
|
||||||
streamer = pythonSet.mkVirtualEnv "streamer-env" workspace.deps.default;
|
pkgs = mkPkgs system;
|
||||||
in
|
toolchain = pkgs.rust-bin.stable.latest.default.override {
|
||||||
{
|
extensions = [
|
||||||
inherit streamer;
|
"rust-src"
|
||||||
default = streamer;
|
"rust-analyzer"
|
||||||
};
|
|
||||||
|
|
||||||
# Make streamer runnable with `nix run`
|
|
||||||
apps.x86_64-linux = {
|
|
||||||
default = {
|
|
||||||
type = "app";
|
|
||||||
program = "${self.packages.x86_64-linux.default}/bin/streamer";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
# This example provides two different modes of development:
|
|
||||||
# - Impurely using uv to manage virtual environments
|
|
||||||
# - Pure development using uv2nix to manage virtual environments
|
|
||||||
devShells.x86_64-linux = {
|
|
||||||
# It is of course perfectly OK to keep using an impure virtualenv workflow and only use uv2nix to build packages.
|
|
||||||
# This devShell simply adds Python and undoes the dependency leakage done by Nixpkgs Python infrastructure.
|
|
||||||
impure = pkgs.mkShell {
|
|
||||||
packages = with pkgs; [
|
|
||||||
python
|
|
||||||
uv
|
|
||||||
pre-commit
|
|
||||||
bashInteractive
|
|
||||||
];
|
];
|
||||||
env = {
|
};
|
||||||
# Prevent uv from managing Python downloads
|
in
|
||||||
UV_PYTHON_DOWNLOADS = "never";
|
(crane.mkLib pkgs).overrideToolchain toolchain;
|
||||||
# Force uv to use nixpkgs Python interpreter
|
|
||||||
UV_PYTHON = python.interpreter;
|
mkStreamd =
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
craneLib = mkCraneLib system;
|
||||||
|
|
||||||
|
commonArgs = {
|
||||||
|
src = craneLib.path ./.;
|
||||||
|
pname = "streamd";
|
||||||
|
version = "0.1.0";
|
||||||
|
strictDeps = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
|
||||||
|
in
|
||||||
|
craneLib.buildPackage (
|
||||||
|
commonArgs
|
||||||
|
// {
|
||||||
|
inherit cargoArtifacts;
|
||||||
|
|
||||||
|
passthru = {
|
||||||
|
inherit cargoArtifacts;
|
||||||
|
tests = {
|
||||||
|
clippy = craneLib.cargoClippy (
|
||||||
|
commonArgs
|
||||||
|
// {
|
||||||
|
inherit cargoArtifacts;
|
||||||
|
cargoClippyExtraArgs = "--all-targets -- -D warnings";
|
||||||
}
|
}
|
||||||
// lib.optionalAttrs pkgs.stdenv.isLinux {
|
|
||||||
# Python libraries often load native shared objects using dlopen(3).
|
|
||||||
# Setting LD_LIBRARY_PATH makes the dynamic library loader aware of libraries without using RPATH for lookup.
|
|
||||||
LD_LIBRARY_PATH = lib.makeLibraryPath pkgs.pythonManylinuxPackages.manylinux1;
|
|
||||||
};
|
|
||||||
shellHook = ''
|
|
||||||
unset PYTHONPATH
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
# This devShell uses uv2nix to construct a virtual environment purely from Nix, using the same dependency specification as the application.
|
|
||||||
# The notable difference is that we also apply another overlay here enabling editable mode ( https://setuptools.pypa.io/en/latest/userguide/development_mode.html ).
|
|
||||||
#
|
|
||||||
# This means that any changes done to your local files do not require a rebuild.
|
|
||||||
#
|
|
||||||
# Note: Editable package support is still unstable and subject to change.
|
|
||||||
uv2nix =
|
|
||||||
let
|
|
||||||
# Create an overlay enabling editable mode for all local dependencies.
|
|
||||||
editableOverlay = workspace.mkEditablePyprojectOverlay {
|
|
||||||
# Use environment variable
|
|
||||||
root = "$REPO_ROOT";
|
|
||||||
# Optional: Only enable editable for these packages
|
|
||||||
# members = [ "streamer" ];
|
|
||||||
};
|
|
||||||
|
|
||||||
# Override previous set with our overrideable overlay.
|
|
||||||
editablePythonSet = pythonSet.overrideScope (
|
|
||||||
lib.composeManyExtensions [
|
|
||||||
editableOverlay
|
|
||||||
|
|
||||||
# Apply fixups for building an editable package of your workspace packages
|
|
||||||
(final: prev: {
|
|
||||||
streamer = prev.streamer.overrideAttrs (old: {
|
|
||||||
# It's a good idea to filter the sources going into an editable build
|
|
||||||
# so the editable package doesn't have to be rebuilt on every change.
|
|
||||||
src = lib.fileset.toSource {
|
|
||||||
root = old.src;
|
|
||||||
fileset = lib.fileset.unions [
|
|
||||||
(old.src + "/pyproject.toml")
|
|
||||||
(old.src + "/README.md")
|
|
||||||
(old.src + "/src/streamer/__init__.py")
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
# Hatchling (our build system) has a dependency on the `editables` package when building editables.
|
|
||||||
#
|
|
||||||
# In normal Python flows this dependency is dynamically handled, and doesn't need to be explicitly declared.
|
|
||||||
# This behaviour is documented in PEP-660.
|
|
||||||
#
|
|
||||||
# With Nix the dependency needs to be explicitly declared.
|
|
||||||
nativeBuildInputs =
|
|
||||||
old.nativeBuildInputs
|
|
||||||
++ final.resolveBuildSystem {
|
|
||||||
editables = [ ];
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
})
|
|
||||||
]
|
|
||||||
);
|
);
|
||||||
|
fmt = craneLib.cargoFmt { src = commonArgs.src; };
|
||||||
# Build virtual environment, with local packages being editable.
|
test = craneLib.cargoTest (commonArgs // { inherit cargoArtifacts; });
|
||||||
#
|
|
||||||
# Enable all optional dependencies for development.
|
|
||||||
virtualenv = editablePythonSet.mkVirtualEnv "streamer-dev-env" workspace.deps.all;
|
|
||||||
|
|
||||||
in
|
|
||||||
pkgs.mkShell {
|
|
||||||
packages = with pkgs; [
|
|
||||||
virtualenv
|
|
||||||
uv
|
|
||||||
pre-commit
|
|
||||||
bashInteractive
|
|
||||||
];
|
|
||||||
|
|
||||||
env = {
|
|
||||||
# Don't create venv using uv
|
|
||||||
UV_NO_SYNC = "1";
|
|
||||||
|
|
||||||
# Force uv to use Python interpreter from venv
|
|
||||||
UV_PYTHON = "${virtualenv}/bin/python";
|
|
||||||
|
|
||||||
# Prevent uv from downloading managed Python's
|
|
||||||
UV_PYTHON_DOWNLOADS = "never";
|
|
||||||
};
|
|
||||||
|
|
||||||
shellHook = ''
|
|
||||||
# Undo dependency propagation by nixpkgs.
|
|
||||||
unset PYTHONPATH
|
|
||||||
|
|
||||||
# Get repository root using git. This is expanded at runtime by the editable `.pth` machinery.
|
|
||||||
export REPO_ROOT=$(git rev-parse --show-toplevel)
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
mkGitHooksCheck =
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
pkgs = mkPkgs system;
|
||||||
|
toolchain = pkgs.rust-bin.stable.latest.default;
|
||||||
|
in
|
||||||
|
git-hooks.lib.${system}.run {
|
||||||
|
src = ./.;
|
||||||
|
hooks = {
|
||||||
|
rustfmt = {
|
||||||
|
enable = true;
|
||||||
|
package = toolchain;
|
||||||
|
};
|
||||||
|
commitizen.enable = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
packages = forAllSystems (
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
streamd = mkStreamd system;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
inherit streamd;
|
||||||
|
default = streamd;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
homeManagerModules.default =
|
||||||
|
{
|
||||||
|
lib,
|
||||||
|
config,
|
||||||
|
pkgs,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
cfg = config.programs.streamd;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
options.programs.streamd = {
|
||||||
|
enable = lib.mkEnableOption "streamd";
|
||||||
|
|
||||||
|
base-folder = lib.mkOption {
|
||||||
|
type = lib.types.str;
|
||||||
|
description = "Base Folder of streamd";
|
||||||
|
};
|
||||||
|
|
||||||
|
package = lib.mkOption {
|
||||||
|
type = lib.types.package;
|
||||||
|
default = self.packages.${pkgs.stdenv.hostPlatform.system}.streamd;
|
||||||
|
defaultText = lib.literalExpression "inputs.streamd.packages.\${pkgs.stdenv.hostPlatform.system}.streamd";
|
||||||
|
description = "The package to use for the streamd binary.";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
config = lib.mkIf cfg.enable {
|
||||||
|
assertions = [
|
||||||
|
(lib.hm.assertions.assertPlatform "programs.streamd" pkgs lib.platforms.linux)
|
||||||
|
];
|
||||||
|
|
||||||
|
home.packages = [ cfg.package ];
|
||||||
|
|
||||||
|
xdg.configFile."streamd/config.toml".source =
|
||||||
|
(pkgs.formats.toml { }).generate "streamd-configuration"
|
||||||
|
{
|
||||||
|
base_folder = cfg.base-folder;
|
||||||
|
};
|
||||||
|
|
||||||
|
home.shellAliases.s = "streamd";
|
||||||
|
|
||||||
|
programs.bash.initExtra = ''
|
||||||
|
eval "$(${cfg.package}/bin/streamd completions bash)"
|
||||||
|
'';
|
||||||
|
|
||||||
|
programs.zsh.initExtra = ''
|
||||||
|
eval "$(${cfg.package}/bin/streamd completions zsh)"
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
checks = forAllSystems (
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
streamd = mkStreamd system;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
inherit (streamd.passthru.tests) clippy fmt test;
|
||||||
|
pre-commit = mkGitHooksCheck system;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
devShells = forAllSystems (
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
pkgs = mkPkgs system;
|
||||||
|
toolchain = pkgs.rust-bin.stable.latest.default.override {
|
||||||
|
extensions = [
|
||||||
|
"rust-src"
|
||||||
|
"rust-analyzer"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
default = pkgs.mkShell {
|
||||||
|
packages = [
|
||||||
|
toolchain
|
||||||
|
pkgs.commitizen
|
||||||
|
];
|
||||||
|
|
||||||
|
shellHook = ''
|
||||||
|
${(mkGitHooksCheck system).shellHook}
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,30 +0,0 @@
|
||||||
[project]
|
|
||||||
name = "streamer"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = "Searching for tags in streams"
|
|
||||||
readme = "README.md"
|
|
||||||
requires-python = ">=3.12"
|
|
||||||
dependencies = [
|
|
||||||
"click==8.3.1",
|
|
||||||
"mistletoe==1.5.1",
|
|
||||||
"pydantic==2.12.5",
|
|
||||||
"pydantic-settings[yaml]==2.12.0",
|
|
||||||
"rich==14.3.2",
|
|
||||||
"typer==0.21.2",
|
|
||||||
"xdg-base-dirs==6.0.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[project.scripts]
|
|
||||||
streamer = "streamer:app"
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["hatchling"]
|
|
||||||
build-backend = "hatchling.build"
|
|
||||||
|
|
||||||
[dependency-groups]
|
|
||||||
dev = [
|
|
||||||
"faker==40.4.0",
|
|
||||||
"pyright==1.1.408",
|
|
||||||
"pytest==9.0.2",
|
|
||||||
"ruff==0.15.0",
|
|
||||||
]
|
|
||||||
40
src/cli/args.rs
Normal file
40
src/cli/args.rs
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
use clap_complete::Shell;
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(name = "streamd")]
|
||||||
|
#[command(
|
||||||
|
author,
|
||||||
|
version,
|
||||||
|
about = "Personal knowledge management and time-tracking CLI using @Tag annotations"
|
||||||
|
)]
|
||||||
|
pub struct Cli {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: Option<Commands>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum Commands {
|
||||||
|
/// Create a new stream file
|
||||||
|
New,
|
||||||
|
|
||||||
|
/// Display open tasks
|
||||||
|
Todo,
|
||||||
|
|
||||||
|
/// Edit a stream file by position
|
||||||
|
Edit {
|
||||||
|
/// Position of the file to edit (0 = most recent, negative = from oldest)
|
||||||
|
#[arg(default_value = "1")]
|
||||||
|
number: i32,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Display extracted timesheets
|
||||||
|
Timesheet,
|
||||||
|
|
||||||
|
/// Generate shell completions
|
||||||
|
Completions {
|
||||||
|
/// Shell to generate completions for
|
||||||
|
#[arg(value_enum)]
|
||||||
|
shell: Shell,
|
||||||
|
},
|
||||||
|
}
|
||||||
11
src/cli/commands/completions.rs
Normal file
11
src/cli/commands/completions.rs
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
use clap::CommandFactory;
|
||||||
|
use clap_complete::{generate, Shell};
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use crate::cli::Cli;
|
||||||
|
|
||||||
|
pub fn run(shell: Shell) {
|
||||||
|
let mut cmd = Cli::command();
|
||||||
|
let name = cmd.get_name().to_string();
|
||||||
|
generate(shell, &mut cmd, name, &mut io::stdout());
|
||||||
|
}
|
||||||
73
src/cli/commands/edit.rs
Normal file
73
src/cli/commands/edit.rs
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
use std::fs;
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
|
use crate::config::Settings;
|
||||||
|
use crate::error::StreamdError;
|
||||||
|
use crate::extract::parse_markdown_file;
|
||||||
|
use crate::localize::{localize_stream_file, TaskConfiguration};
|
||||||
|
use crate::models::LocalizedShard;
|
||||||
|
|
||||||
|
fn all_files() -> Result<Vec<LocalizedShard>, StreamdError> {
|
||||||
|
let settings = Settings::load()?;
|
||||||
|
let mut shards = Vec::new();
|
||||||
|
|
||||||
|
for entry in WalkDir::new(&settings.base_folder)
|
||||||
|
.max_depth(1)
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|e| e.ok())
|
||||||
|
{
|
||||||
|
let path = entry.path();
|
||||||
|
if path.extension().map(|e| e == "md").unwrap_or(false) {
|
||||||
|
let file_name = path.to_string_lossy().to_string();
|
||||||
|
let content = fs::read_to_string(path)?;
|
||||||
|
let stream_file = parse_markdown_file(&file_name, &content);
|
||||||
|
|
||||||
|
if let Ok(shard) = localize_stream_file(&stream_file, &TaskConfiguration) {
|
||||||
|
shards.push(shard);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(shards)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run(number: i32) -> Result<(), StreamdError> {
|
||||||
|
let all_shards = all_files()?;
|
||||||
|
|
||||||
|
// Sort by moment (timestamp)
|
||||||
|
let mut sorted_shards = all_shards;
|
||||||
|
sorted_shards.sort_by_key(|s| s.moment);
|
||||||
|
|
||||||
|
if sorted_shards.is_empty() {
|
||||||
|
return Err(StreamdError::ConfigError("No files found".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let selected_index = if number >= 0 {
|
||||||
|
// 0 = most recent, 1 = second most recent, etc.
|
||||||
|
let idx = sorted_shards.len() as i32 - number;
|
||||||
|
if idx < 0 {
|
||||||
|
return Err(StreamdError::ConfigError(
|
||||||
|
"Argument out of range".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
idx as usize
|
||||||
|
} else {
|
||||||
|
// -1 = oldest, -2 = second oldest, etc.
|
||||||
|
let idx = (-number - 1) as usize;
|
||||||
|
if idx >= sorted_shards.len() {
|
||||||
|
return Err(StreamdError::ConfigError(
|
||||||
|
"Argument out of range".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
idx
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(file_path) = sorted_shards[selected_index].location.get("file") {
|
||||||
|
let editor = std::env::var("EDITOR").unwrap_or_else(|_| "vi".to_string());
|
||||||
|
Command::new(&editor).arg(file_path).status()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
5
src/cli/commands/mod.rs
Normal file
5
src/cli/commands/mod.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
pub mod completions;
|
||||||
|
pub mod edit;
|
||||||
|
pub mod new;
|
||||||
|
pub mod timesheet;
|
||||||
|
pub mod todo;
|
||||||
60
src/cli/commands/new.rs
Normal file
60
src/cli/commands/new.rs
Normal file
|
|
@ -0,0 +1,60 @@
|
||||||
|
use std::fs;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
use chrono::Local;
|
||||||
|
|
||||||
|
use crate::config::Settings;
|
||||||
|
use crate::error::StreamdError;
|
||||||
|
use crate::extract::parse_markdown_file;
|
||||||
|
|
||||||
|
pub fn run() -> Result<(), StreamdError> {
|
||||||
|
let settings = Settings::load()?;
|
||||||
|
let streamd_directory = &settings.base_folder;
|
||||||
|
|
||||||
|
let timestamp = Local::now().format("%Y%m%d-%H%M%S").to_string();
|
||||||
|
let preliminary_file_name = format!("{}_wip.md", timestamp);
|
||||||
|
let preliminary_path = Path::new(streamd_directory).join(&preliminary_file_name);
|
||||||
|
|
||||||
|
// Create initial file with heading
|
||||||
|
let content = "# ";
|
||||||
|
let mut file = fs::File::create(&preliminary_path)?;
|
||||||
|
file.write_all(content.as_bytes())?;
|
||||||
|
drop(file);
|
||||||
|
|
||||||
|
// Open in editor
|
||||||
|
let editor = std::env::var("EDITOR").unwrap_or_else(|_| "vi".to_string());
|
||||||
|
let status = Command::new(&editor).arg(&preliminary_path).status()?;
|
||||||
|
|
||||||
|
if !status.success() {
|
||||||
|
return Err(StreamdError::IoError(std::io::Error::other(
|
||||||
|
"Editor exited with non-zero status",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the edited content
|
||||||
|
let edited_content = fs::read_to_string(&preliminary_path)?;
|
||||||
|
let parsed_content =
|
||||||
|
parse_markdown_file(preliminary_path.to_string_lossy().as_ref(), &edited_content);
|
||||||
|
|
||||||
|
// Determine final filename based on markers
|
||||||
|
let final_file_name = if let Some(ref shard) = parsed_content.shard {
|
||||||
|
if !shard.markers.is_empty() {
|
||||||
|
format!("{} {}.md", timestamp, shard.markers.join(" "))
|
||||||
|
} else {
|
||||||
|
format!("{}.md", timestamp)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
format!("{}.md", timestamp)
|
||||||
|
};
|
||||||
|
|
||||||
|
let final_path = Path::new(streamd_directory).join(&final_file_name);
|
||||||
|
|
||||||
|
// Rename the file
|
||||||
|
fs::rename(&preliminary_path, &final_path)?;
|
||||||
|
|
||||||
|
println!("Saved as {}", final_file_name);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
52
src/cli/commands/timesheet.rs
Normal file
52
src/cli/commands/timesheet.rs
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
use std::fs;
|
||||||
|
|
||||||
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
|
use crate::config::Settings;
|
||||||
|
use crate::error::StreamdError;
|
||||||
|
use crate::extract::parse_markdown_file;
|
||||||
|
use crate::localize::localize_stream_file;
|
||||||
|
use crate::models::LocalizedShard;
|
||||||
|
use crate::timesheet::{extract_timesheets, BasicTimesheetConfiguration};
|
||||||
|
|
||||||
|
fn all_files() -> Result<Vec<LocalizedShard>, StreamdError> {
|
||||||
|
let settings = Settings::load()?;
|
||||||
|
let mut shards = Vec::new();
|
||||||
|
|
||||||
|
for entry in WalkDir::new(&settings.base_folder)
|
||||||
|
.max_depth(1)
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|e| e.ok())
|
||||||
|
{
|
||||||
|
let path = entry.path();
|
||||||
|
if path.extension().map(|e| e == "md").unwrap_or(false) {
|
||||||
|
let file_name = path.to_string_lossy().to_string();
|
||||||
|
let content = fs::read_to_string(path)?;
|
||||||
|
let stream_file = parse_markdown_file(&file_name, &content);
|
||||||
|
|
||||||
|
if let Ok(shard) = localize_stream_file(&stream_file, &BasicTimesheetConfiguration) {
|
||||||
|
shards.push(shard);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(shards)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run() -> Result<(), StreamdError> {
|
||||||
|
let all_shards = all_files()?;
|
||||||
|
let mut sheets = extract_timesheets(&all_shards)?;
|
||||||
|
sheets.sort_by_key(|s| s.date);
|
||||||
|
|
||||||
|
for sheet in sheets {
|
||||||
|
println!("{}", sheet.date);
|
||||||
|
let times: Vec<String> = sheet
|
||||||
|
.timecards
|
||||||
|
.iter()
|
||||||
|
.map(|card| format!("{},{}", card.from_time, card.to_time))
|
||||||
|
.collect();
|
||||||
|
println!("{}", times.join(","));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
56
src/cli/commands/todo.rs
Normal file
56
src/cli/commands/todo.rs
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
use std::fs;
|
||||||
|
|
||||||
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
|
use crate::config::Settings;
|
||||||
|
use crate::error::StreamdError;
|
||||||
|
use crate::extract::parse_markdown_file;
|
||||||
|
use crate::localize::{localize_stream_file, TaskConfiguration};
|
||||||
|
use crate::models::LocalizedShard;
|
||||||
|
use crate::query::find_shard_by_position;
|
||||||
|
|
||||||
|
fn all_files() -> Result<Vec<LocalizedShard>, StreamdError> {
|
||||||
|
let settings = Settings::load()?;
|
||||||
|
let mut shards = Vec::new();
|
||||||
|
|
||||||
|
for entry in WalkDir::new(&settings.base_folder)
|
||||||
|
.max_depth(1)
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|e| e.ok())
|
||||||
|
{
|
||||||
|
let path = entry.path();
|
||||||
|
if path.extension().map(|e| e == "md").unwrap_or(false) {
|
||||||
|
let file_name = path.to_string_lossy().to_string();
|
||||||
|
let content = fs::read_to_string(path)?;
|
||||||
|
let stream_file = parse_markdown_file(&file_name, &content);
|
||||||
|
|
||||||
|
if let Ok(shard) = localize_stream_file(&stream_file, &TaskConfiguration) {
|
||||||
|
shards.push(shard);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(shards)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run() -> Result<(), StreamdError> {
|
||||||
|
let all_shards = all_files()?;
|
||||||
|
|
||||||
|
for task_shard in find_shard_by_position(&all_shards, "task", "open") {
|
||||||
|
if let Some(file_path) = task_shard.location.get("file") {
|
||||||
|
let content = fs::read_to_string(file_path)?;
|
||||||
|
let lines: Vec<&str> = content.lines().collect();
|
||||||
|
|
||||||
|
let start = task_shard.start_line.saturating_sub(1);
|
||||||
|
let end = std::cmp::min(task_shard.end_line, lines.len());
|
||||||
|
|
||||||
|
println!("--- {}:{} ---", file_path, task_shard.start_line);
|
||||||
|
for line in &lines[start..end] {
|
||||||
|
println!("{}", line);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
4
src/cli/mod.rs
Normal file
4
src/cli/mod.rs
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
pub mod args;
|
||||||
|
pub mod commands;
|
||||||
|
|
||||||
|
pub use args::{Cli, Commands};
|
||||||
44
src/config.rs
Normal file
44
src/config.rs
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
use directories::ProjectDirs;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::env;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use crate::error::StreamdError;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct Settings {
|
||||||
|
pub base_folder: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Settings {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
base_folder: env::current_dir()
|
||||||
|
.map(|p| p.to_string_lossy().to_string())
|
||||||
|
.unwrap_or_else(|_| ".".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Settings {
|
||||||
|
pub fn load() -> Result<Self, StreamdError> {
|
||||||
|
let config_path = Self::config_path();
|
||||||
|
|
||||||
|
if config_path.exists() {
|
||||||
|
let content = fs::read_to_string(&config_path)?;
|
||||||
|
let settings: Settings = toml::from_str(&content)?;
|
||||||
|
Ok(settings)
|
||||||
|
} else {
|
||||||
|
Ok(Settings::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn config_path() -> PathBuf {
|
||||||
|
if let Some(proj_dirs) = ProjectDirs::from("", "", "streamd") {
|
||||||
|
proj_dirs.config_dir().join("config.toml")
|
||||||
|
} else {
|
||||||
|
PathBuf::from("~/.config/streamd/config.toml")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
25
src/error.rs
Normal file
25
src/error.rs
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum StreamdError {
|
||||||
|
#[error("Could not extract date from file name: {0}")]
|
||||||
|
DateExtractionError(String),
|
||||||
|
|
||||||
|
#[error("Timesheet error: {0}")]
|
||||||
|
TimesheetError(String),
|
||||||
|
|
||||||
|
#[error("Configuration error: {0}")]
|
||||||
|
ConfigError(String),
|
||||||
|
|
||||||
|
#[error("IO error: {0}")]
|
||||||
|
IoError(#[from] std::io::Error),
|
||||||
|
|
||||||
|
#[error("TOML error: {0}")]
|
||||||
|
TomlError(#[from] toml::de::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<StreamdError> for miette::Report {
|
||||||
|
fn from(err: StreamdError) -> Self {
|
||||||
|
miette::Report::msg(err.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
5
src/extract/mod.rs
Normal file
5
src/extract/mod.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
mod parser;
|
||||||
|
mod tag_extraction;
|
||||||
|
|
||||||
|
pub use parser::parse_markdown_file;
|
||||||
|
pub use tag_extraction::{extract_markers_and_tags, has_markers};
|
||||||
739
src/extract/parser.rs
Normal file
739
src/extract/parser.rs
Normal file
|
|
@ -0,0 +1,739 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use pulldown_cmark::{Event, HeadingLevel, Options, Parser, Tag, TagEnd};
|
||||||
|
|
||||||
|
use crate::extract::tag_extraction::{extract_markers_and_tags, has_markers};
|
||||||
|
use crate::models::{Shard, StreamFile};
|
||||||
|
|
||||||
|
/// Information about a block element.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct BlockInfo {
|
||||||
|
start_line: usize,
|
||||||
|
end_line: usize,
|
||||||
|
block_type: BlockType,
|
||||||
|
events: Vec<Event<'static>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
enum BlockType {
|
||||||
|
Paragraph,
|
||||||
|
Heading(usize),
|
||||||
|
List,
|
||||||
|
ListItem,
|
||||||
|
CodeBlock,
|
||||||
|
#[allow(dead_code)]
|
||||||
|
Other,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build a shard, applying simplification rules.
|
||||||
|
/// If the shard has exactly one child with the same line range and no markers/tags,
|
||||||
|
/// return that child instead.
|
||||||
|
fn build_shard(
|
||||||
|
start_line: usize,
|
||||||
|
end_line: usize,
|
||||||
|
markers: Vec<String>,
|
||||||
|
tags: Vec<String>,
|
||||||
|
children: Vec<Shard>,
|
||||||
|
) -> Shard {
|
||||||
|
if children.len() == 1
|
||||||
|
&& tags.is_empty()
|
||||||
|
&& markers.is_empty()
|
||||||
|
&& children[0].start_line == start_line
|
||||||
|
&& children[0].end_line == end_line
|
||||||
|
{
|
||||||
|
return children.into_iter().next().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
Shard {
|
||||||
|
markers,
|
||||||
|
tags,
|
||||||
|
start_line,
|
||||||
|
end_line,
|
||||||
|
children,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Merge shards where the first one becomes the parent with its markers/tags preserved.
|
||||||
|
fn merge_into_first_shard(
|
||||||
|
mut shards: Vec<Shard>,
|
||||||
|
start_line: usize,
|
||||||
|
end_line: usize,
|
||||||
|
additional_tags: Vec<String>,
|
||||||
|
) -> Shard {
|
||||||
|
if shards.is_empty() {
|
||||||
|
return build_shard(start_line, end_line, vec![], additional_tags, vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut first = shards.remove(0);
|
||||||
|
first.start_line = start_line;
|
||||||
|
first.end_line = end_line;
|
||||||
|
first.children = shards;
|
||||||
|
first.tags.extend(additional_tags);
|
||||||
|
first
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a markdown file into a StreamFile with shard structure.
|
||||||
|
pub fn parse_markdown_file(file_name: &str, file_content: &str) -> StreamFile {
|
||||||
|
let line_count = std::cmp::max(file_content.lines().count(), 1);
|
||||||
|
let end_line = line_count;
|
||||||
|
|
||||||
|
// Handle empty file
|
||||||
|
if file_content.is_empty() {
|
||||||
|
return StreamFile {
|
||||||
|
file_name: file_name.to_string(),
|
||||||
|
shard: Some(Shard::new(1, 1)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the markdown with offset tracking
|
||||||
|
let mut options = Options::empty();
|
||||||
|
options.insert(Options::ENABLE_STRIKETHROUGH);
|
||||||
|
let parser = Parser::new_ext(file_content, options);
|
||||||
|
|
||||||
|
// Collect blocks with their line information
|
||||||
|
let blocks = collect_blocks(file_content, parser);
|
||||||
|
|
||||||
|
// Parse into shard structure
|
||||||
|
let shard = if blocks.is_empty() {
|
||||||
|
Shard::new(1, end_line)
|
||||||
|
} else {
|
||||||
|
parse_header_shards(&blocks, 1, end_line, false).unwrap_or_else(|| Shard::new(1, end_line))
|
||||||
|
};
|
||||||
|
|
||||||
|
StreamFile {
|
||||||
|
file_name: file_name.to_string(),
|
||||||
|
shard: Some(shard),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Collect block-level elements from the parser.
|
||||||
|
fn collect_blocks(content: &str, parser: Parser) -> Vec<BlockInfo> {
|
||||||
|
let mut blocks = Vec::new();
|
||||||
|
let mut current_block: Option<BlockInfo> = None;
|
||||||
|
let _current_events: Vec<Event<'static>> = Vec::new();
|
||||||
|
let mut depth = 0;
|
||||||
|
let mut list_items: Vec<BlockInfo> = Vec::new();
|
||||||
|
let mut in_list = false;
|
||||||
|
let mut list_start_line = 0;
|
||||||
|
|
||||||
|
// Pre-compute line starts for offset-to-line mapping
|
||||||
|
let line_starts: Vec<usize> = std::iter::once(0)
|
||||||
|
.chain(content.match_indices('\n').map(|(i, _)| i + 1))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let offset_to_line =
|
||||||
|
|offset: usize| -> usize { line_starts.partition_point(|&start| start <= offset) };
|
||||||
|
|
||||||
|
for (event, range) in parser.into_offset_iter() {
|
||||||
|
let line = offset_to_line(range.start);
|
||||||
|
|
||||||
|
match &event {
|
||||||
|
Event::Start(Tag::Paragraph) => {
|
||||||
|
if depth == 0 {
|
||||||
|
current_block = Some(BlockInfo {
|
||||||
|
start_line: line,
|
||||||
|
end_line: line,
|
||||||
|
block_type: BlockType::Paragraph,
|
||||||
|
events: Vec::new(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
depth += 1;
|
||||||
|
if let Some(ref mut block) = current_block {
|
||||||
|
block.events.push(event.clone().into_static());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::End(TagEnd::Paragraph) => {
|
||||||
|
depth -= 1;
|
||||||
|
if let Some(ref mut block) = current_block {
|
||||||
|
block.events.push(event.clone().into_static());
|
||||||
|
block.end_line = line;
|
||||||
|
}
|
||||||
|
if depth == 0 {
|
||||||
|
if let Some(block) = current_block.take() {
|
||||||
|
if in_list {
|
||||||
|
list_items.push(block);
|
||||||
|
} else {
|
||||||
|
blocks.push(block);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::Start(Tag::Heading { level, .. }) => {
|
||||||
|
let heading_level = heading_level_to_usize(*level);
|
||||||
|
if depth == 0 {
|
||||||
|
current_block = Some(BlockInfo {
|
||||||
|
start_line: line,
|
||||||
|
end_line: line,
|
||||||
|
block_type: BlockType::Heading(heading_level),
|
||||||
|
events: Vec::new(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
depth += 1;
|
||||||
|
if let Some(ref mut block) = current_block {
|
||||||
|
block.events.push(event.clone().into_static());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::End(TagEnd::Heading(_)) => {
|
||||||
|
depth -= 1;
|
||||||
|
if let Some(ref mut block) = current_block {
|
||||||
|
block.events.push(event.clone().into_static());
|
||||||
|
block.end_line = line;
|
||||||
|
}
|
||||||
|
if depth == 0 {
|
||||||
|
if let Some(block) = current_block.take() {
|
||||||
|
blocks.push(block);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::Start(Tag::List(_)) => {
|
||||||
|
if !in_list {
|
||||||
|
in_list = true;
|
||||||
|
list_start_line = line;
|
||||||
|
list_items.clear();
|
||||||
|
}
|
||||||
|
depth += 1;
|
||||||
|
}
|
||||||
|
Event::End(TagEnd::List(_)) => {
|
||||||
|
depth -= 1;
|
||||||
|
if depth == 0 && in_list {
|
||||||
|
in_list = false;
|
||||||
|
// Create a list block containing all list items
|
||||||
|
if !list_items.is_empty() {
|
||||||
|
blocks.push(BlockInfo {
|
||||||
|
start_line: list_start_line,
|
||||||
|
end_line: line,
|
||||||
|
block_type: BlockType::List,
|
||||||
|
events: vec![], // List events are handled through list_items
|
||||||
|
});
|
||||||
|
// Store list items for later processing
|
||||||
|
for item in list_items.drain(..) {
|
||||||
|
blocks.push(BlockInfo {
|
||||||
|
block_type: BlockType::ListItem,
|
||||||
|
..item
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::Start(Tag::Item) => {
|
||||||
|
if in_list {
|
||||||
|
current_block = Some(BlockInfo {
|
||||||
|
start_line: line,
|
||||||
|
end_line: line,
|
||||||
|
block_type: BlockType::ListItem,
|
||||||
|
events: Vec::new(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::End(TagEnd::Item) => {
|
||||||
|
if let Some(ref mut block) = current_block {
|
||||||
|
block.end_line = line;
|
||||||
|
}
|
||||||
|
if let Some(block) = current_block.take() {
|
||||||
|
list_items.push(block);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::Start(Tag::CodeBlock(_)) => {
|
||||||
|
if depth == 0 {
|
||||||
|
current_block = Some(BlockInfo {
|
||||||
|
start_line: line,
|
||||||
|
end_line: line,
|
||||||
|
block_type: BlockType::CodeBlock,
|
||||||
|
events: Vec::new(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
depth += 1;
|
||||||
|
if let Some(ref mut block) = current_block {
|
||||||
|
block.events.push(event.clone().into_static());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::End(TagEnd::CodeBlock) => {
|
||||||
|
depth -= 1;
|
||||||
|
if let Some(ref mut block) = current_block {
|
||||||
|
block.events.push(event.clone().into_static());
|
||||||
|
block.end_line = line;
|
||||||
|
}
|
||||||
|
if depth == 0 {
|
||||||
|
if let Some(block) = current_block.take() {
|
||||||
|
blocks.push(block);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
if let Some(ref mut block) = current_block {
|
||||||
|
block.events.push(event.clone().into_static());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
blocks
|
||||||
|
}
|
||||||
|
|
||||||
|
fn heading_level_to_usize(level: HeadingLevel) -> usize {
|
||||||
|
match level {
|
||||||
|
HeadingLevel::H1 => 1,
|
||||||
|
HeadingLevel::H2 => 2,
|
||||||
|
HeadingLevel::H3 => 3,
|
||||||
|
HeadingLevel::H4 => 4,
|
||||||
|
HeadingLevel::H5 => 5,
|
||||||
|
HeadingLevel::H6 => 6,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if a block has markers.
|
||||||
|
fn block_has_markers(block: &BlockInfo) -> bool {
|
||||||
|
has_markers(block.events.iter().cloned())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract markers and tags from a block.
|
||||||
|
fn extract_block_markers_and_tags(block: &BlockInfo) -> (Vec<String>, Vec<String>) {
|
||||||
|
extract_markers_and_tags(block.events.iter().cloned())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find positions of paragraph blocks that have markers.
|
||||||
|
fn find_paragraph_shard_positions(blocks: &[BlockInfo]) -> Vec<usize> {
|
||||||
|
blocks
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.filter(|(_, block)| block.block_type == BlockType::Paragraph && block_has_markers(block))
|
||||||
|
.map(|(i, _)| i)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find positions of headings at a specific level.
|
||||||
|
fn find_headings_by_level(blocks: &[BlockInfo], level: usize) -> Vec<usize> {
|
||||||
|
blocks
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.filter(|(_, block)| matches!(block.block_type, BlockType::Heading(l) if l == level))
|
||||||
|
.map(|(i, _)| i)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Calculate the heading level to split on for the next parsing step.
|
||||||
|
fn calculate_heading_level_for_next_split(blocks: &[BlockInfo]) -> Option<usize> {
|
||||||
|
// Find heading levels that have markers (excluding first block)
|
||||||
|
let levels_with_markers: Vec<usize> = blocks[1..]
|
||||||
|
.iter()
|
||||||
|
.filter_map(|block| {
|
||||||
|
if let BlockType::Heading(level) = block.block_type {
|
||||||
|
if block_has_markers(block) {
|
||||||
|
return Some(level);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if levels_with_markers.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count headings at each level
|
||||||
|
let mut level_counts: HashMap<usize, usize> = HashMap::new();
|
||||||
|
for block in blocks {
|
||||||
|
if let BlockType::Heading(level) = block.block_type {
|
||||||
|
*level_counts.entry(level).or_insert(0) += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the minimum level that either:
|
||||||
|
// - Has count >= 2
|
||||||
|
// - Has a marker (excluding first block)
|
||||||
|
let levels_with_multiple: Vec<usize> = level_counts
|
||||||
|
.into_iter()
|
||||||
|
.filter(|(_, count)| *count >= 2)
|
||||||
|
.map(|(level, _)| level)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut candidates = levels_with_multiple;
|
||||||
|
candidates.extend(levels_with_markers);
|
||||||
|
|
||||||
|
candidates.into_iter().min()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Split a slice at the given positions.
|
||||||
|
fn split_at<T: Clone>(items: &[T], positions: &[usize]) -> Vec<Vec<T>> {
|
||||||
|
let mut all_positions: Vec<usize> = vec![0];
|
||||||
|
all_positions.extend(positions.iter().cloned());
|
||||||
|
all_positions.push(items.len());
|
||||||
|
all_positions.sort();
|
||||||
|
all_positions.dedup();
|
||||||
|
|
||||||
|
all_positions
|
||||||
|
.windows(2)
|
||||||
|
.map(|window| items[window[0]..window[1]].to_vec())
|
||||||
|
.filter(|v| !v.is_empty())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse blocks into shard hierarchy based on headings.
|
||||||
|
fn parse_header_shards(
|
||||||
|
blocks: &[BlockInfo],
|
||||||
|
start_line: usize,
|
||||||
|
end_line: usize,
|
||||||
|
use_first_child_as_header: bool,
|
||||||
|
) -> Option<Shard> {
|
||||||
|
if blocks.is_empty() {
|
||||||
|
return Some(build_shard(start_line, end_line, vec![], vec![], vec![]));
|
||||||
|
}
|
||||||
|
|
||||||
|
let split_at_heading_level = calculate_heading_level_for_next_split(blocks);
|
||||||
|
|
||||||
|
if split_at_heading_level.is_none() {
|
||||||
|
return parse_multiple_block_shards(blocks, start_line, end_line, true).0;
|
||||||
|
}
|
||||||
|
|
||||||
|
let heading_level = split_at_heading_level.unwrap();
|
||||||
|
let heading_positions = find_headings_by_level(blocks, heading_level);
|
||||||
|
let block_groups = split_at(blocks, &heading_positions);
|
||||||
|
|
||||||
|
let mut children = Vec::new();
|
||||||
|
|
||||||
|
for (i, group) in block_groups.iter().enumerate() {
|
||||||
|
if group.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let child_start_line = group[0].start_line;
|
||||||
|
let child_end_line = if i + 1 < block_groups.len() && !block_groups[i + 1].is_empty() {
|
||||||
|
block_groups[i + 1][0].start_line - 1
|
||||||
|
} else {
|
||||||
|
end_line
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(child_shard) = parse_header_shards(
|
||||||
|
group,
|
||||||
|
child_start_line,
|
||||||
|
child_end_line,
|
||||||
|
i > 0 || heading_positions.contains(&0),
|
||||||
|
) {
|
||||||
|
children.push(child_shard);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if use_first_child_as_header && !children.is_empty() {
|
||||||
|
Some(merge_into_first_shard(
|
||||||
|
children,
|
||||||
|
start_line,
|
||||||
|
end_line,
|
||||||
|
vec![],
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Some(build_shard(start_line, end_line, vec![], vec![], children))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse multiple blocks into shards.
|
||||||
|
fn parse_multiple_block_shards(
|
||||||
|
blocks: &[BlockInfo],
|
||||||
|
start_line: usize,
|
||||||
|
end_line: usize,
|
||||||
|
enforce_shard: bool,
|
||||||
|
) -> (Option<Shard>, Vec<String>) {
|
||||||
|
if blocks.is_empty() {
|
||||||
|
if enforce_shard {
|
||||||
|
return (
|
||||||
|
Some(build_shard(start_line, end_line, vec![], vec![], vec![])),
|
||||||
|
vec![],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return (None, vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let is_first_block_heading =
|
||||||
|
matches!(blocks[0].block_type, BlockType::Heading(_)) && block_has_markers(&blocks[0]);
|
||||||
|
|
||||||
|
let paragraph_positions = find_paragraph_shard_positions(blocks);
|
||||||
|
let mut children = Vec::new();
|
||||||
|
let mut tags = Vec::new();
|
||||||
|
let mut is_first_block_only_with_marker = false;
|
||||||
|
|
||||||
|
for (i, block) in blocks.iter().enumerate() {
|
||||||
|
if paragraph_positions.contains(&i) {
|
||||||
|
is_first_block_only_with_marker = i == 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
let child_start_line = block.start_line;
|
||||||
|
let child_end_line = if i + 1 < blocks.len() {
|
||||||
|
blocks[i + 1].start_line - 1
|
||||||
|
} else {
|
||||||
|
end_line
|
||||||
|
};
|
||||||
|
|
||||||
|
let (child_shard, child_tags) =
|
||||||
|
parse_single_block_shard(block, child_start_line, child_end_line);
|
||||||
|
|
||||||
|
if let Some(shard) = child_shard {
|
||||||
|
children.push(shard);
|
||||||
|
}
|
||||||
|
tags.extend(child_tags);
|
||||||
|
}
|
||||||
|
|
||||||
|
if children.is_empty() && !enforce_shard {
|
||||||
|
return (None, tags);
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_first_block_heading || is_first_block_only_with_marker {
|
||||||
|
(
|
||||||
|
Some(merge_into_first_shard(children, start_line, end_line, tags)),
|
||||||
|
vec![],
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
Some(build_shard(start_line, end_line, vec![], tags, children)),
|
||||||
|
vec![],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a single block into a shard.
|
||||||
|
fn parse_single_block_shard(
|
||||||
|
block: &BlockInfo,
|
||||||
|
start_line: usize,
|
||||||
|
end_line: usize,
|
||||||
|
) -> (Option<Shard>, Vec<String>) {
|
||||||
|
match block.block_type {
|
||||||
|
BlockType::Paragraph | BlockType::Heading(_) => {
|
||||||
|
let (markers, tags) = extract_block_markers_and_tags(block);
|
||||||
|
if markers.is_empty() {
|
||||||
|
(None, tags)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
Some(build_shard(start_line, end_line, markers, tags, vec![])),
|
||||||
|
vec![],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BlockType::List | BlockType::ListItem => {
|
||||||
|
// List handling is complex - for now, extract any markers/tags
|
||||||
|
let (markers, tags) = extract_block_markers_and_tags(block);
|
||||||
|
if markers.is_empty() {
|
||||||
|
(None, tags)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
Some(build_shard(start_line, end_line, markers, tags, vec![])),
|
||||||
|
vec![],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => (None, vec![]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn make_file_name() -> String {
|
||||||
|
"test.md".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_empty_file() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard::new(1, 1)),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_basic_one_line_file() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "Hello World");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard::new(1, 1)),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_basic_multi_line_file() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "Hello World\n\nHello again!");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard::new(1, 3)),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_single_line_with_tag() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "@Tag Hello World");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard {
|
||||||
|
markers: vec!["Tag".to_string()],
|
||||||
|
tags: vec![],
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
children: vec![],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_single_line_with_two_tags() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "@Marker1 @Marker2 Hello World");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard {
|
||||||
|
markers: vec!["Marker1".to_string(), "Marker2".to_string()],
|
||||||
|
tags: vec![],
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
children: vec![],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_single_line_with_two_tags_and_misplaced_tag() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "@Tag1 @Tag2 Hello World @Tag3");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard {
|
||||||
|
markers: vec!["Tag1".to_string(), "Tag2".to_string()],
|
||||||
|
tags: vec!["Tag3".to_string()],
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
children: vec![],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_header_without_markers() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "# Heading\n\n## Subheading");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard::new(1, 3)),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_ignores_tags_in_code() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "```\n@Marker\n```");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard::new(1, 3)),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_finds_tags_in_italic_text() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "*@ItalicMarker*");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard {
|
||||||
|
markers: vec!["ItalicMarker".to_string()],
|
||||||
|
tags: vec![],
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
children: vec![],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_finds_tags_in_bold_text() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "**@BoldMarker**");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard {
|
||||||
|
markers: vec!["BoldMarker".to_string()],
|
||||||
|
tags: vec![],
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
children: vec![],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_finds_tags_in_strikethrough_text() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "~~@StrikeMarker~~");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard {
|
||||||
|
markers: vec!["StrikeMarker".to_string()],
|
||||||
|
tags: vec![],
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
children: vec![],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_finds_tags_in_link() {
|
||||||
|
let result = parse_markdown_file(&make_file_name(), "[@LinkMarker](https://example.com)");
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard {
|
||||||
|
markers: vec!["LinkMarker".to_string()],
|
||||||
|
tags: vec![],
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
children: vec![],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_continues_looking_for_markers_after_first_link_marker() {
|
||||||
|
let result = parse_markdown_file(
|
||||||
|
&make_file_name(),
|
||||||
|
"[@LinkMarker1](https://example.com) [@LinkMarker2](https://example.com)",
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
StreamFile {
|
||||||
|
file_name: make_file_name(),
|
||||||
|
shard: Some(Shard {
|
||||||
|
markers: vec!["LinkMarker1".to_string(), "LinkMarker2".to_string()],
|
||||||
|
tags: vec![],
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
children: vec![],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
219
src/extract/tag_extraction.rs
Normal file
219
src/extract/tag_extraction.rs
Normal file
|
|
@ -0,0 +1,219 @@
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use pulldown_cmark::{Event, Tag, TagEnd};
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
|
/// Regex pattern for matching @Tags.
|
||||||
|
/// Matches @ followed by any characters except whitespace, *, `, ~, [, ]
|
||||||
|
static TAG_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"@([^\s*`~\[\]]+)").unwrap());
|
||||||
|
|
||||||
|
/// Token type for tag extraction state machine.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum Token {
|
||||||
|
Tag(String),
|
||||||
|
Content,
|
||||||
|
Whitespace,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tokenizes text content into Tags, Content, and Whitespace tokens.
|
||||||
|
fn tokenize(text: &str) -> Vec<Token> {
|
||||||
|
let mut tokens = Vec::new();
|
||||||
|
let mut last_end = 0;
|
||||||
|
|
||||||
|
for mat in TAG_PATTERN.find_iter(text) {
|
||||||
|
// Handle content before the match
|
||||||
|
let before = &text[last_end..mat.start()];
|
||||||
|
if !before.is_empty() {
|
||||||
|
if before.chars().all(|c| c.is_whitespace()) {
|
||||||
|
tokens.push(Token::Whitespace);
|
||||||
|
} else {
|
||||||
|
tokens.push(Token::Content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract the tag name (without the @)
|
||||||
|
let tag_name = &text[mat.start() + 1..mat.end()];
|
||||||
|
tokens.push(Token::Tag(tag_name.to_string()));
|
||||||
|
last_end = mat.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle remaining content after last match
|
||||||
|
if last_end < text.len() {
|
||||||
|
let remaining = &text[last_end..];
|
||||||
|
if !remaining.is_empty() {
|
||||||
|
if remaining.chars().all(|c| c.is_whitespace()) {
|
||||||
|
tokens.push(Token::Whitespace);
|
||||||
|
} else {
|
||||||
|
tokens.push(Token::Content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract markers and tags from a sequence of pulldown-cmark events.
|
||||||
|
///
|
||||||
|
/// Markers are @-prefixed identifiers that appear before any non-whitespace content.
|
||||||
|
/// Tags are @-prefixed identifiers that appear after content has started.
|
||||||
|
///
|
||||||
|
/// Returns (markers, tags).
|
||||||
|
pub fn extract_markers_and_tags<'a>(
|
||||||
|
events: impl Iterator<Item = Event<'a>>,
|
||||||
|
) -> (Vec<String>, Vec<String>) {
|
||||||
|
let mut markers = Vec::new();
|
||||||
|
let mut tags = Vec::new();
|
||||||
|
let mut boundary_crossed = false;
|
||||||
|
let mut in_code = false;
|
||||||
|
|
||||||
|
for event in events {
|
||||||
|
match event {
|
||||||
|
Event::Start(Tag::CodeBlock(_)) | Event::Start(Tag::MetadataBlock(_)) => {
|
||||||
|
in_code = true;
|
||||||
|
}
|
||||||
|
Event::End(TagEnd::CodeBlock) | Event::End(TagEnd::MetadataBlock(_)) => {
|
||||||
|
in_code = false;
|
||||||
|
}
|
||||||
|
Event::Code(_) => {
|
||||||
|
// Inline code is a content boundary but we don't extract tags from it
|
||||||
|
boundary_crossed = true;
|
||||||
|
}
|
||||||
|
Event::Text(text) | Event::InlineHtml(text) if !in_code => {
|
||||||
|
for token in tokenize(&text) {
|
||||||
|
match token {
|
||||||
|
Token::Whitespace => {}
|
||||||
|
Token::Tag(name) => {
|
||||||
|
if boundary_crossed {
|
||||||
|
tags.push(name);
|
||||||
|
} else {
|
||||||
|
markers.push(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Token::Content => {
|
||||||
|
boundary_crossed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(markers, tags)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if the events contain any markers (tags before content).
|
||||||
|
pub fn has_markers<'a>(events: impl Iterator<Item = Event<'a>>) -> bool {
|
||||||
|
let (markers, _) = extract_markers_and_tags(events);
|
||||||
|
!markers.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use pulldown_cmark::Parser;
|
||||||
|
|
||||||
|
fn extract_from_text(text: &str) -> (Vec<String>, Vec<String>) {
|
||||||
|
let mut options = pulldown_cmark::Options::empty();
|
||||||
|
options.insert(pulldown_cmark::Options::ENABLE_STRIKETHROUGH);
|
||||||
|
let parser = Parser::new_ext(text, options);
|
||||||
|
extract_markers_and_tags(parser)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_single_marker() {
|
||||||
|
let (markers, tags) = extract_from_text("@Tag Hello World");
|
||||||
|
assert_eq!(markers, vec!["Tag"]);
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_two_markers() {
|
||||||
|
let (markers, tags) = extract_from_text("@Marker1 @Marker2 Hello World");
|
||||||
|
assert_eq!(markers, vec!["Marker1", "Marker2"]);
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_markers_and_tags() {
|
||||||
|
let (markers, tags) = extract_from_text("@Tag1 @Tag2 Hello World @Tag3");
|
||||||
|
assert_eq!(markers, vec!["Tag1", "Tag2"]);
|
||||||
|
assert_eq!(tags, vec!["Tag3"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_inner_tags() {
|
||||||
|
let (markers, tags) = extract_from_text("Hello @Tag1 World!");
|
||||||
|
assert!(markers.is_empty());
|
||||||
|
assert_eq!(tags, vec!["Tag1"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_ignores_code_blocks() {
|
||||||
|
let (markers, tags) = extract_from_text("```\n@Marker\n```");
|
||||||
|
assert!(markers.is_empty());
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_italic_marker() {
|
||||||
|
let (markers, tags) = extract_from_text("*@ItalicMarker*");
|
||||||
|
assert_eq!(markers, vec!["ItalicMarker"]);
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_bold_marker() {
|
||||||
|
let (markers, tags) = extract_from_text("**@BoldMarker**");
|
||||||
|
assert_eq!(markers, vec!["BoldMarker"]);
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_strikethrough_marker() {
|
||||||
|
let (markers, tags) = extract_from_text("~~@StrikeMarker~~");
|
||||||
|
assert_eq!(markers, vec!["StrikeMarker"]);
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_link_marker() {
|
||||||
|
let (markers, tags) = extract_from_text("[@LinkMarker](https://example.com)");
|
||||||
|
assert_eq!(markers, vec!["LinkMarker"]);
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_multiple_link_markers() {
|
||||||
|
let (markers, tags) = extract_from_text(
|
||||||
|
"[@LinkMarker1](https://example.com) [@LinkMarker2](https://example.com)",
|
||||||
|
);
|
||||||
|
assert_eq!(markers, vec!["LinkMarker1", "LinkMarker2"]);
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_has_markers_true() {
|
||||||
|
let parser = Parser::new("@Tag Hello");
|
||||||
|
assert!(has_markers(parser));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_has_markers_false() {
|
||||||
|
let parser = Parser::new("Hello @Tag");
|
||||||
|
assert!(!has_markers(parser));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_empty_text() {
|
||||||
|
let (markers, tags) = extract_from_text("");
|
||||||
|
assert!(markers.is_empty());
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_no_tags() {
|
||||||
|
let (markers, tags) = extract_from_text("Hello World");
|
||||||
|
assert!(markers.is_empty());
|
||||||
|
assert!(tags.is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
14
src/lib.rs
Normal file
14
src/lib.rs
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
pub mod cli;
|
||||||
|
pub mod config;
|
||||||
|
pub mod error;
|
||||||
|
pub mod extract;
|
||||||
|
pub mod localize;
|
||||||
|
pub mod models;
|
||||||
|
pub mod query;
|
||||||
|
pub mod timesheet;
|
||||||
|
|
||||||
|
pub use error::StreamdError;
|
||||||
|
pub use models::{
|
||||||
|
Dimension, LocalizedShard, Marker, MarkerPlacement, RepositoryConfiguration, Shard,
|
||||||
|
SpecialDayType, StreamFile, Timecard, Timesheet,
|
||||||
|
};
|
||||||
448
src/localize/configuration.rs
Normal file
448
src/localize/configuration.rs
Normal file
|
|
@ -0,0 +1,448 @@
|
||||||
|
use std::collections::BTreeSet;
|
||||||
|
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
|
||||||
|
use crate::models::{Dimension, Marker, MarkerPlacement, RepositoryConfiguration};
|
||||||
|
|
||||||
|
/// Merge two dimensions, with the second taking precedence.
|
||||||
|
///
|
||||||
|
/// - display_name: second wins if non-empty, else base
|
||||||
|
/// - comment: second wins if not None, else base
|
||||||
|
/// - propagate: second wins if explicitly set, else base
|
||||||
|
pub fn merge_single_dimension(base: &Dimension, second: &Dimension) -> Dimension {
|
||||||
|
Dimension {
|
||||||
|
display_name: if second.display_name.is_empty() {
|
||||||
|
base.display_name.clone()
|
||||||
|
} else {
|
||||||
|
second.display_name.clone()
|
||||||
|
},
|
||||||
|
comment: if second.comment.is_some() {
|
||||||
|
second.comment.clone()
|
||||||
|
} else {
|
||||||
|
base.comment.clone()
|
||||||
|
},
|
||||||
|
propagate: if second.propagate_was_set {
|
||||||
|
second.propagate
|
||||||
|
} else {
|
||||||
|
base.propagate
|
||||||
|
},
|
||||||
|
propagate_was_set: second.propagate_was_set || base.propagate_was_set,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Merge two dimension maps.
|
||||||
|
pub fn merge_dimensions(
|
||||||
|
base: &IndexMap<String, Dimension>,
|
||||||
|
second: &IndexMap<String, Dimension>,
|
||||||
|
) -> IndexMap<String, Dimension> {
|
||||||
|
let mut merged = base.clone();
|
||||||
|
|
||||||
|
for (key, second_dim) in second {
|
||||||
|
if let Some(base_dim) = merged.get(key) {
|
||||||
|
merged.insert(key.clone(), merge_single_dimension(base_dim, second_dim));
|
||||||
|
} else {
|
||||||
|
merged.insert(key.clone(), second_dim.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
merged
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a placement identity tuple for deduplication.
|
||||||
|
/// We use BTreeSet to make it hashable and order-independent.
|
||||||
|
fn placement_identity(p: &MarkerPlacement) -> (BTreeSet<String>, String) {
|
||||||
|
(p.if_with.iter().cloned().collect(), p.dimension.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Merge two markers, with the second taking precedence.
|
||||||
|
pub fn merge_single_marker(base: &Marker, second: &Marker) -> Marker {
|
||||||
|
let display_name = if second.display_name.is_empty() {
|
||||||
|
base.display_name.clone()
|
||||||
|
} else {
|
||||||
|
second.display_name.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut merged_placements: Vec<MarkerPlacement> = Vec::new();
|
||||||
|
let mut seen: IndexMap<(BTreeSet<String>, String), usize> = IndexMap::new();
|
||||||
|
|
||||||
|
for placement in &base.placements {
|
||||||
|
let ident = placement_identity(placement);
|
||||||
|
seen.insert(ident, merged_placements.len());
|
||||||
|
merged_placements.push(placement.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
for placement in &second.placements {
|
||||||
|
let ident = placement_identity(placement);
|
||||||
|
if let Some(&idx) = seen.get(&ident) {
|
||||||
|
merged_placements[idx] = placement.clone();
|
||||||
|
} else {
|
||||||
|
seen.insert(ident, merged_placements.len());
|
||||||
|
merged_placements.push(placement.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Marker {
|
||||||
|
display_name,
|
||||||
|
placements: merged_placements,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Merge two marker maps.
|
||||||
|
pub fn merge_markers(
|
||||||
|
base: &IndexMap<String, Marker>,
|
||||||
|
second: &IndexMap<String, Marker>,
|
||||||
|
) -> IndexMap<String, Marker> {
|
||||||
|
let mut merged = base.clone();
|
||||||
|
|
||||||
|
for (key, second_marker) in second {
|
||||||
|
if let Some(base_marker) = merged.get(key) {
|
||||||
|
merged.insert(key.clone(), merge_single_marker(base_marker, second_marker));
|
||||||
|
} else {
|
||||||
|
merged.insert(key.clone(), second_marker.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
merged
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Merge two repository configurations.
|
||||||
|
pub fn merge_repository_configuration(
|
||||||
|
base: &RepositoryConfiguration,
|
||||||
|
second: &RepositoryConfiguration,
|
||||||
|
) -> RepositoryConfiguration {
|
||||||
|
RepositoryConfiguration {
|
||||||
|
dimensions: merge_dimensions(&base.dimensions, &second.dimensions),
|
||||||
|
markers: merge_markers(&base.markers, &second.markers),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_second_overrides_display_name_when_non_empty() {
|
||||||
|
let base = Dimension::new("Base")
|
||||||
|
.with_comment("c1")
|
||||||
|
.with_propagate(true);
|
||||||
|
let second = Dimension::new("Second")
|
||||||
|
.with_comment("c2")
|
||||||
|
.with_propagate(false);
|
||||||
|
|
||||||
|
let merged = merge_single_dimension(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged.display_name, "Second");
|
||||||
|
assert_eq!(merged.comment, Some("c2".to_string()));
|
||||||
|
assert!(!merged.propagate);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_second_empty_display_name_falls_back_to_base() {
|
||||||
|
let base = Dimension::new("Base")
|
||||||
|
.with_comment("c1")
|
||||||
|
.with_propagate(true);
|
||||||
|
let second = Dimension::new("").with_comment("c2").with_propagate(false);
|
||||||
|
|
||||||
|
let merged = merge_single_dimension(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged.display_name, "Base");
|
||||||
|
assert_eq!(merged.comment, Some("c2".to_string()));
|
||||||
|
assert!(!merged.propagate);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_second_comment_none_does_not_erase_base_comment() {
|
||||||
|
let base = Dimension::new("Base")
|
||||||
|
.with_comment("keep")
|
||||||
|
.with_propagate(true);
|
||||||
|
let mut second = Dimension::new("Second");
|
||||||
|
second.propagate = false;
|
||||||
|
second.propagate_was_set = true;
|
||||||
|
|
||||||
|
let merged = merge_single_dimension(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged.display_name, "Second");
|
||||||
|
assert_eq!(merged.comment, Some("keep".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_second_comment_non_none_overrides_base_comment() {
|
||||||
|
let base = Dimension::new("Base")
|
||||||
|
.with_comment("c1")
|
||||||
|
.with_propagate(true);
|
||||||
|
let second = Dimension::new("Second")
|
||||||
|
.with_comment("c2")
|
||||||
|
.with_propagate(true);
|
||||||
|
|
||||||
|
let merged = merge_single_dimension(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged.comment, Some("c2".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_second_propagate_overrides_base_when_provided() {
|
||||||
|
let base = Dimension::new("Base")
|
||||||
|
.with_comment("c1")
|
||||||
|
.with_propagate(true);
|
||||||
|
let second = Dimension::new("Second")
|
||||||
|
.with_comment("c2")
|
||||||
|
.with_propagate(false);
|
||||||
|
|
||||||
|
let merged = merge_single_dimension(&base, &second);
|
||||||
|
|
||||||
|
assert!(!merged.propagate);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_propagate_merging_retains_base_when_second_not_provided() {
|
||||||
|
let base = Dimension::new("Base")
|
||||||
|
.with_comment("c1")
|
||||||
|
.with_propagate(true);
|
||||||
|
let second = Dimension::new("Second").with_comment("c2");
|
||||||
|
|
||||||
|
let merged = merge_single_dimension(&base, &second);
|
||||||
|
|
||||||
|
assert!(merged.propagate);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_adds_new_keys_from_second() {
|
||||||
|
let mut base = IndexMap::new();
|
||||||
|
base.insert("a".to_string(), Dimension::new("A").with_propagate(true));
|
||||||
|
|
||||||
|
let mut second = IndexMap::new();
|
||||||
|
second.insert("b".to_string(), Dimension::new("B").with_propagate(false));
|
||||||
|
|
||||||
|
let merged = merge_dimensions(&base, &second);
|
||||||
|
|
||||||
|
assert!(merged.contains_key("a"));
|
||||||
|
assert!(merged.contains_key("b"));
|
||||||
|
assert_eq!(merged["a"].display_name, "A");
|
||||||
|
assert_eq!(merged["b"].display_name, "B");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merges_existing_keys() {
|
||||||
|
let mut base = IndexMap::new();
|
||||||
|
base.insert(
|
||||||
|
"a".to_string(),
|
||||||
|
Dimension::new("A").with_comment("c1").with_propagate(true),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut second = IndexMap::new();
|
||||||
|
second.insert("a".to_string(), Dimension::new("A2").with_propagate(false));
|
||||||
|
|
||||||
|
let merged = merge_dimensions(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged["a"].display_name, "A2");
|
||||||
|
assert_eq!(merged["a"].comment, Some("c1".to_string()));
|
||||||
|
assert!(!merged["a"].propagate);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_does_not_mutate_inputs() {
|
||||||
|
let mut base = IndexMap::new();
|
||||||
|
base.insert(
|
||||||
|
"a".to_string(),
|
||||||
|
Dimension::new("A").with_comment("c1").with_propagate(true),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut second = IndexMap::new();
|
||||||
|
second.insert(
|
||||||
|
"b".to_string(),
|
||||||
|
Dimension::new("B").with_comment("c2").with_propagate(false),
|
||||||
|
);
|
||||||
|
|
||||||
|
let merged = merge_dimensions(&base, &second);
|
||||||
|
|
||||||
|
assert!(!base.contains_key("b"));
|
||||||
|
assert!(!second.contains_key("a"));
|
||||||
|
assert!(merged.contains_key("a"));
|
||||||
|
assert!(merged.contains_key("b"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_second_marker_overrides_display_name_when_non_empty() {
|
||||||
|
let base = Marker::new("Base").with_placements(vec![MarkerPlacement::new("project")]);
|
||||||
|
let second = Marker::new("Second")
|
||||||
|
.with_placements(vec![MarkerPlacement::new("timesheet").with_value("coding")]);
|
||||||
|
|
||||||
|
let merged = merge_single_marker(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged.display_name, "Second");
|
||||||
|
assert_eq!(merged.placements.len(), 2);
|
||||||
|
assert_eq!(merged.placements[0].dimension, "project");
|
||||||
|
assert_eq!(merged.placements[1].dimension, "timesheet");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_second_marker_empty_display_name_falls_back_to_base() {
|
||||||
|
let base = Marker::new("Base").with_placements(vec![]);
|
||||||
|
let second = Marker::new("").with_placements(vec![]);
|
||||||
|
|
||||||
|
let merged = merge_single_marker(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged.display_name, "Base");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_appends_new_placements() {
|
||||||
|
let base = Marker::new("Base").with_placements(vec![MarkerPlacement::new("project")]);
|
||||||
|
let second = Marker::new("Second").with_placements(vec![MarkerPlacement::new("timesheet")
|
||||||
|
.with_if_with(vec!["Timesheet"])
|
||||||
|
.with_value("x")]);
|
||||||
|
|
||||||
|
let merged = merge_single_marker(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged.placements.len(), 2);
|
||||||
|
assert_eq!(merged.placements[0].dimension, "project");
|
||||||
|
assert_eq!(merged.placements[1].dimension, "timesheet");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_deduplicates_by_identity_and_second_overrides_base() {
|
||||||
|
let base = Marker::new("Base").with_placements(vec![
|
||||||
|
MarkerPlacement::new("d")
|
||||||
|
.with_if_with(vec!["A"])
|
||||||
|
.with_value("v"),
|
||||||
|
MarkerPlacement::new("d")
|
||||||
|
.with_if_with(vec!["B"])
|
||||||
|
.with_value("v2"),
|
||||||
|
]);
|
||||||
|
let second = Marker::new("Second").with_placements(vec![
|
||||||
|
MarkerPlacement::new("d")
|
||||||
|
.with_if_with(vec!["A"])
|
||||||
|
.with_value("v"),
|
||||||
|
MarkerPlacement::new("d")
|
||||||
|
.with_if_with(vec!["C"])
|
||||||
|
.with_value("v3"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
let merged = merge_single_marker(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged.placements.len(), 3);
|
||||||
|
// First placement (A, d) should be from second
|
||||||
|
assert_eq!(
|
||||||
|
merged.placements[0].if_with.iter().collect::<Vec<_>>(),
|
||||||
|
vec!["A"]
|
||||||
|
);
|
||||||
|
// Second placement (B, d) should be from base
|
||||||
|
assert_eq!(
|
||||||
|
merged.placements[1].if_with.iter().collect::<Vec<_>>(),
|
||||||
|
vec!["B"]
|
||||||
|
);
|
||||||
|
// Third placement (C, d) should be from second
|
||||||
|
assert_eq!(
|
||||||
|
merged.placements[2].if_with.iter().collect::<Vec<_>>(),
|
||||||
|
vec!["C"]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_identity_is_order_insensitive_for_if_with() {
|
||||||
|
let base = Marker::new("Base").with_placements(vec![MarkerPlacement::new("d")
|
||||||
|
.with_if_with(vec!["A", "B"])
|
||||||
|
.with_value("v")]);
|
||||||
|
let second = Marker::new("Second").with_placements(vec![MarkerPlacement::new("d")
|
||||||
|
.with_if_with(vec!["B", "A"])
|
||||||
|
.with_value("v2")]);
|
||||||
|
|
||||||
|
let merged = merge_single_marker(&base, &second);
|
||||||
|
|
||||||
|
// With if_with as a set, identity is order-insensitive; second overrides base.
|
||||||
|
assert_eq!(merged.placements.len(), 1);
|
||||||
|
assert_eq!(merged.placements[0].value, Some("v2".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_adds_new_marker_keys_from_second() {
|
||||||
|
let mut base = IndexMap::new();
|
||||||
|
base.insert("M1".to_string(), Marker::new("M1").with_placements(vec![]));
|
||||||
|
|
||||||
|
let mut second = IndexMap::new();
|
||||||
|
second.insert("M2".to_string(), Marker::new("M2").with_placements(vec![]));
|
||||||
|
|
||||||
|
let merged = merge_markers(&base, &second);
|
||||||
|
|
||||||
|
assert!(merged.contains_key("M1"));
|
||||||
|
assert!(merged.contains_key("M2"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merges_existing_marker_keys() {
|
||||||
|
let mut base = IndexMap::new();
|
||||||
|
base.insert(
|
||||||
|
"M".to_string(),
|
||||||
|
Marker::new("Base").with_placements(vec![MarkerPlacement::new("project")]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut second = IndexMap::new();
|
||||||
|
second.insert(
|
||||||
|
"M".to_string(),
|
||||||
|
Marker::new("Second").with_placements(vec![MarkerPlacement::new("timesheet")
|
||||||
|
.with_if_with(vec!["Timesheet"])
|
||||||
|
.with_value("coding")]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let merged = merge_markers(&base, &second);
|
||||||
|
|
||||||
|
assert_eq!(merged["M"].display_name, "Second");
|
||||||
|
assert_eq!(merged["M"].placements.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merge_repository_configuration() {
|
||||||
|
let base = RepositoryConfiguration::new()
|
||||||
|
.with_dimension(
|
||||||
|
"project",
|
||||||
|
Dimension::new("Project")
|
||||||
|
.with_comment("c1")
|
||||||
|
.with_propagate(true),
|
||||||
|
)
|
||||||
|
.with_dimension(
|
||||||
|
"moment",
|
||||||
|
Dimension::new("Moment")
|
||||||
|
.with_comment("c2")
|
||||||
|
.with_propagate(true),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"Streamd",
|
||||||
|
Marker::new("Streamd").with_placements(vec![MarkerPlacement::new("project")]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let second = RepositoryConfiguration::new()
|
||||||
|
.with_dimension("project", Dimension::new("Project2").with_propagate(false))
|
||||||
|
.with_dimension(
|
||||||
|
"timesheet",
|
||||||
|
Dimension::new("Timesheet")
|
||||||
|
.with_comment("c3")
|
||||||
|
.with_propagate(false),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"Streamd",
|
||||||
|
Marker::new("Streamd2").with_placements(vec![MarkerPlacement::new("timesheet")
|
||||||
|
.with_if_with(vec!["Timesheet"])
|
||||||
|
.with_value("coding")]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"JobHunting",
|
||||||
|
Marker::new("JobHunting").with_placements(vec![MarkerPlacement::new("project")]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let merged = merge_repository_configuration(&base, &second);
|
||||||
|
|
||||||
|
assert!(merged.dimensions.contains_key("project"));
|
||||||
|
assert!(merged.dimensions.contains_key("moment"));
|
||||||
|
assert!(merged.dimensions.contains_key("timesheet"));
|
||||||
|
assert_eq!(merged.dimensions["project"].display_name, "Project2");
|
||||||
|
assert_eq!(merged.dimensions["project"].comment, Some("c1".to_string()));
|
||||||
|
assert!(!merged.dimensions["project"].propagate);
|
||||||
|
assert_eq!(merged.dimensions["moment"].display_name, "Moment");
|
||||||
|
assert_eq!(merged.dimensions["timesheet"].display_name, "Timesheet");
|
||||||
|
|
||||||
|
assert!(merged.markers.contains_key("Streamd"));
|
||||||
|
assert!(merged.markers.contains_key("JobHunting"));
|
||||||
|
assert_eq!(merged.markers["Streamd"].display_name, "Streamd2");
|
||||||
|
assert_eq!(merged.markers["Streamd"].placements.len(), 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
365
src/localize/datetime.rs
Normal file
365
src/localize/datetime.rs
Normal file
|
|
@ -0,0 +1,365 @@
|
||||||
|
use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use regex::Regex;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
/// Regex for extracting date and optional time from file names.
|
||||||
|
/// Format: YYYYMMDD or YYYYMMDD-HHMMSS (time can be 4-6 digits)
|
||||||
|
static FILE_NAME_REGEX: Lazy<Regex> =
|
||||||
|
Lazy::new(|| Regex::new(r"^(?P<date>\d{8})(?:-(?P<time>\d{4,6}))?.+\.md$").unwrap());
|
||||||
|
|
||||||
|
/// Regex for validating datetime marker format (14 digits).
|
||||||
|
static DATETIME_MARKER_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\d{14}$").unwrap());
|
||||||
|
|
||||||
|
/// Regex for validating date marker format (8 digits).
|
||||||
|
static DATE_MARKER_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\d{8}$").unwrap());
|
||||||
|
|
||||||
|
/// Regex for validating time marker format (6 digits).
|
||||||
|
static TIME_MARKER_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\d{6}$").unwrap());
|
||||||
|
|
||||||
|
/// Extract a datetime from a file name in the format YYYYMMDD-HHMMSS.
|
||||||
|
///
|
||||||
|
/// The time component is optional and can be 4-6 digits (HHMM, HHMMS, or HHMMSS).
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
/// - "20230101-123456 Some Text.md" -> DateTime for 2023-01-01 12:34:56
|
||||||
|
/// - "20230101 Some Text.md" -> DateTime for 2023-01-01 00:00:00
|
||||||
|
/// - "invalid-file-name.md" -> None
|
||||||
|
pub fn extract_datetime_from_file_name(file_name: &str) -> Option<DateTime<Utc>> {
|
||||||
|
let base_name = Path::new(file_name)
|
||||||
|
.file_name()
|
||||||
|
.and_then(|s| s.to_str())
|
||||||
|
.unwrap_or(file_name);
|
||||||
|
|
||||||
|
let captures = FILE_NAME_REGEX.captures(base_name)?;
|
||||||
|
let date_str = captures.name("date")?.as_str();
|
||||||
|
let time_str = captures.name("time").map(|m| m.as_str()).unwrap_or("");
|
||||||
|
|
||||||
|
// Pad time string to 6 digits
|
||||||
|
let time_str = format!("{:0<6}", time_str);
|
||||||
|
|
||||||
|
let datetime_str = format!(
|
||||||
|
"{} {}:{}:{}",
|
||||||
|
date_str,
|
||||||
|
&time_str[0..2],
|
||||||
|
&time_str[2..4],
|
||||||
|
&time_str[4..6]
|
||||||
|
);
|
||||||
|
|
||||||
|
NaiveDateTime::parse_from_str(&datetime_str, "%Y%m%d %H:%M:%S")
|
||||||
|
.ok()
|
||||||
|
.map(|dt| dt.and_utc())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract a datetime from a marker string in the exact format: YYYYMMDDHHMMSS.
|
||||||
|
///
|
||||||
|
/// Returns the parsed datetime if the format matches and values are valid.
|
||||||
|
pub fn extract_datetime_from_marker(marker: &str) -> Option<DateTime<Utc>> {
|
||||||
|
if !DATETIME_MARKER_REGEX.is_match(marker) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
NaiveDateTime::parse_from_str(marker, "%Y%m%d%H%M%S")
|
||||||
|
.ok()
|
||||||
|
.map(|dt| dt.and_utc())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract a date from a marker string in the exact format: YYYYMMDD.
|
||||||
|
///
|
||||||
|
/// Returns the parsed date if the format matches and values are valid.
|
||||||
|
pub fn extract_date_from_marker(marker: &str) -> Option<NaiveDate> {
|
||||||
|
if !DATE_MARKER_REGEX.is_match(marker) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
NaiveDate::parse_from_str(marker, "%Y%m%d").ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract a time from a marker string in the exact format: HHMMSS.
|
||||||
|
///
|
||||||
|
/// Returns the parsed time if the format matches and values are valid.
|
||||||
|
pub fn extract_time_from_marker(marker: &str) -> Option<NaiveTime> {
|
||||||
|
if !TIME_MARKER_REGEX.is_match(marker) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
NaiveTime::parse_from_str(marker, "%H%M%S").ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract a datetime from a list of markers, using an inherited datetime as fallback.
|
||||||
|
///
|
||||||
|
/// The function processes markers in reverse order, allowing later markers to override
|
||||||
|
/// earlier ones. It combines date-only and time-only markers when both are present.
|
||||||
|
///
|
||||||
|
/// Rules:
|
||||||
|
/// - If a full datetime marker (14 digits) is found, it sets both date and time
|
||||||
|
/// - If only a date marker is found, the time defaults to midnight
|
||||||
|
/// - If only a time marker is found, the date is inherited
|
||||||
|
/// - If no valid markers are found, the inherited datetime is returned
|
||||||
|
pub fn extract_datetime_from_marker_list(
|
||||||
|
markers: &[String],
|
||||||
|
inherited_datetime: DateTime<Utc>,
|
||||||
|
) -> DateTime<Utc> {
|
||||||
|
let mut shard_time: Option<NaiveTime> = None;
|
||||||
|
let mut shard_date: Option<NaiveDate> = None;
|
||||||
|
|
||||||
|
// Process markers in reverse order (last wins)
|
||||||
|
for marker in markers.iter().rev() {
|
||||||
|
if let Some(time) = extract_time_from_marker(marker) {
|
||||||
|
shard_time = Some(time);
|
||||||
|
}
|
||||||
|
if let Some(date) = extract_date_from_marker(marker) {
|
||||||
|
shard_date = Some(date);
|
||||||
|
}
|
||||||
|
if let Some(datetime) = extract_datetime_from_marker(marker) {
|
||||||
|
shard_date = Some(datetime.naive_utc().date());
|
||||||
|
shard_time = Some(datetime.naive_utc().time());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Combine date and time, applying defaults as needed
|
||||||
|
let final_date = shard_date.unwrap_or_else(|| inherited_datetime.naive_utc().date());
|
||||||
|
let final_time = match (shard_date, shard_time) {
|
||||||
|
// If we have a date but no time, use midnight
|
||||||
|
(Some(_), None) => NaiveTime::from_hms_opt(0, 0, 0).unwrap(),
|
||||||
|
// Otherwise use the shard time or inherit
|
||||||
|
_ => shard_time.unwrap_or_else(|| inherited_datetime.naive_utc().time()),
|
||||||
|
};
|
||||||
|
|
||||||
|
NaiveDateTime::new(final_date, final_time).and_utc()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use chrono::TimeZone;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_date_from_file_name_valid() {
|
||||||
|
let file_name = "20230101-123456 Some Text.md";
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_file_name(file_name),
|
||||||
|
Some(Utc.with_ymd_and_hms(2023, 1, 1, 12, 34, 56).unwrap())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_date_from_file_name_invalid() {
|
||||||
|
let file_name = "invalid-file-name.md";
|
||||||
|
assert_eq!(extract_datetime_from_file_name(file_name), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_date_from_file_name_without_time() {
|
||||||
|
let file_name = "20230101 Some Text.md";
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_file_name(file_name),
|
||||||
|
Some(Utc.with_ymd_and_hms(2023, 1, 1, 0, 0, 0).unwrap())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_date_from_file_name_short_time() {
|
||||||
|
let file_name = "20230101-1234 Some Text.md";
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_file_name(file_name),
|
||||||
|
Some(Utc.with_ymd_and_hms(2023, 1, 1, 12, 34, 0).unwrap())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_date_from_file_name_empty_string() {
|
||||||
|
let file_name = "";
|
||||||
|
assert_eq!(extract_datetime_from_file_name(file_name), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_date_from_file_name_with_full_path() {
|
||||||
|
let file_name = "/path/to/20230101-123456 Some Text.md";
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_file_name(file_name),
|
||||||
|
Some(Utc.with_ymd_and_hms(2023, 1, 1, 12, 34, 56).unwrap())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_datetime_from_marker_valid() {
|
||||||
|
let marker = "20250101150000";
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_marker(marker),
|
||||||
|
Some(Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_datetime_from_marker_invalid_format() {
|
||||||
|
assert_eq!(extract_datetime_from_marker("2025010115000"), None); // too short
|
||||||
|
assert_eq!(extract_datetime_from_marker("202501011500000"), None); // too long
|
||||||
|
assert_eq!(extract_datetime_from_marker("2025-01-01T150000"), None); // separators
|
||||||
|
assert_eq!(extract_datetime_from_marker("2025010115000a"), None); // non-digit
|
||||||
|
assert_eq!(extract_datetime_from_marker(""), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_datetime_from_marker_invalid_values() {
|
||||||
|
assert_eq!(extract_datetime_from_marker("20250230120000"), None); // Feb 30
|
||||||
|
assert_eq!(extract_datetime_from_marker("20250101126000"), None); // minute 60
|
||||||
|
assert_eq!(extract_datetime_from_marker("20250101240000"), None); // hour 24
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_date_from_marker_valid() {
|
||||||
|
let marker = "20250101";
|
||||||
|
assert_eq!(
|
||||||
|
extract_date_from_marker(marker),
|
||||||
|
Some(NaiveDate::from_ymd_opt(2025, 1, 1).unwrap())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_date_from_marker_invalid_format() {
|
||||||
|
assert_eq!(extract_date_from_marker("2025010"), None); // too short
|
||||||
|
assert_eq!(extract_date_from_marker("202501011"), None); // too long
|
||||||
|
assert_eq!(extract_date_from_marker("2025-01-01"), None); // separators
|
||||||
|
assert_eq!(extract_date_from_marker("2025010a"), None); // non-digit
|
||||||
|
assert_eq!(extract_date_from_marker(""), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_date_from_marker_invalid_values() {
|
||||||
|
assert_eq!(extract_date_from_marker("20250230"), None); // Feb 30
|
||||||
|
assert_eq!(extract_date_from_marker("20251301"), None); // month 13
|
||||||
|
assert_eq!(extract_date_from_marker("20250132"), None); // day 32
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_time_from_marker_valid() {
|
||||||
|
let marker = "150000";
|
||||||
|
assert_eq!(
|
||||||
|
extract_time_from_marker(marker),
|
||||||
|
Some(NaiveTime::from_hms_opt(15, 0, 0).unwrap())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_time_from_marker_invalid_format() {
|
||||||
|
assert_eq!(extract_time_from_marker("15000"), None); // too short
|
||||||
|
assert_eq!(extract_time_from_marker("1500000"), None); // too long
|
||||||
|
assert_eq!(extract_time_from_marker("15:00:00"), None); // separators
|
||||||
|
assert_eq!(extract_time_from_marker("15000a"), None); // non-digit
|
||||||
|
assert_eq!(extract_time_from_marker(""), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_time_from_marker_invalid_values() {
|
||||||
|
assert_eq!(extract_time_from_marker("240000"), None); // hour 24
|
||||||
|
assert_eq!(extract_time_from_marker("156000"), None); // minute 60
|
||||||
|
// Note: chrono allows leap seconds (60), so 150060 is valid
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_no_markers_inherits_datetime() {
|
||||||
|
let inherited = Utc.with_ymd_and_hms(2025, 1, 2, 3, 4, 5).unwrap();
|
||||||
|
assert_eq!(extract_datetime_from_marker_list(&[], inherited), inherited);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unrelated_markers_inherits_datetime() {
|
||||||
|
let inherited = Utc.with_ymd_and_hms(2025, 1, 2, 3, 4, 5).unwrap();
|
||||||
|
let markers: Vec<String> = vec![
|
||||||
|
"not-a-marker".to_string(),
|
||||||
|
"2025-01-01".to_string(),
|
||||||
|
"1500".to_string(),
|
||||||
|
"1234567".to_string(),
|
||||||
|
];
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_marker_list(&markers, inherited),
|
||||||
|
inherited
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_date_only_marker_sets_midnight() {
|
||||||
|
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
|
||||||
|
let markers = vec!["20250101".to_string()];
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_marker_list(&markers, inherited),
|
||||||
|
Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_time_only_marker_inherits_date() {
|
||||||
|
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
|
||||||
|
let markers = vec!["150000".to_string()];
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_marker_list(&markers, inherited),
|
||||||
|
Utc.with_ymd_and_hms(2025, 6, 7, 15, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_datetime_marker_overrides_both_date_and_time() {
|
||||||
|
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
|
||||||
|
let markers = vec!["20250101150000".to_string()];
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_marker_list(&markers, inherited),
|
||||||
|
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_combined_date_and_time_markers() {
|
||||||
|
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
|
||||||
|
let markers = vec!["20250101".to_string(), "150000".to_string()];
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_marker_list(&markers, inherited),
|
||||||
|
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_first_marker_wins_when_multiple_dates_or_times() {
|
||||||
|
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
|
||||||
|
let markers = vec![
|
||||||
|
"20250101".to_string(),
|
||||||
|
"150000".to_string(),
|
||||||
|
"20250102".to_string(),
|
||||||
|
"160000".to_string(),
|
||||||
|
];
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_marker_list(&markers, inherited),
|
||||||
|
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_last_separated_date_and_time_win() {
|
||||||
|
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
|
||||||
|
let markers = vec![
|
||||||
|
"20250101".to_string(),
|
||||||
|
"150000".to_string(),
|
||||||
|
"20250102160000".to_string(),
|
||||||
|
];
|
||||||
|
// The first date (20250101) and first time (150000) should win over the later combined datetime
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_marker_list(&markers, inherited),
|
||||||
|
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_invalid_date_or_time_markers_are_ignored() {
|
||||||
|
let inherited = Utc.with_ymd_and_hms(2025, 6, 7, 8, 9, 10).unwrap();
|
||||||
|
let markers = vec![
|
||||||
|
"20251301".to_string(), // invalid month
|
||||||
|
"240000".to_string(), // invalid hour
|
||||||
|
"20250101".to_string(), // valid
|
||||||
|
"150000".to_string(), // valid
|
||||||
|
];
|
||||||
|
assert_eq!(
|
||||||
|
extract_datetime_from_marker_list(&markers, inherited),
|
||||||
|
Utc.with_ymd_and_hms(2025, 1, 1, 15, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
15
src/localize/mod.rs
Normal file
15
src/localize/mod.rs
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
mod configuration;
|
||||||
|
mod datetime;
|
||||||
|
mod preconfigured;
|
||||||
|
mod shard;
|
||||||
|
|
||||||
|
pub use configuration::{
|
||||||
|
merge_dimensions, merge_markers, merge_repository_configuration, merge_single_dimension,
|
||||||
|
merge_single_marker,
|
||||||
|
};
|
||||||
|
pub use datetime::{
|
||||||
|
extract_date_from_marker, extract_datetime_from_file_name, extract_datetime_from_marker,
|
||||||
|
extract_datetime_from_marker_list, extract_time_from_marker,
|
||||||
|
};
|
||||||
|
pub use preconfigured::TaskConfiguration;
|
||||||
|
pub use shard::{localize_shard, localize_stream_file};
|
||||||
46
src/localize/preconfigured.rs
Normal file
46
src/localize/preconfigured.rs
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
|
use crate::models::{Dimension, Marker, MarkerPlacement, RepositoryConfiguration};
|
||||||
|
|
||||||
|
/// Pre-configured repository configuration for task management.
|
||||||
|
#[allow(non_upper_case_globals)]
|
||||||
|
pub static TaskConfiguration: Lazy<RepositoryConfiguration> = Lazy::new(|| {
|
||||||
|
RepositoryConfiguration::new()
|
||||||
|
.with_dimension(
|
||||||
|
"task",
|
||||||
|
Dimension::new("Task")
|
||||||
|
.with_comment(
|
||||||
|
"If placed, the given shard is a task. The placement determines the state.",
|
||||||
|
)
|
||||||
|
.with_propagate(false),
|
||||||
|
)
|
||||||
|
.with_dimension(
|
||||||
|
"project",
|
||||||
|
Dimension::new("Project")
|
||||||
|
.with_comment("Project the task is attached to")
|
||||||
|
.with_propagate(true),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"Task",
|
||||||
|
Marker::new("Task").with_placements(vec![
|
||||||
|
MarkerPlacement::new("task").with_value("open"),
|
||||||
|
MarkerPlacement::new("task")
|
||||||
|
.with_if_with(vec!["Done"])
|
||||||
|
.with_value("done"),
|
||||||
|
MarkerPlacement::new("task")
|
||||||
|
.with_if_with(vec!["Waiting"])
|
||||||
|
.with_value("waiting"),
|
||||||
|
MarkerPlacement::new("task")
|
||||||
|
.with_if_with(vec!["Cancelled"])
|
||||||
|
.with_value("cancelled"),
|
||||||
|
MarkerPlacement::new("task")
|
||||||
|
.with_if_with(vec!["NotDone"])
|
||||||
|
.with_value("cancelled"),
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"WaitingFor",
|
||||||
|
Marker::new("Task")
|
||||||
|
.with_placements(vec![MarkerPlacement::new("task").with_value("waiting")]),
|
||||||
|
)
|
||||||
|
});
|
||||||
282
src/localize/shard.rs
Normal file
282
src/localize/shard.rs
Normal file
|
|
@ -0,0 +1,282 @@
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use indexmap::{IndexMap, IndexSet};
|
||||||
|
|
||||||
|
use crate::error::StreamdError;
|
||||||
|
use crate::models::{LocalizedShard, RepositoryConfiguration, Shard, StreamFile};
|
||||||
|
|
||||||
|
use super::datetime::{extract_datetime_from_file_name, extract_datetime_from_marker_list};
|
||||||
|
|
||||||
|
/// Localize a shard within the repository's coordinate system.
|
||||||
|
///
|
||||||
|
/// This function:
|
||||||
|
/// 1. Extracts datetime from markers
|
||||||
|
/// 2. Applies marker placements to determine dimensional position
|
||||||
|
/// 3. Propagates dimensional values to children based on dimension configuration
|
||||||
|
pub fn localize_shard(
|
||||||
|
shard: &Shard,
|
||||||
|
config: &RepositoryConfiguration,
|
||||||
|
propagated: &IndexMap<String, String>,
|
||||||
|
moment: DateTime<Utc>,
|
||||||
|
) -> LocalizedShard {
|
||||||
|
let mut position = propagated.clone();
|
||||||
|
let mut private_position: IndexMap<String, String> = IndexMap::new();
|
||||||
|
|
||||||
|
// Extract datetime from markers
|
||||||
|
let adjusted_moment = extract_datetime_from_marker_list(&shard.markers, moment);
|
||||||
|
|
||||||
|
// Convert markers to a set for if_with checking
|
||||||
|
let marker_set: IndexSet<String> = shard.markers.iter().cloned().collect();
|
||||||
|
|
||||||
|
// Process each marker and its placements
|
||||||
|
for marker in &shard.markers {
|
||||||
|
if let Some(marker_def) = config.markers.get(marker) {
|
||||||
|
for placement in &marker_def.placements {
|
||||||
|
// Check if_with condition
|
||||||
|
if !placement.if_with.is_subset(&marker_set) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the dimension configuration
|
||||||
|
let dimension = match config.dimensions.get(&placement.dimension) {
|
||||||
|
Some(d) => d,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = placement.value.clone().unwrap_or_else(|| marker.clone());
|
||||||
|
|
||||||
|
// Check if we should place the value
|
||||||
|
let should_place = placement.overwrites
|
||||||
|
|| (!position.contains_key(&placement.dimension)
|
||||||
|
&& !private_position.contains_key(&placement.dimension));
|
||||||
|
|
||||||
|
if should_place {
|
||||||
|
if dimension.propagate {
|
||||||
|
position.insert(placement.dimension.clone(), value);
|
||||||
|
} else {
|
||||||
|
private_position.insert(placement.dimension.clone(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively localize children with propagated position
|
||||||
|
let children: Vec<LocalizedShard> = shard
|
||||||
|
.children
|
||||||
|
.iter()
|
||||||
|
.map(|child| localize_shard(child, config, &position, adjusted_moment))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Merge private position into final position
|
||||||
|
position.extend(private_position);
|
||||||
|
|
||||||
|
LocalizedShard {
|
||||||
|
markers: shard.markers.clone(),
|
||||||
|
tags: shard.tags.clone(),
|
||||||
|
start_line: shard.start_line,
|
||||||
|
end_line: shard.end_line,
|
||||||
|
moment: adjusted_moment,
|
||||||
|
location: position,
|
||||||
|
children,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Localize an entire stream file.
|
||||||
|
///
|
||||||
|
/// Extracts the datetime from the file name and localizes the root shard.
|
||||||
|
pub fn localize_stream_file(
|
||||||
|
stream_file: &StreamFile,
|
||||||
|
config: &RepositoryConfiguration,
|
||||||
|
) -> Result<LocalizedShard, StreamdError> {
|
||||||
|
let shard_date = extract_datetime_from_file_name(&stream_file.file_name)
|
||||||
|
.ok_or_else(|| StreamdError::DateExtractionError(stream_file.file_name.clone()))?;
|
||||||
|
|
||||||
|
let shard = stream_file
|
||||||
|
.shard
|
||||||
|
.as_ref()
|
||||||
|
.ok_or_else(|| StreamdError::DateExtractionError("No shard in file".to_string()))?;
|
||||||
|
|
||||||
|
let mut initial_location = IndexMap::new();
|
||||||
|
initial_location.insert("file".to_string(), stream_file.file_name.clone());
|
||||||
|
|
||||||
|
Ok(localize_shard(shard, config, &initial_location, shard_date))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::models::{Dimension, Marker, MarkerPlacement};
|
||||||
|
use chrono::TimeZone;
|
||||||
|
|
||||||
|
fn make_config() -> RepositoryConfiguration {
|
||||||
|
RepositoryConfiguration::new()
|
||||||
|
.with_dimension(
|
||||||
|
"project",
|
||||||
|
Dimension::new("Project")
|
||||||
|
.with_comment("GTD Project that is being worked on")
|
||||||
|
.with_propagate(true),
|
||||||
|
)
|
||||||
|
.with_dimension(
|
||||||
|
"moment",
|
||||||
|
Dimension::new("Moment")
|
||||||
|
.with_comment("Timestamp this entry was created at")
|
||||||
|
.with_propagate(true),
|
||||||
|
)
|
||||||
|
.with_dimension(
|
||||||
|
"timesheet",
|
||||||
|
Dimension::new("Timesheet")
|
||||||
|
.with_comment("Time Cards for Time Tracking")
|
||||||
|
.with_propagate(true),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"Streamd",
|
||||||
|
Marker::new("Streamd").with_placements(vec![
|
||||||
|
MarkerPlacement::new("project"),
|
||||||
|
MarkerPlacement::new("timesheet")
|
||||||
|
.with_if_with(vec!["Timesheet"])
|
||||||
|
.with_value("coding"),
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"JobHunting",
|
||||||
|
Marker::new("JobHunting").with_placements(vec![MarkerPlacement::new("project")]),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_project_simple_stream_file() {
|
||||||
|
let config = make_config();
|
||||||
|
let stream_file = StreamFile::new("20250622-121000 Test File.md")
|
||||||
|
.with_shard(Shard::new(1, 1).with_markers(vec!["Streamd".to_string()]));
|
||||||
|
|
||||||
|
let result = localize_stream_file(&stream_file, &config).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
result.moment,
|
||||||
|
Utc.with_ymd_and_hms(2025, 6, 22, 12, 10, 0).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(result.markers, vec!["Streamd"]);
|
||||||
|
assert_eq!(result.location.get("project"), Some(&"Streamd".to_string()));
|
||||||
|
assert_eq!(
|
||||||
|
result.location.get("file"),
|
||||||
|
Some(&stream_file.file_name.clone())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_timesheet_use_case() {
|
||||||
|
let config = make_config();
|
||||||
|
let stream_file = StreamFile::new("20260131-210000 Test File.md").with_shard(
|
||||||
|
Shard::new(1, 1).with_markers(vec!["Timesheet".to_string(), "Streamd".to_string()]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = localize_stream_file(&stream_file, &config).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
result.moment,
|
||||||
|
Utc.with_ymd_and_hms(2026, 1, 31, 21, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(result.location.get("project"), Some(&"Streamd".to_string()));
|
||||||
|
assert_eq!(
|
||||||
|
result.location.get("timesheet"),
|
||||||
|
Some(&"coding".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_overwrites_true_propagated_dimension_overwrites_existing_value() {
|
||||||
|
let config = RepositoryConfiguration::new()
|
||||||
|
.with_dimension("project", Dimension::new("Project").with_propagate(true))
|
||||||
|
.with_marker(
|
||||||
|
"A",
|
||||||
|
Marker::new("A")
|
||||||
|
.with_placements(vec![MarkerPlacement::new("project").with_value("a")]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"B",
|
||||||
|
Marker::new("B").with_placements(vec![MarkerPlacement::new("project")
|
||||||
|
.with_value("b")
|
||||||
|
.with_overwrites(true)]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let stream_file = StreamFile::new("20260131-210000 Test File.md")
|
||||||
|
.with_shard(Shard::new(1, 1).with_markers(vec!["A".to_string(), "B".to_string()]));
|
||||||
|
|
||||||
|
let result = localize_stream_file(&stream_file, &config).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.location.get("project"), Some(&"b".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_overwrites_false_propagated_dimension_does_not_overwrite_existing_value() {
|
||||||
|
let config = RepositoryConfiguration::new()
|
||||||
|
.with_dimension("project", Dimension::new("Project").with_propagate(true))
|
||||||
|
.with_marker(
|
||||||
|
"A",
|
||||||
|
Marker::new("A")
|
||||||
|
.with_placements(vec![MarkerPlacement::new("project").with_value("a")]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"B",
|
||||||
|
Marker::new("B").with_placements(vec![MarkerPlacement::new("project")
|
||||||
|
.with_value("b")
|
||||||
|
.with_overwrites(false)]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let stream_file = StreamFile::new("20260131-210000 Test File.md")
|
||||||
|
.with_shard(Shard::new(1, 1).with_markers(vec!["A".to_string(), "B".to_string()]));
|
||||||
|
|
||||||
|
let result = localize_stream_file(&stream_file, &config).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.location.get("project"), Some(&"a".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_overwrites_true_non_propagated_dimension_overwrites_private_value() {
|
||||||
|
let config = RepositoryConfiguration::new()
|
||||||
|
.with_dimension("label", Dimension::new("Label").with_propagate(false))
|
||||||
|
.with_marker(
|
||||||
|
"A",
|
||||||
|
Marker::new("A")
|
||||||
|
.with_placements(vec![MarkerPlacement::new("label").with_value("a")]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"B",
|
||||||
|
Marker::new("B").with_placements(vec![MarkerPlacement::new("label")
|
||||||
|
.with_value("b")
|
||||||
|
.with_overwrites(true)]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let stream_file = StreamFile::new("20260131-210000 Test File.md")
|
||||||
|
.with_shard(Shard::new(1, 1).with_markers(vec!["A".to_string(), "B".to_string()]));
|
||||||
|
|
||||||
|
let result = localize_stream_file(&stream_file, &config).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.location.get("label"), Some(&"b".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_overwrites_false_non_propagated_dimension_does_not_overwrite_private_value() {
|
||||||
|
let config = RepositoryConfiguration::new()
|
||||||
|
.with_dimension("label", Dimension::new("Label").with_propagate(false))
|
||||||
|
.with_marker(
|
||||||
|
"A",
|
||||||
|
Marker::new("A").with_placements(vec![MarkerPlacement::new("label")
|
||||||
|
.with_value("a")
|
||||||
|
.with_overwrites(true)]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"B",
|
||||||
|
Marker::new("B").with_placements(vec![MarkerPlacement::new("label")
|
||||||
|
.with_value("b")
|
||||||
|
.with_overwrites(false)]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let stream_file = StreamFile::new("20260131-210000 Test File.md")
|
||||||
|
.with_shard(Shard::new(1, 1).with_markers(vec!["A".to_string(), "B".to_string()]));
|
||||||
|
|
||||||
|
let result = localize_stream_file(&stream_file, &config).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.location.get("label"), Some(&"a".to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
19
src/main.rs
Normal file
19
src/main.rs
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
use clap::Parser;
|
||||||
|
use streamd::cli::{Cli, Commands};
|
||||||
|
|
||||||
|
fn main() -> miette::Result<()> {
|
||||||
|
let cli = Cli::parse();
|
||||||
|
|
||||||
|
match cli.command {
|
||||||
|
Some(Commands::New) => streamd::cli::commands::new::run()?,
|
||||||
|
Some(Commands::Todo) => streamd::cli::commands::todo::run()?,
|
||||||
|
Some(Commands::Edit { number }) => streamd::cli::commands::edit::run(number)?,
|
||||||
|
Some(Commands::Timesheet) => streamd::cli::commands::timesheet::run()?,
|
||||||
|
Some(Commands::Completions { shell }) => {
|
||||||
|
streamd::cli::commands::completions::run(shell);
|
||||||
|
}
|
||||||
|
None => streamd::cli::commands::new::run()?,
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
42
src/models/dimension.rs
Normal file
42
src/models/dimension.rs
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// A Dimension represents an axis along which shards can be categorized.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Dimension {
|
||||||
|
/// Human-readable name for display purposes.
|
||||||
|
pub display_name: String,
|
||||||
|
|
||||||
|
/// Optional description of what this dimension represents.
|
||||||
|
#[serde(default)]
|
||||||
|
pub comment: Option<String>,
|
||||||
|
|
||||||
|
/// Whether values in this dimension should propagate to child shards.
|
||||||
|
#[serde(default)]
|
||||||
|
pub propagate: bool,
|
||||||
|
|
||||||
|
/// Tracks whether 'propagate' was explicitly set (for merge semantics).
|
||||||
|
#[serde(skip)]
|
||||||
|
pub propagate_was_set: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Dimension {
|
||||||
|
pub fn new(display_name: impl Into<String>) -> Self {
|
||||||
|
Self {
|
||||||
|
display_name: display_name.into(),
|
||||||
|
comment: None,
|
||||||
|
propagate: false,
|
||||||
|
propagate_was_set: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_comment(mut self, comment: impl Into<String>) -> Self {
|
||||||
|
self.comment = Some(comment.into());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_propagate(mut self, propagate: bool) -> Self {
|
||||||
|
self.propagate = propagate;
|
||||||
|
self.propagate_was_set = true;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
63
src/models/localized_shard.rs
Normal file
63
src/models/localized_shard.rs
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// A LocalizedShard extends a Shard with temporal and dimensional context.
|
||||||
|
/// It represents a shard that has been placed within the repository's coordinate system.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct LocalizedShard {
|
||||||
|
/// Markers are tags that appear at the beginning of a line before any content.
|
||||||
|
pub markers: Vec<String>,
|
||||||
|
|
||||||
|
/// Tags are @-prefixed identifiers that appear after content has started.
|
||||||
|
pub tags: Vec<String>,
|
||||||
|
|
||||||
|
/// The starting line number in the source file (1-indexed).
|
||||||
|
pub start_line: usize,
|
||||||
|
|
||||||
|
/// The ending line number in the source file (1-indexed).
|
||||||
|
pub end_line: usize,
|
||||||
|
|
||||||
|
/// The moment in time this shard is associated with.
|
||||||
|
pub moment: DateTime<Utc>,
|
||||||
|
|
||||||
|
/// The dimensional location of this shard (dimension name -> value).
|
||||||
|
pub location: IndexMap<String, String>,
|
||||||
|
|
||||||
|
/// Child shards nested within this shard.
|
||||||
|
pub children: Vec<LocalizedShard>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LocalizedShard {
|
||||||
|
pub fn new(start_line: usize, end_line: usize, moment: DateTime<Utc>) -> Self {
|
||||||
|
Self {
|
||||||
|
markers: Vec::new(),
|
||||||
|
tags: Vec::new(),
|
||||||
|
start_line,
|
||||||
|
end_line,
|
||||||
|
moment,
|
||||||
|
location: IndexMap::new(),
|
||||||
|
children: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_markers(mut self, markers: Vec<String>) -> Self {
|
||||||
|
self.markers = markers;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_tags(mut self, tags: Vec<String>) -> Self {
|
||||||
|
self.tags = tags;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_location(mut self, location: IndexMap<String, String>) -> Self {
|
||||||
|
self.location = location;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_children(mut self, children: Vec<LocalizedShard>) -> Self {
|
||||||
|
self.children = children;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
76
src/models/marker.rs
Normal file
76
src/models/marker.rs
Normal file
|
|
@ -0,0 +1,76 @@
|
||||||
|
use indexmap::IndexSet;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// A MarkerPlacement defines how a marker affects dimension values.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct MarkerPlacement {
|
||||||
|
/// Only apply this placement if all markers in `if_with` are also present.
|
||||||
|
#[serde(default)]
|
||||||
|
pub if_with: IndexSet<String>,
|
||||||
|
|
||||||
|
/// The dimension to place a value in.
|
||||||
|
pub dimension: String,
|
||||||
|
|
||||||
|
/// The value to place. If None, uses the marker name itself.
|
||||||
|
#[serde(default)]
|
||||||
|
pub value: Option<String>,
|
||||||
|
|
||||||
|
/// Whether this placement should overwrite existing values in the dimension.
|
||||||
|
#[serde(default = "default_overwrites")]
|
||||||
|
pub overwrites: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_overwrites() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MarkerPlacement {
|
||||||
|
pub fn new(dimension: impl Into<String>) -> Self {
|
||||||
|
Self {
|
||||||
|
if_with: IndexSet::new(),
|
||||||
|
dimension: dimension.into(),
|
||||||
|
value: None,
|
||||||
|
overwrites: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_if_with(mut self, if_with: impl IntoIterator<Item = impl Into<String>>) -> Self {
|
||||||
|
self.if_with = if_with.into_iter().map(Into::into).collect();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_value(mut self, value: impl Into<String>) -> Self {
|
||||||
|
self.value = Some(value.into());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_overwrites(mut self, overwrites: bool) -> Self {
|
||||||
|
self.overwrites = overwrites;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A Marker defines how an @-tag should be interpreted for dimensional placement.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Marker {
|
||||||
|
/// Human-readable name for display purposes.
|
||||||
|
pub display_name: String,
|
||||||
|
|
||||||
|
/// The dimensional placements this marker creates.
|
||||||
|
#[serde(default)]
|
||||||
|
pub placements: Vec<MarkerPlacement>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Marker {
|
||||||
|
pub fn new(display_name: impl Into<String>) -> Self {
|
||||||
|
Self {
|
||||||
|
display_name: display_name.into(),
|
||||||
|
placements: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_placements(mut self, placements: Vec<MarkerPlacement>) -> Self {
|
||||||
|
self.placements = placements;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
11
src/models/mod.rs
Normal file
11
src/models/mod.rs
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
mod dimension;
|
||||||
|
mod localized_shard;
|
||||||
|
mod marker;
|
||||||
|
mod shard;
|
||||||
|
mod timecard;
|
||||||
|
|
||||||
|
pub use dimension::Dimension;
|
||||||
|
pub use localized_shard::LocalizedShard;
|
||||||
|
pub use marker::{Marker, MarkerPlacement};
|
||||||
|
pub use shard::{RepositoryConfiguration, Shard, StreamFile};
|
||||||
|
pub use timecard::{SpecialDayType, Timecard, Timesheet};
|
||||||
115
src/models/shard.rs
Normal file
115
src/models/shard.rs
Normal file
|
|
@ -0,0 +1,115 @@
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::{Dimension, Marker};
|
||||||
|
|
||||||
|
/// A Shard represents a section of a markdown file that may contain markers and tags.
|
||||||
|
/// Shards form a tree structure where children inherit context from their parents.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Shard {
|
||||||
|
/// Markers are tags that appear at the beginning of a line before any content.
|
||||||
|
/// They define dimensional placement for the shard.
|
||||||
|
#[serde(default)]
|
||||||
|
pub markers: Vec<String>,
|
||||||
|
|
||||||
|
/// Tags are @-prefixed identifiers that appear after content has started.
|
||||||
|
/// They are informational but don't affect dimensional placement.
|
||||||
|
#[serde(default)]
|
||||||
|
pub tags: Vec<String>,
|
||||||
|
|
||||||
|
/// The starting line number in the source file (1-indexed).
|
||||||
|
pub start_line: usize,
|
||||||
|
|
||||||
|
/// The ending line number in the source file (1-indexed).
|
||||||
|
pub end_line: usize,
|
||||||
|
|
||||||
|
/// Child shards nested within this shard.
|
||||||
|
#[serde(default)]
|
||||||
|
pub children: Vec<Shard>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Shard {
|
||||||
|
pub fn new(start_line: usize, end_line: usize) -> Self {
|
||||||
|
Self {
|
||||||
|
markers: Vec::new(),
|
||||||
|
tags: Vec::new(),
|
||||||
|
start_line,
|
||||||
|
end_line,
|
||||||
|
children: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_markers(mut self, markers: Vec<String>) -> Self {
|
||||||
|
self.markers = markers;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_tags(mut self, tags: Vec<String>) -> Self {
|
||||||
|
self.tags = tags;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_children(mut self, children: Vec<Shard>) -> Self {
|
||||||
|
self.children = children;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A StreamFile represents a parsed markdown file with its associated shard tree.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct StreamFile {
|
||||||
|
/// The file name or path of the source file.
|
||||||
|
pub file_name: String,
|
||||||
|
|
||||||
|
/// The root shard representing the entire file's content structure.
|
||||||
|
pub shard: Option<Shard>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StreamFile {
|
||||||
|
pub fn new(file_name: impl Into<String>) -> Self {
|
||||||
|
Self {
|
||||||
|
file_name: file_name.into(),
|
||||||
|
shard: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_shard(mut self, shard: Shard) -> Self {
|
||||||
|
self.shard = Some(shard);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Repository configuration defines the dimensions and markers used to organize shards.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct RepositoryConfiguration {
|
||||||
|
/// Dimensions define the axes along which shards can be positioned.
|
||||||
|
pub dimensions: IndexMap<String, Dimension>,
|
||||||
|
|
||||||
|
/// Markers define how @-tags map to dimension placements.
|
||||||
|
pub markers: IndexMap<String, Marker>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RepositoryConfiguration {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
dimensions: IndexMap::new(),
|
||||||
|
markers: IndexMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_dimension(mut self, name: impl Into<String>, dimension: Dimension) -> Self {
|
||||||
|
self.dimensions.insert(name.into(), dimension);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_marker(mut self, name: impl Into<String>, marker: Marker) -> Self {
|
||||||
|
self.markers.insert(name.into(), marker);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for RepositoryConfiguration {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
77
src/models/timecard.rs
Normal file
77
src/models/timecard.rs
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
use chrono::NaiveDate;
|
||||||
|
use chrono::NaiveTime;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Type of special day that affects timesheet calculations.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub enum SpecialDayType {
|
||||||
|
#[serde(rename = "VACATION")]
|
||||||
|
Vacation,
|
||||||
|
#[serde(rename = "UNDERTIME")]
|
||||||
|
Undertime,
|
||||||
|
#[serde(rename = "HOLIDAY")]
|
||||||
|
Holiday,
|
||||||
|
#[serde(rename = "WEEKEND")]
|
||||||
|
Weekend,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for SpecialDayType {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
SpecialDayType::Vacation => write!(f, "VACATION"),
|
||||||
|
SpecialDayType::Undertime => write!(f, "UNDERTIME"),
|
||||||
|
SpecialDayType::Holiday => write!(f, "HOLIDAY"),
|
||||||
|
SpecialDayType::Weekend => write!(f, "WEEKEND"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A Timecard represents a single work period with start and end times.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Timecard {
|
||||||
|
pub from_time: NaiveTime,
|
||||||
|
pub to_time: NaiveTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Timecard {
|
||||||
|
pub fn new(from_time: NaiveTime, to_time: NaiveTime) -> Self {
|
||||||
|
Self { from_time, to_time }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A Timesheet aggregates all time tracking information for a single day.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Timesheet {
|
||||||
|
pub date: NaiveDate,
|
||||||
|
#[serde(default)]
|
||||||
|
pub is_sick_leave: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub special_day_type: Option<SpecialDayType>,
|
||||||
|
pub timecards: Vec<Timecard>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Timesheet {
|
||||||
|
pub fn new(date: NaiveDate) -> Self {
|
||||||
|
Self {
|
||||||
|
date,
|
||||||
|
is_sick_leave: false,
|
||||||
|
special_day_type: None,
|
||||||
|
timecards: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_sick_leave(mut self, is_sick_leave: bool) -> Self {
|
||||||
|
self.is_sick_leave = is_sick_leave;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_special_day_type(mut self, special_day_type: SpecialDayType) -> Self {
|
||||||
|
self.special_day_type = Some(special_day_type);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_timecards(mut self, timecards: Vec<Timecard>) -> Self {
|
||||||
|
self.timecards = timecards;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
209
src/query/find.rs
Normal file
209
src/query/find.rs
Normal file
|
|
@ -0,0 +1,209 @@
|
||||||
|
use crate::models::LocalizedShard;
|
||||||
|
|
||||||
|
/// Find all shards matching a predicate, recursively searching through children.
|
||||||
|
///
|
||||||
|
/// The search is depth-first, with the parent tested before its children.
|
||||||
|
pub fn find_shard<F>(shards: &[LocalizedShard], predicate: F) -> Vec<LocalizedShard>
|
||||||
|
where
|
||||||
|
F: Fn(&LocalizedShard) -> bool + Copy,
|
||||||
|
{
|
||||||
|
let mut found_shards = Vec::new();
|
||||||
|
|
||||||
|
for shard in shards {
|
||||||
|
if predicate(shard) {
|
||||||
|
found_shards.push(shard.clone());
|
||||||
|
}
|
||||||
|
found_shards.extend(find_shard(&shard.children, predicate));
|
||||||
|
}
|
||||||
|
|
||||||
|
found_shards
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find all shards where a specific dimension has a specific value.
|
||||||
|
pub fn find_shard_by_position(
|
||||||
|
shards: &[LocalizedShard],
|
||||||
|
dimension: &str,
|
||||||
|
value: &str,
|
||||||
|
) -> Vec<LocalizedShard> {
|
||||||
|
find_shard(shards, |shard| {
|
||||||
|
shard
|
||||||
|
.location
|
||||||
|
.get(dimension)
|
||||||
|
.map(|v| v == value)
|
||||||
|
.unwrap_or(false)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find all shards where a specific dimension is set (regardless of value).
|
||||||
|
pub fn find_shard_by_set_dimension(
|
||||||
|
shards: &[LocalizedShard],
|
||||||
|
dimension: &str,
|
||||||
|
) -> Vec<LocalizedShard> {
|
||||||
|
find_shard(shards, |shard| shard.location.contains_key(dimension))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use chrono::{TimeZone, Utc};
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
|
||||||
|
fn generate_localized_shard(
|
||||||
|
location: Option<IndexMap<String, String>>,
|
||||||
|
children: Option<Vec<LocalizedShard>>,
|
||||||
|
) -> LocalizedShard {
|
||||||
|
LocalizedShard {
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
moment: Utc.with_ymd_and_hms(2020, 1, 1, 0, 0, 0).unwrap(),
|
||||||
|
location: location.unwrap_or_default(),
|
||||||
|
children: children.unwrap_or_default(),
|
||||||
|
markers: vec![],
|
||||||
|
tags: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_returns_empty_when_no_match() {
|
||||||
|
let mut loc = IndexMap::new();
|
||||||
|
loc.insert("file".to_string(), "a.md".to_string());
|
||||||
|
let root = generate_localized_shard(Some(loc), None);
|
||||||
|
let shards = vec![root];
|
||||||
|
|
||||||
|
let result = find_shard(&shards, |s| s.location.contains_key("missing"));
|
||||||
|
|
||||||
|
assert!(result.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_finds_matches_depth_first_and_preserves_order() {
|
||||||
|
let mut loc1 = IndexMap::new();
|
||||||
|
loc1.insert("k".to_string(), "match".to_string());
|
||||||
|
let grandchild = generate_localized_shard(Some(loc1.clone()), None);
|
||||||
|
|
||||||
|
let child1 = generate_localized_shard(Some(loc1), Some(vec![grandchild.clone()]));
|
||||||
|
|
||||||
|
let mut loc2 = IndexMap::new();
|
||||||
|
loc2.insert("k".to_string(), "nope".to_string());
|
||||||
|
let child2 = generate_localized_shard(Some(loc2.clone()), None);
|
||||||
|
|
||||||
|
let root = generate_localized_shard(Some(loc2), Some(vec![child1.clone(), child2]));
|
||||||
|
|
||||||
|
let result = find_shard(&[root], |s| {
|
||||||
|
s.location.get("k") == Some(&"match".to_string())
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 2);
|
||||||
|
assert_eq!(result[0], child1);
|
||||||
|
assert_eq!(result[1], grandchild);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_includes_root_if_it_matches() {
|
||||||
|
let mut loc = IndexMap::new();
|
||||||
|
loc.insert("k".to_string(), "match".to_string());
|
||||||
|
|
||||||
|
let child = generate_localized_shard(Some(loc.clone()), None);
|
||||||
|
let root = generate_localized_shard(Some(loc), Some(vec![child]));
|
||||||
|
|
||||||
|
let result = find_shard(std::slice::from_ref(&root), |s| {
|
||||||
|
s.location.get("k") == Some(&"match".to_string())
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(result[0], root);
|
||||||
|
assert_eq!(result.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_multiple_roots_keeps_left_to_right_order() {
|
||||||
|
let mut loc_match = IndexMap::new();
|
||||||
|
loc_match.insert("k".to_string(), "match".to_string());
|
||||||
|
|
||||||
|
let mut loc_nope = IndexMap::new();
|
||||||
|
loc_nope.insert("k".to_string(), "nope".to_string());
|
||||||
|
|
||||||
|
let a = generate_localized_shard(Some(loc_match.clone()), None);
|
||||||
|
let b = generate_localized_shard(Some(loc_match), None);
|
||||||
|
let c = generate_localized_shard(Some(loc_nope), None);
|
||||||
|
|
||||||
|
let result = find_shard(&[a.clone(), b.clone(), c], |s| {
|
||||||
|
s.location.get("k") == Some(&"match".to_string())
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(result, vec![a, b]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_function_can_use_arbitrary_logic() {
|
||||||
|
let mut loc1 = IndexMap::new();
|
||||||
|
loc1.insert("x".to_string(), "1".to_string());
|
||||||
|
|
||||||
|
let mut loc2 = IndexMap::new();
|
||||||
|
loc2.insert("x".to_string(), "2".to_string());
|
||||||
|
|
||||||
|
let mut loc3 = IndexMap::new();
|
||||||
|
loc3.insert("x".to_string(), "3".to_string());
|
||||||
|
|
||||||
|
let a = generate_localized_shard(Some(loc1), None);
|
||||||
|
let b = generate_localized_shard(Some(loc2), None);
|
||||||
|
let c = generate_localized_shard(Some(loc3), None);
|
||||||
|
let root = generate_localized_shard(None, Some(vec![a, b.clone(), c]));
|
||||||
|
|
||||||
|
let result = find_shard(&[root], |shard| {
|
||||||
|
shard
|
||||||
|
.location
|
||||||
|
.get("x")
|
||||||
|
.and_then(|x| x.parse::<i32>().ok())
|
||||||
|
.map(|x| x % 2 == 0)
|
||||||
|
.unwrap_or(false)
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(result, vec![b]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_matches_only_when_dimension_present_and_equal() {
|
||||||
|
let mut loc_match = IndexMap::new();
|
||||||
|
loc_match.insert("file".to_string(), "a.md".to_string());
|
||||||
|
loc_match.insert("line".to_string(), "10".to_string());
|
||||||
|
|
||||||
|
let mut loc_wrong = IndexMap::new();
|
||||||
|
loc_wrong.insert("file".to_string(), "a.md".to_string());
|
||||||
|
loc_wrong.insert("line".to_string(), "11".to_string());
|
||||||
|
|
||||||
|
let mut loc_missing = IndexMap::new();
|
||||||
|
loc_missing.insert("file".to_string(), "a.md".to_string());
|
||||||
|
|
||||||
|
let match_shard = generate_localized_shard(Some(loc_match), None);
|
||||||
|
let wrong_value = generate_localized_shard(Some(loc_wrong), None);
|
||||||
|
let missing_dim = generate_localized_shard(Some(loc_missing), None);
|
||||||
|
|
||||||
|
let mut root_loc = IndexMap::new();
|
||||||
|
root_loc.insert("root".to_string(), "x".to_string());
|
||||||
|
let root = generate_localized_shard(
|
||||||
|
Some(root_loc),
|
||||||
|
Some(vec![match_shard.clone(), wrong_value, missing_dim]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = find_shard_by_position(&[root], "line", "10");
|
||||||
|
|
||||||
|
assert_eq!(result, vec![match_shard]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_recurses_through_children() {
|
||||||
|
let mut loc_deep = IndexMap::new();
|
||||||
|
loc_deep.insert("section".to_string(), "s1".to_string());
|
||||||
|
let deep = generate_localized_shard(Some(loc_deep), None);
|
||||||
|
|
||||||
|
let mut loc_mid = IndexMap::new();
|
||||||
|
loc_mid.insert("section".to_string(), "s0".to_string());
|
||||||
|
let mid = generate_localized_shard(Some(loc_mid), Some(vec![deep.clone()]));
|
||||||
|
|
||||||
|
let root = generate_localized_shard(None, Some(vec![mid]));
|
||||||
|
|
||||||
|
let result = find_shard_by_position(&[root], "section", "s1");
|
||||||
|
|
||||||
|
assert_eq!(result, vec![deep]);
|
||||||
|
}
|
||||||
|
}
|
||||||
3
src/query/mod.rs
Normal file
3
src/query/mod.rs
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
mod find;
|
||||||
|
|
||||||
|
pub use find::{find_shard, find_shard_by_position, find_shard_by_set_dimension};
|
||||||
|
|
@ -1,126 +0,0 @@
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
from shutil import move
|
|
||||||
from typing import Annotated, Generator
|
|
||||||
|
|
||||||
import click
|
|
||||||
import typer
|
|
||||||
from rich import print
|
|
||||||
from rich.markdown import Markdown
|
|
||||||
from rich.panel import Panel
|
|
||||||
|
|
||||||
from streamer.localize import (
|
|
||||||
LocalizedShard,
|
|
||||||
RepositoryConfiguration,
|
|
||||||
localize_stream_file,
|
|
||||||
)
|
|
||||||
from streamer.localize.preconfigured_configurations import TaskConfiguration
|
|
||||||
from streamer.parse import parse_markdown_file
|
|
||||||
from streamer.query import find_shard_by_position
|
|
||||||
from streamer.query.find import find_shard_by_set_dimension
|
|
||||||
from streamer.settings import Settings
|
|
||||||
from streamer.timesheet.configuration import BasicTimesheetConfiguration
|
|
||||||
from streamer.timesheet.extract import extract_timesheets
|
|
||||||
|
|
||||||
app = typer.Typer()
|
|
||||||
|
|
||||||
|
|
||||||
def all_files(config: RepositoryConfiguration) -> Generator[LocalizedShard]:
|
|
||||||
for file_name in glob.glob(f"{glob.escape(Settings().base_folder)}/*.md"):
|
|
||||||
with open(file_name, "r") as file:
|
|
||||||
file_content = file.read()
|
|
||||||
if shard := localize_stream_file(
|
|
||||||
parse_markdown_file(file_name, file_content), config
|
|
||||||
):
|
|
||||||
yield shard
|
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
|
||||||
def todo() -> None:
|
|
||||||
all_shards = list(all_files(TaskConfiguration))
|
|
||||||
|
|
||||||
for task_shard in find_shard_by_position(all_shards, "task", "open"):
|
|
||||||
with open(task_shard.location["file"], "r") as file:
|
|
||||||
file_content = file.read().splitlines()
|
|
||||||
print(
|
|
||||||
Panel(
|
|
||||||
Markdown(
|
|
||||||
"\n".join(
|
|
||||||
file_content[
|
|
||||||
task_shard.start_line - 1 : task_shard.end_line
|
|
||||||
]
|
|
||||||
)
|
|
||||||
),
|
|
||||||
title=f"{task_shard.location['file']}:{task_shard.start_line}",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
|
||||||
def edit(number: Annotated[int, typer.Argument()] = 1) -> None:
|
|
||||||
all_shards = list(all_files(TaskConfiguration))
|
|
||||||
sorted_shards = sorted(all_shards, key=lambda s: s.moment)
|
|
||||||
|
|
||||||
if abs(number) >= len(sorted_shards):
|
|
||||||
raise ValueError("Argument out of range")
|
|
||||||
|
|
||||||
selected_number = number
|
|
||||||
if selected_number >= 0:
|
|
||||||
selected_number = len(sorted_shards) - selected_number
|
|
||||||
else:
|
|
||||||
selected_number = -selected_number
|
|
||||||
|
|
||||||
click.edit(None, filename=sorted_shards[selected_number].location["file"])
|
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
|
||||||
def timesheet() -> None:
|
|
||||||
all_shards = list(all_files(BasicTimesheetConfiguration))
|
|
||||||
sheets = sorted(extract_timesheets(all_shards), key=lambda card: card.date)
|
|
||||||
for sheet in sheets:
|
|
||||||
print(sheet.date)
|
|
||||||
print(
|
|
||||||
",".join(
|
|
||||||
map(lambda card: f"{card.from_time},{card.to_time}", sheet.timecards)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
|
||||||
def new() -> None:
|
|
||||||
streamer_directory = Settings().base_folder
|
|
||||||
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
|
|
||||||
preliminary_file_name = f"{timestamp}_wip.md"
|
|
||||||
prelimary_path = os.path.join(streamer_directory, preliminary_file_name)
|
|
||||||
|
|
||||||
content = "# "
|
|
||||||
with open(prelimary_path, "w") as file:
|
|
||||||
_ = file.write(content)
|
|
||||||
|
|
||||||
click.edit(None, filename=prelimary_path)
|
|
||||||
|
|
||||||
with open(prelimary_path, "r") as file:
|
|
||||||
content = file.read()
|
|
||||||
parsed_content = parse_markdown_file(prelimary_path, content)
|
|
||||||
|
|
||||||
final_file_name = f"{timestamp}.md"
|
|
||||||
if parsed_content.shard is not None and len(
|
|
||||||
markers := parsed_content.shard.markers
|
|
||||||
):
|
|
||||||
final_file_name = f"{timestamp} {' '.join(markers)}.md"
|
|
||||||
|
|
||||||
final_path = os.path.join(streamer_directory, final_file_name)
|
|
||||||
_ = move(prelimary_path, final_path)
|
|
||||||
print(f"Saved as [yellow]{final_file_name}")
|
|
||||||
|
|
||||||
|
|
||||||
@app.callback(invoke_without_command=True)
|
|
||||||
def main(ctx: typer.Context):
|
|
||||||
if ctx.invoked_subcommand is None:
|
|
||||||
new()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
app()
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
from .localize import localize_stream_file
|
|
||||||
from .localized_shard import LocalizedShard
|
|
||||||
from .repository_configuration import RepositoryConfiguration
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"RepositoryConfiguration",
|
|
||||||
"localize_stream_file",
|
|
||||||
"LocalizedShard",
|
|
||||||
]
|
|
||||||
|
|
@ -1,92 +0,0 @@
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from datetime import date, datetime, time
|
|
||||||
|
|
||||||
|
|
||||||
def extract_datetime_from_file_name(file_name: str) -> datetime | None:
|
|
||||||
FILE_NAME_REGEX = r"^(?P<date>\d{8})(?:-(?P<time>\d{4,6}))?.+.md$"
|
|
||||||
base_name = os.path.basename(file_name)
|
|
||||||
match = re.match(FILE_NAME_REGEX, base_name)
|
|
||||||
|
|
||||||
if match:
|
|
||||||
date_str = match.group("date")
|
|
||||||
time_str = match.group("time") or ""
|
|
||||||
time_str = time_str.ljust(6, "0")
|
|
||||||
datetime_str = f"{date_str} {time_str[:2]}:{time_str[2:4]}:{time_str[4:]}"
|
|
||||||
return datetime.strptime(datetime_str, "%Y%m%d %H:%M:%S")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def extract_datetime_from_marker(marker: str) -> datetime | None:
|
|
||||||
"""
|
|
||||||
Extract a datetime from a marker string in the exact format: YYYYMMDDHHMMSS.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Parsed datetime if the format is fulfilled and values are valid, else None.
|
|
||||||
"""
|
|
||||||
if not re.fullmatch(r"\d{14}", marker or ""):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
return datetime.strptime(marker, "%Y%m%d%H%M%S")
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def extract_date_from_marker(marker: str) -> date | None:
|
|
||||||
"""
|
|
||||||
Extract a date from a marker string in the exact format: YYYYMMDD.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Parsed date if the format is fulfilled and values are valid, else None.
|
|
||||||
"""
|
|
||||||
if not re.fullmatch(r"\d{8}", marker or ""):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
return datetime.strptime(marker, "%Y%m%d").date()
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def extract_time_from_marker(marker: str) -> time | None: # noqa: F821
|
|
||||||
"""
|
|
||||||
Extract a time from a marker string in the exact format: HHMMSS.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Parsed time if the format is fulfilled and values are valid, else None.
|
|
||||||
"""
|
|
||||||
if not re.fullmatch(r"\d{6}", marker or ""):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
return datetime.strptime(marker, "%H%M%S").time()
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def extract_datetime_from_marker_list(markers: list[str], inherited_datetime: datetime):
|
|
||||||
shard_time: time | None = None
|
|
||||||
shard_date: date | None = None
|
|
||||||
|
|
||||||
for marker in markers[::-1]:
|
|
||||||
if parsed_time := extract_time_from_marker(marker):
|
|
||||||
shard_time = parsed_time
|
|
||||||
if parsed_date := extract_date_from_marker(marker):
|
|
||||||
shard_date = parsed_date
|
|
||||||
if parsed_datetime := extract_datetime_from_marker(marker):
|
|
||||||
shard_date = parsed_datetime.date()
|
|
||||||
shard_time = parsed_datetime.time()
|
|
||||||
|
|
||||||
if shard_date and not shard_time:
|
|
||||||
return datetime.combine(shard_date, time(0, 0, 0))
|
|
||||||
|
|
||||||
return datetime.combine(
|
|
||||||
shard_date or inherited_datetime.date(), shard_time or inherited_datetime.time()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"extract_datetime_from_file_name",
|
|
||||||
"extract_datetime_from_marker",
|
|
||||||
"extract_date_from_marker",
|
|
||||||
"extract_time_from_marker",
|
|
||||||
"extract_datetime_from_marker_list",
|
|
||||||
]
|
|
||||||
|
|
@ -1,70 +0,0 @@
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from streamer.parse.shard import Shard, StreamFile
|
|
||||||
|
|
||||||
from .extract_datetime import (
|
|
||||||
extract_datetime_from_file_name,
|
|
||||||
extract_datetime_from_marker_list,
|
|
||||||
)
|
|
||||||
from .localized_shard import LocalizedShard
|
|
||||||
from .repository_configuration import RepositoryConfiguration
|
|
||||||
|
|
||||||
|
|
||||||
def localize_shard(
|
|
||||||
shard: Shard,
|
|
||||||
config: RepositoryConfiguration,
|
|
||||||
propagated: dict[str, str],
|
|
||||||
moment: datetime,
|
|
||||||
) -> LocalizedShard:
|
|
||||||
position = {**propagated}
|
|
||||||
private_position: dict[str, str] = {}
|
|
||||||
|
|
||||||
adjusted_moment: datetime = extract_datetime_from_marker_list(shard.markers, moment)
|
|
||||||
|
|
||||||
for marker in shard.markers:
|
|
||||||
if marker in config.markers:
|
|
||||||
marker_definition = config.markers[marker]
|
|
||||||
for placement in marker_definition.placements:
|
|
||||||
if placement.if_with <= set(shard.markers):
|
|
||||||
dimension = config.dimensions[placement.dimension]
|
|
||||||
|
|
||||||
value = placement.value or marker
|
|
||||||
|
|
||||||
if placement.overwrites or (
|
|
||||||
placement.dimension not in position
|
|
||||||
and placement.dimension not in private_position
|
|
||||||
):
|
|
||||||
if dimension.propagate:
|
|
||||||
position[placement.dimension] = value
|
|
||||||
else:
|
|
||||||
private_position[placement.dimension] = value
|
|
||||||
|
|
||||||
children = [
|
|
||||||
localize_shard(child, config, position, adjusted_moment)
|
|
||||||
for child in shard.children
|
|
||||||
]
|
|
||||||
|
|
||||||
position.update(private_position)
|
|
||||||
|
|
||||||
return LocalizedShard(
|
|
||||||
**shard.model_dump(exclude={"children"}),
|
|
||||||
location=position,
|
|
||||||
children=children,
|
|
||||||
moment=adjusted_moment,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def localize_stream_file(
|
|
||||||
stream_file: StreamFile, config: RepositoryConfiguration
|
|
||||||
) -> LocalizedShard | None:
|
|
||||||
shard_date = extract_datetime_from_file_name(stream_file.file_name)
|
|
||||||
|
|
||||||
if not shard_date or not stream_file.shard:
|
|
||||||
raise ValueError("Could not extract date")
|
|
||||||
|
|
||||||
return localize_shard(
|
|
||||||
stream_file.shard, config, {"file": stream_file.file_name}, shard_date
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["localize_stream_file"]
|
|
||||||
|
|
@ -1,14 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from streamer.parse.shard import Shard
|
|
||||||
|
|
||||||
|
|
||||||
class LocalizedShard(Shard):
|
|
||||||
moment: datetime
|
|
||||||
location: dict[str, str]
|
|
||||||
children: list[LocalizedShard] = [] # pyright: ignore[reportIncompatibleVariableOverride]
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["LocalizedShard"]
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
||||||
from streamer.localize.repository_configuration import (
|
|
||||||
Dimension,
|
|
||||||
Marker,
|
|
||||||
MarkerPlacement,
|
|
||||||
RepositoryConfiguration,
|
|
||||||
)
|
|
||||||
|
|
||||||
TaskConfiguration = RepositoryConfiguration(
|
|
||||||
dimensions={
|
|
||||||
"task": Dimension(
|
|
||||||
display_name="Task",
|
|
||||||
comment="If placed, the given shard is a task. The placement determines the state.",
|
|
||||||
propagate=False,
|
|
||||||
),
|
|
||||||
"project": Dimension(
|
|
||||||
display_name="Project",
|
|
||||||
comment="Project the task is attached to",
|
|
||||||
propagate=True,
|
|
||||||
),
|
|
||||||
},
|
|
||||||
markers={
|
|
||||||
"Task": Marker(
|
|
||||||
display_name="Task",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(dimension="task", value="open"),
|
|
||||||
MarkerPlacement(if_with={"Done"}, dimension="task", value="done"),
|
|
||||||
MarkerPlacement(if_with={"Waiting"}, dimension="task", value="waiting"),
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={"Cancelled"}, dimension="task", value="cancelled"
|
|
||||||
),
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={"NotDone"}, dimension="task", value="cancelled"
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"WaitingFor": Marker(
|
|
||||||
display_name="Task",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(dimension="task", value="waiting"),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
@ -1,108 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class Dimension(BaseModel):
|
|
||||||
display_name: str
|
|
||||||
comment: Optional[str] = None
|
|
||||||
propagate: bool = False
|
|
||||||
|
|
||||||
|
|
||||||
class MarkerPlacement(BaseModel):
|
|
||||||
if_with: set[str] = set()
|
|
||||||
dimension: str
|
|
||||||
value: str | None = None
|
|
||||||
overwrites: bool = True
|
|
||||||
|
|
||||||
|
|
||||||
class Marker(BaseModel):
|
|
||||||
display_name: str
|
|
||||||
placements: list[MarkerPlacement] = []
|
|
||||||
|
|
||||||
|
|
||||||
class RepositoryConfiguration(BaseModel):
|
|
||||||
dimensions: dict[str, Dimension]
|
|
||||||
markers: dict[str, Marker]
|
|
||||||
|
|
||||||
|
|
||||||
def merge_single_dimension(base: Dimension, second: Dimension) -> Dimension:
|
|
||||||
second_fields_set = getattr(second, "model_fields_set", set())
|
|
||||||
|
|
||||||
return Dimension(
|
|
||||||
display_name=second.display_name or base.display_name,
|
|
||||||
comment=base.comment if second.comment is None else second.comment,
|
|
||||||
propagate=second.propagate
|
|
||||||
if "propagate" in second_fields_set
|
|
||||||
else base.propagate,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def merge_dimensions(
|
|
||||||
base: dict[str, Dimension], second: dict[str, Dimension]
|
|
||||||
) -> dict[str, Dimension]:
|
|
||||||
merged: dict[str, Dimension] = dict(base)
|
|
||||||
for key, second_dimension in second.items():
|
|
||||||
if key in merged:
|
|
||||||
merged[key] = merge_single_dimension(merged[key], second_dimension)
|
|
||||||
else:
|
|
||||||
merged[key] = second_dimension
|
|
||||||
return merged
|
|
||||||
|
|
||||||
|
|
||||||
def _placement_identity(p: MarkerPlacement) -> tuple[frozenset[str], str]:
|
|
||||||
return (frozenset(p.if_with), p.dimension)
|
|
||||||
|
|
||||||
|
|
||||||
def merge_single_marker(base: Marker, second: Marker) -> Marker:
|
|
||||||
merged_display_name = second.display_name or base.display_name
|
|
||||||
|
|
||||||
merged_placements: list[MarkerPlacement] = []
|
|
||||||
seen: dict[tuple[frozenset[str], str], int] = {}
|
|
||||||
|
|
||||||
for placement in base.placements:
|
|
||||||
ident = _placement_identity(placement)
|
|
||||||
seen[ident] = len(merged_placements)
|
|
||||||
merged_placements.append(placement)
|
|
||||||
|
|
||||||
for placement in second.placements:
|
|
||||||
ident = _placement_identity(placement)
|
|
||||||
if ident in seen:
|
|
||||||
merged_placements[seen[ident]] = placement
|
|
||||||
else:
|
|
||||||
seen[ident] = len(merged_placements)
|
|
||||||
merged_placements.append(placement)
|
|
||||||
|
|
||||||
return Marker(display_name=merged_display_name, placements=merged_placements)
|
|
||||||
|
|
||||||
|
|
||||||
def merge_markers(
|
|
||||||
base: dict[str, Marker], second: dict[str, Marker]
|
|
||||||
) -> dict[str, Marker]:
|
|
||||||
merged: dict[str, Marker] = dict(base)
|
|
||||||
for key, second_marker in second.items():
|
|
||||||
if key in merged:
|
|
||||||
merged[key] = merge_single_marker(merged[key], second_marker)
|
|
||||||
else:
|
|
||||||
merged[key] = second_marker
|
|
||||||
return merged
|
|
||||||
|
|
||||||
|
|
||||||
def merge_repository_configuration(
|
|
||||||
base: RepositoryConfiguration, second: RepositoryConfiguration
|
|
||||||
) -> RepositoryConfiguration:
|
|
||||||
return RepositoryConfiguration(
|
|
||||||
dimensions=merge_dimensions(base.dimensions, second.dimensions),
|
|
||||||
markers=merge_markers(base.markers, second.markers),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"Dimension",
|
|
||||||
"Marker",
|
|
||||||
"MarkerPlacement",
|
|
||||||
"RepositoryConfiguration",
|
|
||||||
"merge_repository_configuration",
|
|
||||||
]
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
from .shard import Shard, StreamFile
|
|
||||||
from .parse import parse_markdown_file
|
|
||||||
|
|
||||||
__all__ = ["Shard", "StreamFile", "parse_markdown_file"]
|
|
||||||
|
|
@ -1,84 +0,0 @@
|
||||||
import re
|
|
||||||
from typing import Iterable
|
|
||||||
from mistletoe.block_token import BlockToken
|
|
||||||
from mistletoe.span_token import Emphasis, RawText, Strikethrough, Strong, Link
|
|
||||||
from mistletoe.token import Token
|
|
||||||
|
|
||||||
from .markdown_tag import Tag
|
|
||||||
|
|
||||||
|
|
||||||
def extract_markers_and_tags_from_single_token(
|
|
||||||
token: Token,
|
|
||||||
marker_boundary_encountered: bool,
|
|
||||||
return_at_first_marker: bool = False,
|
|
||||||
) -> tuple[list[str], list[str], bool]:
|
|
||||||
result_markers, result_tags = [], []
|
|
||||||
result_marker_boundary_encountered = marker_boundary_encountered
|
|
||||||
|
|
||||||
if isinstance(token, Tag):
|
|
||||||
if marker_boundary_encountered:
|
|
||||||
result_tags.append(token.content)
|
|
||||||
else:
|
|
||||||
result_markers.append(token.content)
|
|
||||||
elif isinstance(token, (Emphasis, Strong, Strikethrough, Link)):
|
|
||||||
markers, tags, child_marker_boundary_encountered = (
|
|
||||||
extract_markers_and_tags_from_tokens(
|
|
||||||
token.children or [],
|
|
||||||
marker_boundary_encountered,
|
|
||||||
return_at_first_marker,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
result_markers.extend(markers)
|
|
||||||
result_tags.extend(tags)
|
|
||||||
result_marker_boundary_encountered = (
|
|
||||||
marker_boundary_encountered or child_marker_boundary_encountered
|
|
||||||
)
|
|
||||||
elif isinstance(token, RawText) and re.match(r"^[\s]*$", token.content):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
result_marker_boundary_encountered = True
|
|
||||||
|
|
||||||
return result_markers, result_tags, result_marker_boundary_encountered
|
|
||||||
|
|
||||||
|
|
||||||
def extract_markers_and_tags_from_tokens(
|
|
||||||
tokens: Iterable[Token],
|
|
||||||
marker_boundary_encountered: bool,
|
|
||||||
return_at_first_marker: bool = False,
|
|
||||||
) -> tuple[list[str], list[str], bool]:
|
|
||||||
result_markers, result_tags = [], []
|
|
||||||
result_marker_boundary_encountered = marker_boundary_encountered
|
|
||||||
|
|
||||||
for child in tokens:
|
|
||||||
markers, tags, child_marker_boundary_encountered = (
|
|
||||||
extract_markers_and_tags_from_single_token(
|
|
||||||
child, result_marker_boundary_encountered, return_at_first_marker
|
|
||||||
)
|
|
||||||
)
|
|
||||||
result_markers.extend(markers)
|
|
||||||
result_tags.extend(tags)
|
|
||||||
result_marker_boundary_encountered = (
|
|
||||||
marker_boundary_encountered or child_marker_boundary_encountered
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(result_markers) > 0 and return_at_first_marker:
|
|
||||||
break
|
|
||||||
|
|
||||||
return result_markers, result_tags, result_marker_boundary_encountered
|
|
||||||
|
|
||||||
|
|
||||||
def extract_markers_and_tags(block_token: BlockToken) -> tuple[list[str], list[str]]:
|
|
||||||
markers, tags, _ = extract_markers_and_tags_from_tokens(
|
|
||||||
block_token.children or [], False
|
|
||||||
)
|
|
||||||
return markers, tags
|
|
||||||
|
|
||||||
|
|
||||||
def has_markers(block_token: BlockToken) -> bool:
|
|
||||||
markers, _, _ = extract_markers_and_tags_from_tokens(
|
|
||||||
block_token.children or [], False, return_at_first_marker=True
|
|
||||||
)
|
|
||||||
return len(markers) > 0
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["extract_markers_and_tags", "has_markers"]
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
from itertools import pairwise
|
|
||||||
from typing import TypeVar
|
|
||||||
|
|
||||||
A = TypeVar("A")
|
|
||||||
|
|
||||||
|
|
||||||
def split_at(list_to_be_split: list[A], positions: list[int]):
|
|
||||||
positions = sorted(set([0, *positions, len(list_to_be_split)]))
|
|
||||||
|
|
||||||
return [list_to_be_split[left:right] for left, right in pairwise(positions)]
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["split_at"]
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
import re
|
|
||||||
from mistletoe.markdown_renderer import Fragment, MarkdownRenderer
|
|
||||||
from mistletoe.span_token import SpanToken
|
|
||||||
|
|
||||||
|
|
||||||
class Tag(SpanToken):
|
|
||||||
parse_inner = False
|
|
||||||
pattern = re.compile(r"@([^\s*\x60~\[\]]+)")
|
|
||||||
|
|
||||||
|
|
||||||
class TagMarkdownRenderer(MarkdownRenderer):
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__(Tag)
|
|
||||||
|
|
||||||
def render_tag(self, token: Tag):
|
|
||||||
yield Fragment("@")
|
|
||||||
yield Fragment(token.content)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["Tag", "TagMarkdownRenderer"]
|
|
||||||
|
|
@ -1,242 +0,0 @@
|
||||||
from collections import Counter
|
|
||||||
|
|
||||||
from mistletoe.block_token import (
|
|
||||||
BlockToken,
|
|
||||||
Document,
|
|
||||||
Heading,
|
|
||||||
List,
|
|
||||||
ListItem,
|
|
||||||
Paragraph,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .extract_tag import extract_markers_and_tags, has_markers
|
|
||||||
from .list import split_at
|
|
||||||
from .markdown_tag import TagMarkdownRenderer
|
|
||||||
from .shard import Shard, StreamFile
|
|
||||||
|
|
||||||
|
|
||||||
def get_line_number(block_token: BlockToken) -> int:
|
|
||||||
return block_token.line_number # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
def build_shard(
|
|
||||||
start_line: int,
|
|
||||||
end_line: int,
|
|
||||||
markers: list[str] = [],
|
|
||||||
tags: list[str] = [],
|
|
||||||
children: list[Shard] = [],
|
|
||||||
) -> Shard:
|
|
||||||
if (
|
|
||||||
len(children) == 1
|
|
||||||
and len(tags) == 0
|
|
||||||
and len(markers) == 0
|
|
||||||
and children[0].start_line == start_line
|
|
||||||
and children[0].end_line == end_line
|
|
||||||
):
|
|
||||||
return children[0]
|
|
||||||
|
|
||||||
return Shard(
|
|
||||||
markers=markers,
|
|
||||||
tags=tags,
|
|
||||||
children=children,
|
|
||||||
start_line=start_line,
|
|
||||||
end_line=end_line,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def merge_into_first_shard(
|
|
||||||
shards: list[Shard], start_line: int, end_line: int, additional_tags: list[str] = []
|
|
||||||
):
|
|
||||||
return shards[0].model_copy(
|
|
||||||
update={
|
|
||||||
"start_line": start_line,
|
|
||||||
"end_line": end_line,
|
|
||||||
"children": shards[1:],
|
|
||||||
"tags": shards[0].tags + additional_tags,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def find_paragraph_shard_positions(block_tokens: list[BlockToken]) -> list[int]:
|
|
||||||
return [
|
|
||||||
index
|
|
||||||
for index, block_token in enumerate(block_tokens)
|
|
||||||
if isinstance(block_token, Paragraph) and has_markers(block_token)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def find_headings_by_level(
|
|
||||||
block_tokens: list[BlockToken], header_level: int
|
|
||||||
) -> list[int]:
|
|
||||||
return [
|
|
||||||
index
|
|
||||||
for index, block_token in enumerate(block_tokens)
|
|
||||||
if isinstance(block_token, Heading) and block_token.level == header_level
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_heading_level_for_next_split(
|
|
||||||
block_tokens: list[BlockToken],
|
|
||||||
) -> int | None:
|
|
||||||
"""
|
|
||||||
If there is no marker in any heading, then return None.
|
|
||||||
If only the first token is a heading with a marker, then return None.
|
|
||||||
Otherwise: Return the heading level with the lowest level (h1 < h2), of which there are two or which has a marker (and doesn't stem from first)
|
|
||||||
"""
|
|
||||||
level_of_headings_without_first_with_marker = [
|
|
||||||
token.level
|
|
||||||
for token in block_tokens[1:]
|
|
||||||
if isinstance(token, Heading) and has_markers(token)
|
|
||||||
]
|
|
||||||
|
|
||||||
if len(level_of_headings_without_first_with_marker) == 0:
|
|
||||||
return None
|
|
||||||
|
|
||||||
heading_level_counter = Counter(
|
|
||||||
[token.level for token in block_tokens if isinstance(token, Heading)]
|
|
||||||
)
|
|
||||||
|
|
||||||
return min(
|
|
||||||
[level for level, count in heading_level_counter.items() if count >= 2]
|
|
||||||
+ level_of_headings_without_first_with_marker
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_single_block_shards(
|
|
||||||
block_token: BlockToken, start_line: int, end_line: int
|
|
||||||
) -> tuple[Shard | None, list[str]]:
|
|
||||||
markers, tags, children = [], [], []
|
|
||||||
|
|
||||||
if isinstance(block_token, List):
|
|
||||||
list_items: list[ListItem] = ( # type: ignore
|
|
||||||
list(block_token.children) if block_token.children is not None else []
|
|
||||||
)
|
|
||||||
for index, list_item in enumerate(list_items):
|
|
||||||
list_item_start_line = get_line_number(list_item)
|
|
||||||
list_item_end_line = (
|
|
||||||
get_line_number(list_items[index + 1]) - 1
|
|
||||||
if index + 1 < len(list_items)
|
|
||||||
else end_line
|
|
||||||
)
|
|
||||||
list_item_shard, list_item_tags = parse_multiple_block_shards(
|
|
||||||
list_item.children, # type: ignore
|
|
||||||
list_item_start_line,
|
|
||||||
list_item_end_line,
|
|
||||||
)
|
|
||||||
if list_item_shard is not None:
|
|
||||||
children.append(list_item_shard)
|
|
||||||
tags.extend(list_item_tags)
|
|
||||||
|
|
||||||
elif isinstance(block_token, (Paragraph, Heading)):
|
|
||||||
markers, tags = extract_markers_and_tags(block_token)
|
|
||||||
|
|
||||||
if len(markers) == 0 and len(children) == 0:
|
|
||||||
return None, tags
|
|
||||||
|
|
||||||
return build_shard(
|
|
||||||
start_line, end_line, markers=markers, tags=tags, children=children
|
|
||||||
), []
|
|
||||||
|
|
||||||
|
|
||||||
def parse_multiple_block_shards(
|
|
||||||
block_tokens: list[BlockToken],
|
|
||||||
start_line: int,
|
|
||||||
end_line: int,
|
|
||||||
enforce_shard: bool = False,
|
|
||||||
) -> tuple[Shard | None, list[str]]:
|
|
||||||
is_first_block_heading = isinstance(block_tokens[0], Heading) and has_markers(
|
|
||||||
block_tokens[0]
|
|
||||||
)
|
|
||||||
|
|
||||||
paragraph_positions = find_paragraph_shard_positions(block_tokens)
|
|
||||||
children, tags = [], []
|
|
||||||
|
|
||||||
is_first_block_only_with_marker = False
|
|
||||||
|
|
||||||
for i, token in enumerate(block_tokens):
|
|
||||||
if i in paragraph_positions:
|
|
||||||
is_first_block_only_with_marker = i == 0
|
|
||||||
|
|
||||||
child_start_line = get_line_number(token)
|
|
||||||
child_end_line = (
|
|
||||||
get_line_number(block_tokens[i + 1]) - 1
|
|
||||||
if i + 1 < len(block_tokens)
|
|
||||||
else end_line
|
|
||||||
)
|
|
||||||
|
|
||||||
child_shard, child_tags = parse_single_block_shards(
|
|
||||||
token, child_start_line, child_end_line
|
|
||||||
)
|
|
||||||
|
|
||||||
if child_shard is not None:
|
|
||||||
children.append(child_shard)
|
|
||||||
if len(child_tags) > 0:
|
|
||||||
tags.extend(child_tags)
|
|
||||||
|
|
||||||
if len(children) == 0 and not enforce_shard:
|
|
||||||
return None, tags
|
|
||||||
if is_first_block_heading or is_first_block_only_with_marker:
|
|
||||||
return merge_into_first_shard(children, start_line, end_line, tags), []
|
|
||||||
else:
|
|
||||||
return build_shard(start_line, end_line, tags=tags, children=children), []
|
|
||||||
|
|
||||||
|
|
||||||
def parse_header_shards(
|
|
||||||
block_tokens: list[BlockToken],
|
|
||||||
start_line: int,
|
|
||||||
end_line: int,
|
|
||||||
use_first_child_as_header: bool = False,
|
|
||||||
) -> Shard | None:
|
|
||||||
if len(block_tokens) == 0:
|
|
||||||
return build_shard(start_line, end_line)
|
|
||||||
|
|
||||||
split_at_heading_level = calculate_heading_level_for_next_split(block_tokens)
|
|
||||||
|
|
||||||
if split_at_heading_level is None:
|
|
||||||
return parse_multiple_block_shards(
|
|
||||||
block_tokens, start_line, end_line, enforce_shard=True
|
|
||||||
)[0]
|
|
||||||
|
|
||||||
heading_positions = find_headings_by_level(block_tokens, split_at_heading_level)
|
|
||||||
|
|
||||||
block_tokens_split_by_heading = split_at(block_tokens, heading_positions)
|
|
||||||
|
|
||||||
children = []
|
|
||||||
for i, child_blocks in enumerate(block_tokens_split_by_heading):
|
|
||||||
child_start_line = get_line_number(child_blocks[0])
|
|
||||||
child_end_line = (
|
|
||||||
get_line_number(block_tokens_split_by_heading[i + 1][0]) - 1
|
|
||||||
if i + 1 < len(block_tokens_split_by_heading)
|
|
||||||
else end_line
|
|
||||||
)
|
|
||||||
if child_shard := parse_header_shards(
|
|
||||||
child_blocks,
|
|
||||||
child_start_line,
|
|
||||||
child_end_line,
|
|
||||||
use_first_child_as_header=i > 0 or 0 in heading_positions,
|
|
||||||
):
|
|
||||||
children.append(child_shard)
|
|
||||||
|
|
||||||
if use_first_child_as_header and len(children) > 0:
|
|
||||||
return merge_into_first_shard(children, start_line, end_line)
|
|
||||||
else:
|
|
||||||
return build_shard(start_line, end_line, children=children)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_markdown_file(file_name: str, file_content: str) -> StreamFile:
|
|
||||||
shard = build_shard(1, max([len(file_content.splitlines()), 1]))
|
|
||||||
|
|
||||||
with TagMarkdownRenderer():
|
|
||||||
ast = Document(file_content)
|
|
||||||
|
|
||||||
block_tokens: list[BlockToken] = ast.children # type: ignore
|
|
||||||
if len(block_tokens) > 0:
|
|
||||||
if parsed_shard := parse_header_shards(
|
|
||||||
block_tokens, shard.start_line, shard.end_line
|
|
||||||
):
|
|
||||||
shard = parsed_shard
|
|
||||||
|
|
||||||
return StreamFile(shard=shard, file_name=file_name)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["Shard", "StreamFile", "parse_markdown_file"]
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class Shard(BaseModel):
|
|
||||||
markers: list[str] = []
|
|
||||||
tags: list[str] = []
|
|
||||||
start_line: int
|
|
||||||
end_line: int
|
|
||||||
children: list[Shard] = []
|
|
||||||
|
|
||||||
|
|
||||||
class StreamFile(BaseModel):
|
|
||||||
file_name: str
|
|
||||||
shard: Shard | None = None
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["Shard", "StreamFile"]
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
from .find import find_shard, find_shard_by_position
|
|
||||||
|
|
||||||
__all__ = ["find_shard_by_position", "find_shard"]
|
|
||||||
|
|
@ -1,35 +0,0 @@
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
from streamer.localize import LocalizedShard
|
|
||||||
|
|
||||||
|
|
||||||
def find_shard(
|
|
||||||
shards: list[LocalizedShard], query_function: Callable[[LocalizedShard], bool]
|
|
||||||
) -> list[LocalizedShard]:
|
|
||||||
found_shards = []
|
|
||||||
|
|
||||||
for shard in shards:
|
|
||||||
if query_function(shard):
|
|
||||||
found_shards.append(shard)
|
|
||||||
found_shards.extend(find_shard(shard.children, query_function))
|
|
||||||
|
|
||||||
return found_shards
|
|
||||||
|
|
||||||
|
|
||||||
def find_shard_by_position(
|
|
||||||
shards: list[LocalizedShard], dimension: str, value: str
|
|
||||||
) -> list[LocalizedShard]:
|
|
||||||
return find_shard(
|
|
||||||
shards,
|
|
||||||
lambda shard: dimension in shard.location
|
|
||||||
and shard.location[dimension] == value,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def find_shard_by_set_dimension(
|
|
||||||
shards: list[LocalizedShard], dimension: str
|
|
||||||
) -> list[LocalizedShard]:
|
|
||||||
return find_shard(shards, lambda shard: dimension in shard.location)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["find_shard_by_position", "find_shard", "find_shard_by_set_dimension"]
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
import os
|
|
||||||
from pydantic_settings import (
|
|
||||||
BaseSettings,
|
|
||||||
PydanticBaseSettingsSource,
|
|
||||||
SettingsConfigDict,
|
|
||||||
YamlConfigSettingsSource,
|
|
||||||
)
|
|
||||||
from xdg_base_dirs import xdg_config_home
|
|
||||||
|
|
||||||
SETTINGS_FILE = xdg_config_home() / "streamer" / "config.yaml"
|
|
||||||
|
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
|
||||||
model_config = SettingsConfigDict(env_file_encoding="utf-8")
|
|
||||||
|
|
||||||
base_folder: str = os.getcwd()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def settings_customise_sources(
|
|
||||||
cls,
|
|
||||||
settings_cls: type[BaseSettings],
|
|
||||||
init_settings: PydanticBaseSettingsSource,
|
|
||||||
env_settings: PydanticBaseSettingsSource,
|
|
||||||
dotenv_settings: PydanticBaseSettingsSource,
|
|
||||||
file_secret_settings: PydanticBaseSettingsSource,
|
|
||||||
) -> tuple[PydanticBaseSettingsSource, ...]:
|
|
||||||
return (
|
|
||||||
init_settings,
|
|
||||||
YamlConfigSettingsSource(settings_cls, yaml_file=SETTINGS_FILE),
|
|
||||||
dotenv_settings,
|
|
||||||
env_settings,
|
|
||||||
file_secret_settings,
|
|
||||||
)
|
|
||||||
|
|
@ -1,115 +0,0 @@
|
||||||
from enum import StrEnum
|
|
||||||
|
|
||||||
from streamer.localize import RepositoryConfiguration
|
|
||||||
from streamer.localize.repository_configuration import (
|
|
||||||
Dimension,
|
|
||||||
Marker,
|
|
||||||
MarkerPlacement,
|
|
||||||
)
|
|
||||||
|
|
||||||
TIMESHEET_TAG = "Timesheet"
|
|
||||||
TIMESHEET_DIMENSION_NAME = "timesheet"
|
|
||||||
|
|
||||||
|
|
||||||
class TimesheetPointType(StrEnum):
|
|
||||||
Card = "CARD"
|
|
||||||
SickLeave = "SICK_LEAVE"
|
|
||||||
Vacation = "VACATION"
|
|
||||||
Undertime = "UNDERTIME"
|
|
||||||
Holiday = "HOLIDAY"
|
|
||||||
Break = "BREAK"
|
|
||||||
|
|
||||||
|
|
||||||
BasicTimesheetConfiguration = RepositoryConfiguration(
|
|
||||||
dimensions={
|
|
||||||
TIMESHEET_DIMENSION_NAME: Dimension(
|
|
||||||
display_name="Timesheet",
|
|
||||||
comment="Used by Timesheet-Subcommand to create Timecards",
|
|
||||||
propagate=False,
|
|
||||||
)
|
|
||||||
},
|
|
||||||
markers={
|
|
||||||
TIMESHEET_TAG: Marker(
|
|
||||||
display_name="A default time card",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
dimension=TIMESHEET_DIMENSION_NAME,
|
|
||||||
value=TimesheetPointType.Card.value,
|
|
||||||
overwrites=False,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"VacationDay": Marker(
|
|
||||||
display_name="Vacation Day",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={TIMESHEET_TAG},
|
|
||||||
dimension=TIMESHEET_DIMENSION_NAME,
|
|
||||||
value=TimesheetPointType.Vacation.value,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"Break": Marker(
|
|
||||||
display_name="Break",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={TIMESHEET_TAG},
|
|
||||||
dimension=TIMESHEET_DIMENSION_NAME,
|
|
||||||
value=TimesheetPointType.Break.value,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"LunchBreak": Marker(
|
|
||||||
display_name="Break",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={TIMESHEET_TAG},
|
|
||||||
dimension=TIMESHEET_DIMENSION_NAME,
|
|
||||||
value=TimesheetPointType.Break.value,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"Feierabend": Marker(
|
|
||||||
display_name="Break",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={TIMESHEET_TAG},
|
|
||||||
dimension=TIMESHEET_DIMENSION_NAME,
|
|
||||||
value=TimesheetPointType.Break.value,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"Holiday": Marker(
|
|
||||||
display_name="Offical Holiday",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={TIMESHEET_TAG},
|
|
||||||
dimension=TIMESHEET_DIMENSION_NAME,
|
|
||||||
value=TimesheetPointType.Holiday.value,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"SickLeave": Marker(
|
|
||||||
display_name="Sick Leave",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={TIMESHEET_TAG},
|
|
||||||
dimension=TIMESHEET_DIMENSION_NAME,
|
|
||||||
value=TimesheetPointType.SickLeave.value,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"UndertimeDay": Marker(
|
|
||||||
display_name="Undertime Leave",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={TIMESHEET_TAG},
|
|
||||||
dimension=TIMESHEET_DIMENSION_NAME,
|
|
||||||
value=TimesheetPointType.Undertime.value,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = ["BasicTimesheetConfiguration", "TIMESHEET_TAG", "TIMESHEET_DIMENSION_NAME"]
|
|
||||||
|
|
@ -1,114 +0,0 @@
|
||||||
from datetime import datetime
|
|
||||||
from itertools import groupby
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
from streamer.localize import LocalizedShard
|
|
||||||
from streamer.query.find import find_shard_by_set_dimension
|
|
||||||
|
|
||||||
from .configuration import TIMESHEET_DIMENSION_NAME, TimesheetPointType
|
|
||||||
from .timecard import SpecialDayType, Timecard, Timesheet
|
|
||||||
|
|
||||||
|
|
||||||
class TimesheetPoint(BaseModel):
|
|
||||||
moment: datetime
|
|
||||||
type: TimesheetPointType
|
|
||||||
|
|
||||||
|
|
||||||
def shard_to_timesheet_point(shard: LocalizedShard) -> TimesheetPoint:
|
|
||||||
return TimesheetPoint(
|
|
||||||
moment=shard.moment,
|
|
||||||
type=TimesheetPointType(shard.location[TIMESHEET_DIMENSION_NAME]),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def shards_to_timesheet_points(shards: list[LocalizedShard]) -> list[TimesheetPoint]:
|
|
||||||
return list(
|
|
||||||
map(
|
|
||||||
shard_to_timesheet_point,
|
|
||||||
find_shard_by_set_dimension(shards, TIMESHEET_DIMENSION_NAME),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def aggregate_timecard_day(points: list[TimesheetPoint]) -> Timesheet | None:
|
|
||||||
sorted_points = sorted(points, key=lambda point: point.moment)
|
|
||||||
|
|
||||||
is_sick_leave = False
|
|
||||||
special_day_type = None
|
|
||||||
|
|
||||||
card_date = sorted_points[0].moment.date()
|
|
||||||
|
|
||||||
# We expect timesheet points to alternate between "Card" (start work) and
|
|
||||||
# "Break" (end work). Starting in "break" means we are not currently in a
|
|
||||||
# work block until we see the first Card.
|
|
||||||
last_is_break = True
|
|
||||||
last_time = sorted_points[0].moment.time()
|
|
||||||
|
|
||||||
timecards: list[Timecard] = []
|
|
||||||
for point in sorted_points:
|
|
||||||
if point.moment.date() != card_date:
|
|
||||||
raise ValueError("Dates of all given timesheet days should be consistent")
|
|
||||||
|
|
||||||
point_time = point.moment.time()
|
|
||||||
|
|
||||||
match point.type:
|
|
||||||
case TimesheetPointType.Holiday:
|
|
||||||
if special_day_type is not None:
|
|
||||||
raise ValueError(
|
|
||||||
f"{card_date} is both {point.type} and {special_day_type}"
|
|
||||||
)
|
|
||||||
special_day_type = SpecialDayType.Holiday
|
|
||||||
case TimesheetPointType.Vacation:
|
|
||||||
if special_day_type is not None:
|
|
||||||
raise ValueError(
|
|
||||||
f"{card_date} is both {point.type} and {special_day_type}"
|
|
||||||
)
|
|
||||||
special_day_type = SpecialDayType.Vacation
|
|
||||||
case TimesheetPointType.Undertime:
|
|
||||||
if special_day_type is not None:
|
|
||||||
raise ValueError(
|
|
||||||
f"{card_date} is both {point.type} and {special_day_type}"
|
|
||||||
)
|
|
||||||
special_day_type = SpecialDayType.Undertime
|
|
||||||
case TimesheetPointType.SickLeave:
|
|
||||||
is_sick_leave = True
|
|
||||||
case TimesheetPointType.Break:
|
|
||||||
if not last_is_break:
|
|
||||||
timecards.append(Timecard(from_time=last_time, to_time=point_time))
|
|
||||||
last_is_break = True
|
|
||||||
last_time = point_time
|
|
||||||
case TimesheetPointType.Card:
|
|
||||||
if last_is_break:
|
|
||||||
last_is_break = False
|
|
||||||
last_time = point_time
|
|
||||||
|
|
||||||
if not last_is_break:
|
|
||||||
raise ValueError(f"Last Timecard of {card_date} is not a break!")
|
|
||||||
|
|
||||||
if len(timecards) == 0 and not is_sick_leave and special_day_type is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return Timesheet(
|
|
||||||
date=card_date,
|
|
||||||
is_sick_leave=is_sick_leave,
|
|
||||||
special_day_type=special_day_type,
|
|
||||||
timecards=timecards,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def aggregate_timecards(points: list[TimesheetPoint]) -> list[Timesheet]:
|
|
||||||
day_timecards = [
|
|
||||||
aggregate_timecard_day(list(timecard))
|
|
||||||
for _date, timecard in groupby(points, key=lambda point: point.moment.date())
|
|
||||||
]
|
|
||||||
|
|
||||||
return [timecard for timecard in day_timecards if timecard is not None]
|
|
||||||
|
|
||||||
|
|
||||||
def extract_timesheets(shards: list[LocalizedShard]) -> list[Timesheet]:
|
|
||||||
points = shards_to_timesheet_points(shards)
|
|
||||||
return aggregate_timecards(points)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["extract_timesheets"]
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
from datetime import date, time
|
|
||||||
from enum import StrEnum
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class SpecialDayType(StrEnum):
|
|
||||||
Vacation = "VACATION"
|
|
||||||
Undertime = "UNDERTIME"
|
|
||||||
Holiday = "HOLIDAY"
|
|
||||||
Weekend = "WEEKEND"
|
|
||||||
|
|
||||||
|
|
||||||
class Timecard(BaseModel):
|
|
||||||
from_time: time
|
|
||||||
to_time: time
|
|
||||||
|
|
||||||
|
|
||||||
class Timesheet(BaseModel):
|
|
||||||
date: date
|
|
||||||
is_sick_leave: bool = False
|
|
||||||
special_day_type: SpecialDayType | None = None
|
|
||||||
timecards: list[Timecard]
|
|
||||||
84
src/timesheet/configuration.rs
Normal file
84
src/timesheet/configuration.rs
Normal file
|
|
@ -0,0 +1,84 @@
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
|
use crate::models::{Dimension, Marker, MarkerPlacement, RepositoryConfiguration};
|
||||||
|
|
||||||
|
use super::TimesheetPointType;
|
||||||
|
|
||||||
|
pub const TIMESHEET_TAG: &str = "Timesheet";
|
||||||
|
pub const TIMESHEET_DIMENSION_NAME: &str = "timesheet";
|
||||||
|
|
||||||
|
/// Pre-configured repository configuration for timesheet tracking.
|
||||||
|
#[allow(non_upper_case_globals)]
|
||||||
|
pub static BasicTimesheetConfiguration: Lazy<RepositoryConfiguration> = Lazy::new(|| {
|
||||||
|
RepositoryConfiguration::new()
|
||||||
|
.with_dimension(
|
||||||
|
TIMESHEET_DIMENSION_NAME,
|
||||||
|
Dimension::new("Timesheet")
|
||||||
|
.with_comment("Used by Timesheet-Subcommand to create Timecards")
|
||||||
|
.with_propagate(false),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
TIMESHEET_TAG,
|
||||||
|
Marker::new("A default time card").with_placements(vec![MarkerPlacement::new(
|
||||||
|
TIMESHEET_DIMENSION_NAME,
|
||||||
|
)
|
||||||
|
.with_value(TimesheetPointType::Card.as_str())
|
||||||
|
.with_overwrites(false)]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"VacationDay",
|
||||||
|
Marker::new("Vacation Day").with_placements(vec![MarkerPlacement::new(
|
||||||
|
TIMESHEET_DIMENSION_NAME,
|
||||||
|
)
|
||||||
|
.with_if_with(vec![TIMESHEET_TAG])
|
||||||
|
.with_value(TimesheetPointType::Vacation.as_str())]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"Break",
|
||||||
|
Marker::new("Break").with_placements(vec![MarkerPlacement::new(
|
||||||
|
TIMESHEET_DIMENSION_NAME,
|
||||||
|
)
|
||||||
|
.with_if_with(vec![TIMESHEET_TAG])
|
||||||
|
.with_value(TimesheetPointType::Break.as_str())]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"LunchBreak",
|
||||||
|
Marker::new("Break").with_placements(vec![MarkerPlacement::new(
|
||||||
|
TIMESHEET_DIMENSION_NAME,
|
||||||
|
)
|
||||||
|
.with_if_with(vec![TIMESHEET_TAG])
|
||||||
|
.with_value(TimesheetPointType::Break.as_str())]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"Feierabend",
|
||||||
|
Marker::new("Break").with_placements(vec![MarkerPlacement::new(
|
||||||
|
TIMESHEET_DIMENSION_NAME,
|
||||||
|
)
|
||||||
|
.with_if_with(vec![TIMESHEET_TAG])
|
||||||
|
.with_value(TimesheetPointType::Break.as_str())]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"Holiday",
|
||||||
|
Marker::new("Official Holiday").with_placements(vec![MarkerPlacement::new(
|
||||||
|
TIMESHEET_DIMENSION_NAME,
|
||||||
|
)
|
||||||
|
.with_if_with(vec![TIMESHEET_TAG])
|
||||||
|
.with_value(TimesheetPointType::Holiday.as_str())]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"SickLeave",
|
||||||
|
Marker::new("Sick Leave").with_placements(vec![MarkerPlacement::new(
|
||||||
|
TIMESHEET_DIMENSION_NAME,
|
||||||
|
)
|
||||||
|
.with_if_with(vec![TIMESHEET_TAG])
|
||||||
|
.with_value(TimesheetPointType::SickLeave.as_str())]),
|
||||||
|
)
|
||||||
|
.with_marker(
|
||||||
|
"UndertimeDay",
|
||||||
|
Marker::new("Undertime Leave").with_placements(vec![MarkerPlacement::new(
|
||||||
|
TIMESHEET_DIMENSION_NAME,
|
||||||
|
)
|
||||||
|
.with_if_with(vec![TIMESHEET_TAG])
|
||||||
|
.with_value(TimesheetPointType::Undertime.as_str())]),
|
||||||
|
)
|
||||||
|
});
|
||||||
537
src/timesheet/extract.rs
Normal file
537
src/timesheet/extract.rs
Normal file
|
|
@ -0,0 +1,537 @@
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use crate::error::StreamdError;
|
||||||
|
use crate::models::{LocalizedShard, SpecialDayType, Timecard, Timesheet};
|
||||||
|
use crate::query::find_shard_by_set_dimension;
|
||||||
|
|
||||||
|
use super::configuration::TIMESHEET_DIMENSION_NAME;
|
||||||
|
use super::TimesheetPointType;
|
||||||
|
|
||||||
|
/// A point in time with an associated timesheet type.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct TimesheetPoint {
|
||||||
|
moment: DateTime<Utc>,
|
||||||
|
point_type: TimesheetPointType,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert a localized shard to a timesheet point.
|
||||||
|
fn shard_to_timesheet_point(shard: &LocalizedShard) -> Option<TimesheetPoint> {
|
||||||
|
let type_str = shard.location.get(TIMESHEET_DIMENSION_NAME)?;
|
||||||
|
let point_type = type_str.parse::<TimesheetPointType>().ok()?;
|
||||||
|
|
||||||
|
Some(TimesheetPoint {
|
||||||
|
moment: shard.moment,
|
||||||
|
point_type,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert localized shards to timesheet points.
|
||||||
|
fn shards_to_timesheet_points(shards: &[LocalizedShard]) -> Vec<TimesheetPoint> {
|
||||||
|
find_shard_by_set_dimension(shards, TIMESHEET_DIMENSION_NAME)
|
||||||
|
.iter()
|
||||||
|
.filter_map(shard_to_timesheet_point)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Aggregate timesheet points for a single day into a Timesheet.
|
||||||
|
fn aggregate_timecard_day(points: &[TimesheetPoint]) -> Result<Option<Timesheet>, StreamdError> {
|
||||||
|
if points.is_empty() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let sorted_points: Vec<_> = {
|
||||||
|
let mut pts = points.to_vec();
|
||||||
|
pts.sort_by_key(|p| p.moment);
|
||||||
|
pts
|
||||||
|
};
|
||||||
|
|
||||||
|
let card_date = sorted_points[0].moment.date_naive();
|
||||||
|
let mut is_sick_leave = false;
|
||||||
|
let mut special_day_type: Option<SpecialDayType> = None;
|
||||||
|
|
||||||
|
// State machine: starting in "break" mode (not working)
|
||||||
|
let mut last_is_break = true;
|
||||||
|
let mut last_time = sorted_points[0].moment.time();
|
||||||
|
let mut timecards: Vec<Timecard> = Vec::new();
|
||||||
|
|
||||||
|
for point in &sorted_points {
|
||||||
|
if point.moment.date_naive() != card_date {
|
||||||
|
return Err(StreamdError::TimesheetError(
|
||||||
|
"Dates of all given timesheet days should be consistent".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let point_time = point.moment.time();
|
||||||
|
|
||||||
|
match point.point_type {
|
||||||
|
TimesheetPointType::Holiday => {
|
||||||
|
if special_day_type.is_some() {
|
||||||
|
return Err(StreamdError::TimesheetError(format!(
|
||||||
|
"{} is both {:?} and {:?}",
|
||||||
|
card_date, point.point_type, special_day_type
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
special_day_type = Some(SpecialDayType::Holiday);
|
||||||
|
}
|
||||||
|
TimesheetPointType::Vacation => {
|
||||||
|
if special_day_type.is_some() {
|
||||||
|
return Err(StreamdError::TimesheetError(format!(
|
||||||
|
"{} is both {:?} and {:?}",
|
||||||
|
card_date, point.point_type, special_day_type
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
special_day_type = Some(SpecialDayType::Vacation);
|
||||||
|
}
|
||||||
|
TimesheetPointType::Undertime => {
|
||||||
|
if special_day_type.is_some() {
|
||||||
|
return Err(StreamdError::TimesheetError(format!(
|
||||||
|
"{} is both {:?} and {:?}",
|
||||||
|
card_date, point.point_type, special_day_type
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
special_day_type = Some(SpecialDayType::Undertime);
|
||||||
|
}
|
||||||
|
TimesheetPointType::SickLeave => {
|
||||||
|
is_sick_leave = true;
|
||||||
|
}
|
||||||
|
TimesheetPointType::Break => {
|
||||||
|
if !last_is_break {
|
||||||
|
timecards.push(Timecard::new(last_time, point_time));
|
||||||
|
last_is_break = true;
|
||||||
|
last_time = point_time;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TimesheetPointType::Card => {
|
||||||
|
if last_is_break {
|
||||||
|
last_is_break = false;
|
||||||
|
last_time = point_time;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that we ended in break mode
|
||||||
|
if !last_is_break {
|
||||||
|
return Err(StreamdError::TimesheetError(format!(
|
||||||
|
"Last Timecard of {} is not a break!",
|
||||||
|
card_date
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only return a timesheet if there's meaningful data
|
||||||
|
if timecards.is_empty() && !is_sick_leave && special_day_type.is_none() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Some(Timesheet {
|
||||||
|
date: card_date,
|
||||||
|
is_sick_leave,
|
||||||
|
special_day_type,
|
||||||
|
timecards,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Aggregate timesheet points into timesheets, grouped by day.
|
||||||
|
fn aggregate_timecards(points: &[TimesheetPoint]) -> Result<Vec<Timesheet>, StreamdError> {
|
||||||
|
let mut timesheets = Vec::new();
|
||||||
|
|
||||||
|
// Group by date
|
||||||
|
for (_date, group) in &points.iter().chunk_by(|p| p.moment.date_naive()) {
|
||||||
|
let day_points: Vec<_> = group.cloned().collect();
|
||||||
|
if let Some(timesheet) = aggregate_timecard_day(&day_points)? {
|
||||||
|
timesheets.push(timesheet);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(timesheets)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract timesheets from localized shards.
|
||||||
|
pub fn extract_timesheets(shards: &[LocalizedShard]) -> Result<Vec<Timesheet>, StreamdError> {
|
||||||
|
let points = shards_to_timesheet_points(shards);
|
||||||
|
aggregate_timecards(&points)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use chrono::{NaiveTime, TimeZone};
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
|
||||||
|
fn point(at: DateTime<Utc>, point_type: TimesheetPointType) -> LocalizedShard {
|
||||||
|
let mut location = IndexMap::new();
|
||||||
|
location.insert(
|
||||||
|
TIMESHEET_DIMENSION_NAME.to_string(),
|
||||||
|
point_type.as_str().to_string(),
|
||||||
|
);
|
||||||
|
location.insert("file".to_string(), "dummy.md".to_string());
|
||||||
|
|
||||||
|
LocalizedShard {
|
||||||
|
moment: at,
|
||||||
|
markers: vec!["Timesheet".to_string()],
|
||||||
|
tags: vec![],
|
||||||
|
start_line: 1,
|
||||||
|
end_line: 1,
|
||||||
|
children: vec![],
|
||||||
|
location,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_single_work_block() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(9, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(17, 30, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 1);
|
||||||
|
assert_eq!(result[0].date, day.date_naive());
|
||||||
|
assert!(!result[0].is_sick_leave);
|
||||||
|
assert!(result[0].special_day_type.is_none());
|
||||||
|
assert_eq!(result[0].timecards.len(), 1);
|
||||||
|
assert_eq!(
|
||||||
|
result[0].timecards[0].from_time,
|
||||||
|
NaiveTime::from_hms_opt(9, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
result[0].timecards[0].to_time,
|
||||||
|
NaiveTime::from_hms_opt(17, 30, 0).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_three_work_blocks_separated_by_breaks() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(7, 15, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(12, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(12, 45, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(15, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(16, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(17, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 1);
|
||||||
|
assert_eq!(result[0].timecards.len(), 3);
|
||||||
|
assert_eq!(
|
||||||
|
result[0].timecards[0].from_time,
|
||||||
|
NaiveTime::from_hms_opt(7, 15, 0).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
result[0].timecards[0].to_time,
|
||||||
|
NaiveTime::from_hms_opt(12, 0, 0).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_input_order_is_not_required_within_a_day() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(15, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(7, 15, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(12, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(12, 45, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(17, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(16, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 1);
|
||||||
|
assert_eq!(result[0].timecards.len(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_groups_by_day() {
|
||||||
|
let day1 = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let day2 = Utc.with_ymd_and_hms(2026, 2, 2, 0, 0, 0).unwrap();
|
||||||
|
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day1.with_time(NaiveTime::from_hms_opt(9, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day1.with_time(NaiveTime::from_hms_opt(17, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day2.with_time(NaiveTime::from_hms_opt(10, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day2.with_time(NaiveTime::from_hms_opt(18, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 2);
|
||||||
|
assert_eq!(result[0].date, day1.date_naive());
|
||||||
|
assert_eq!(result[1].date, day2.date_naive());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_day_with_only_special_day_type_vacation() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(8, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Vacation,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(9, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 1);
|
||||||
|
assert_eq!(result[0].special_day_type, Some(SpecialDayType::Vacation));
|
||||||
|
assert!(result[0].timecards.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_day_with_only_special_day_type_holiday() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(8, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Holiday,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(9, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 1);
|
||||||
|
assert_eq!(result[0].special_day_type, Some(SpecialDayType::Holiday));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_day_with_only_special_day_type_undertime() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(8, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Undertime,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(9, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 1);
|
||||||
|
assert_eq!(result[0].special_day_type, Some(SpecialDayType::Undertime));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_day_with_sick_leave_and_timecards() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(7, 30, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::SickLeave,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(9, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(12, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 1);
|
||||||
|
assert!(result[0].is_sick_leave);
|
||||||
|
assert_eq!(result[0].timecards.len(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_day_with_sick_leave_only() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(8, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::SickLeave,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(9, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.len(), 1);
|
||||||
|
assert!(result[0].is_sick_leave);
|
||||||
|
assert!(result[0].timecards.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_empty_input() {
|
||||||
|
let result = extract_timesheets(&[]).unwrap();
|
||||||
|
assert!(result.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_day_with_only_cards_and_no_break_is_invalid() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(9, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(12, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Card,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards);
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert!(err.to_string().contains("not a break"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_two_special_day_types_same_day_is_invalid() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(8, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Vacation,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(8, 5, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Holiday,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(9, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards);
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
let err = result.unwrap_err();
|
||||||
|
assert!(err.to_string().contains("is both"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_day_with_only_breaks_is_ignored() {
|
||||||
|
let day = Utc.with_ymd_and_hms(2026, 2, 1, 0, 0, 0).unwrap();
|
||||||
|
let shards = vec![
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(12, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
point(
|
||||||
|
day.with_time(NaiveTime::from_hms_opt(13, 0, 0).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
TimesheetPointType::Break,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let result = extract_timesheets(&shards).unwrap();
|
||||||
|
|
||||||
|
assert!(result.is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
7
src/timesheet/mod.rs
Normal file
7
src/timesheet/mod.rs
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
mod configuration;
|
||||||
|
mod extract;
|
||||||
|
mod point_types;
|
||||||
|
|
||||||
|
pub use configuration::{BasicTimesheetConfiguration, TIMESHEET_DIMENSION_NAME, TIMESHEET_TAG};
|
||||||
|
pub use extract::extract_timesheets;
|
||||||
|
pub use point_types::TimesheetPointType;
|
||||||
54
src/timesheet/point_types.rs
Normal file
54
src/timesheet/point_types.rs
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
/// Type of timesheet point for time tracking.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub enum TimesheetPointType {
|
||||||
|
#[serde(rename = "CARD")]
|
||||||
|
Card,
|
||||||
|
#[serde(rename = "SICK_LEAVE")]
|
||||||
|
SickLeave,
|
||||||
|
#[serde(rename = "VACATION")]
|
||||||
|
Vacation,
|
||||||
|
#[serde(rename = "UNDERTIME")]
|
||||||
|
Undertime,
|
||||||
|
#[serde(rename = "HOLIDAY")]
|
||||||
|
Holiday,
|
||||||
|
#[serde(rename = "BREAK")]
|
||||||
|
Break,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TimesheetPointType {
|
||||||
|
pub fn as_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
TimesheetPointType::Card => "CARD",
|
||||||
|
TimesheetPointType::SickLeave => "SICK_LEAVE",
|
||||||
|
TimesheetPointType::Vacation => "VACATION",
|
||||||
|
TimesheetPointType::Undertime => "UNDERTIME",
|
||||||
|
TimesheetPointType::Holiday => "HOLIDAY",
|
||||||
|
TimesheetPointType::Break => "BREAK",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for TimesheetPointType {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for TimesheetPointType {
|
||||||
|
type Err = String;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"CARD" => Ok(TimesheetPointType::Card),
|
||||||
|
"SICK_LEAVE" => Ok(TimesheetPointType::SickLeave),
|
||||||
|
"VACATION" => Ok(TimesheetPointType::Vacation),
|
||||||
|
"UNDERTIME" => Ok(TimesheetPointType::Undertime),
|
||||||
|
"HOLIDAY" => Ok(TimesheetPointType::Holiday),
|
||||||
|
"BREAK" => Ok(TimesheetPointType::Break),
|
||||||
|
_ => Err(format!("Unknown timesheet point type: {}", s)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
283
streamd.svg
Normal file
283
streamd.svg
Normal file
|
|
@ -0,0 +1,283 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
width="192"
|
||||||
|
height="192"
|
||||||
|
viewBox="0 0 192 192"
|
||||||
|
version="1.1"
|
||||||
|
id="svg5"
|
||||||
|
inkscape:version="1.4.2 (ebf0e940d0, 2025-05-08)"
|
||||||
|
sodipodi:docname="streamd.svg"
|
||||||
|
inkscape:export-xdpi="96"
|
||||||
|
inkscape:export-ydpi="96"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||||
|
<title
|
||||||
|
id="title1">Streamd</title>
|
||||||
|
<sodipodi:namedview
|
||||||
|
id="namedview7"
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1.0"
|
||||||
|
inkscape:pageshadow="2"
|
||||||
|
inkscape:pageopacity="0.0"
|
||||||
|
inkscape:pagecheckerboard="0"
|
||||||
|
inkscape:document-units="px"
|
||||||
|
showgrid="true"
|
||||||
|
inkscape:snap-bbox="true"
|
||||||
|
inkscape:bbox-paths="true"
|
||||||
|
inkscape:snap-bbox-midpoints="true"
|
||||||
|
inkscape:snap-bbox-edge-midpoints="true"
|
||||||
|
inkscape:bbox-nodes="true"
|
||||||
|
inkscape:object-paths="true"
|
||||||
|
inkscape:snap-intersection-paths="true"
|
||||||
|
inkscape:snap-object-midpoints="true"
|
||||||
|
inkscape:snap-midpoints="true"
|
||||||
|
inkscape:snap-smooth-nodes="true"
|
||||||
|
inkscape:zoom="4"
|
||||||
|
inkscape:cx="58.375"
|
||||||
|
inkscape:cy="112.25"
|
||||||
|
inkscape:window-width="2560"
|
||||||
|
inkscape:window-height="1416"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="0"
|
||||||
|
inkscape:window-maximized="0"
|
||||||
|
inkscape:current-layer="layer3"
|
||||||
|
inkscape:snap-grids="true"
|
||||||
|
inkscape:snap-page="true"
|
||||||
|
inkscape:showpageshadow="2"
|
||||||
|
inkscape:deskcolor="#d1d1d1">
|
||||||
|
<inkscape:grid
|
||||||
|
type="xygrid"
|
||||||
|
id="grid824"
|
||||||
|
empspacing="4"
|
||||||
|
originx="0"
|
||||||
|
originy="0"
|
||||||
|
spacingy="1"
|
||||||
|
spacingx="1"
|
||||||
|
units="px" />
|
||||||
|
</sodipodi:namedview>
|
||||||
|
<defs
|
||||||
|
id="defs2">
|
||||||
|
<linearGradient
|
||||||
|
inkscape:collect="always"
|
||||||
|
id="linearGradient3586">
|
||||||
|
<stop
|
||||||
|
style="stop-color:#ffffff;stop-opacity:0.1"
|
||||||
|
offset="0"
|
||||||
|
id="stop3582" />
|
||||||
|
<stop
|
||||||
|
style="stop-color:#ffffff;stop-opacity:0"
|
||||||
|
offset="1"
|
||||||
|
id="stop3584" />
|
||||||
|
</linearGradient>
|
||||||
|
<clipPath
|
||||||
|
clipPathUnits="userSpaceOnUse"
|
||||||
|
id="clipPath1598">
|
||||||
|
<ellipse
|
||||||
|
style="fill:#bbdefb"
|
||||||
|
id="ellipse1600"
|
||||||
|
cx="160"
|
||||||
|
cy="40"
|
||||||
|
rx="12"
|
||||||
|
ry="20" />
|
||||||
|
</clipPath>
|
||||||
|
<filter
|
||||||
|
inkscape:collect="always"
|
||||||
|
style="color-interpolation-filters:sRGB"
|
||||||
|
id="filter1774"
|
||||||
|
x="-0.043319996"
|
||||||
|
y="-0.043320001"
|
||||||
|
width="1.08664"
|
||||||
|
height="1.08664">
|
||||||
|
<feGaussianBlur
|
||||||
|
inkscape:collect="always"
|
||||||
|
stdDeviation="1.5060667"
|
||||||
|
id="feGaussianBlur1776" />
|
||||||
|
</filter>
|
||||||
|
<filter
|
||||||
|
inkscape:collect="always"
|
||||||
|
style="color-interpolation-filters:sRGB"
|
||||||
|
id="filter1839"
|
||||||
|
x="-0.047999999"
|
||||||
|
y="-0.047999999"
|
||||||
|
width="1.096"
|
||||||
|
height="1.096">
|
||||||
|
<feGaussianBlur
|
||||||
|
inkscape:collect="always"
|
||||||
|
stdDeviation="3.04"
|
||||||
|
id="feGaussianBlur1841" />
|
||||||
|
</filter>
|
||||||
|
<filter
|
||||||
|
inkscape:collect="always"
|
||||||
|
style="color-interpolation-filters:sRGB"
|
||||||
|
id="filter1886"
|
||||||
|
x="-0.030315789"
|
||||||
|
y="-0.1152"
|
||||||
|
width="1.0606316"
|
||||||
|
height="1.2304">
|
||||||
|
<feGaussianBlur
|
||||||
|
inkscape:collect="always"
|
||||||
|
stdDeviation="1.92"
|
||||||
|
id="feGaussianBlur1888" />
|
||||||
|
</filter>
|
||||||
|
<clipPath
|
||||||
|
clipPathUnits="userSpaceOnUse"
|
||||||
|
id="clipPath1932">
|
||||||
|
<path
|
||||||
|
style="fill:#e3f2fd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="m 32,44 v 116 c 0,8 4,12 12,12 40,0 68,-28 115.63984,-28.34001 C 168.99976,143.59319 172,140 172,132 V 44 Z"
|
||||||
|
id="path1934"
|
||||||
|
sodipodi:nodetypes="cccsccc" />
|
||||||
|
</clipPath>
|
||||||
|
<filter
|
||||||
|
inkscape:collect="always"
|
||||||
|
style="color-interpolation-filters:sRGB"
|
||||||
|
id="filter1956"
|
||||||
|
x="-0.030857142"
|
||||||
|
y="-0.108"
|
||||||
|
width="1.0617143"
|
||||||
|
height="1.216">
|
||||||
|
<feGaussianBlur
|
||||||
|
inkscape:collect="always"
|
||||||
|
stdDeviation="1.8"
|
||||||
|
id="feGaussianBlur1958" />
|
||||||
|
</filter>
|
||||||
|
<clipPath
|
||||||
|
clipPathUnits="userSpaceOnUse"
|
||||||
|
id="clipPath1980">
|
||||||
|
<ellipse
|
||||||
|
style="fill:#bbdefb"
|
||||||
|
id="ellipse1982"
|
||||||
|
cx="160"
|
||||||
|
cy="40"
|
||||||
|
rx="12"
|
||||||
|
ry="20" />
|
||||||
|
</clipPath>
|
||||||
|
<linearGradient
|
||||||
|
inkscape:collect="always"
|
||||||
|
xlink:href="#linearGradient3586"
|
||||||
|
id="linearGradient3588"
|
||||||
|
x1="0"
|
||||||
|
y1="0"
|
||||||
|
x2="192"
|
||||||
|
y2="192"
|
||||||
|
gradientUnits="userSpaceOnUse" />
|
||||||
|
</defs>
|
||||||
|
<g
|
||||||
|
inkscape:label="Paper"
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer1"
|
||||||
|
style="display:inline"
|
||||||
|
sodipodi:insensitive="true">
|
||||||
|
<path
|
||||||
|
id="path1814"
|
||||||
|
style="opacity:0.2;fill:#000000;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;filter:url(#filter1839)"
|
||||||
|
d="M 32,20 A 12,20 0 0 0 20,40 12,20 0 0 0 32,60 v 100 c 0,8 4,12 12,12 40,0 68.00079,-27.99983 115.64062,-28.33984 C 169.00055,143.59336 172,140 172,132 V 44 h -0.26758 A 12,20 0 0 0 172,40 12,20 0 0 0 160,20 Z m 125.57812,0.455078 a 12,20 0 0 0 -0.38867,0.128906 12,20 0 0 1 0.38867,-0.128906 z m -2.58984,1.410156 a 12,20 0 0 0 -0.4043,0.333985 12,20 0 0 1 0.4043,-0.333985 z m -2.40039,2.423828 a 12,20 0 0 0 -0.21484,0.314454 12,20 0 0 1 0.21484,-0.314454 z m -1.82031,2.990235 a 12,20 0 0 0 -0.19727,0.373047 12,20 0 0 1 0.19727,-0.373047 z m -1.44531,3.685547 a 12,20 0 0 0 -0.20508,0.697265 12,20 0 0 1 0.20508,-0.697265 z m -0.97266,4.349609 a 12,20 0 0 0 -0.0762,0.650391 12,20 0 0 1 0.0762,-0.650391 z" />
|
||||||
|
<path
|
||||||
|
style="fill:#e0f7fa;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="m 32,40 v 120 c 0,8 4,12 12,12 40,0 68,-28 115.63984,-28.34001 C 168.99976,143.59319 172,140 172,132 V 40 Z"
|
||||||
|
id="path2678"
|
||||||
|
sodipodi:nodetypes="cccsccc" />
|
||||||
|
<path
|
||||||
|
id="path2431"
|
||||||
|
style="opacity:0.2;fill:#000000;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="m 172,131 c 0,8 -2.99945,11.59336 -12.35938,11.66016 C 112.00079,143.00017 84,171 44,171 36,171 32,167 32,159 v 1 c 0,8 4,12 12,12 40,0 68.00079,-27.99983 115.64062,-28.33984 C 169.00055,143.59336 172,140 172,132 Z" />
|
||||||
|
<path
|
||||||
|
id="path1863"
|
||||||
|
style="opacity:0.2;fill:#000000;stroke-width:0.470751;filter:url(#filter1886)"
|
||||||
|
d="M 32,20 A 12,20 0 0 0 20,40 12,20 0 0 0 32,60 H 160 A 12,20 0 0 0 172,40 12,20 0 0 0 160,20 Z m 125.57812,0.455078 a 12,20 0 0 0 -0.38867,0.128906 12,20 0 0 1 0.38867,-0.128906 z m -2.58984,1.410156 a 12,20 0 0 0 -0.4043,0.333985 12,20 0 0 1 0.4043,-0.333985 z m -2.40039,2.423828 a 12,20 0 0 0 -0.21484,0.314454 12,20 0 0 1 0.21484,-0.314454 z m -1.82031,2.990235 a 12,20 0 0 0 -0.19727,0.373047 12,20 0 0 1 0.19727,-0.373047 z m -1.44531,3.685547 a 12,20 0 0 0 -0.20508,0.697265 12,20 0 0 1 0.20508,-0.697265 z m -0.97266,4.349609 a 12,20 0 0 0 -0.0762,0.650391 12,20 0 0 1 0.0762,-0.650391 z m -0.0762,8.720703 a 12,20 0 0 0 0.0762,0.650391 12,20 0 0 1 -0.0762,-0.650391 z m 0.84375,4.302735 a 12,20 0 0 0 0.20508,0.697265 12,20 0 0 1 -0.20508,-0.697265 z m 1.45312,4.009765 a 12,20 0 0 0 0.19727,0.373047 12,20 0 0 1 -0.19727,-0.373047 z m 1.80274,3.048828 a 12,20 0 0 0 0.21484,0.314454 12,20 0 0 1 -0.21484,-0.314454 z m 2.21093,2.404297 a 12,20 0 0 0 0.4043,0.333985 12,20 0 0 1 -0.4043,-0.333985 z m 2.60547,1.615235 a 12,20 0 0 0 0.38867,0.128906 12,20 0 0 1 -0.38867,-0.128906 z"
|
||||||
|
clip-path="url(#clipPath1932)" />
|
||||||
|
<ellipse
|
||||||
|
style="fill:#f2f2f2"
|
||||||
|
id="ellipse2158"
|
||||||
|
cx="160"
|
||||||
|
cy="40"
|
||||||
|
rx="12"
|
||||||
|
ry="20" />
|
||||||
|
<path
|
||||||
|
id="rect1442"
|
||||||
|
style="fill:#4dd0e1;stroke-width:0.470751"
|
||||||
|
d="M 32,20 A 12,20 0 0 0 20,40 12,20 0 0 0 32,60 H 160 A 12,20 0 0 1 148,40 12,20 0 0 1 160,20 Z" />
|
||||||
|
<path
|
||||||
|
id="path1936"
|
||||||
|
style="opacity:0.2;fill:#000000;stroke-width:0.470751;filter:url(#filter1956)"
|
||||||
|
d="M 32,20 A 12,20 0 0 0 20,40 12,20 0 0 0 32,60 H 160 A 12,20 0 0 1 148,40 12,20 0 0 1 160,20 Z"
|
||||||
|
clip-path="url(#clipPath1980)" />
|
||||||
|
<path
|
||||||
|
id="rect1406"
|
||||||
|
style="opacity:1;fill:#80deea"
|
||||||
|
d="M 32,20 A 12,20 0 0 0 20,40 h 128 a 12,20 0 0 1 12,-20 z" />
|
||||||
|
<path
|
||||||
|
id="path2390"
|
||||||
|
style="opacity:0.2;fill:#ffffff"
|
||||||
|
d="M 32,20 A 12,20 0 0 0 20,40 h 0.06055 A 12,20 0 0 1 32,20.900391 H 156.64648 A 12,20 0 0 1 160,20 Z" />
|
||||||
|
<path
|
||||||
|
id="path2600"
|
||||||
|
style="opacity:0.2;fill:#000000;stroke-width:0.470751"
|
||||||
|
d="M 20.033203,39.5 C 20.020878,39.666428 20.00981,39.833108 20,40 c 0,11.045695 5.372583,20 12,20 h 128 c -1.1985,-0.03799 -2.38682,-0.375069 -3.52539,-1 H 32 C 25.500897,58.973414 20.195661,50.328485 20.033203,39.5 Z"
|
||||||
|
sodipodi:nodetypes="ccscccc" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer3"
|
||||||
|
inkscape:label="Tag"
|
||||||
|
style="display:inline">
|
||||||
|
<path
|
||||||
|
id="path1772"
|
||||||
|
style="opacity:0.2;fill:#000000;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;filter:url(#filter1774)"
|
||||||
|
d="M 91.53554,89.53554 C 89.41422,91.65686 88,94.48528 88,97.31371 v 16.97056 c 0,4.24264 0,5.65686 2.82843,8.48529 l 42.42641,42.42639 c 5.65685,5.65686 11.31371,5.65686 16.97056,0 l 16.97056,-16.97056 c 5.65686,-5.65686 5.65686,-11.3137 0,-16.97056 l -42.4264,-42.4264 C 121.94113,86 120.52692,86 116.28428,86 H 99.31371 c -2.82842,0 -5.65685,1.41422 -7.77817,3.53554 z m 4.94975,4.94974 a 8,8 0 0 1 11.3137,0 8,8 0 0 1 0,11.31371 8,8 0 0 1 -11.3137,0 8,8 0 0 1 0,-11.31371 z" />
|
||||||
|
<path
|
||||||
|
id="path7522"
|
||||||
|
style="fill:#d84315;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;fill-opacity:1"
|
||||||
|
d="M 91.53554,89.53554 C 89.41422,91.65686 88,94.48528 88,97.31371 v 16.97056 c 0,4.24264 0,5.65686 2.82843,8.48529 l 42.42641,42.42639 c 5.65685,5.65686 11.31371,5.65686 16.97056,0 l 16.97056,-16.97056 c 5.65686,-5.65686 5.65686,-11.3137 0,-16.97056 l -42.4264,-42.4264 C 121.94113,86 120.52692,86 116.28428,86 H 99.31371 c -2.82842,0 -5.65685,1.41422 -7.77817,3.53554 z m 4.94975,4.94974 a 8,8 0 0 1 11.3137,0 8,8 0 0 1 0,11.31371 8,8 0 0 1 -11.3137,0 8,8 0 0 1 0,-11.31371 z" />
|
||||||
|
<path
|
||||||
|
style="opacity:0.2;fill:#000000;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="m 88,113.28516 v 1 c 0,4.24264 -3.05e-4,5.65594 2.828125,8.48437 l 42.425785,42.42578 c 5.65685,5.65686 11.31385,5.65686 16.9707,0 l 16.9707,-16.9707 c 2.99512,-2.99511 4.3894,-5.98927 4.21289,-8.98438 -0.15686,2.66175 -1.55114,5.32263 -4.21289,7.98438 l -16.9707,16.9707 c -5.65685,5.65686 -11.31385,5.65686 -16.9707,0 L 90.828125,121.76953 C 87.999695,118.9411 88,117.5278 88,113.28516 Z"
|
||||||
|
id="path3326" />
|
||||||
|
<path
|
||||||
|
style="opacity:0.2;fill:#ffffff;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="m 100.89062,91.240234 a 8,8 0 0 0 -4.406245,2.244141 8,8 0 0 0 -2.244141,5.40625 8,8 0 0 1 2.244141,-4.40625 8,8 0 0 1 11.314455,0 8,8 0 0 1 2.31445,5.158203 8,8 0 0 0 -2.31445,-6.158203 8,8 0 0 0 -6.90821,-2.244141 z"
|
||||||
|
id="path3103" />
|
||||||
|
<path
|
||||||
|
style="opacity:0.2;fill:#000000;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="m 110.11328,100.64258 a 8,8 0 0 1 -2.31445,5.15625 8,8 0 0 1 -11.314455,0 8,8 0 0 1 -2.244141,-4.4043 8,8 0 0 0 2.244141,5.4043 8,8 0 0 0 11.314455,0 8,8 0 0 0 2.31445,-6.15625 z"
|
||||||
|
id="path3389" />
|
||||||
|
<path
|
||||||
|
style="opacity:0.2;fill:#ffffff;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="m 99.314453,86 c -2.82842,0 -5.657977,1.413836 -7.779297,3.535156 C 89.413836,91.656476 88,94.486023 88,97.314453 v 1 c 0,-2.82843 1.413836,-5.657977 3.535156,-7.779297 C 93.656476,88.413836 96.486033,87 99.314453,87 h 16.970707 c 4.24264,0 5.65594,-3.05e-4 8.48437,2.828125 l 42.42578,42.425785 c 2.66175,2.66174 4.05603,5.32458 4.21289,7.98632 0.17651,-2.9951 -1.21777,-5.99121 -4.21289,-8.98632 L 124.76953,88.828125 C 121.9411,85.999695 120.5278,86 116.28516,86 Z"
|
||||||
|
id="path3346" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer4"
|
||||||
|
inkscape:label="Finish"
|
||||||
|
sodipodi:insensitive="true"
|
||||||
|
style="display:inline">
|
||||||
|
<path
|
||||||
|
id="path3525"
|
||||||
|
style="fill:url(#linearGradient3588);fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="M 32,20 A 12,20 0 0 0 20,40 12,20 0 0 0 32,60 v 100 c 0,8 4,12 12,12 27.15145,0 48.786559,-12.88889 74.77539,-21.2832 l 14.47852,14.47851 c 5.65685,5.65686 11.31385,5.65686 16.9707,0 l 16.9707,-16.9707 c 3.49839,-3.49839 4.82865,-6.99576 4,-10.49414 C 171.74927,136.15451 172,134.25875 172,132 V 44 h -0.26758 A 12,20 0 0 0 172,40 12,20 0 0 0 160,20 Z m 125.57812,0.455078 a 12,20 0 0 0 -0.38867,0.128906 12,20 0 0 1 0.38867,-0.128906 z m -2.58984,1.410156 a 12,20 0 0 0 -0.4043,0.333985 12,20 0 0 1 0.4043,-0.333985 z m -2.40039,2.423828 a 12,20 0 0 0 -0.21484,0.314454 12,20 0 0 1 0.21484,-0.314454 z m -1.82031,2.990235 a 12,20 0 0 0 -0.19727,0.373047 12,20 0 0 1 0.19727,-0.373047 z m -1.44531,3.685547 a 12,20 0 0 0 -0.20508,0.697265 12,20 0 0 1 0.20508,-0.697265 z m -0.97266,4.349609 a 12,20 0 0 0 -0.0762,0.650391 12,20 0 0 1 0.0762,-0.650391 z" />
|
||||||
|
</g>
|
||||||
|
<metadata
|
||||||
|
id="metadata1">
|
||||||
|
<rdf:RDF>
|
||||||
|
<cc:Work
|
||||||
|
rdf:about="">
|
||||||
|
<dc:title>Streamd</dc:title>
|
||||||
|
<dc:creator>
|
||||||
|
<cc:Agent>
|
||||||
|
<dc:title>Konstantin Fickel</dc:title>
|
||||||
|
</cc:Agent>
|
||||||
|
</dc:creator>
|
||||||
|
</cc:Work>
|
||||||
|
</rdf:RDF>
|
||||||
|
</metadata>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 14 KiB |
|
|
@ -1,157 +0,0 @@
|
||||||
from datetime import date, datetime, time
|
|
||||||
|
|
||||||
from streamer.localize.extract_datetime import (
|
|
||||||
extract_date_from_marker,
|
|
||||||
extract_datetime_from_file_name,
|
|
||||||
extract_datetime_from_marker,
|
|
||||||
extract_datetime_from_marker_list,
|
|
||||||
extract_time_from_marker,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestExtractDateTime:
|
|
||||||
def test_extract_date_from_file_name_valid(self):
|
|
||||||
file_name = "20230101-123456 Some Text.md"
|
|
||||||
assert datetime(2023, 1, 1, 12, 34, 56) == extract_datetime_from_file_name(
|
|
||||||
file_name
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_extract_date_from_file_name_invalid(self):
|
|
||||||
file_name = "invalid-file-name.md"
|
|
||||||
assert extract_datetime_from_file_name(file_name) is None
|
|
||||||
|
|
||||||
def test_extract_date_from_file_name_without_time(self):
|
|
||||||
file_name = "20230101 Some Text.md"
|
|
||||||
assert datetime(2023, 1, 1, 0, 0, 0) == extract_datetime_from_file_name(
|
|
||||||
file_name
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_extract_date_from_file_name_short_time(self):
|
|
||||||
file_name = "20230101-1234 Some Text.md"
|
|
||||||
assert datetime(2023, 1, 1, 12, 34, 0) == extract_datetime_from_file_name(
|
|
||||||
file_name
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_extract_date_from_file_name_empty_string(self):
|
|
||||||
file_name = ""
|
|
||||||
assert extract_datetime_from_file_name(file_name) is None
|
|
||||||
|
|
||||||
def test_extract_date_from_file_name_with_full_path(self):
|
|
||||||
file_name = "/path/to/20230101-123456 Some Text.md"
|
|
||||||
assert datetime(2023, 1, 1, 12, 34, 56) == extract_datetime_from_file_name(
|
|
||||||
file_name
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestExtractMarkerDateTime:
|
|
||||||
def test_extract_datetime_from_marker_valid(self):
|
|
||||||
marker = "20250101150000"
|
|
||||||
assert datetime(2025, 1, 1, 15, 0, 0) == extract_datetime_from_marker(marker)
|
|
||||||
|
|
||||||
def test_extract_datetime_from_marker_invalid_format(self):
|
|
||||||
assert extract_datetime_from_marker("2025010115000") is None # too short
|
|
||||||
assert extract_datetime_from_marker("202501011500000") is None # too long
|
|
||||||
assert extract_datetime_from_marker("2025-01-01T150000") is None # separators
|
|
||||||
assert extract_datetime_from_marker("2025010115000a") is None # non-digit
|
|
||||||
assert extract_datetime_from_marker("") is None
|
|
||||||
|
|
||||||
def test_extract_datetime_from_marker_invalid_values(self):
|
|
||||||
assert extract_datetime_from_marker("20250230120000") is None # Feb 30
|
|
||||||
assert extract_datetime_from_marker("20250101126000") is None # minute 60
|
|
||||||
assert extract_datetime_from_marker("20250101240000") is None # hour 24
|
|
||||||
|
|
||||||
|
|
||||||
class TestExtractMarkerDate:
|
|
||||||
def test_extract_date_from_marker_valid(self):
|
|
||||||
marker = "20250101"
|
|
||||||
assert date(2025, 1, 1) == extract_date_from_marker(marker)
|
|
||||||
|
|
||||||
def test_extract_date_from_marker_invalid_format(self):
|
|
||||||
assert extract_date_from_marker("2025010") is None # too short
|
|
||||||
assert extract_date_from_marker("202501011") is None # too long
|
|
||||||
assert extract_date_from_marker("2025-01-01") is None # separators
|
|
||||||
assert extract_date_from_marker("2025010a") is None # non-digit
|
|
||||||
assert extract_date_from_marker("") is None
|
|
||||||
|
|
||||||
def test_extract_date_from_marker_invalid_values(self):
|
|
||||||
assert extract_date_from_marker("20250230") is None # Feb 30
|
|
||||||
assert extract_date_from_marker("20251301") is None # month 13
|
|
||||||
assert extract_date_from_marker("20250132") is None # day 32
|
|
||||||
|
|
||||||
|
|
||||||
class TestExtractMarkerTime:
|
|
||||||
def test_extract_time_from_marker_valid(self):
|
|
||||||
marker = "150000"
|
|
||||||
assert time(15, 0, 0) == extract_time_from_marker(marker)
|
|
||||||
|
|
||||||
def test_extract_time_from_marker_invalid_format(self):
|
|
||||||
assert extract_time_from_marker("15000") is None # too short
|
|
||||||
assert extract_time_from_marker("1500000") is None # too long
|
|
||||||
assert extract_time_from_marker("15:00:00") is None # separators
|
|
||||||
assert extract_time_from_marker("15000a") is None # non-digit
|
|
||||||
assert extract_time_from_marker("") is None
|
|
||||||
|
|
||||||
def test_extract_time_from_marker_invalid_values(self):
|
|
||||||
assert extract_time_from_marker("240000") is None # hour 24
|
|
||||||
assert extract_time_from_marker("156000") is None # minute 60
|
|
||||||
assert extract_time_from_marker("150060") is None # second 60
|
|
||||||
|
|
||||||
|
|
||||||
class TestExtractDateTimeFromMarkerList:
|
|
||||||
def test_no_markers_inherits_datetime(self):
|
|
||||||
inherited = datetime(2025, 1, 2, 3, 4, 5)
|
|
||||||
assert inherited == extract_datetime_from_marker_list([], inherited)
|
|
||||||
|
|
||||||
def test_unrelated_markers_inherits_datetime(self):
|
|
||||||
inherited = datetime(2025, 1, 2, 3, 4, 5)
|
|
||||||
markers = ["not-a-marker", "2025-01-01", "1500", "1234567"]
|
|
||||||
assert inherited == extract_datetime_from_marker_list(markers, inherited)
|
|
||||||
|
|
||||||
def test_date_only_marker_sets_midnight(self):
|
|
||||||
inherited = datetime(2025, 6, 7, 8, 9, 10)
|
|
||||||
markers = ["20250101"]
|
|
||||||
assert datetime(2025, 1, 1, 0, 0, 0) == extract_datetime_from_marker_list(
|
|
||||||
markers, inherited
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_time_only_marker_inherits_date(self):
|
|
||||||
inherited = datetime(2025, 6, 7, 8, 9, 10)
|
|
||||||
markers = ["150000"]
|
|
||||||
assert datetime(2025, 6, 7, 15, 0, 0) == extract_datetime_from_marker_list(
|
|
||||||
markers, inherited
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_datetime_marker_overrides_both_date_and_time(self):
|
|
||||||
inherited = datetime(2025, 6, 7, 8, 9, 10)
|
|
||||||
markers = ["20250101150000"]
|
|
||||||
assert datetime(2025, 1, 1, 15, 0, 0) == extract_datetime_from_marker_list(
|
|
||||||
markers, inherited
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_combined_date_and_time_markers(self):
|
|
||||||
inherited = datetime(2025, 6, 7, 8, 9, 10)
|
|
||||||
markers = ["20250101", "150000"]
|
|
||||||
assert datetime(2025, 1, 1, 15, 0, 0) == extract_datetime_from_marker_list(
|
|
||||||
markers, inherited
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_first_marker_wins_when_multiple_dates_or_times(self):
|
|
||||||
inherited = datetime(2025, 6, 7, 8, 9, 10)
|
|
||||||
markers = ["20250101", "150000", "20250102", "160000"]
|
|
||||||
assert datetime(2025, 1, 1, 15, 0, 0) == extract_datetime_from_marker_list(
|
|
||||||
markers, inherited
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_last_separated_date_and_time_win(self):
|
|
||||||
inherited = datetime(2025, 6, 7, 8, 9, 10)
|
|
||||||
markers = ["20250101", "150000", "20250102160000"]
|
|
||||||
assert datetime(2025, 1, 1, 15, 0, 0) == extract_datetime_from_marker_list(
|
|
||||||
markers, inherited
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_invalid_date_or_time_markers_are_ignored(self):
|
|
||||||
inherited = datetime(2025, 6, 7, 8, 9, 10)
|
|
||||||
markers = ["20251301", "240000", "20250101", "150000"]
|
|
||||||
assert datetime(2025, 1, 1, 15, 0, 0) == extract_datetime_from_marker_list(
|
|
||||||
markers, inherited
|
|
||||||
)
|
|
||||||
|
|
@ -1,365 +0,0 @@
|
||||||
import pytest
|
|
||||||
|
|
||||||
from streamer.localize.repository_configuration import (
|
|
||||||
Dimension,
|
|
||||||
Marker,
|
|
||||||
MarkerPlacement,
|
|
||||||
RepositoryConfiguration,
|
|
||||||
merge_dimensions,
|
|
||||||
merge_markers,
|
|
||||||
merge_repository_configuration,
|
|
||||||
merge_single_dimension,
|
|
||||||
merge_single_marker,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestMergeSingleDimension:
|
|
||||||
def test_second_overrides_display_name_when_non_empty(self):
|
|
||||||
base = Dimension(display_name="Base", comment="c1", propagate=True)
|
|
||||||
second = Dimension(display_name="Second", comment="c2", propagate=False)
|
|
||||||
|
|
||||||
merged = merge_single_dimension(base, second)
|
|
||||||
|
|
||||||
assert merged.display_name == "Second"
|
|
||||||
assert merged.comment == "c2"
|
|
||||||
assert merged.propagate is False
|
|
||||||
|
|
||||||
def test_second_empty_display_name_falls_back_to_base(self):
|
|
||||||
base = Dimension(display_name="Base", comment="c1", propagate=True)
|
|
||||||
second = Dimension(display_name="", comment="c2", propagate=False)
|
|
||||||
|
|
||||||
merged = merge_single_dimension(base, second)
|
|
||||||
|
|
||||||
assert merged.display_name == "Base"
|
|
||||||
assert merged.comment == "c2"
|
|
||||||
assert merged.propagate is False
|
|
||||||
|
|
||||||
def test_second_comment_none_does_not_erase_base_comment(self):
|
|
||||||
base = Dimension(display_name="Base", comment="keep", propagate=True)
|
|
||||||
second = Dimension(display_name="Second", comment=None, propagate=False)
|
|
||||||
|
|
||||||
merged = merge_single_dimension(base, second)
|
|
||||||
|
|
||||||
assert merged.display_name == "Second"
|
|
||||||
assert merged.comment == "keep"
|
|
||||||
|
|
||||||
def test_second_comment_non_none_overrides_base_comment(self):
|
|
||||||
base = Dimension(display_name="Base", comment="c1", propagate=True)
|
|
||||||
second = Dimension(display_name="Second", comment="c2", propagate=True)
|
|
||||||
|
|
||||||
merged = merge_single_dimension(base, second)
|
|
||||||
|
|
||||||
assert merged.comment == "c2"
|
|
||||||
|
|
||||||
def test_second_propagate_overrides_base_when_provided(self):
|
|
||||||
base = Dimension(display_name="Base", comment="c1", propagate=True)
|
|
||||||
second = Dimension(display_name="Second", comment="c2", propagate=False)
|
|
||||||
|
|
||||||
merged = merge_single_dimension(base, second)
|
|
||||||
|
|
||||||
assert merged.propagate is False
|
|
||||||
|
|
||||||
def test_propagate_merging_retains_base_when_second_not_provided(self):
|
|
||||||
base = Dimension(display_name="Base", comment="c1", propagate=True)
|
|
||||||
second = Dimension(display_name="Second", comment="c2")
|
|
||||||
|
|
||||||
merged = merge_single_dimension(base, second)
|
|
||||||
|
|
||||||
assert merged.propagate is True
|
|
||||||
|
|
||||||
|
|
||||||
class TestMergeDimensions:
|
|
||||||
def test_adds_new_keys_from_second(self):
|
|
||||||
base = {"a": Dimension(display_name="A", propagate=True)}
|
|
||||||
second = {"b": Dimension(display_name="B", propagate=False)}
|
|
||||||
|
|
||||||
merged = merge_dimensions(base, second)
|
|
||||||
|
|
||||||
assert set(merged.keys()) == {"a", "b"}
|
|
||||||
assert merged["a"].display_name == "A"
|
|
||||||
assert merged["b"].display_name == "B"
|
|
||||||
|
|
||||||
def test_merges_existing_keys(self):
|
|
||||||
base = {"a": Dimension(display_name="A", comment="c1", propagate=True)}
|
|
||||||
second = {"a": Dimension(display_name="A2", comment=None, propagate=False)}
|
|
||||||
|
|
||||||
merged = merge_dimensions(base, second)
|
|
||||||
|
|
||||||
assert merged["a"].display_name == "A2"
|
|
||||||
assert merged["a"].comment == "c1"
|
|
||||||
assert merged["a"].propagate is False
|
|
||||||
|
|
||||||
def test_does_not_mutate_inputs(self):
|
|
||||||
base = {"a": Dimension(display_name="A", comment="c1", propagate=True)}
|
|
||||||
second = {"b": Dimension(display_name="B", comment="c2", propagate=False)}
|
|
||||||
|
|
||||||
merged = merge_dimensions(base, second)
|
|
||||||
|
|
||||||
assert "b" not in base
|
|
||||||
assert "a" not in second
|
|
||||||
assert set(merged.keys()) == {"a", "b"}
|
|
||||||
|
|
||||||
|
|
||||||
class TestMergeSingleMarker:
|
|
||||||
def test_second_overrides_display_name_when_non_empty(self):
|
|
||||||
base = Marker(
|
|
||||||
display_name="Base",
|
|
||||||
placements=[MarkerPlacement(dimension="project", value=None)],
|
|
||||||
)
|
|
||||||
second = Marker(
|
|
||||||
display_name="Second",
|
|
||||||
placements=[MarkerPlacement(dimension="timesheet", value="coding")],
|
|
||||||
)
|
|
||||||
|
|
||||||
merged = merge_single_marker(base, second)
|
|
||||||
|
|
||||||
assert merged.display_name == "Second"
|
|
||||||
assert merged.placements == [
|
|
||||||
MarkerPlacement(dimension="project", value=None, if_with=set()),
|
|
||||||
MarkerPlacement(dimension="timesheet", value="coding", if_with=set()),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_second_empty_display_name_falls_back_to_base(self):
|
|
||||||
base = Marker(display_name="Base", placements=[])
|
|
||||||
second = Marker(display_name="", placements=[])
|
|
||||||
|
|
||||||
merged = merge_single_marker(base, second)
|
|
||||||
|
|
||||||
assert merged.display_name == "Base"
|
|
||||||
|
|
||||||
def test_appends_new_placements(self):
|
|
||||||
base = Marker(
|
|
||||||
display_name="Base",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(dimension="project"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
second = Marker(
|
|
||||||
display_name="Second",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={"Timesheet"}, dimension="timesheet", value="x"
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
merged = merge_single_marker(base, second)
|
|
||||||
|
|
||||||
assert merged.placements == [
|
|
||||||
MarkerPlacement(dimension="project"),
|
|
||||||
MarkerPlacement(if_with={"Timesheet"}, dimension="timesheet", value="x"),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_deduplicates_by_identity_and_second_overrides_base(self):
|
|
||||||
base = Marker(
|
|
||||||
display_name="Base",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(if_with={"A"}, dimension="d", value="v"),
|
|
||||||
MarkerPlacement(if_with={"B"}, dimension="d", value="v2"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
second = Marker(
|
|
||||||
display_name="Second",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(if_with={"A"}, dimension="d", value="v"),
|
|
||||||
MarkerPlacement(if_with={"C"}, dimension="d", value="v3"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
merged = merge_single_marker(base, second)
|
|
||||||
|
|
||||||
assert merged.placements == [
|
|
||||||
MarkerPlacement(if_with={"A"}, dimension="d", value="v"),
|
|
||||||
MarkerPlacement(if_with={"B"}, dimension="d", value="v2"),
|
|
||||||
MarkerPlacement(if_with={"C"}, dimension="d", value="v3"),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_identity_is_order_insensitive_for_if_with(self):
|
|
||||||
base = Marker(
|
|
||||||
display_name="Base",
|
|
||||||
placements=[MarkerPlacement(if_with={"A", "B"}, dimension="d", value="v")],
|
|
||||||
)
|
|
||||||
second = Marker(
|
|
||||||
display_name="Second",
|
|
||||||
placements=[MarkerPlacement(if_with={"B", "A"}, dimension="d", value="v2")],
|
|
||||||
)
|
|
||||||
|
|
||||||
merged = merge_single_marker(base, second)
|
|
||||||
|
|
||||||
# With `if_with` as a set, identity is order-insensitive; second overrides base.
|
|
||||||
assert merged.placements == [
|
|
||||||
MarkerPlacement(if_with={"A", "B"}, dimension="d", value="v2"),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class TestMergeMarkers:
|
|
||||||
def test_adds_new_marker_keys_from_second(self):
|
|
||||||
base = {"M1": Marker(display_name="M1", placements=[])}
|
|
||||||
second = {"M2": Marker(display_name="M2", placements=[])}
|
|
||||||
|
|
||||||
merged = merge_markers(base, second)
|
|
||||||
|
|
||||||
assert set(merged.keys()) == {"M1", "M2"}
|
|
||||||
|
|
||||||
def test_merges_existing_marker_keys(self):
|
|
||||||
base = {
|
|
||||||
"M": Marker(
|
|
||||||
display_name="Base",
|
|
||||||
placements=[MarkerPlacement(dimension="project")],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
second = {
|
|
||||||
"M": Marker(
|
|
||||||
display_name="Second",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={"Timesheet"}, dimension="timesheet", value="coding"
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
merged = merge_markers(base, second)
|
|
||||||
|
|
||||||
assert merged["M"].display_name == "Second"
|
|
||||||
assert merged["M"].placements == [
|
|
||||||
MarkerPlacement(dimension="project", value=None, if_with=set()),
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={"Timesheet"}, dimension="timesheet", value="coding"
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_does_not_mutate_inputs(self):
|
|
||||||
base = {"M1": Marker(display_name="M1", placements=[])}
|
|
||||||
second = {"M2": Marker(display_name="M2", placements=[])}
|
|
||||||
|
|
||||||
merged = merge_markers(base, second)
|
|
||||||
|
|
||||||
assert "M2" not in base
|
|
||||||
assert "M1" not in second
|
|
||||||
assert set(merged.keys()) == {"M1", "M2"}
|
|
||||||
|
|
||||||
|
|
||||||
class TestMergeRepositoryConfiguration:
|
|
||||||
def test_merges_dimensions_and_markers(self):
|
|
||||||
base = RepositoryConfiguration(
|
|
||||||
dimensions={
|
|
||||||
"project": Dimension(
|
|
||||||
display_name="Project", comment="c1", propagate=True
|
|
||||||
),
|
|
||||||
"moment": Dimension(
|
|
||||||
display_name="Moment", comment="c2", propagate=True
|
|
||||||
),
|
|
||||||
},
|
|
||||||
markers={
|
|
||||||
"Streamer": Marker(
|
|
||||||
display_name="Streamer",
|
|
||||||
placements=[MarkerPlacement(dimension="project")],
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
second = RepositoryConfiguration(
|
|
||||||
dimensions={
|
|
||||||
"project": Dimension(display_name="Project2", propagate=False),
|
|
||||||
"timesheet": Dimension(
|
|
||||||
display_name="Timesheet", comment="c3", propagate=False
|
|
||||||
),
|
|
||||||
},
|
|
||||||
markers={
|
|
||||||
"Streamer": Marker(
|
|
||||||
display_name="Streamer2",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={"Timesheet"}, dimension="timesheet", value="coding"
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"JobHunting": Marker(
|
|
||||||
display_name="JobHunting",
|
|
||||||
placements=[MarkerPlacement(dimension="project")],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
merged = merge_repository_configuration(base, second)
|
|
||||||
|
|
||||||
assert set(merged.dimensions.keys()) == {"project", "moment", "timesheet"}
|
|
||||||
assert merged.dimensions["project"].display_name == "Project2"
|
|
||||||
assert merged.dimensions["project"].comment == "c1"
|
|
||||||
assert merged.dimensions["project"].propagate is False
|
|
||||||
assert merged.dimensions["moment"].display_name == "Moment"
|
|
||||||
assert merged.dimensions["timesheet"].display_name == "Timesheet"
|
|
||||||
|
|
||||||
assert set(merged.markers.keys()) == {"Streamer", "JobHunting"}
|
|
||||||
assert merged.markers["Streamer"].display_name == "Streamer2"
|
|
||||||
assert merged.markers["Streamer"].placements == [
|
|
||||||
MarkerPlacement(dimension="project", value=None, if_with=set()),
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={"Timesheet"}, dimension="timesheet", value="coding"
|
|
||||||
),
|
|
||||||
]
|
|
||||||
assert merged.markers["JobHunting"].placements == [
|
|
||||||
MarkerPlacement(dimension="project", value=None, if_with=set())
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_does_not_mutate_base_or_second(self):
|
|
||||||
base = RepositoryConfiguration(
|
|
||||||
dimensions={"a": Dimension(display_name="A", propagate=True)},
|
|
||||||
markers={"M": Marker(display_name="M", placements=[])},
|
|
||||||
)
|
|
||||||
second = RepositoryConfiguration(
|
|
||||||
dimensions={"b": Dimension(display_name="B", propagate=False)},
|
|
||||||
markers={"N": Marker(display_name="N", placements=[])},
|
|
||||||
)
|
|
||||||
|
|
||||||
_ = merge_repository_configuration(base, second)
|
|
||||||
|
|
||||||
assert set(base.dimensions.keys()) == {"a"}
|
|
||||||
assert set(second.dimensions.keys()) == {"b"}
|
|
||||||
assert set(base.markers.keys()) == {"M"}
|
|
||||||
assert set(second.markers.keys()) == {"N"}
|
|
||||||
|
|
||||||
def test_merge_is_associative_for_non_conflicting_inputs(self):
|
|
||||||
a = RepositoryConfiguration(
|
|
||||||
dimensions={"d1": Dimension(display_name="D1", propagate=True)},
|
|
||||||
markers={"m1": Marker(display_name="M1", placements=[])},
|
|
||||||
)
|
|
||||||
b = RepositoryConfiguration(
|
|
||||||
dimensions={"d2": Dimension(display_name="D2", propagate=False)},
|
|
||||||
markers={"m2": Marker(display_name="M2", placements=[])},
|
|
||||||
)
|
|
||||||
c = RepositoryConfiguration(
|
|
||||||
dimensions={"d3": Dimension(display_name="D3", propagate=False)},
|
|
||||||
markers={"m3": Marker(display_name="M3", placements=[])},
|
|
||||||
)
|
|
||||||
|
|
||||||
left = merge_repository_configuration(merge_repository_configuration(a, b), c)
|
|
||||||
right = merge_repository_configuration(a, merge_repository_configuration(b, c))
|
|
||||||
|
|
||||||
assert left == right
|
|
||||||
assert set(left.dimensions.keys()) == {"d1", "d2", "d3"}
|
|
||||||
assert set(left.markers.keys()) == {"m1", "m2", "m3"}
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("base", "second", "expected_propagate"),
|
|
||||||
[
|
|
||||||
(
|
|
||||||
RepositoryConfiguration(
|
|
||||||
dimensions={"d": Dimension(display_name="D", propagate=True)},
|
|
||||||
markers={},
|
|
||||||
),
|
|
||||||
RepositoryConfiguration(
|
|
||||||
dimensions={"d": Dimension(display_name="D2")},
|
|
||||||
markers={},
|
|
||||||
),
|
|
||||||
True,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_merge_repository_configuration_propagate_preserves_base_when_omitted(
|
|
||||||
base, second, expected_propagate
|
|
||||||
):
|
|
||||||
merged = merge_repository_configuration(base, second)
|
|
||||||
assert merged.dimensions["d"].propagate is expected_propagate
|
|
||||||
|
|
@ -1,344 +0,0 @@
|
||||||
from faker import Faker
|
|
||||||
|
|
||||||
from streamer.parse import Shard, StreamFile, parse_markdown_file
|
|
||||||
|
|
||||||
fake = Faker()
|
|
||||||
|
|
||||||
|
|
||||||
class TestParseProcess:
|
|
||||||
file_name: str = fake.file_name(extension="md")
|
|
||||||
|
|
||||||
def test_parse_empty_file(self):
|
|
||||||
assert parse_markdown_file(self.file_name, "") == StreamFile(
|
|
||||||
file_name=self.file_name, shard=Shard(start_line=1, end_line=1)
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_basic_one_line_file(self):
|
|
||||||
test_file = "Hello World"
|
|
||||||
assert parse_markdown_file(self.file_name, test_file) == StreamFile(
|
|
||||||
file_name=self.file_name,
|
|
||||||
shard=Shard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_basic_multi_line_file(self):
|
|
||||||
test_file = "Hello World\n\nHello again!"
|
|
||||||
assert parse_markdown_file(self.file_name, test_file) == StreamFile(
|
|
||||||
file_name=self.file_name,
|
|
||||||
shard=Shard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=3,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_single_line_with_tag(self):
|
|
||||||
test_file = "@Tag Hello World"
|
|
||||||
assert parse_markdown_file(self.file_name, test_file) == StreamFile(
|
|
||||||
file_name=self.file_name,
|
|
||||||
shard=Shard(
|
|
||||||
markers=["Tag"],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_single_line_with_two_tags(self):
|
|
||||||
test_file = "@Marker1 @Marker2 Hello World"
|
|
||||||
assert parse_markdown_file(self.file_name, test_file) == StreamFile(
|
|
||||||
file_name=self.file_name,
|
|
||||||
shard=Shard(
|
|
||||||
markers=["Marker1", "Marker2"],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_single_line_with_two_tags_and_misplaced_tag(self):
|
|
||||||
test_file = "@Tag1 @Tag2 Hello World @Tag3"
|
|
||||||
assert parse_markdown_file(self.file_name, test_file) == StreamFile(
|
|
||||||
file_name=self.file_name,
|
|
||||||
shard=Shard(
|
|
||||||
markers=["Tag1", "Tag2"],
|
|
||||||
tags=["Tag3"],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_split_paragraphs_into_shards(self):
|
|
||||||
file_text = "Hello World!\n\n@Tag1 Block 1\n\n@Tag2 Block 2"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text) == StreamFile(
|
|
||||||
file_name=self.file_name,
|
|
||||||
shard=Shard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=5,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
markers=["Tag1"],
|
|
||||||
start_line=3,
|
|
||||||
end_line=3,
|
|
||||||
),
|
|
||||||
Shard(
|
|
||||||
markers=["Tag2"],
|
|
||||||
start_line=5,
|
|
||||||
end_line=5,
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_split_paragraph_with_inner_tags_at_more_positions(self):
|
|
||||||
file_text = "Hello @Tag1 World!\n\n@Marker Block 1\n\nBlock 2 @Tag2"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
tags=["Tag1", "Tag2"],
|
|
||||||
start_line=1,
|
|
||||||
end_line=5,
|
|
||||||
children=[
|
|
||||||
Shard(markers=["Marker"], start_line=3, end_line=3, children=[]),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_header_without_markers(self):
|
|
||||||
file_text = "# Heading\n\n## Subheading"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=3,
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_split_at_heading_if_marker_on_subheading(self):
|
|
||||||
file_text = "# Heading @Tag1\n\n## @Marker1 Subheading @Tag2\n\n# Heading @Tag3"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text) == StreamFile(
|
|
||||||
file_name=self.file_name,
|
|
||||||
shard=Shard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=5,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
tags=["Tag1"],
|
|
||||||
start_line=1,
|
|
||||||
end_line=4,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
markers=["Marker1"],
|
|
||||||
tags=["Tag2"],
|
|
||||||
start_line=3,
|
|
||||||
end_line=4,
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Shard(tags=["Tag3"], start_line=5, end_line=5, children=[]),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_only_parse_releveant_levels(self):
|
|
||||||
file_text = "# @Marker1 Heading @Tag1\n\n## Subheading @Tag2"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text) == StreamFile(
|
|
||||||
file_name=self.file_name,
|
|
||||||
shard=Shard(
|
|
||||||
markers=["Marker1"],
|
|
||||||
tags=["Tag1", "Tag2"],
|
|
||||||
start_line=1,
|
|
||||||
end_line=3,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_fullly_before_headings_start(self):
|
|
||||||
file_text = "Hello\n\n@Marker1 World!\n\n# @Marker2 I'm a heading!"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=5,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=4,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
markers=["Marker1"],
|
|
||||||
start_line=3,
|
|
||||||
end_line=3,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Shard(markers=["Marker2"], start_line=5, end_line=5, children=[]),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_complex_heading_structure(self):
|
|
||||||
file_text = "Preamble @Preamble\n## @Intro\n# @Title\n## @Chapter1\n## @Chapter2\n### Section 1\n### Section 2"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=7,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=2,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
tags=["Preamble"],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
),
|
|
||||||
Shard(
|
|
||||||
markers=["Intro"],
|
|
||||||
start_line=2,
|
|
||||||
end_line=2,
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Shard(
|
|
||||||
markers=["Title"],
|
|
||||||
start_line=3,
|
|
||||||
end_line=7,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
markers=["Chapter1"],
|
|
||||||
start_line=4,
|
|
||||||
end_line=4,
|
|
||||||
),
|
|
||||||
Shard(
|
|
||||||
markers=["Chapter2"],
|
|
||||||
start_line=5,
|
|
||||||
end_line=7,
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_simple_list(self):
|
|
||||||
file_text = "* hello world\n * @Marker i've got a marker"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
markers=[],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=2,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
markers=["Marker"], tags=[], start_line=2, end_line=2, children=[]
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_complex_list(self):
|
|
||||||
file_text = """* I'm the parent!
|
|
||||||
* @Marker1 I've got a marker\n
|
|
||||||
* I've got no marker!
|
|
||||||
* I've got a child with a marker!
|
|
||||||
* @Marker2 I'm the child with the marker
|
|
||||||
"""
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
markers=[],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=6,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
markers=[],
|
|
||||||
tags=[],
|
|
||||||
start_line=2,
|
|
||||||
end_line=6,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
markers=["Marker1"],
|
|
||||||
tags=[],
|
|
||||||
start_line=2,
|
|
||||||
end_line=3,
|
|
||||||
children=[],
|
|
||||||
),
|
|
||||||
Shard(
|
|
||||||
markers=[],
|
|
||||||
tags=[],
|
|
||||||
start_line=5,
|
|
||||||
end_line=6,
|
|
||||||
children=[
|
|
||||||
Shard(
|
|
||||||
markers=["Marker2"],
|
|
||||||
tags=[],
|
|
||||||
start_line=6,
|
|
||||||
end_line=6,
|
|
||||||
children=[],
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_ignores_tags_in_code(self):
|
|
||||||
file_text = "```\n@Marker\n```"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
markers=[],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=3,
|
|
||||||
children=[],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_finds_tags_in_italic_text(self):
|
|
||||||
file_text = "*@ItalicMarker*"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
markers=["ItalicMarker"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_finds_tags_in_bold_text(self):
|
|
||||||
file_text = "**@BoldMarker**"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
markers=["BoldMarker"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_finds_tags_in_strikethrough_text(self):
|
|
||||||
file_text = "~~@StrikeMarker~~"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
markers=["StrikeMarker"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_finds_tags_in_link(self):
|
|
||||||
file_text = "[@LinkMarker](https://konstantinfickel.de)"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
markers=["LinkMarker"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parse_continues_looking_for_markers_after_first_link_marker(self):
|
|
||||||
file_text = "[@LinkMarker1](https://konstantinfickel.de1) [@LinkMarker2](https://konstantinfickel.de)"
|
|
||||||
|
|
||||||
assert parse_markdown_file(self.file_name, file_text).shard == Shard(
|
|
||||||
markers=["LinkMarker1", "LinkMarker2"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
)
|
|
||||||
|
|
@ -1,104 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from streamer.localize import LocalizedShard
|
|
||||||
from streamer.query.find import find_shard, find_shard_by_position
|
|
||||||
|
|
||||||
|
|
||||||
def generate_localized_shard(
|
|
||||||
*,
|
|
||||||
location: dict[str, str] | None = None,
|
|
||||||
children: list[LocalizedShard] | None = None,
|
|
||||||
) -> LocalizedShard:
|
|
||||||
return LocalizedShard(
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
moment=datetime(2020, 1, 1),
|
|
||||||
location=location or {},
|
|
||||||
children=children or [],
|
|
||||||
markers=[],
|
|
||||||
tags=[],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestFindShard:
|
|
||||||
def test_returns_empty_when_no_match(self) -> None:
|
|
||||||
root = generate_localized_shard(location={"file": "a.md"})
|
|
||||||
shards = [root]
|
|
||||||
|
|
||||||
result = find_shard(shards, lambda s: "missing" in s.location)
|
|
||||||
|
|
||||||
assert result == []
|
|
||||||
|
|
||||||
def test_finds_matches_depth_first_and_preserves_order(self) -> None:
|
|
||||||
grandchild = generate_localized_shard(location={"k": "match"})
|
|
||||||
child1 = generate_localized_shard(
|
|
||||||
location={"k": "match"}, children=[grandchild]
|
|
||||||
)
|
|
||||||
child2 = generate_localized_shard(location={"k": "nope"})
|
|
||||||
root = generate_localized_shard(
|
|
||||||
location={"k": "nope"}, children=[child1, child2]
|
|
||||||
)
|
|
||||||
|
|
||||||
result = find_shard([root], lambda s: s.location.get("k") == "match")
|
|
||||||
|
|
||||||
assert result == [child1, grandchild]
|
|
||||||
|
|
||||||
def test_includes_root_if_it_matches(self) -> None:
|
|
||||||
root = generate_localized_shard(
|
|
||||||
location={"k": "match"},
|
|
||||||
children=[generate_localized_shard(location={"k": "match"})],
|
|
||||||
)
|
|
||||||
|
|
||||||
result = find_shard([root], lambda s: s.location.get("k") == "match")
|
|
||||||
|
|
||||||
assert result[0] is root
|
|
||||||
assert len(result) == 2
|
|
||||||
|
|
||||||
def test_multiple_roots_keeps_left_to_right_order(self) -> None:
|
|
||||||
a = generate_localized_shard(location={"k": "match"})
|
|
||||||
b = generate_localized_shard(location={"k": "match"})
|
|
||||||
c = generate_localized_shard(location={"k": "nope"})
|
|
||||||
|
|
||||||
result = find_shard([a, b, c], lambda s: s.location.get("k") == "match")
|
|
||||||
|
|
||||||
assert result == [a, b]
|
|
||||||
|
|
||||||
def test_query_function_can_use_arbitrary_logic(self) -> None:
|
|
||||||
# Ensures typing/behavior supports any callable that returns bool.
|
|
||||||
a = generate_localized_shard(location={"x": "1"})
|
|
||||||
b = generate_localized_shard(location={"x": "2"})
|
|
||||||
c = generate_localized_shard(location={"x": "3"})
|
|
||||||
root = generate_localized_shard(location={}, children=[a, b, c])
|
|
||||||
|
|
||||||
def is_even_x(shard: LocalizedShard) -> bool:
|
|
||||||
x = shard.location.get("x")
|
|
||||||
return x is not None and int(x) % 2 == 0
|
|
||||||
|
|
||||||
result = find_shard([root], is_even_x)
|
|
||||||
|
|
||||||
assert result == [b]
|
|
||||||
|
|
||||||
|
|
||||||
class TestFindShardByPosition:
|
|
||||||
def test_matches_only_when_dimension_present_and_equal(self) -> None:
|
|
||||||
match = generate_localized_shard(location={"file": "a.md", "line": "10"})
|
|
||||||
wrong_value = generate_localized_shard(location={"file": "a.md", "line": "11"})
|
|
||||||
missing_dim = generate_localized_shard(location={"file": "a.md"})
|
|
||||||
root = generate_localized_shard(
|
|
||||||
location={"root": "x"}, children=[match, wrong_value, missing_dim]
|
|
||||||
)
|
|
||||||
|
|
||||||
result = find_shard_by_position([root], "line", "10")
|
|
||||||
|
|
||||||
assert result == [match]
|
|
||||||
|
|
||||||
def test_recurses_through_children(self) -> None:
|
|
||||||
deep = generate_localized_shard(location={"section": "s1"})
|
|
||||||
mid = generate_localized_shard(location={"section": "s0"}, children=[deep])
|
|
||||||
root = generate_localized_shard(location={}, children=[mid])
|
|
||||||
|
|
||||||
result = find_shard_by_position([root], "section", "s1")
|
|
||||||
|
|
||||||
assert result == [deep]
|
|
||||||
|
|
@ -1,231 +0,0 @@
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from streamer.localize.localize import localize_stream_file
|
|
||||||
from streamer.localize.localized_shard import LocalizedShard
|
|
||||||
from streamer.localize.repository_configuration import (
|
|
||||||
Dimension,
|
|
||||||
Marker,
|
|
||||||
MarkerPlacement,
|
|
||||||
RepositoryConfiguration,
|
|
||||||
)
|
|
||||||
from streamer.parse.shard import Shard, StreamFile
|
|
||||||
|
|
||||||
repository_configuration = RepositoryConfiguration(
|
|
||||||
dimensions={
|
|
||||||
"project": Dimension(
|
|
||||||
display_name="Project",
|
|
||||||
comment="GTD Project that is being worked on",
|
|
||||||
propagate=True,
|
|
||||||
),
|
|
||||||
"moment": Dimension(
|
|
||||||
display_name="Moment",
|
|
||||||
comment="Timestamp this entry was created at",
|
|
||||||
propagate=True,
|
|
||||||
),
|
|
||||||
"timesheet": Dimension(
|
|
||||||
display_name="Timesheet",
|
|
||||||
comment="Time Cards for Time Tracking",
|
|
||||||
propagate=True,
|
|
||||||
),
|
|
||||||
},
|
|
||||||
markers={
|
|
||||||
"Streamer": Marker(
|
|
||||||
display_name="Streamer",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(dimension="project"),
|
|
||||||
MarkerPlacement(
|
|
||||||
if_with={"Timesheet"}, dimension="timesheet", value="coding"
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"JobHunting": Marker(
|
|
||||||
display_name="JobHunting", placements=[MarkerPlacement(dimension="project")]
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestLocalize:
|
|
||||||
def test_project_simple_stream_file(self):
|
|
||||||
stream_file = StreamFile(
|
|
||||||
file_name="20250622-121000 Test File.md",
|
|
||||||
shard=Shard(start_line=1, end_line=1, markers=["Streamer"]),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert localize_stream_file(
|
|
||||||
stream_file, repository_configuration
|
|
||||||
) == LocalizedShard(
|
|
||||||
moment=datetime(2025, 6, 22, 12, 10, 0, 0),
|
|
||||||
markers=["Streamer"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
location={"project": "Streamer", "file": stream_file.file_name},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_timesheet_use_case(self):
|
|
||||||
stream_file = StreamFile(
|
|
||||||
file_name="20260131-210000 Test File.md",
|
|
||||||
shard=Shard(start_line=1, end_line=1, markers=["Timesheet", "Streamer"]),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert localize_stream_file(
|
|
||||||
stream_file, repository_configuration
|
|
||||||
) == LocalizedShard(
|
|
||||||
moment=datetime(2026, 1, 31, 21, 0, 0, 0),
|
|
||||||
markers=["Timesheet", "Streamer"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
location={
|
|
||||||
"file": stream_file.file_name,
|
|
||||||
"project": "Streamer",
|
|
||||||
"timesheet": "coding",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_overwrites_true_propagated_dimension_overwrites_existing_value(self):
|
|
||||||
config = RepositoryConfiguration(
|
|
||||||
dimensions={
|
|
||||||
"project": Dimension(display_name="Project", propagate=True),
|
|
||||||
},
|
|
||||||
markers={
|
|
||||||
"A": Marker(
|
|
||||||
display_name="A",
|
|
||||||
placements=[MarkerPlacement(dimension="project", value="a")],
|
|
||||||
),
|
|
||||||
"B": Marker(
|
|
||||||
display_name="B",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(dimension="project", value="b", overwrites=True)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
stream_file = StreamFile(
|
|
||||||
file_name="20260131-210000 Test File.md",
|
|
||||||
shard=Shard(start_line=1, end_line=1, markers=["A", "B"]),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert localize_stream_file(stream_file, config) == LocalizedShard(
|
|
||||||
moment=datetime(2026, 1, 31, 21, 0, 0, 0),
|
|
||||||
markers=["A", "B"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
location={"file": stream_file.file_name, "project": "b"},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_overwrites_false_propagated_dimension_does_not_overwrite_existing_value(
|
|
||||||
self,
|
|
||||||
):
|
|
||||||
config = RepositoryConfiguration(
|
|
||||||
dimensions={
|
|
||||||
"project": Dimension(display_name="Project", propagate=True),
|
|
||||||
},
|
|
||||||
markers={
|
|
||||||
"A": Marker(
|
|
||||||
display_name="A",
|
|
||||||
placements=[MarkerPlacement(dimension="project", value="a")],
|
|
||||||
),
|
|
||||||
"B": Marker(
|
|
||||||
display_name="B",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(
|
|
||||||
dimension="project", value="b", overwrites=False
|
|
||||||
)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
stream_file = StreamFile(
|
|
||||||
file_name="20260131-210000 Test File.md",
|
|
||||||
shard=Shard(start_line=1, end_line=1, markers=["A", "B"]),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert localize_stream_file(stream_file, config) == LocalizedShard(
|
|
||||||
moment=datetime(2026, 1, 31, 21, 0, 0, 0),
|
|
||||||
markers=["A", "B"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
location={"file": stream_file.file_name, "project": "a"},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_overwrites_true_non_propagated_dimension_overwrites_private_value(self):
|
|
||||||
config = RepositoryConfiguration(
|
|
||||||
dimensions={
|
|
||||||
"label": Dimension(display_name="Label", propagate=False),
|
|
||||||
},
|
|
||||||
markers={
|
|
||||||
"A": Marker(
|
|
||||||
display_name="A",
|
|
||||||
placements=[MarkerPlacement(dimension="label", value="a")],
|
|
||||||
),
|
|
||||||
"B": Marker(
|
|
||||||
display_name="B",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(dimension="label", value="b", overwrites=True)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
stream_file = StreamFile(
|
|
||||||
file_name="20260131-210000 Test File.md",
|
|
||||||
shard=Shard(start_line=1, end_line=1, markers=["A", "B"]),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert localize_stream_file(stream_file, config) == LocalizedShard(
|
|
||||||
moment=datetime(2026, 1, 31, 21, 0, 0, 0),
|
|
||||||
markers=["A", "B"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
location={"file": stream_file.file_name, "label": "b"},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_overwrites_false_non_propagated_dimension_does_not_overwrite_private_value(
|
|
||||||
self,
|
|
||||||
):
|
|
||||||
config = RepositoryConfiguration(
|
|
||||||
dimensions={
|
|
||||||
"label": Dimension(display_name="Label", propagate=False),
|
|
||||||
},
|
|
||||||
markers={
|
|
||||||
"A": Marker(
|
|
||||||
display_name="A",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(dimension="label", value="a", overwrites=True)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
"B": Marker(
|
|
||||||
display_name="B",
|
|
||||||
placements=[
|
|
||||||
MarkerPlacement(dimension="label", value="b", overwrites=False)
|
|
||||||
],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
stream_file = StreamFile(
|
|
||||||
file_name="20260131-210000 Test File.md",
|
|
||||||
shard=Shard(start_line=1, end_line=1, markers=["A", "B"]),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert localize_stream_file(stream_file, config) == LocalizedShard(
|
|
||||||
moment=datetime(2026, 1, 31, 21, 0, 0, 0),
|
|
||||||
markers=["A", "B"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
location={"file": stream_file.file_name, "label": "a"},
|
|
||||||
)
|
|
||||||
|
|
@ -1,288 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime, time
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from streamer.localize.localized_shard import LocalizedShard
|
|
||||||
from streamer.timesheet.configuration import (
|
|
||||||
TIMESHEET_DIMENSION_NAME,
|
|
||||||
TimesheetPointType,
|
|
||||||
)
|
|
||||||
from streamer.timesheet.extract import extract_timesheets
|
|
||||||
from streamer.timesheet.timecard import SpecialDayType, Timecard, Timesheet
|
|
||||||
|
|
||||||
|
|
||||||
def point(at: datetime, type: TimesheetPointType) -> LocalizedShard:
|
|
||||||
"""
|
|
||||||
Create a minimal LocalizedShard that will be interpreted as a timesheet point.
|
|
||||||
|
|
||||||
Note: The extract pipeline uses set-dimension filtering; we therefore ensure the
|
|
||||||
timesheet dimension is set in `location`.
|
|
||||||
"""
|
|
||||||
return LocalizedShard(
|
|
||||||
moment=at,
|
|
||||||
markers=["Timesheet"],
|
|
||||||
tags=[],
|
|
||||||
start_line=1,
|
|
||||||
end_line=1,
|
|
||||||
children=[],
|
|
||||||
location={TIMESHEET_DIMENSION_NAME: type.value, "file": "dummy.md"},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestExtractTimesheets:
|
|
||||||
def test_single_work_block(self):
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Card),
|
|
||||||
point(day.replace(hour=17, minute=30), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == [
|
|
||||||
Timesheet(
|
|
||||||
date=day.date(),
|
|
||||||
is_sick_leave=False,
|
|
||||||
special_day_type=None,
|
|
||||||
timecards=[Timecard(from_time=time(9, 0), to_time=time(17, 30))],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_three_work_blocks_separated_by_breaks(self):
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=7, minute=15), TimesheetPointType.Card),
|
|
||||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Break),
|
|
||||||
point(day.replace(hour=12, minute=45), TimesheetPointType.Card),
|
|
||||||
point(day.replace(hour=15, minute=0), TimesheetPointType.Break),
|
|
||||||
point(day.replace(hour=16, minute=0), TimesheetPointType.Card),
|
|
||||||
point(day.replace(hour=17, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == [
|
|
||||||
Timesheet(
|
|
||||||
date=day.date(),
|
|
||||||
is_sick_leave=False,
|
|
||||||
special_day_type=None,
|
|
||||||
timecards=[
|
|
||||||
Timecard(from_time=time(7, 15), to_time=time(12, 0)),
|
|
||||||
Timecard(from_time=time(12, 45), to_time=time(15, 0)),
|
|
||||||
Timecard(from_time=time(16, 0), to_time=time(17, 0)),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_input_order_is_not_required_within_a_day(self):
|
|
||||||
"""
|
|
||||||
Points may come unsorted; extraction should sort by timestamp within a day.
|
|
||||||
"""
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=15, minute=0), TimesheetPointType.Break),
|
|
||||||
point(day.replace(hour=7, minute=15), TimesheetPointType.Card),
|
|
||||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Break),
|
|
||||||
point(day.replace(hour=12, minute=45), TimesheetPointType.Card),
|
|
||||||
point(day.replace(hour=17, minute=0), TimesheetPointType.Break),
|
|
||||||
point(day.replace(hour=16, minute=0), TimesheetPointType.Card),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == [
|
|
||||||
Timesheet(
|
|
||||||
date=day.date(),
|
|
||||||
is_sick_leave=False,
|
|
||||||
special_day_type=None,
|
|
||||||
timecards=[
|
|
||||||
Timecard(from_time=time(7, 15), to_time=time(12, 0)),
|
|
||||||
Timecard(from_time=time(12, 45), to_time=time(15, 0)),
|
|
||||||
Timecard(from_time=time(16, 0), to_time=time(17, 0)),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_groups_by_day(self):
|
|
||||||
"""
|
|
||||||
If points span multiple days, we should get one Timesheet per day.
|
|
||||||
"""
|
|
||||||
day1 = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
day2 = datetime(2026, 2, 2, 0, 0, 0)
|
|
||||||
|
|
||||||
shards = [
|
|
||||||
point(day2.replace(hour=10, minute=0), TimesheetPointType.Card),
|
|
||||||
point(day2.replace(hour=18, minute=0), TimesheetPointType.Break),
|
|
||||||
point(day1.replace(hour=9, minute=0), TimesheetPointType.Card),
|
|
||||||
point(day1.replace(hour=17, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Note: current implementation groups by date using `itertools.groupby` on the
|
|
||||||
# incoming order; to be robust, we pass day1 points first, then day2 points.
|
|
||||||
# This asserts the intended behavior.
|
|
||||||
shards = [
|
|
||||||
point(day1.replace(hour=9, minute=0), TimesheetPointType.Card),
|
|
||||||
point(day1.replace(hour=17, minute=0), TimesheetPointType.Break),
|
|
||||||
point(day2.replace(hour=10, minute=0), TimesheetPointType.Card),
|
|
||||||
point(day2.replace(hour=18, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == [
|
|
||||||
Timesheet(
|
|
||||||
date=day1.date(),
|
|
||||||
is_sick_leave=False,
|
|
||||||
special_day_type=None,
|
|
||||||
timecards=[Timecard(from_time=time(9, 0), to_time=time(17, 0))],
|
|
||||||
),
|
|
||||||
Timesheet(
|
|
||||||
date=day2.date(),
|
|
||||||
is_sick_leave=False,
|
|
||||||
special_day_type=None,
|
|
||||||
timecards=[Timecard(from_time=time(10, 0), to_time=time(18, 0))],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_day_with_only_special_day_type_vacation(self):
|
|
||||||
"""
|
|
||||||
A day can be marked as Vacation without timecards; it should still produce a Timesheet.
|
|
||||||
"""
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=8, minute=0), TimesheetPointType.Vacation),
|
|
||||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == [
|
|
||||||
Timesheet(
|
|
||||||
date=day.date(),
|
|
||||||
is_sick_leave=False,
|
|
||||||
special_day_type=SpecialDayType.Vacation,
|
|
||||||
timecards=[],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_day_with_only_special_day_type_holiday(self):
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=8, minute=0), TimesheetPointType.Holiday),
|
|
||||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == [
|
|
||||||
Timesheet(
|
|
||||||
date=day.date(),
|
|
||||||
is_sick_leave=False,
|
|
||||||
special_day_type=SpecialDayType.Holiday,
|
|
||||||
timecards=[],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_day_with_only_special_day_type_undertime(self):
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=8, minute=0), TimesheetPointType.Undertime),
|
|
||||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == [
|
|
||||||
Timesheet(
|
|
||||||
date=day.date(),
|
|
||||||
is_sick_leave=False,
|
|
||||||
special_day_type=SpecialDayType.Undertime,
|
|
||||||
timecards=[],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_day_with_sick_leave_and_timecards(self):
|
|
||||||
"""
|
|
||||||
SickLeave should set the flag but not prevent timecard aggregation.
|
|
||||||
"""
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=7, minute=30), TimesheetPointType.SickLeave),
|
|
||||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Card),
|
|
||||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == [
|
|
||||||
Timesheet(
|
|
||||||
date=day.date(),
|
|
||||||
is_sick_leave=True,
|
|
||||||
special_day_type=None,
|
|
||||||
timecards=[Timecard(from_time=time(9, 0), to_time=time(12, 0))],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_day_with_sick_leave_only(self):
|
|
||||||
"""
|
|
||||||
A day with only SickLeave should still produce a Timesheet (no timecards).
|
|
||||||
"""
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=8, minute=0), TimesheetPointType.SickLeave),
|
|
||||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == [
|
|
||||||
Timesheet(
|
|
||||||
date=day.date(),
|
|
||||||
is_sick_leave=True,
|
|
||||||
special_day_type=None,
|
|
||||||
timecards=[],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_empty_input(self):
|
|
||||||
assert extract_timesheets([]) == []
|
|
||||||
|
|
||||||
def test_day_with_only_cards_and_no_break_is_invalid(self):
|
|
||||||
"""
|
|
||||||
A day ending 'in work' (last point not a Break) should raise.
|
|
||||||
"""
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Card),
|
|
||||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Card),
|
|
||||||
]
|
|
||||||
|
|
||||||
with pytest.raises(ValueError, match=r"Last Timecard of .* is not a break"):
|
|
||||||
extract_timesheets(shards)
|
|
||||||
|
|
||||||
def test_two_special_day_types_same_day_is_invalid(self):
|
|
||||||
"""
|
|
||||||
A day cannot be both Vacation and Holiday (or any two distinct special types).
|
|
||||||
"""
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=8, minute=0), TimesheetPointType.Vacation),
|
|
||||||
point(day.replace(hour=8, minute=5), TimesheetPointType.Holiday),
|
|
||||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
with pytest.raises(ValueError, match=r"is both .* and .*"):
|
|
||||||
extract_timesheets(shards)
|
|
||||||
|
|
||||||
def test_points_with_mixed_dates_inside_one_group_raises(self):
|
|
||||||
"""
|
|
||||||
Defensive: if aggregation receives points spanning multiple dates for a single day,
|
|
||||||
it should raise. (This can occur if higher-level grouping is incorrect.)
|
|
||||||
"""
|
|
||||||
day1 = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
day2 = datetime(2026, 2, 2, 0, 0, 0)
|
|
||||||
|
|
||||||
shards = [
|
|
||||||
point(day1.replace(hour=9, minute=0), TimesheetPointType.Card),
|
|
||||||
point(day2.replace(hour=9, minute=30), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
with pytest.raises(ValueError, match=r"Last Timecard of .* is not a break"):
|
|
||||||
extract_timesheets(shards)
|
|
||||||
|
|
||||||
def test_day_with_only_breaks_is_ignored(self):
|
|
||||||
"""
|
|
||||||
A day with no timecards and no sick/special markers should not emit a Timesheet.
|
|
||||||
"""
|
|
||||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
|
||||||
shards = [
|
|
||||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Break),
|
|
||||||
point(day.replace(hour=13, minute=0), TimesheetPointType.Break),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert extract_timesheets(shards) == []
|
|
||||||
460
uv.lock
generated
460
uv.lock
generated
|
|
@ -1,460 +0,0 @@
|
||||||
version = 1
|
|
||||||
revision = 3
|
|
||||||
requires-python = ">=3.12"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "annotated-doc"
|
|
||||||
version = "0.0.4"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "annotated-types"
|
|
||||||
version = "0.7.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "click"
|
|
||||||
version = "8.3.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "colorama"
|
|
||||||
version = "0.4.6"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "faker"
|
|
||||||
version = "40.4.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "tzdata", marker = "sys_platform == 'win32'" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/fc/7e/dccb7013c9f3d66f2e379383600629fec75e4da2698548bdbf2041ea4b51/faker-40.4.0.tar.gz", hash = "sha256:76f8e74a3df28c3e2ec2caafa956e19e37a132fdc7ea067bc41783affcfee364", size = 1952221, upload-time = "2026-02-06T23:30:15.515Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ac/63/58efa67c10fb27810d34351b7a10f85f109a7f7e2a07dc3773952459c47b/faker-40.4.0-py3-none-any.whl", hash = "sha256:486d43c67ebbb136bc932406418744f9a0bdf2c07f77703ea78b58b77e9aa443", size = 1987060, upload-time = "2026-02-06T23:30:13.44Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "iniconfig"
|
|
||||||
version = "2.3.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "markdown-it-py"
|
|
||||||
version = "4.0.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "mdurl" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "mdurl"
|
|
||||||
version = "0.1.2"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "mistletoe"
|
|
||||||
version = "1.5.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/31/ae/d33647e2a26a8899224f36afc5e7b7a670af30f1fd87231e9f07ca19d673/mistletoe-1.5.1.tar.gz", hash = "sha256:c5571ce6ca9cfdc7ce9151c3ae79acb418e067812000907616427197648030a3", size = 111769, upload-time = "2025-12-07T16:19:01.066Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/20/60/0980fefdc4d12c18c1bbab9d62852f27aded8839233c7b0a9827aaf395f5/mistletoe-1.5.1-py3-none-any.whl", hash = "sha256:d3e97664798261503f685f6a6281b092628367cf3128fc68a015a993b0c4feb3", size = 55331, upload-time = "2025-12-07T16:18:59.65Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "nodeenv"
|
|
||||||
version = "1.10.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "packaging"
|
|
||||||
version = "26.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pluggy"
|
|
||||||
version = "1.6.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pydantic"
|
|
||||||
version = "2.12.5"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "annotated-types" },
|
|
||||||
{ name = "pydantic-core" },
|
|
||||||
{ name = "typing-extensions" },
|
|
||||||
{ name = "typing-inspection" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pydantic-core"
|
|
||||||
version = "2.41.5"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "typing-extensions" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pydantic-settings"
|
|
||||||
version = "2.12.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "pydantic" },
|
|
||||||
{ name = "python-dotenv" },
|
|
||||||
{ name = "typing-inspection" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.optional-dependencies]
|
|
||||||
yaml = [
|
|
||||||
{ name = "pyyaml" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pygments"
|
|
||||||
version = "2.19.2"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyright"
|
|
||||||
version = "1.1.408"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "nodeenv" },
|
|
||||||
{ name = "typing-extensions" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz", hash = "sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size = 4400578, upload-time = "2026-01-08T08:07:38.795Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl", hash = "sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size = 6399144, upload-time = "2026-01-08T08:07:37.082Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pytest"
|
|
||||||
version = "9.0.2"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
|
||||||
{ name = "iniconfig" },
|
|
||||||
{ name = "packaging" },
|
|
||||||
{ name = "pluggy" },
|
|
||||||
{ name = "pygments" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "python-dotenv"
|
|
||||||
version = "1.2.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyyaml"
|
|
||||||
version = "6.0.3"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rich"
|
|
||||||
version = "14.3.2"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "markdown-it-py" },
|
|
||||||
{ name = "pygments" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/74/99/a4cab2acbb884f80e558b0771e97e21e939c5dfb460f488d19df485e8298/rich-14.3.2.tar.gz", hash = "sha256:e712f11c1a562a11843306f5ed999475f09ac31ffb64281f73ab29ffdda8b3b8", size = 230143, upload-time = "2026-02-01T16:20:47.908Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69", size = 309963, upload-time = "2026-02-01T16:20:46.078Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ruff"
|
|
||||||
version = "0.15.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/c8/39/5cee96809fbca590abea6b46c6d1c586b49663d1d2830a751cc8fc42c666/ruff-0.15.0.tar.gz", hash = "sha256:6bdea47cdbea30d40f8f8d7d69c0854ba7c15420ec75a26f463290949d7f7e9a", size = 4524893, upload-time = "2026-02-03T17:53:35.357Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bc/88/3fd1b0aa4b6330d6aaa63a285bc96c9f71970351579152d231ed90914586/ruff-0.15.0-py3-none-linux_armv6l.whl", hash = "sha256:aac4ebaa612a82b23d45964586f24ae9bc23ca101919f5590bdb368d74ad5455", size = 10354332, upload-time = "2026-02-03T17:52:54.892Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/72/f6/62e173fbb7eb75cc29fe2576a1e20f0a46f671a2587b5f604bfb0eaf5f6f/ruff-0.15.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dcd4be7cc75cfbbca24a98d04d0b9b36a270d0833241f776b788d59f4142b14d", size = 10767189, upload-time = "2026-02-03T17:53:19.778Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/99/e4/968ae17b676d1d2ff101d56dc69cf333e3a4c985e1ec23803df84fc7bf9e/ruff-0.15.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d747e3319b2bce179c7c1eaad3d884dc0a199b5f4d5187620530adf9105268ce", size = 10075384, upload-time = "2026-02-03T17:53:29.241Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a2/bf/9843c6044ab9e20af879c751487e61333ca79a2c8c3058b15722386b8cae/ruff-0.15.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:650bd9c56ae03102c51a5e4b554d74d825ff3abe4db22b90fd32d816c2e90621", size = 10481363, upload-time = "2026-02-03T17:52:43.332Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/55/d9/4ada5ccf4cd1f532db1c8d44b6f664f2208d3d93acbeec18f82315e15193/ruff-0.15.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6664b7eac559e3048223a2da77769c2f92b43a6dfd4720cef42654299a599c9", size = 10187736, upload-time = "2026-02-03T17:53:00.522Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/86/e2/f25eaecd446af7bb132af0a1d5b135a62971a41f5366ff41d06d25e77a91/ruff-0.15.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f811f97b0f092b35320d1556f3353bf238763420ade5d9e62ebd2b73f2ff179", size = 10968415, upload-time = "2026-02-03T17:53:15.705Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e7/dc/f06a8558d06333bf79b497d29a50c3a673d9251214e0d7ec78f90b30aa79/ruff-0.15.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:761ec0a66680fab6454236635a39abaf14198818c8cdf691e036f4bc0f406b2d", size = 11809643, upload-time = "2026-02-03T17:53:23.031Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/dd/45/0ece8db2c474ad7df13af3a6d50f76e22a09d078af63078f005057ca59eb/ruff-0.15.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:940f11c2604d317e797b289f4f9f3fa5555ffe4fb574b55ed006c3d9b6f0eb78", size = 11234787, upload-time = "2026-02-03T17:52:46.432Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/8a/d9/0e3a81467a120fd265658d127db648e4d3acfe3e4f6f5d4ea79fac47e587/ruff-0.15.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbca3d40558789126da91d7ef9a7c87772ee107033db7191edefa34e2c7f1b4", size = 11112797, upload-time = "2026-02-03T17:52:49.274Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b2/cb/8c0b3b0c692683f8ff31351dfb6241047fa873a4481a76df4335a8bff716/ruff-0.15.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9a121a96db1d75fa3eb39c4539e607f628920dd72ff1f7c5ee4f1b768ac62d6e", size = 11033133, upload-time = "2026-02-03T17:53:33.105Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f8/5e/23b87370cf0f9081a8c89a753e69a4e8778805b8802ccfe175cc410e50b9/ruff-0.15.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5298d518e493061f2eabd4abd067c7e4fb89e2f63291c94332e35631c07c3662", size = 10442646, upload-time = "2026-02-03T17:53:06.278Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e1/9a/3c94de5ce642830167e6d00b5c75aacd73e6347b4c7fc6828699b150a5ee/ruff-0.15.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afb6e603d6375ff0d6b0cee563fa21ab570fd15e65c852cb24922cef25050cf1", size = 10195750, upload-time = "2026-02-03T17:53:26.084Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/30/15/e396325080d600b436acc970848d69df9c13977942fb62bb8722d729bee8/ruff-0.15.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77e515f6b15f828b94dc17d2b4ace334c9ddb7d9468c54b2f9ed2b9c1593ef16", size = 10676120, upload-time = "2026-02-03T17:53:09.363Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/8d/c9/229a23d52a2983de1ad0fb0ee37d36e0257e6f28bfd6b498ee2c76361874/ruff-0.15.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6f6e80850a01eb13b3e42ee0ebdf6e4497151b48c35051aab51c101266d187a3", size = 11201636, upload-time = "2026-02-03T17:52:57.281Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/6f/b0/69adf22f4e24f3677208adb715c578266842e6e6a3cc77483f48dd999ede/ruff-0.15.0-py3-none-win32.whl", hash = "sha256:238a717ef803e501b6d51e0bdd0d2c6e8513fe9eec14002445134d3907cd46c3", size = 10465945, upload-time = "2026-02-03T17:53:12.591Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/51/ad/f813b6e2c97e9b4598be25e94a9147b9af7e60523b0cb5d94d307c15229d/ruff-0.15.0-py3-none-win_amd64.whl", hash = "sha256:dd5e4d3301dc01de614da3cdffc33d4b1b96fb89e45721f1598e5532ccf78b18", size = 11564657, upload-time = "2026-02-03T17:52:51.893Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f6/b0/2d823f6e77ebe560f4e397d078487e8d52c1516b331e3521bc75db4272ca/ruff-0.15.0-py3-none-win_arm64.whl", hash = "sha256:c480d632cc0ca3f0727acac8b7d053542d9e114a462a145d0b00e7cd658c515a", size = 10865753, upload-time = "2026-02-03T17:53:03.014Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "shellingham"
|
|
||||||
version = "1.5.4"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "streamer"
|
|
||||||
version = "0.1.0"
|
|
||||||
source = { editable = "." }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "click" },
|
|
||||||
{ name = "mistletoe" },
|
|
||||||
{ name = "pydantic" },
|
|
||||||
{ name = "pydantic-settings", extra = ["yaml"] },
|
|
||||||
{ name = "rich" },
|
|
||||||
{ name = "typer" },
|
|
||||||
{ name = "xdg-base-dirs" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dev-dependencies]
|
|
||||||
dev = [
|
|
||||||
{ name = "faker" },
|
|
||||||
{ name = "pyright" },
|
|
||||||
{ name = "pytest" },
|
|
||||||
{ name = "ruff" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.metadata]
|
|
||||||
requires-dist = [
|
|
||||||
{ name = "click", specifier = "==8.3.1" },
|
|
||||||
{ name = "mistletoe", specifier = "==1.5.1" },
|
|
||||||
{ name = "pydantic", specifier = "==2.12.5" },
|
|
||||||
{ name = "pydantic-settings", extras = ["yaml"], specifier = "==2.12.0" },
|
|
||||||
{ name = "rich", specifier = "==14.3.2" },
|
|
||||||
{ name = "typer", specifier = "==0.21.2" },
|
|
||||||
{ name = "xdg-base-dirs", specifier = "==6.0.2" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.metadata.requires-dev]
|
|
||||||
dev = [
|
|
||||||
{ name = "faker", specifier = "==40.4.0" },
|
|
||||||
{ name = "pyright", specifier = "==1.1.408" },
|
|
||||||
{ name = "pytest", specifier = "==9.0.2" },
|
|
||||||
{ name = "ruff", specifier = "==0.15.0" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typer"
|
|
||||||
version = "0.21.2"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "annotated-doc" },
|
|
||||||
{ name = "click" },
|
|
||||||
{ name = "rich" },
|
|
||||||
{ name = "shellingham" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/f2/1e/a27cc02a0cd715118c71fa2aef2c687fdefc3c28d90fd0dd789c5118154c/typer-0.21.2.tar.gz", hash = "sha256:1abd95a3b675e17ff61b0838ac637fe9478d446d62ad17fa4bb81ea57cc54028", size = 120426, upload-time = "2026-02-10T19:33:46.182Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b8/cc/d59f893fbdfb5f58770c05febfc4086a46875f1084453621c35605cec946/typer-0.21.2-py3-none-any.whl", hash = "sha256:c3d8de54d00347ef90b82131ca946274f017cffb46683ae3883c360fa958f55c", size = 56728, upload-time = "2026-02-10T19:33:48.01Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typing-extensions"
|
|
||||||
version = "4.15.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typing-inspection"
|
|
||||||
version = "0.4.2"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "typing-extensions" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tzdata"
|
|
||||||
version = "2025.3"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "xdg-base-dirs"
|
|
||||||
version = "6.0.2"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/bf/d0/bbe05a15347538aaf9fa5b51ac3b97075dfb834931fcb77d81fbdb69e8f6/xdg_base_dirs-6.0.2.tar.gz", hash = "sha256:950504e14d27cf3c9cb37744680a43bf0ac42efefc4ef4acf98dc736cab2bced", size = 4085, upload-time = "2024-10-19T14:35:08.114Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fc/03/030b47fd46b60fc87af548e57ff59c2ca84b2a1dadbe721bb0ce33896b2e/xdg_base_dirs-6.0.2-py3-none-any.whl", hash = "sha256:3c01d1b758ed4ace150ac960ac0bd13ce4542b9e2cdf01312dcda5012cfebabe", size = 4747, upload-time = "2024-10-19T14:35:05.931Z" },
|
|
||||||
]
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue