29-timesheet-management #30
10 changed files with 716 additions and 57 deletions
|
|
@ -18,7 +18,10 @@ from streamer.localize import (
|
|||
from streamer.localize.preconfigured_configurations import TaskConfiguration
|
||||
from streamer.parse import parse_markdown_file
|
||||
from streamer.query import find_shard_by_position
|
||||
from streamer.query.find import find_shard_by_set_dimension
|
||||
from streamer.settings import Settings
|
||||
from streamer.timesheet.configuration import BasicTimesheetConfiguration
|
||||
from streamer.timesheet.extract import extract_timesheets
|
||||
|
||||
app = typer.Typer()
|
||||
|
||||
|
|
@ -71,6 +74,19 @@ def edit(number: Annotated[int, typer.Argument()] = 1) -> None:
|
|||
click.edit(None, filename=sorted_shards[selected_number].location["file"])
|
||||
|
||||
|
||||
@app.command()
|
||||
def timesheet() -> None:
|
||||
all_shards = list(all_files(BasicTimesheetConfiguration))
|
||||
sheets = sorted(extract_timesheets(all_shards), key=lambda card: card.date)
|
||||
for sheet in sheets:
|
||||
print(sheet.date)
|
||||
print(
|
||||
",".join(
|
||||
map(lambda card: f"{card.from_time},{card.to_time}", sheet.timecards)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@app.command()
|
||||
def new() -> None:
|
||||
streamer_directory = Settings().base_folder
|
||||
|
|
|
|||
|
|
@ -30,10 +30,14 @@ def localize_shard(
|
|||
|
||||
value = placement.value or marker
|
||||
|
||||
if dimension.propagate:
|
||||
position[placement.dimension] = value
|
||||
else:
|
||||
private_position[placement.dimension] = value
|
||||
if placement.overwrites or (
|
||||
placement.dimension not in position
|
||||
and placement.dimension not in private_position
|
||||
):
|
||||
if dimension.propagate:
|
||||
position[placement.dimension] = value
|
||||
else:
|
||||
private_position[placement.dimension] = value
|
||||
|
||||
children = [
|
||||
localize_shard(child, config, position, adjusted_moment)
|
||||
|
|
|
|||
|
|
@ -41,55 +41,3 @@ TaskConfiguration = RepositoryConfiguration(
|
|||
),
|
||||
},
|
||||
)
|
||||
|
||||
BasicTimesheetConfiguration = RepositoryConfiguration(
|
||||
dimensions={
|
||||
"timesheet": Dimension(
|
||||
display_name="Timesheet",
|
||||
comment="Used by Timesheet-Subcommand to create Timecards",
|
||||
propagate=False,
|
||||
)
|
||||
},
|
||||
markers={
|
||||
"VacationDay": Marker(
|
||||
display_name="Vacation Day",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={"Timesheet"},
|
||||
dimension="timesheet",
|
||||
value="day_off_sick_leave",
|
||||
)
|
||||
],
|
||||
),
|
||||
"Holiday": Marker(
|
||||
display_name="Offical Holiday",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={"Timesheet"},
|
||||
dimension="timesheet",
|
||||
value="day_off_holiday",
|
||||
)
|
||||
],
|
||||
),
|
||||
"SickLeave": Marker(
|
||||
display_name="Sick Leave",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={"Timesheet"},
|
||||
dimension="timesheet",
|
||||
value="day_off_sick_leave",
|
||||
)
|
||||
],
|
||||
),
|
||||
"UndertimeDay": Marker(
|
||||
display_name="Undertime Leave",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={"Timesheet"},
|
||||
dimension="timesheet",
|
||||
value="day_off_undertime",
|
||||
)
|
||||
],
|
||||
),
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ class MarkerPlacement(BaseModel):
|
|||
if_with: set[str] = set()
|
||||
dimension: str
|
||||
value: str | None = None
|
||||
overwrites: bool = True
|
||||
|
||||
|
||||
class Marker(BaseModel):
|
||||
|
|
|
|||
|
|
@ -26,4 +26,10 @@ def find_shard_by_position(
|
|||
)
|
||||
|
||||
|
||||
__all__ = ["find_shard_by_position", "find_shard"]
|
||||
def find_shard_by_set_dimension(
|
||||
shards: list[LocalizedShard], dimension: str
|
||||
) -> list[LocalizedShard]:
|
||||
return find_shard(shards, lambda shard: dimension in shard.location)
|
||||
|
||||
|
||||
__all__ = ["find_shard_by_position", "find_shard", "find_shard_by_set_dimension"]
|
||||
|
|
|
|||
115
src/streamer/timesheet/configuration.py
Normal file
115
src/streamer/timesheet/configuration.py
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
from enum import StrEnum
|
||||
|
||||
from streamer.localize import RepositoryConfiguration
|
||||
from streamer.localize.repository_configuration import (
|
||||
Dimension,
|
||||
Marker,
|
||||
MarkerPlacement,
|
||||
)
|
||||
|
||||
TIMESHEET_TAG = "Timesheet"
|
||||
TIMESHEET_DIMENSION_NAME = "timesheet"
|
||||
|
||||
|
||||
class TimesheetPointType(StrEnum):
|
||||
Card = "CARD"
|
||||
SickLeave = "SICK_LEAVE"
|
||||
Vacation = "VACATION"
|
||||
Undertime = "UNDERTIME"
|
||||
Holiday = "HOLIDAY"
|
||||
Break = "BREAK"
|
||||
|
||||
|
||||
BasicTimesheetConfiguration = RepositoryConfiguration(
|
||||
dimensions={
|
||||
TIMESHEET_DIMENSION_NAME: Dimension(
|
||||
display_name="Timesheet",
|
||||
comment="Used by Timesheet-Subcommand to create Timecards",
|
||||
propagate=False,
|
||||
)
|
||||
},
|
||||
markers={
|
||||
TIMESHEET_TAG: Marker(
|
||||
display_name="A default time card",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
dimension=TIMESHEET_DIMENSION_NAME,
|
||||
value=TimesheetPointType.Card.value,
|
||||
overwrites=False,
|
||||
)
|
||||
],
|
||||
),
|
||||
"VacationDay": Marker(
|
||||
display_name="Vacation Day",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={TIMESHEET_TAG},
|
||||
dimension=TIMESHEET_DIMENSION_NAME,
|
||||
value=TimesheetPointType.Vacation.value,
|
||||
)
|
||||
],
|
||||
),
|
||||
"Break": Marker(
|
||||
display_name="Break",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={TIMESHEET_TAG},
|
||||
dimension=TIMESHEET_DIMENSION_NAME,
|
||||
value=TimesheetPointType.Break.value,
|
||||
)
|
||||
],
|
||||
),
|
||||
"LunchBreak": Marker(
|
||||
display_name="Break",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={TIMESHEET_TAG},
|
||||
dimension=TIMESHEET_DIMENSION_NAME,
|
||||
value=TimesheetPointType.Break.value,
|
||||
)
|
||||
],
|
||||
),
|
||||
"Feierabend": Marker(
|
||||
display_name="Break",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={TIMESHEET_TAG},
|
||||
dimension=TIMESHEET_DIMENSION_NAME,
|
||||
value=TimesheetPointType.Break.value,
|
||||
)
|
||||
],
|
||||
),
|
||||
"Holiday": Marker(
|
||||
display_name="Offical Holiday",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={TIMESHEET_TAG},
|
||||
dimension=TIMESHEET_DIMENSION_NAME,
|
||||
value=TimesheetPointType.Holiday.value,
|
||||
)
|
||||
],
|
||||
),
|
||||
"SickLeave": Marker(
|
||||
display_name="Sick Leave",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={TIMESHEET_TAG},
|
||||
dimension=TIMESHEET_DIMENSION_NAME,
|
||||
value=TimesheetPointType.SickLeave.value,
|
||||
)
|
||||
],
|
||||
),
|
||||
"UndertimeDay": Marker(
|
||||
display_name="Undertime Leave",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
if_with={TIMESHEET_TAG},
|
||||
dimension=TIMESHEET_DIMENSION_NAME,
|
||||
value=TimesheetPointType.Undertime.value,
|
||||
)
|
||||
],
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
__all__ = ["BasicTimesheetConfiguration", "TIMESHEET_TAG", "TIMESHEET_DIMENSION_NAME"]
|
||||
114
src/streamer/timesheet/extract.py
Normal file
114
src/streamer/timesheet/extract.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
from datetime import datetime
|
||||
from itertools import groupby
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from streamer.localize import LocalizedShard
|
||||
from streamer.query.find import find_shard_by_set_dimension
|
||||
|
||||
from .configuration import TIMESHEET_DIMENSION_NAME, TimesheetPointType
|
||||
from .timecard import SpecialDayType, Timecard, Timesheet
|
||||
|
||||
|
||||
class TimesheetPoint(BaseModel):
|
||||
moment: datetime
|
||||
type: TimesheetPointType
|
||||
|
||||
|
||||
def shard_to_timesheet_point(shard: LocalizedShard) -> TimesheetPoint:
|
||||
return TimesheetPoint(
|
||||
moment=shard.moment,
|
||||
type=TimesheetPointType(shard.location[TIMESHEET_DIMENSION_NAME]),
|
||||
)
|
||||
|
||||
|
||||
def shards_to_timesheet_points(shards: list[LocalizedShard]) -> list[TimesheetPoint]:
|
||||
return list(
|
||||
map(
|
||||
shard_to_timesheet_point,
|
||||
find_shard_by_set_dimension(shards, TIMESHEET_DIMENSION_NAME),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def aggregate_timecard_day(points: list[TimesheetPoint]) -> Timesheet | None:
|
||||
sorted_points = sorted(points, key=lambda point: point.moment)
|
||||
|
||||
is_sick_leave = False
|
||||
special_day_type = None
|
||||
|
||||
card_date = sorted_points[0].moment.date()
|
||||
|
||||
# We expect timesheet points to alternate between "Card" (start work) and
|
||||
# "Break" (end work). Starting in "break" means we are not currently in a
|
||||
# work block until we see the first Card.
|
||||
last_is_break = True
|
||||
last_time = sorted_points[0].moment.time()
|
||||
|
||||
timecards: list[Timecard] = []
|
||||
for point in sorted_points:
|
||||
if point.moment.date() != card_date:
|
||||
raise ValueError("Dates of all given timesheet days should be consistent")
|
||||
|
||||
point_time = point.moment.time()
|
||||
|
||||
match point.type:
|
||||
case TimesheetPointType.Holiday:
|
||||
if special_day_type is not None:
|
||||
raise ValueError(
|
||||
f"{card_date} is both {point.type} and {special_day_type}"
|
||||
)
|
||||
special_day_type = SpecialDayType.Holiday
|
||||
case TimesheetPointType.Vacation:
|
||||
if special_day_type is not None:
|
||||
raise ValueError(
|
||||
f"{card_date} is both {point.type} and {special_day_type}"
|
||||
)
|
||||
special_day_type = SpecialDayType.Vacation
|
||||
case TimesheetPointType.Undertime:
|
||||
if special_day_type is not None:
|
||||
raise ValueError(
|
||||
f"{card_date} is both {point.type} and {special_day_type}"
|
||||
)
|
||||
special_day_type = SpecialDayType.Undertime
|
||||
case TimesheetPointType.SickLeave:
|
||||
is_sick_leave = True
|
||||
case TimesheetPointType.Break:
|
||||
if not last_is_break:
|
||||
timecards.append(Timecard(from_time=last_time, to_time=point_time))
|
||||
last_is_break = True
|
||||
last_time = point_time
|
||||
case TimesheetPointType.Card:
|
||||
if last_is_break:
|
||||
last_is_break = False
|
||||
last_time = point_time
|
||||
|
||||
if not last_is_break:
|
||||
raise ValueError(f"Last Timecard of {card_date} is not a break!")
|
||||
|
||||
if len(timecards) == 0 and not is_sick_leave and special_day_type is None:
|
||||
return None
|
||||
|
||||
return Timesheet(
|
||||
date=card_date,
|
||||
is_sick_leave=is_sick_leave,
|
||||
special_day_type=special_day_type,
|
||||
timecards=timecards,
|
||||
)
|
||||
|
||||
|
||||
def aggregate_timecards(points: list[TimesheetPoint]) -> list[Timesheet]:
|
||||
day_timecards = [
|
||||
aggregate_timecard_day(list(timecard))
|
||||
for _date, timecard in groupby(points, key=lambda point: point.moment.date())
|
||||
]
|
||||
|
||||
return [timecard for timecard in day_timecards if timecard is not None]
|
||||
|
||||
|
||||
def extract_timesheets(shards: list[LocalizedShard]) -> list[Timesheet]:
|
||||
points = shards_to_timesheet_points(shards)
|
||||
return aggregate_timecards(points)
|
||||
|
||||
|
||||
__all__ = ["extract_timesheets"]
|
||||
23
src/streamer/timesheet/timecard.py
Normal file
23
src/streamer/timesheet/timecard.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
from datetime import date, time
|
||||
from enum import StrEnum
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class SpecialDayType(StrEnum):
|
||||
Vacation = "VACATION"
|
||||
Undertime = "UNDERTIME"
|
||||
Holiday = "HOLIDAY"
|
||||
Weekend = "WEEKEND"
|
||||
|
||||
|
||||
class Timecard(BaseModel):
|
||||
from_time: time
|
||||
to_time: time
|
||||
|
||||
|
||||
class Timesheet(BaseModel):
|
||||
date: date
|
||||
is_sick_leave: bool = False
|
||||
special_day_type: SpecialDayType | None = None
|
||||
timecards: list[Timecard]
|
||||
|
|
@ -85,3 +85,147 @@ class TestLocalize:
|
|||
"timesheet": "coding",
|
||||
},
|
||||
)
|
||||
|
||||
def test_overwrites_true_propagated_dimension_overwrites_existing_value(self):
|
||||
config = RepositoryConfiguration(
|
||||
dimensions={
|
||||
"project": Dimension(display_name="Project", propagate=True),
|
||||
},
|
||||
markers={
|
||||
"A": Marker(
|
||||
display_name="A",
|
||||
placements=[MarkerPlacement(dimension="project", value="a")],
|
||||
),
|
||||
"B": Marker(
|
||||
display_name="B",
|
||||
placements=[
|
||||
MarkerPlacement(dimension="project", value="b", overwrites=True)
|
||||
],
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
stream_file = StreamFile(
|
||||
file_name="20260131-210000 Test File.md",
|
||||
shard=Shard(start_line=1, end_line=1, markers=["A", "B"]),
|
||||
)
|
||||
|
||||
assert localize_stream_file(stream_file, config) == LocalizedShard(
|
||||
moment=datetime(2026, 1, 31, 21, 0, 0, 0),
|
||||
markers=["A", "B"],
|
||||
tags=[],
|
||||
start_line=1,
|
||||
end_line=1,
|
||||
children=[],
|
||||
location={"file": stream_file.file_name, "project": "b"},
|
||||
)
|
||||
|
||||
def test_overwrites_false_propagated_dimension_does_not_overwrite_existing_value(
|
||||
self,
|
||||
):
|
||||
config = RepositoryConfiguration(
|
||||
dimensions={
|
||||
"project": Dimension(display_name="Project", propagate=True),
|
||||
},
|
||||
markers={
|
||||
"A": Marker(
|
||||
display_name="A",
|
||||
placements=[MarkerPlacement(dimension="project", value="a")],
|
||||
),
|
||||
"B": Marker(
|
||||
display_name="B",
|
||||
placements=[
|
||||
MarkerPlacement(
|
||||
dimension="project", value="b", overwrites=False
|
||||
)
|
||||
],
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
stream_file = StreamFile(
|
||||
file_name="20260131-210000 Test File.md",
|
||||
shard=Shard(start_line=1, end_line=1, markers=["A", "B"]),
|
||||
)
|
||||
|
||||
assert localize_stream_file(stream_file, config) == LocalizedShard(
|
||||
moment=datetime(2026, 1, 31, 21, 0, 0, 0),
|
||||
markers=["A", "B"],
|
||||
tags=[],
|
||||
start_line=1,
|
||||
end_line=1,
|
||||
children=[],
|
||||
location={"file": stream_file.file_name, "project": "a"},
|
||||
)
|
||||
|
||||
def test_overwrites_true_non_propagated_dimension_overwrites_private_value(self):
|
||||
config = RepositoryConfiguration(
|
||||
dimensions={
|
||||
"label": Dimension(display_name="Label", propagate=False),
|
||||
},
|
||||
markers={
|
||||
"A": Marker(
|
||||
display_name="A",
|
||||
placements=[MarkerPlacement(dimension="label", value="a")],
|
||||
),
|
||||
"B": Marker(
|
||||
display_name="B",
|
||||
placements=[
|
||||
MarkerPlacement(dimension="label", value="b", overwrites=True)
|
||||
],
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
stream_file = StreamFile(
|
||||
file_name="20260131-210000 Test File.md",
|
||||
shard=Shard(start_line=1, end_line=1, markers=["A", "B"]),
|
||||
)
|
||||
|
||||
assert localize_stream_file(stream_file, config) == LocalizedShard(
|
||||
moment=datetime(2026, 1, 31, 21, 0, 0, 0),
|
||||
markers=["A", "B"],
|
||||
tags=[],
|
||||
start_line=1,
|
||||
end_line=1,
|
||||
children=[],
|
||||
location={"file": stream_file.file_name, "label": "b"},
|
||||
)
|
||||
|
||||
def test_overwrites_false_non_propagated_dimension_does_not_overwrite_private_value(
|
||||
self,
|
||||
):
|
||||
config = RepositoryConfiguration(
|
||||
dimensions={
|
||||
"label": Dimension(display_name="Label", propagate=False),
|
||||
},
|
||||
markers={
|
||||
"A": Marker(
|
||||
display_name="A",
|
||||
placements=[
|
||||
MarkerPlacement(dimension="label", value="a", overwrites=True)
|
||||
],
|
||||
),
|
||||
"B": Marker(
|
||||
display_name="B",
|
||||
placements=[
|
||||
MarkerPlacement(dimension="label", value="b", overwrites=False)
|
||||
],
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
stream_file = StreamFile(
|
||||
file_name="20260131-210000 Test File.md",
|
||||
shard=Shard(start_line=1, end_line=1, markers=["A", "B"]),
|
||||
)
|
||||
|
||||
assert localize_stream_file(stream_file, config) == LocalizedShard(
|
||||
moment=datetime(2026, 1, 31, 21, 0, 0, 0),
|
||||
markers=["A", "B"],
|
||||
tags=[],
|
||||
start_line=1,
|
||||
end_line=1,
|
||||
children=[],
|
||||
location={"file": stream_file.file_name, "label": "a"},
|
||||
)
|
||||
|
|
|
|||
288
test/timesheet/test_extract_timesheets.py
Normal file
288
test/timesheet/test_extract_timesheets.py
Normal file
|
|
@ -0,0 +1,288 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, time
|
||||
|
||||
import pytest
|
||||
|
||||
from streamer.localize.localized_shard import LocalizedShard
|
||||
from streamer.timesheet.configuration import (
|
||||
TIMESHEET_DIMENSION_NAME,
|
||||
TimesheetPointType,
|
||||
)
|
||||
from streamer.timesheet.extract import extract_timesheets
|
||||
from streamer.timesheet.timecard import SpecialDayType, Timecard, Timesheet
|
||||
|
||||
|
||||
def point(at: datetime, type: TimesheetPointType) -> LocalizedShard:
|
||||
"""
|
||||
Create a minimal LocalizedShard that will be interpreted as a timesheet point.
|
||||
|
||||
Note: The extract pipeline uses set-dimension filtering; we therefore ensure the
|
||||
timesheet dimension is set in `location`.
|
||||
"""
|
||||
return LocalizedShard(
|
||||
moment=at,
|
||||
markers=["Timesheet"],
|
||||
tags=[],
|
||||
start_line=1,
|
||||
end_line=1,
|
||||
children=[],
|
||||
location={TIMESHEET_DIMENSION_NAME: type.value, "file": "dummy.md"},
|
||||
)
|
||||
|
||||
|
||||
class TestExtractTimesheets:
|
||||
def test_single_work_block(self):
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Card),
|
||||
point(day.replace(hour=17, minute=30), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == [
|
||||
Timesheet(
|
||||
date=day.date(),
|
||||
is_sick_leave=False,
|
||||
special_day_type=None,
|
||||
timecards=[Timecard(from_time=time(9, 0), to_time=time(17, 30))],
|
||||
)
|
||||
]
|
||||
|
||||
def test_three_work_blocks_separated_by_breaks(self):
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=7, minute=15), TimesheetPointType.Card),
|
||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Break),
|
||||
point(day.replace(hour=12, minute=45), TimesheetPointType.Card),
|
||||
point(day.replace(hour=15, minute=0), TimesheetPointType.Break),
|
||||
point(day.replace(hour=16, minute=0), TimesheetPointType.Card),
|
||||
point(day.replace(hour=17, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == [
|
||||
Timesheet(
|
||||
date=day.date(),
|
||||
is_sick_leave=False,
|
||||
special_day_type=None,
|
||||
timecards=[
|
||||
Timecard(from_time=time(7, 15), to_time=time(12, 0)),
|
||||
Timecard(from_time=time(12, 45), to_time=time(15, 0)),
|
||||
Timecard(from_time=time(16, 0), to_time=time(17, 0)),
|
||||
],
|
||||
)
|
||||
]
|
||||
|
||||
def test_input_order_is_not_required_within_a_day(self):
|
||||
"""
|
||||
Points may come unsorted; extraction should sort by timestamp within a day.
|
||||
"""
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
|
||||
shards = [
|
||||
point(day.replace(hour=15, minute=0), TimesheetPointType.Break),
|
||||
point(day.replace(hour=7, minute=15), TimesheetPointType.Card),
|
||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Break),
|
||||
point(day.replace(hour=12, minute=45), TimesheetPointType.Card),
|
||||
point(day.replace(hour=17, minute=0), TimesheetPointType.Break),
|
||||
point(day.replace(hour=16, minute=0), TimesheetPointType.Card),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == [
|
||||
Timesheet(
|
||||
date=day.date(),
|
||||
is_sick_leave=False,
|
||||
special_day_type=None,
|
||||
timecards=[
|
||||
Timecard(from_time=time(7, 15), to_time=time(12, 0)),
|
||||
Timecard(from_time=time(12, 45), to_time=time(15, 0)),
|
||||
Timecard(from_time=time(16, 0), to_time=time(17, 0)),
|
||||
],
|
||||
)
|
||||
]
|
||||
|
||||
def test_groups_by_day(self):
|
||||
"""
|
||||
If points span multiple days, we should get one Timesheet per day.
|
||||
"""
|
||||
day1 = datetime(2026, 2, 1, 0, 0, 0)
|
||||
day2 = datetime(2026, 2, 2, 0, 0, 0)
|
||||
|
||||
shards = [
|
||||
point(day2.replace(hour=10, minute=0), TimesheetPointType.Card),
|
||||
point(day2.replace(hour=18, minute=0), TimesheetPointType.Break),
|
||||
point(day1.replace(hour=9, minute=0), TimesheetPointType.Card),
|
||||
point(day1.replace(hour=17, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
# Note: current implementation groups by date using `itertools.groupby` on the
|
||||
# incoming order; to be robust, we pass day1 points first, then day2 points.
|
||||
# This asserts the intended behavior.
|
||||
shards = [
|
||||
point(day1.replace(hour=9, minute=0), TimesheetPointType.Card),
|
||||
point(day1.replace(hour=17, minute=0), TimesheetPointType.Break),
|
||||
point(day2.replace(hour=10, minute=0), TimesheetPointType.Card),
|
||||
point(day2.replace(hour=18, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == [
|
||||
Timesheet(
|
||||
date=day1.date(),
|
||||
is_sick_leave=False,
|
||||
special_day_type=None,
|
||||
timecards=[Timecard(from_time=time(9, 0), to_time=time(17, 0))],
|
||||
),
|
||||
Timesheet(
|
||||
date=day2.date(),
|
||||
is_sick_leave=False,
|
||||
special_day_type=None,
|
||||
timecards=[Timecard(from_time=time(10, 0), to_time=time(18, 0))],
|
||||
),
|
||||
]
|
||||
|
||||
def test_day_with_only_special_day_type_vacation(self):
|
||||
"""
|
||||
A day can be marked as Vacation without timecards; it should still produce a Timesheet.
|
||||
"""
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=8, minute=0), TimesheetPointType.Vacation),
|
||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == [
|
||||
Timesheet(
|
||||
date=day.date(),
|
||||
is_sick_leave=False,
|
||||
special_day_type=SpecialDayType.Vacation,
|
||||
timecards=[],
|
||||
)
|
||||
]
|
||||
|
||||
def test_day_with_only_special_day_type_holiday(self):
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=8, minute=0), TimesheetPointType.Holiday),
|
||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == [
|
||||
Timesheet(
|
||||
date=day.date(),
|
||||
is_sick_leave=False,
|
||||
special_day_type=SpecialDayType.Holiday,
|
||||
timecards=[],
|
||||
)
|
||||
]
|
||||
|
||||
def test_day_with_only_special_day_type_undertime(self):
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=8, minute=0), TimesheetPointType.Undertime),
|
||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == [
|
||||
Timesheet(
|
||||
date=day.date(),
|
||||
is_sick_leave=False,
|
||||
special_day_type=SpecialDayType.Undertime,
|
||||
timecards=[],
|
||||
)
|
||||
]
|
||||
|
||||
def test_day_with_sick_leave_and_timecards(self):
|
||||
"""
|
||||
SickLeave should set the flag but not prevent timecard aggregation.
|
||||
"""
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=7, minute=30), TimesheetPointType.SickLeave),
|
||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Card),
|
||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == [
|
||||
Timesheet(
|
||||
date=day.date(),
|
||||
is_sick_leave=True,
|
||||
special_day_type=None,
|
||||
timecards=[Timecard(from_time=time(9, 0), to_time=time(12, 0))],
|
||||
)
|
||||
]
|
||||
|
||||
def test_day_with_sick_leave_only(self):
|
||||
"""
|
||||
A day with only SickLeave should still produce a Timesheet (no timecards).
|
||||
"""
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=8, minute=0), TimesheetPointType.SickLeave),
|
||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == [
|
||||
Timesheet(
|
||||
date=day.date(),
|
||||
is_sick_leave=True,
|
||||
special_day_type=None,
|
||||
timecards=[],
|
||||
)
|
||||
]
|
||||
|
||||
def test_empty_input(self):
|
||||
assert extract_timesheets([]) == []
|
||||
|
||||
def test_day_with_only_cards_and_no_break_is_invalid(self):
|
||||
"""
|
||||
A day ending 'in work' (last point not a Break) should raise.
|
||||
"""
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Card),
|
||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Card),
|
||||
]
|
||||
|
||||
with pytest.raises(ValueError, match=r"Last Timecard of .* is not a break"):
|
||||
extract_timesheets(shards)
|
||||
|
||||
def test_two_special_day_types_same_day_is_invalid(self):
|
||||
"""
|
||||
A day cannot be both Vacation and Holiday (or any two distinct special types).
|
||||
"""
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=8, minute=0), TimesheetPointType.Vacation),
|
||||
point(day.replace(hour=8, minute=5), TimesheetPointType.Holiday),
|
||||
point(day.replace(hour=9, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
with pytest.raises(ValueError, match=r"is both .* and .*"):
|
||||
extract_timesheets(shards)
|
||||
|
||||
def test_points_with_mixed_dates_inside_one_group_raises(self):
|
||||
"""
|
||||
Defensive: if aggregation receives points spanning multiple dates for a single day,
|
||||
it should raise. (This can occur if higher-level grouping is incorrect.)
|
||||
"""
|
||||
day1 = datetime(2026, 2, 1, 0, 0, 0)
|
||||
day2 = datetime(2026, 2, 2, 0, 0, 0)
|
||||
|
||||
shards = [
|
||||
point(day1.replace(hour=9, minute=0), TimesheetPointType.Card),
|
||||
point(day2.replace(hour=9, minute=30), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
with pytest.raises(ValueError, match=r"Last Timecard of .* is not a break"):
|
||||
extract_timesheets(shards)
|
||||
|
||||
def test_day_with_only_breaks_is_ignored(self):
|
||||
"""
|
||||
A day with no timecards and no sick/special markers should not emit a Timesheet.
|
||||
"""
|
||||
day = datetime(2026, 2, 1, 0, 0, 0)
|
||||
shards = [
|
||||
point(day.replace(hour=12, minute=0), TimesheetPointType.Break),
|
||||
point(day.replace(hour=13, minute=0), TimesheetPointType.Break),
|
||||
]
|
||||
|
||||
assert extract_timesheets(shards) == []
|
||||
Loading…
Add table
Add a link
Reference in a new issue