feat: habit tracker with gamification + KB updates
Merge feature/habit-tracker into master (squashed): ✨ Habit Tracker Features: - Bead chain visualization (30-day history) - Weekly lives recovery system (+1 life/week) - Lucide icons (zap, shield) replacing emoji - Responsive layout (mobile-optimized) - Navigation links added to all dashboard pages 📚 Knowledge Base: - 40+ trading basics articles with metadata - Daily notes (2026-02-10, 2026-02-11) - Health & insights content - KB index restructuring 🧪 Tests: - Comprehensive test suite (4 test files) - Integration tests for lives recovery - 28/29 tests passing Commits squashed: - feat(habits): bead chain visualization + weekly lives recovery + nav integration - docs(memory): update KB content + daily notes - chore(data): update habits and status data Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
570
dashboard/api.py
570
dashboard/api.py
@@ -11,16 +11,22 @@ import sys
|
||||
import re
|
||||
import os
|
||||
import signal
|
||||
import uuid
|
||||
from http.server import HTTPServer, SimpleHTTPRequestHandler
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Import habits helpers
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
import habits_helpers
|
||||
|
||||
BASE_DIR = Path(__file__).parent.parent
|
||||
TOOLS_DIR = BASE_DIR / 'tools'
|
||||
NOTES_DIR = BASE_DIR / 'kb' / 'youtube'
|
||||
KANBAN_DIR = BASE_DIR / 'dashboard'
|
||||
WORKSPACE_DIR = Path('/home/moltbot/workspace')
|
||||
HABITS_FILE = KANBAN_DIR / 'habits.json'
|
||||
|
||||
# Load .env file if present
|
||||
_env_file = Path(__file__).parent / '.env'
|
||||
@@ -48,6 +54,12 @@ class TaskBoardHandler(SimpleHTTPRequestHandler):
|
||||
self.handle_git_commit()
|
||||
elif self.path == '/api/pdf':
|
||||
self.handle_pdf_post()
|
||||
elif self.path == '/api/habits':
|
||||
self.handle_habits_post()
|
||||
elif self.path.startswith('/api/habits/') and self.path.endswith('/check'):
|
||||
self.handle_habits_check()
|
||||
elif self.path.startswith('/api/habits/') and self.path.endswith('/skip'):
|
||||
self.handle_habits_skip()
|
||||
elif self.path == '/api/workspace/run':
|
||||
self.handle_workspace_run()
|
||||
elif self.path == '/api/workspace/stop':
|
||||
@@ -61,6 +73,20 @@ class TaskBoardHandler(SimpleHTTPRequestHandler):
|
||||
else:
|
||||
self.send_error(404)
|
||||
|
||||
def do_PUT(self):
|
||||
if self.path.startswith('/api/habits/'):
|
||||
self.handle_habits_put()
|
||||
else:
|
||||
self.send_error(404)
|
||||
|
||||
def do_DELETE(self):
|
||||
if self.path.startswith('/api/habits/') and '/check' in self.path:
|
||||
self.handle_habits_uncheck()
|
||||
elif self.path.startswith('/api/habits/'):
|
||||
self.handle_habits_delete()
|
||||
else:
|
||||
self.send_error(404)
|
||||
|
||||
def handle_git_commit(self):
|
||||
"""Run git commit and push."""
|
||||
try:
|
||||
@@ -251,6 +277,8 @@ class TaskBoardHandler(SimpleHTTPRequestHandler):
|
||||
self.handle_cron_status()
|
||||
elif self.path == '/api/activity' or self.path.startswith('/api/activity?'):
|
||||
self.handle_activity()
|
||||
elif self.path == '/api/habits':
|
||||
self.handle_habits_get()
|
||||
elif self.path.startswith('/api/files'):
|
||||
self.handle_files_get()
|
||||
elif self.path.startswith('/api/diff'):
|
||||
@@ -1381,6 +1409,546 @@ class TaskBoardHandler(SimpleHTTPRequestHandler):
|
||||
except Exception as e:
|
||||
self.send_json({'error': str(e)}, 500)
|
||||
|
||||
def handle_habits_get(self):
|
||||
"""Get all habits with enriched stats."""
|
||||
try:
|
||||
# Read habits file
|
||||
if not HABITS_FILE.exists():
|
||||
self.send_json([])
|
||||
return
|
||||
|
||||
with open(HABITS_FILE, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
habits = data.get('habits', [])
|
||||
|
||||
# Enrich each habit with calculated stats
|
||||
enriched_habits = []
|
||||
for habit in habits:
|
||||
# Calculate stats using helpers
|
||||
current_streak = habits_helpers.calculate_streak(habit)
|
||||
best_streak = habit.get('streak', {}).get('best', 0)
|
||||
completion_rate = habits_helpers.get_completion_rate(habit, days=30)
|
||||
weekly_summary = habits_helpers.get_weekly_summary(habit)
|
||||
|
||||
# Add stats to habit
|
||||
enriched = habit.copy()
|
||||
enriched['current_streak'] = current_streak
|
||||
enriched['best_streak'] = best_streak
|
||||
enriched['completion_rate_30d'] = completion_rate
|
||||
enriched['weekly_summary'] = weekly_summary
|
||||
enriched['should_check_today'] = habits_helpers.should_check_today(habit)
|
||||
|
||||
enriched_habits.append(enriched)
|
||||
|
||||
# Sort by priority ascending (lower number = higher priority)
|
||||
enriched_habits.sort(key=lambda h: h.get('priority', 999))
|
||||
|
||||
self.send_json(enriched_habits)
|
||||
except Exception as e:
|
||||
self.send_json({'error': str(e)}, 500)
|
||||
|
||||
def handle_habits_post(self):
|
||||
"""Create a new habit."""
|
||||
try:
|
||||
# Read request body
|
||||
content_length = int(self.headers['Content-Length'])
|
||||
post_data = self.rfile.read(content_length).decode('utf-8')
|
||||
data = json.loads(post_data)
|
||||
|
||||
# Validate required fields
|
||||
name = data.get('name', '').strip()
|
||||
if not name:
|
||||
self.send_json({'error': 'name is required'}, 400)
|
||||
return
|
||||
|
||||
if len(name) > 100:
|
||||
self.send_json({'error': 'name must be max 100 characters'}, 400)
|
||||
return
|
||||
|
||||
# Validate color (hex format)
|
||||
color = data.get('color', '#3b82f6')
|
||||
if color and not re.match(r'^#[0-9A-Fa-f]{6}$', color):
|
||||
self.send_json({'error': 'color must be valid hex format (#RRGGBB)'}, 400)
|
||||
return
|
||||
|
||||
# Validate frequency type
|
||||
frequency_type = data.get('frequency', {}).get('type', 'daily')
|
||||
valid_types = ['daily', 'specific_days', 'x_per_week', 'weekly', 'monthly', 'custom']
|
||||
if frequency_type not in valid_types:
|
||||
self.send_json({'error': f'frequency.type must be one of: {", ".join(valid_types)}'}, 400)
|
||||
return
|
||||
|
||||
# Create new habit
|
||||
habit_id = str(uuid.uuid4())
|
||||
now = datetime.now().isoformat()
|
||||
|
||||
new_habit = {
|
||||
'id': habit_id,
|
||||
'name': name,
|
||||
'category': data.get('category', 'other'),
|
||||
'color': color,
|
||||
'icon': data.get('icon', 'check-circle'),
|
||||
'priority': data.get('priority', 5),
|
||||
'notes': data.get('notes', ''),
|
||||
'reminderTime': data.get('reminderTime', ''),
|
||||
'frequency': data.get('frequency', {'type': 'daily'}),
|
||||
'streak': {
|
||||
'current': 0,
|
||||
'best': 0,
|
||||
'lastCheckIn': None
|
||||
},
|
||||
'lives': 3,
|
||||
'completions': [],
|
||||
'createdAt': now,
|
||||
'updatedAt': now
|
||||
}
|
||||
|
||||
# Read existing habits
|
||||
if HABITS_FILE.exists():
|
||||
with open(HABITS_FILE, 'r', encoding='utf-8') as f:
|
||||
habits_data = json.load(f)
|
||||
else:
|
||||
habits_data = {'lastUpdated': '', 'habits': []}
|
||||
|
||||
# Add new habit
|
||||
habits_data['habits'].append(new_habit)
|
||||
habits_data['lastUpdated'] = now
|
||||
|
||||
# Save to file
|
||||
with open(HABITS_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(habits_data, f, indent=2)
|
||||
|
||||
# Return created habit with 201 status
|
||||
self.send_json(new_habit, 201)
|
||||
except json.JSONDecodeError:
|
||||
self.send_json({'error': 'Invalid JSON'}, 400)
|
||||
except Exception as e:
|
||||
self.send_json({'error': str(e)}, 500)
|
||||
|
||||
def handle_habits_put(self):
|
||||
"""Update an existing habit."""
|
||||
try:
|
||||
# Extract habit ID from path
|
||||
path_parts = self.path.split('/')
|
||||
if len(path_parts) < 4:
|
||||
self.send_json({'error': 'Invalid path'}, 400)
|
||||
return
|
||||
|
||||
habit_id = path_parts[3]
|
||||
|
||||
# Read request body
|
||||
content_length = int(self.headers['Content-Length'])
|
||||
post_data = self.rfile.read(content_length).decode('utf-8')
|
||||
data = json.loads(post_data)
|
||||
|
||||
# Read existing habits
|
||||
if not HABITS_FILE.exists():
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
with open(HABITS_FILE, 'r', encoding='utf-8') as f:
|
||||
habits_data = json.load(f)
|
||||
|
||||
# Find habit to update
|
||||
habits = habits_data.get('habits', [])
|
||||
habit_index = None
|
||||
for i, habit in enumerate(habits):
|
||||
if habit['id'] == habit_id:
|
||||
habit_index = i
|
||||
break
|
||||
|
||||
if habit_index is None:
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
# Validate allowed fields
|
||||
allowed_fields = ['name', 'category', 'color', 'icon', 'priority', 'notes', 'frequency', 'reminderTime']
|
||||
|
||||
# Validate name if provided
|
||||
if 'name' in data:
|
||||
name = data['name'].strip()
|
||||
if not name:
|
||||
self.send_json({'error': 'name cannot be empty'}, 400)
|
||||
return
|
||||
if len(name) > 100:
|
||||
self.send_json({'error': 'name must be max 100 characters'}, 400)
|
||||
return
|
||||
|
||||
# Validate color if provided
|
||||
if 'color' in data:
|
||||
color = data['color']
|
||||
if color and not re.match(r'^#[0-9A-Fa-f]{6}$', color):
|
||||
self.send_json({'error': 'color must be valid hex format (#RRGGBB)'}, 400)
|
||||
return
|
||||
|
||||
# Validate frequency type if provided
|
||||
if 'frequency' in data:
|
||||
frequency_type = data.get('frequency', {}).get('type', 'daily')
|
||||
valid_types = ['daily', 'specific_days', 'x_per_week', 'weekly', 'monthly', 'custom']
|
||||
if frequency_type not in valid_types:
|
||||
self.send_json({'error': f'frequency.type must be one of: {", ".join(valid_types)}'}, 400)
|
||||
return
|
||||
|
||||
# Update only allowed fields
|
||||
habit = habits[habit_index]
|
||||
for field in allowed_fields:
|
||||
if field in data:
|
||||
habit[field] = data[field]
|
||||
|
||||
# Update timestamp
|
||||
habit['updatedAt'] = datetime.now().isoformat()
|
||||
|
||||
# Save to file
|
||||
habits_data['lastUpdated'] = datetime.now().isoformat()
|
||||
with open(HABITS_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(habits_data, f, indent=2)
|
||||
|
||||
# Return updated habit
|
||||
self.send_json(habit)
|
||||
except json.JSONDecodeError:
|
||||
self.send_json({'error': 'Invalid JSON'}, 400)
|
||||
except Exception as e:
|
||||
self.send_json({'error': str(e)}, 500)
|
||||
|
||||
def handle_habits_delete(self):
|
||||
"""Delete a habit."""
|
||||
try:
|
||||
# Extract habit ID from path
|
||||
path_parts = self.path.split('/')
|
||||
if len(path_parts) < 4:
|
||||
self.send_json({'error': 'Invalid path'}, 400)
|
||||
return
|
||||
|
||||
habit_id = path_parts[3]
|
||||
|
||||
# Read existing habits
|
||||
if not HABITS_FILE.exists():
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
with open(HABITS_FILE, 'r', encoding='utf-8') as f:
|
||||
habits_data = json.load(f)
|
||||
|
||||
# Find and remove habit
|
||||
habits = habits_data.get('habits', [])
|
||||
habit_found = False
|
||||
for i, habit in enumerate(habits):
|
||||
if habit['id'] == habit_id:
|
||||
habits.pop(i)
|
||||
habit_found = True
|
||||
break
|
||||
|
||||
if not habit_found:
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
# Save to file
|
||||
habits_data['lastUpdated'] = datetime.now().isoformat()
|
||||
with open(HABITS_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(habits_data, f, indent=2)
|
||||
|
||||
# Return 204 No Content
|
||||
self.send_response(204)
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.end_headers()
|
||||
except Exception as e:
|
||||
self.send_json({'error': str(e)}, 500)
|
||||
|
||||
def handle_habits_check(self):
|
||||
"""Check in on a habit (complete it for today)."""
|
||||
try:
|
||||
# Extract habit ID from path (/api/habits/{id}/check)
|
||||
path_parts = self.path.split('/')
|
||||
if len(path_parts) < 5:
|
||||
self.send_json({'error': 'Invalid path'}, 400)
|
||||
return
|
||||
|
||||
habit_id = path_parts[3]
|
||||
|
||||
# Read optional body (note, rating, mood)
|
||||
body_data = {}
|
||||
content_length = self.headers.get('Content-Length')
|
||||
if content_length:
|
||||
post_data = self.rfile.read(int(content_length)).decode('utf-8')
|
||||
if post_data.strip():
|
||||
try:
|
||||
body_data = json.loads(post_data)
|
||||
except json.JSONDecodeError:
|
||||
self.send_json({'error': 'Invalid JSON'}, 400)
|
||||
return
|
||||
|
||||
# Read existing habits
|
||||
if not HABITS_FILE.exists():
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
with open(HABITS_FILE, 'r', encoding='utf-8') as f:
|
||||
habits_data = json.load(f)
|
||||
|
||||
# Find habit
|
||||
habit = None
|
||||
for h in habits_data.get('habits', []):
|
||||
if h['id'] == habit_id:
|
||||
habit = h
|
||||
break
|
||||
|
||||
if not habit:
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
# Verify habit is relevant for today
|
||||
if not habits_helpers.should_check_today(habit):
|
||||
self.send_json({'error': 'Habit is not relevant for today based on its frequency'}, 400)
|
||||
return
|
||||
|
||||
# Verify not already checked today
|
||||
today = datetime.now().date().isoformat()
|
||||
completions = habit.get('completions', [])
|
||||
for completion in completions:
|
||||
if completion.get('date') == today:
|
||||
self.send_json({'error': 'Habit already checked in today'}, 409)
|
||||
return
|
||||
|
||||
# Create completion entry
|
||||
completion_entry = {
|
||||
'date': today,
|
||||
'type': 'check' # Distinguish from 'skip' for life restore logic
|
||||
}
|
||||
|
||||
# Add optional fields
|
||||
if 'note' in body_data:
|
||||
completion_entry['note'] = body_data['note']
|
||||
if 'rating' in body_data:
|
||||
rating = body_data['rating']
|
||||
if not isinstance(rating, int) or rating < 1 or rating > 5:
|
||||
self.send_json({'error': 'rating must be an integer between 1 and 5'}, 400)
|
||||
return
|
||||
completion_entry['rating'] = rating
|
||||
if 'mood' in body_data:
|
||||
mood = body_data['mood']
|
||||
if mood not in ['happy', 'neutral', 'sad']:
|
||||
self.send_json({'error': 'mood must be one of: happy, neutral, sad'}, 400)
|
||||
return
|
||||
completion_entry['mood'] = mood
|
||||
|
||||
# Add completion to habit
|
||||
habit['completions'].append(completion_entry)
|
||||
|
||||
# Recalculate streak
|
||||
current_streak = habits_helpers.calculate_streak(habit)
|
||||
habit['streak']['current'] = current_streak
|
||||
|
||||
# Update best streak if current is higher
|
||||
if current_streak > habit['streak']['best']:
|
||||
habit['streak']['best'] = current_streak
|
||||
|
||||
# Update lastCheckIn
|
||||
habit['streak']['lastCheckIn'] = today
|
||||
|
||||
# Check for weekly lives recovery (+1 life if ≥1 check-in in previous week)
|
||||
new_lives, was_awarded = habits_helpers.check_and_award_weekly_lives(habit)
|
||||
lives_awarded_this_checkin = False
|
||||
|
||||
if was_awarded:
|
||||
habit['lives'] = new_lives
|
||||
habit['lastLivesAward'] = today
|
||||
lives_awarded_this_checkin = True
|
||||
|
||||
# Update timestamp
|
||||
habit['updatedAt'] = datetime.now().isoformat()
|
||||
habits_data['lastUpdated'] = habit['updatedAt']
|
||||
|
||||
# Save to file
|
||||
with open(HABITS_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(habits_data, f, indent=2)
|
||||
|
||||
# Enrich habit with calculated stats before returning
|
||||
current_streak = habits_helpers.calculate_streak(habit)
|
||||
best_streak = habit.get('streak', {}).get('best', 0)
|
||||
completion_rate = habits_helpers.get_completion_rate(habit, days=30)
|
||||
weekly_summary = habits_helpers.get_weekly_summary(habit)
|
||||
|
||||
enriched_habit = habit.copy()
|
||||
enriched_habit['current_streak'] = current_streak
|
||||
enriched_habit['best_streak'] = best_streak
|
||||
enriched_habit['completion_rate_30d'] = completion_rate
|
||||
enriched_habit['weekly_summary'] = weekly_summary
|
||||
enriched_habit['should_check_today'] = habits_helpers.should_check_today(habit)
|
||||
enriched_habit['livesAwarded'] = lives_awarded_this_checkin
|
||||
|
||||
# Return enriched habit
|
||||
self.send_json(enriched_habit, 200)
|
||||
except Exception as e:
|
||||
self.send_json({'error': str(e)}, 500)
|
||||
|
||||
def handle_habits_uncheck(self):
|
||||
"""Uncheck a habit (remove completion for a specific date)."""
|
||||
try:
|
||||
# Extract habit ID from path (/api/habits/{id}/check)
|
||||
path_parts = self.path.split('?')[0].split('/')
|
||||
if len(path_parts) < 5:
|
||||
self.send_json({'error': 'Invalid path'}, 400)
|
||||
return
|
||||
|
||||
habit_id = path_parts[3]
|
||||
|
||||
# Parse query string for date parameter
|
||||
parsed = urlparse(self.path)
|
||||
query_params = parse_qs(parsed.query)
|
||||
|
||||
# Get date from query string (required)
|
||||
if 'date' not in query_params:
|
||||
self.send_json({'error': 'date parameter is required (format: YYYY-MM-DD)'}, 400)
|
||||
return
|
||||
|
||||
target_date = query_params['date'][0]
|
||||
|
||||
# Validate date format
|
||||
try:
|
||||
datetime.fromisoformat(target_date)
|
||||
except ValueError:
|
||||
self.send_json({'error': 'Invalid date format. Use YYYY-MM-DD'}, 400)
|
||||
return
|
||||
|
||||
# Read existing habits
|
||||
if not HABITS_FILE.exists():
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
with open(HABITS_FILE, 'r', encoding='utf-8') as f:
|
||||
habits_data = json.load(f)
|
||||
|
||||
# Find habit
|
||||
habit = None
|
||||
for h in habits_data.get('habits', []):
|
||||
if h['id'] == habit_id:
|
||||
habit = h
|
||||
break
|
||||
|
||||
if not habit:
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
# Find and remove the completion for the specified date
|
||||
completions = habit.get('completions', [])
|
||||
completion_found = False
|
||||
for i, completion in enumerate(completions):
|
||||
if completion.get('date') == target_date:
|
||||
completions.pop(i)
|
||||
completion_found = True
|
||||
break
|
||||
|
||||
if not completion_found:
|
||||
self.send_json({'error': 'No completion found for the specified date'}, 404)
|
||||
return
|
||||
|
||||
# Recalculate streak after removing completion
|
||||
current_streak = habits_helpers.calculate_streak(habit)
|
||||
habit['streak']['current'] = current_streak
|
||||
|
||||
# Update best streak if needed (best never decreases, but we keep it for consistency)
|
||||
if current_streak > habit['streak']['best']:
|
||||
habit['streak']['best'] = current_streak
|
||||
|
||||
# Update timestamp
|
||||
habit['updatedAt'] = datetime.now().isoformat()
|
||||
habits_data['lastUpdated'] = habit['updatedAt']
|
||||
|
||||
# Save to file
|
||||
with open(HABITS_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(habits_data, f, indent=2)
|
||||
|
||||
# Enrich habit with calculated stats before returning
|
||||
best_streak = habit.get('streak', {}).get('best', 0)
|
||||
completion_rate = habits_helpers.get_completion_rate(habit, days=30)
|
||||
weekly_summary = habits_helpers.get_weekly_summary(habit)
|
||||
|
||||
enriched_habit = habit.copy()
|
||||
enriched_habit['current_streak'] = current_streak
|
||||
enriched_habit['best_streak'] = best_streak
|
||||
enriched_habit['completion_rate_30d'] = completion_rate
|
||||
enriched_habit['weekly_summary'] = weekly_summary
|
||||
enriched_habit['should_check_today'] = habits_helpers.should_check_today(habit)
|
||||
|
||||
# Return enriched habit
|
||||
self.send_json(enriched_habit, 200)
|
||||
except Exception as e:
|
||||
self.send_json({'error': str(e)}, 500)
|
||||
|
||||
def handle_habits_skip(self):
|
||||
"""Skip a day using a life to preserve streak."""
|
||||
try:
|
||||
# Extract habit ID from path (/api/habits/{id}/skip)
|
||||
path_parts = self.path.split('/')
|
||||
if len(path_parts) < 5:
|
||||
self.send_json({'error': 'Invalid path'}, 400)
|
||||
return
|
||||
|
||||
habit_id = path_parts[3]
|
||||
|
||||
# Read existing habits
|
||||
if not HABITS_FILE.exists():
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
with open(HABITS_FILE, 'r', encoding='utf-8') as f:
|
||||
habits_data = json.load(f)
|
||||
|
||||
# Find habit
|
||||
habit = None
|
||||
for h in habits_data.get('habits', []):
|
||||
if h['id'] == habit_id:
|
||||
habit = h
|
||||
break
|
||||
|
||||
if not habit:
|
||||
self.send_json({'error': 'Habit not found'}, 404)
|
||||
return
|
||||
|
||||
# Verify lives > 0
|
||||
current_lives = habit.get('lives', 3)
|
||||
if current_lives <= 0:
|
||||
self.send_json({'error': 'No lives remaining'}, 400)
|
||||
return
|
||||
|
||||
# Decrement lives by 1
|
||||
habit['lives'] = current_lives - 1
|
||||
|
||||
# Add completion entry with type='skip'
|
||||
today = datetime.now().date().isoformat()
|
||||
completion_entry = {
|
||||
'date': today,
|
||||
'type': 'skip'
|
||||
}
|
||||
habit['completions'].append(completion_entry)
|
||||
|
||||
# Update timestamp
|
||||
habit['updatedAt'] = datetime.now().isoformat()
|
||||
habits_data['lastUpdated'] = habit['updatedAt']
|
||||
|
||||
# Save to file
|
||||
with open(HABITS_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(habits_data, f, indent=2)
|
||||
|
||||
# Enrich habit with calculated stats before returning
|
||||
current_streak = habits_helpers.calculate_streak(habit)
|
||||
best_streak = habit.get('streak', {}).get('best', 0)
|
||||
completion_rate = habits_helpers.get_completion_rate(habit, days=30)
|
||||
weekly_summary = habits_helpers.get_weekly_summary(habit)
|
||||
|
||||
enriched_habit = habit.copy()
|
||||
enriched_habit['current_streak'] = current_streak
|
||||
enriched_habit['best_streak'] = best_streak
|
||||
enriched_habit['completion_rate_30d'] = completion_rate
|
||||
enriched_habit['weekly_summary'] = weekly_summary
|
||||
enriched_habit['should_check_today'] = habits_helpers.should_check_today(habit)
|
||||
|
||||
# Return enriched habit
|
||||
self.send_json(enriched_habit, 200)
|
||||
except Exception as e:
|
||||
self.send_json({'error': str(e)}, 500)
|
||||
|
||||
def send_json(self, data, code=200):
|
||||
self.send_response(code)
|
||||
self.send_header('Content-Type', 'application/json')
|
||||
@@ -1394,7 +1962,7 @@ class TaskBoardHandler(SimpleHTTPRequestHandler):
|
||||
def do_OPTIONS(self):
|
||||
self.send_response(200)
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
|
||||
self.send_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS')
|
||||
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
|
||||
self.end_headers()
|
||||
|
||||
|
||||
57
dashboard/archive/tasks-2026-02.json
Normal file
57
dashboard/archive/tasks-2026-02.json
Normal file
@@ -0,0 +1,57 @@
|
||||
{
|
||||
"month": "2026-02",
|
||||
"tasks": [
|
||||
{
|
||||
"id": "task-034",
|
||||
"title": "Actualizare documentație canale agenți",
|
||||
"description": "",
|
||||
"created": "2026-02-01T12:15:41Z",
|
||||
"priority": "medium",
|
||||
"completed": "2026-02-01T12:15:44Z"
|
||||
},
|
||||
{
|
||||
"id": "task-035",
|
||||
"title": "Restructurare echipă: șterg work, unific health+growth→self",
|
||||
"description": "",
|
||||
"created": "2026-02-01T12:20:59Z",
|
||||
"priority": "medium",
|
||||
"completed": "2026-02-01T12:23:32Z"
|
||||
},
|
||||
{
|
||||
"id": "task-036",
|
||||
"title": "Unificare în 1 agent cu tehnici diminuare dezavantaje",
|
||||
"description": "",
|
||||
"created": "2026-02-01T13:27:51Z",
|
||||
"priority": "medium",
|
||||
"completed": "2026-02-01T13:30:01Z"
|
||||
},
|
||||
{
|
||||
"id": "task-037",
|
||||
"title": "Coaching dimineață - Asumarea eforturilor (Zoltan Vereș)",
|
||||
"description": "",
|
||||
"created": "2026-02-02T07:01:14Z",
|
||||
"priority": "medium"
|
||||
},
|
||||
{
|
||||
"id": "task-038",
|
||||
"title": "Raport dimineata trimis pe email",
|
||||
"description": "",
|
||||
"created": "2026-02-03T06:31:08Z",
|
||||
"priority": "medium"
|
||||
},
|
||||
{
|
||||
"id": "task-039",
|
||||
"title": "Raport seară 3 feb trimis pe email",
|
||||
"description": "",
|
||||
"created": "2026-02-03T18:01:12Z",
|
||||
"priority": "medium"
|
||||
},
|
||||
{
|
||||
"id": "task-040",
|
||||
"title": "Job night-execute: 2 video-uri YouTube procesate",
|
||||
"description": "",
|
||||
"created": "2026-02-03T21:02:31Z",
|
||||
"priority": "medium"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -846,6 +846,10 @@
|
||||
<i data-lucide="file-text"></i>
|
||||
<span>KB</span>
|
||||
</a>
|
||||
<a href="/echo/habits.html" class="nav-item">
|
||||
<i data-lucide="dumbbell"></i>
|
||||
<span>Habits</span>
|
||||
</a>
|
||||
<a href="/echo/files.html" class="nav-item active">
|
||||
<i data-lucide="folder"></i>
|
||||
<span>Files</span>
|
||||
|
||||
@@ -246,6 +246,10 @@
|
||||
<i data-lucide="file-text"></i>
|
||||
<span>Notes</span>
|
||||
</a>
|
||||
<a href="/echo/habits.html" class="nav-item">
|
||||
<i data-lucide="dumbbell"></i>
|
||||
<span>Habits</span>
|
||||
</a>
|
||||
<a href="/echo/files.html" class="nav-item">
|
||||
<i data-lucide="folder"></i>
|
||||
<span>Files</span>
|
||||
|
||||
3490
dashboard/habits.html
Normal file
3490
dashboard/habits.html
Normal file
File diff suppressed because it is too large
Load Diff
56
dashboard/habits.json
Normal file
56
dashboard/habits.json
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"lastUpdated": "2026-02-11T10:59:50.675572",
|
||||
"habits": [
|
||||
{
|
||||
"id": "95c15eef-3a14-4985-a61e-0b64b72851b0",
|
||||
"name": "Bazin \u0219i Saun\u0103",
|
||||
"category": "health",
|
||||
"color": "#EF4444",
|
||||
"icon": "target",
|
||||
"priority": 50,
|
||||
"notes": "",
|
||||
"reminderTime": "19:00",
|
||||
"frequency": {
|
||||
"type": "x_per_week",
|
||||
"count": 5
|
||||
},
|
||||
"streak": {
|
||||
"current": 0,
|
||||
"best": 1,
|
||||
"lastCheckIn": "2026-02-11"
|
||||
},
|
||||
"lives": 0,
|
||||
"completions": [],
|
||||
"createdAt": "2026-02-11T00:54:03.447063",
|
||||
"updatedAt": "2026-02-11T10:59:50.675572"
|
||||
},
|
||||
{
|
||||
"id": "ceddaa7e-caf9-4038-94bb-da486c586bf8",
|
||||
"name": "Fotocitire",
|
||||
"category": "growth",
|
||||
"color": "#10B981",
|
||||
"icon": "camera",
|
||||
"priority": 30,
|
||||
"notes": "",
|
||||
"reminderTime": "",
|
||||
"frequency": {
|
||||
"type": "x_per_week",
|
||||
"count": 3
|
||||
},
|
||||
"streak": {
|
||||
"current": 1,
|
||||
"best": 1,
|
||||
"lastCheckIn": "2026-02-11"
|
||||
},
|
||||
"lives": 2,
|
||||
"completions": [
|
||||
{
|
||||
"date": "2026-02-11",
|
||||
"type": "check"
|
||||
}
|
||||
],
|
||||
"createdAt": "2026-02-11T01:58:44.779904",
|
||||
"updatedAt": "2026-02-11T10:51:30.181867"
|
||||
}
|
||||
]
|
||||
}
|
||||
387
dashboard/habits_helpers.py
Normal file
387
dashboard/habits_helpers.py
Normal file
@@ -0,0 +1,387 @@
|
||||
"""
|
||||
Habit Tracker Helper Functions
|
||||
|
||||
This module provides core helper functions for calculating streaks,
|
||||
checking relevance, and computing stats for habits.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any, Optional
|
||||
|
||||
|
||||
def calculate_streak(habit: Dict[str, Any]) -> int:
|
||||
"""
|
||||
Calculate the current streak for a habit based on its frequency type.
|
||||
Skips maintain the streak (don't break it) but don't count toward the total.
|
||||
|
||||
Args:
|
||||
habit: Dict containing habit data with frequency, completions, etc.
|
||||
|
||||
Returns:
|
||||
int: Current streak count (days, weeks, or months depending on frequency)
|
||||
"""
|
||||
frequency_type = habit.get("frequency", {}).get("type", "daily")
|
||||
completions = habit.get("completions", [])
|
||||
|
||||
if not completions:
|
||||
return 0
|
||||
|
||||
# Sort completions by date (newest first)
|
||||
sorted_completions = sorted(
|
||||
[c for c in completions if c.get("date")],
|
||||
key=lambda x: x["date"],
|
||||
reverse=True
|
||||
)
|
||||
|
||||
if not sorted_completions:
|
||||
return 0
|
||||
|
||||
if frequency_type == "daily":
|
||||
return _calculate_daily_streak(sorted_completions)
|
||||
elif frequency_type == "specific_days":
|
||||
return _calculate_specific_days_streak(habit, sorted_completions)
|
||||
elif frequency_type == "x_per_week":
|
||||
return _calculate_x_per_week_streak(habit, sorted_completions)
|
||||
elif frequency_type == "weekly":
|
||||
return _calculate_weekly_streak(sorted_completions)
|
||||
elif frequency_type == "monthly":
|
||||
return _calculate_monthly_streak(sorted_completions)
|
||||
elif frequency_type == "custom":
|
||||
return _calculate_custom_streak(habit, sorted_completions)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _calculate_daily_streak(completions: List[Dict[str, Any]]) -> int:
|
||||
"""
|
||||
Calculate streak for daily habits (consecutive days).
|
||||
Skips maintain the streak (don't break it) but don't count toward the total.
|
||||
"""
|
||||
streak = 0
|
||||
today = datetime.now().date()
|
||||
expected_date = today
|
||||
|
||||
for completion in completions:
|
||||
completion_date = datetime.fromisoformat(completion["date"]).date()
|
||||
completion_type = completion.get("type", "check")
|
||||
|
||||
if completion_date == expected_date:
|
||||
# Only count 'check' completions toward streak total
|
||||
# 'skip' completions maintain the streak but don't extend it
|
||||
if completion_type == "check":
|
||||
streak += 1
|
||||
expected_date = completion_date - timedelta(days=1)
|
||||
elif completion_date < expected_date:
|
||||
# Gap found, streak breaks
|
||||
break
|
||||
|
||||
return streak
|
||||
|
||||
|
||||
def _calculate_specific_days_streak(habit: Dict[str, Any], completions: List[Dict[str, Any]]) -> int:
|
||||
"""Calculate streak for specific days habits (only count relevant days)."""
|
||||
relevant_days = set(habit.get("frequency", {}).get("days", []))
|
||||
if not relevant_days:
|
||||
return 0
|
||||
|
||||
streak = 0
|
||||
today = datetime.now().date()
|
||||
current_date = today
|
||||
|
||||
# Find the most recent relevant day
|
||||
while current_date.weekday() not in relevant_days:
|
||||
current_date -= timedelta(days=1)
|
||||
|
||||
for completion in completions:
|
||||
completion_date = datetime.fromisoformat(completion["date"]).date()
|
||||
|
||||
if completion_date == current_date:
|
||||
streak += 1
|
||||
# Move to previous relevant day
|
||||
current_date -= timedelta(days=1)
|
||||
while current_date.weekday() not in relevant_days:
|
||||
current_date -= timedelta(days=1)
|
||||
elif completion_date < current_date:
|
||||
# Check if we missed a relevant day
|
||||
temp_date = current_date
|
||||
found_gap = False
|
||||
while temp_date > completion_date:
|
||||
if temp_date.weekday() in relevant_days:
|
||||
found_gap = True
|
||||
break
|
||||
temp_date -= timedelta(days=1)
|
||||
if found_gap:
|
||||
break
|
||||
|
||||
return streak
|
||||
|
||||
|
||||
def _calculate_x_per_week_streak(habit: Dict[str, Any], completions: List[Dict[str, Any]]) -> int:
|
||||
"""Calculate streak for x_per_week habits (consecutive days with check-ins).
|
||||
|
||||
For x_per_week habits, streak counts consecutive DAYS with check-ins,
|
||||
not consecutive weeks meeting the target. The weekly target (e.g., 4/week)
|
||||
is a goal, but streak measures the chain of check-in days.
|
||||
"""
|
||||
# Use the same logic as daily habits - count consecutive check-in days
|
||||
return _calculate_daily_streak(completions)
|
||||
|
||||
|
||||
def _calculate_weekly_streak(completions: List[Dict[str, Any]]) -> int:
|
||||
"""Calculate streak for weekly habits (consecutive days with check-ins).
|
||||
|
||||
For weekly habits, streak counts consecutive DAYS with check-ins,
|
||||
just like daily habits. The weekly frequency just means you should
|
||||
check in at least once per week.
|
||||
"""
|
||||
return _calculate_daily_streak(completions)
|
||||
|
||||
|
||||
def _calculate_monthly_streak(completions: List[Dict[str, Any]]) -> int:
|
||||
"""Calculate streak for monthly habits (consecutive days with check-ins).
|
||||
|
||||
For monthly habits, streak counts consecutive DAYS with check-ins,
|
||||
just like daily habits. The monthly frequency just means you should
|
||||
check in at least once per month.
|
||||
"""
|
||||
return _calculate_daily_streak(completions)
|
||||
|
||||
|
||||
def _calculate_custom_streak(habit: Dict[str, Any], completions: List[Dict[str, Any]]) -> int:
|
||||
"""Calculate streak for custom interval habits (every X days)."""
|
||||
interval = habit.get("frequency", {}).get("interval", 1)
|
||||
if interval <= 0:
|
||||
return 0
|
||||
|
||||
streak = 0
|
||||
expected_date = datetime.now().date()
|
||||
|
||||
for completion in completions:
|
||||
completion_date = datetime.fromisoformat(completion["date"]).date()
|
||||
|
||||
# Allow completion within the interval window
|
||||
days_diff = (expected_date - completion_date).days
|
||||
if 0 <= days_diff <= interval - 1:
|
||||
streak += 1
|
||||
expected_date = completion_date - timedelta(days=interval)
|
||||
else:
|
||||
break
|
||||
|
||||
return streak
|
||||
|
||||
|
||||
def should_check_today(habit: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Check if a habit is relevant for today based on its frequency type.
|
||||
|
||||
Args:
|
||||
habit: Dict containing habit data with frequency settings
|
||||
|
||||
Returns:
|
||||
bool: True if the habit should be checked today
|
||||
"""
|
||||
frequency_type = habit.get("frequency", {}).get("type", "daily")
|
||||
today = datetime.now().date()
|
||||
weekday = today.weekday() # 0=Monday, 6=Sunday
|
||||
|
||||
if frequency_type == "daily":
|
||||
return True
|
||||
|
||||
elif frequency_type == "specific_days":
|
||||
relevant_days = set(habit.get("frequency", {}).get("days", []))
|
||||
return weekday in relevant_days
|
||||
|
||||
elif frequency_type == "x_per_week":
|
||||
# Always relevant for x_per_week (can check any day)
|
||||
return True
|
||||
|
||||
elif frequency_type == "weekly":
|
||||
# Always relevant (can check any day of the week)
|
||||
return True
|
||||
|
||||
elif frequency_type == "monthly":
|
||||
# Always relevant (can check any day of the month)
|
||||
return True
|
||||
|
||||
elif frequency_type == "custom":
|
||||
# Check if enough days have passed since last completion
|
||||
completions = habit.get("completions", [])
|
||||
if not completions:
|
||||
return True
|
||||
|
||||
interval = habit.get("frequency", {}).get("interval", 1)
|
||||
last_completion = max(completions, key=lambda x: x.get("date", ""))
|
||||
last_date = datetime.fromisoformat(last_completion["date"]).date()
|
||||
days_since = (today - last_date).days
|
||||
|
||||
return days_since >= interval
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_completion_rate(habit: Dict[str, Any], days: int = 30) -> float:
|
||||
"""
|
||||
Calculate the completion rate as a percentage over the last N days.
|
||||
|
||||
Args:
|
||||
habit: Dict containing habit data
|
||||
days: Number of days to look back (default 30)
|
||||
|
||||
Returns:
|
||||
float: Completion rate as percentage (0-100)
|
||||
"""
|
||||
frequency_type = habit.get("frequency", {}).get("type", "daily")
|
||||
completions = habit.get("completions", [])
|
||||
|
||||
today = datetime.now().date()
|
||||
start_date = today - timedelta(days=days - 1)
|
||||
|
||||
# Count relevant days and checked days
|
||||
relevant_days = 0
|
||||
checked_dates = set()
|
||||
|
||||
for completion in completions:
|
||||
completion_date = datetime.fromisoformat(completion["date"]).date()
|
||||
if start_date <= completion_date <= today:
|
||||
checked_dates.add(completion_date)
|
||||
|
||||
# Calculate relevant days based on frequency type
|
||||
if frequency_type == "daily":
|
||||
relevant_days = days
|
||||
|
||||
elif frequency_type == "specific_days":
|
||||
relevant_day_set = set(habit.get("frequency", {}).get("days", []))
|
||||
current = start_date
|
||||
while current <= today:
|
||||
if current.weekday() in relevant_day_set:
|
||||
relevant_days += 1
|
||||
current += timedelta(days=1)
|
||||
|
||||
elif frequency_type == "x_per_week":
|
||||
target_per_week = habit.get("frequency", {}).get("count", 1)
|
||||
num_weeks = days // 7
|
||||
relevant_days = num_weeks * target_per_week
|
||||
|
||||
elif frequency_type == "weekly":
|
||||
num_weeks = days // 7
|
||||
relevant_days = num_weeks
|
||||
|
||||
elif frequency_type == "monthly":
|
||||
num_months = days // 30
|
||||
relevant_days = num_months
|
||||
|
||||
elif frequency_type == "custom":
|
||||
interval = habit.get("frequency", {}).get("interval", 1)
|
||||
relevant_days = days // interval if interval > 0 else 0
|
||||
|
||||
if relevant_days == 0:
|
||||
return 0.0
|
||||
|
||||
checked_days = len(checked_dates)
|
||||
return (checked_days / relevant_days) * 100
|
||||
|
||||
|
||||
def get_weekly_summary(habit: Dict[str, Any]) -> Dict[str, str]:
|
||||
"""
|
||||
Get a summary of the current week showing status for each day.
|
||||
|
||||
Args:
|
||||
habit: Dict containing habit data
|
||||
|
||||
Returns:
|
||||
Dict mapping day names to status: "checked", "skipped", "missed", or "upcoming"
|
||||
"""
|
||||
frequency_type = habit.get("frequency", {}).get("type", "daily")
|
||||
completions = habit.get("completions", [])
|
||||
|
||||
today = datetime.now().date()
|
||||
|
||||
# Start of current week (Monday)
|
||||
start_of_week = today - timedelta(days=today.weekday())
|
||||
|
||||
# Create completion map
|
||||
completion_map = {}
|
||||
for completion in completions:
|
||||
completion_date = datetime.fromisoformat(completion["date"]).date()
|
||||
if completion_date >= start_of_week:
|
||||
completion_type = completion.get("type", "check")
|
||||
completion_map[completion_date] = completion_type
|
||||
|
||||
# Build summary for each day of the week
|
||||
summary = {}
|
||||
day_names = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
|
||||
|
||||
for i, day_name in enumerate(day_names):
|
||||
day_date = start_of_week + timedelta(days=i)
|
||||
|
||||
if day_date > today:
|
||||
summary[day_name] = "upcoming"
|
||||
elif day_date in completion_map:
|
||||
if completion_map[day_date] == "skip":
|
||||
summary[day_name] = "skipped"
|
||||
else:
|
||||
summary[day_name] = "checked"
|
||||
else:
|
||||
# Check if this day was relevant
|
||||
if frequency_type == "specific_days":
|
||||
relevant_days = set(habit.get("frequency", {}).get("days", []))
|
||||
if day_date.weekday() not in relevant_days:
|
||||
summary[day_name] = "not_relevant"
|
||||
else:
|
||||
summary[day_name] = "missed"
|
||||
else:
|
||||
summary[day_name] = "missed"
|
||||
|
||||
return summary
|
||||
|
||||
|
||||
def check_and_award_weekly_lives(habit: Dict[str, Any]) -> tuple[int, bool]:
|
||||
"""
|
||||
Check if habit qualifies for weekly lives recovery and award +1 life if eligible.
|
||||
|
||||
Awards +1 life if:
|
||||
- At least one check-in in the previous week (Monday-Sunday)
|
||||
- Not already awarded this week
|
||||
|
||||
Args:
|
||||
habit: Dict containing habit data with completions and lastLivesAward
|
||||
|
||||
Returns:
|
||||
tuple[int, bool]: (new_lives_count, was_awarded)
|
||||
"""
|
||||
completions = habit.get("completions", [])
|
||||
current_lives = habit.get("lives", 3)
|
||||
|
||||
today = datetime.now().date()
|
||||
|
||||
# Calculate current week start (Monday 00:00)
|
||||
current_week_start = today - timedelta(days=today.weekday())
|
||||
|
||||
# Check if already awarded this week
|
||||
last_lives_award = habit.get("lastLivesAward")
|
||||
if last_lives_award:
|
||||
last_award_date = datetime.fromisoformat(last_lives_award).date()
|
||||
if last_award_date >= current_week_start:
|
||||
# Already awarded this week
|
||||
return (current_lives, False)
|
||||
|
||||
# Calculate previous week boundaries
|
||||
previous_week_start = current_week_start - timedelta(days=7)
|
||||
previous_week_end = current_week_start - timedelta(days=1)
|
||||
|
||||
# Count check-ins in previous week
|
||||
checkins_in_previous_week = 0
|
||||
for completion in completions:
|
||||
completion_date = datetime.fromisoformat(completion["date"]).date()
|
||||
completion_type = completion.get("type", "check")
|
||||
|
||||
if previous_week_start <= completion_date <= previous_week_end:
|
||||
if completion_type == "check":
|
||||
checkins_in_previous_week += 1
|
||||
|
||||
# Award life if at least 1 check-in found
|
||||
if checkins_in_previous_week >= 1:
|
||||
new_lives = current_lives + 1
|
||||
return (new_lives, True)
|
||||
|
||||
return (current_lives, False)
|
||||
@@ -1071,6 +1071,10 @@
|
||||
<i data-lucide="file-text"></i>
|
||||
<span>KB</span>
|
||||
</a>
|
||||
<a href="/echo/habits.html" class="nav-item">
|
||||
<i data-lucide="dumbbell"></i>
|
||||
<span>Habits</span>
|
||||
</a>
|
||||
<a href="/echo/files.html" class="nav-item">
|
||||
<i data-lucide="folder"></i>
|
||||
<span>Files</span>
|
||||
|
||||
@@ -696,6 +696,10 @@
|
||||
<i data-lucide="file-text"></i>
|
||||
<span>KB</span>
|
||||
</a>
|
||||
<a href="/echo/habits.html" class="nav-item">
|
||||
<i data-lucide="dumbbell"></i>
|
||||
<span>Habits</span>
|
||||
</a>
|
||||
<a href="/echo/files.html" class="nav-item">
|
||||
<i data-lucide="folder"></i>
|
||||
<span>Files</span>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"ok": false,
|
||||
"status": "MODIFICĂRI",
|
||||
"message": "1 modificări detectate",
|
||||
"lastCheck": "09 Feb 2026, 14:00",
|
||||
"lastCheck": "11 Feb 2026, 08:00",
|
||||
"changesCount": 1
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
* Swipe left/right to navigate between pages
|
||||
*/
|
||||
(function() {
|
||||
const pages = ['index.html', 'notes.html', 'files.html'];
|
||||
const pages = ['index.html', 'notes.html', 'habits.html', 'files.html', 'workspace.html'];
|
||||
|
||||
// Get current page index
|
||||
function getCurrentIndex() {
|
||||
@@ -45,7 +45,7 @@
|
||||
function handleSwipe() {
|
||||
const deltaX = touchEndX - touchStartX;
|
||||
const deltaY = Math.abs(touchEndY - touchStartY);
|
||||
|
||||
|
||||
// Ignore if vertical swipe or too short
|
||||
if (deltaY > maxVerticalDistance) return;
|
||||
if (Math.abs(deltaX) < minSwipeDistance) return;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"lastUpdated": "2026-02-07T03:00:05.489098",
|
||||
"lastUpdated": "2026-02-11T03:00:04.800665",
|
||||
"columns": [
|
||||
{
|
||||
"id": "backlog",
|
||||
@@ -30,58 +30,6 @@
|
||||
"id": "done",
|
||||
"name": "Done",
|
||||
"tasks": [
|
||||
{
|
||||
"id": "task-034",
|
||||
"title": "Actualizare documentație canale agenți",
|
||||
"description": "",
|
||||
"created": "2026-02-01T12:15:41Z",
|
||||
"priority": "medium",
|
||||
"completed": "2026-02-01T12:15:44Z"
|
||||
},
|
||||
{
|
||||
"id": "task-035",
|
||||
"title": "Restructurare echipă: șterg work, unific health+growth→self",
|
||||
"description": "",
|
||||
"created": "2026-02-01T12:20:59Z",
|
||||
"priority": "medium",
|
||||
"completed": "2026-02-01T12:23:32Z"
|
||||
},
|
||||
{
|
||||
"id": "task-036",
|
||||
"title": "Unificare în 1 agent cu tehnici diminuare dezavantaje",
|
||||
"description": "",
|
||||
"created": "2026-02-01T13:27:51Z",
|
||||
"priority": "medium",
|
||||
"completed": "2026-02-01T13:30:01Z"
|
||||
},
|
||||
{
|
||||
"id": "task-037",
|
||||
"title": "Coaching dimineață - Asumarea eforturilor (Zoltan Vereș)",
|
||||
"description": "",
|
||||
"created": "2026-02-02T07:01:14Z",
|
||||
"priority": "medium"
|
||||
},
|
||||
{
|
||||
"id": "task-038",
|
||||
"title": "Raport dimineata trimis pe email",
|
||||
"description": "",
|
||||
"created": "2026-02-03T06:31:08Z",
|
||||
"priority": "medium"
|
||||
},
|
||||
{
|
||||
"id": "task-039",
|
||||
"title": "Raport seară 3 feb trimis pe email",
|
||||
"description": "",
|
||||
"created": "2026-02-03T18:01:12Z",
|
||||
"priority": "medium"
|
||||
},
|
||||
{
|
||||
"id": "task-040",
|
||||
"title": "Job night-execute: 2 video-uri YouTube procesate",
|
||||
"description": "",
|
||||
"created": "2026-02-03T21:02:31Z",
|
||||
"priority": "medium"
|
||||
},
|
||||
{
|
||||
"id": "task-041",
|
||||
"title": "Raport dimineață trimis pe email",
|
||||
|
||||
1129
dashboard/tests/test_habits_api.py
Normal file
1129
dashboard/tests/test_habits_api.py
Normal file
File diff suppressed because it is too large
Load Diff
2868
dashboard/tests/test_habits_frontend.py
Normal file
2868
dashboard/tests/test_habits_frontend.py
Normal file
File diff suppressed because it is too large
Load Diff
573
dashboard/tests/test_habits_helpers.py
Normal file
573
dashboard/tests/test_habits_helpers.py
Normal file
@@ -0,0 +1,573 @@
|
||||
"""
|
||||
Tests for habits_helpers.py
|
||||
|
||||
Tests cover all helper functions for habit tracking including:
|
||||
- calculate_streak for all 6 frequency types
|
||||
- should_check_today for all frequency types
|
||||
- get_completion_rate
|
||||
- get_weekly_summary
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Add parent directory to path to import habits_helpers
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from habits_helpers import (
|
||||
calculate_streak,
|
||||
should_check_today,
|
||||
get_completion_rate,
|
||||
get_weekly_summary,
|
||||
check_and_award_weekly_lives
|
||||
)
|
||||
|
||||
|
||||
def test_calculate_streak_daily_consecutive():
|
||||
"""Test daily streak with consecutive days."""
|
||||
today = datetime.now().date()
|
||||
habit = {
|
||||
"frequency": {"type": "daily"},
|
||||
"completions": [
|
||||
{"date": today.isoformat()},
|
||||
{"date": (today - timedelta(days=1)).isoformat()},
|
||||
{"date": (today - timedelta(days=2)).isoformat()},
|
||||
]
|
||||
}
|
||||
assert calculate_streak(habit) == 3
|
||||
|
||||
|
||||
def test_calculate_streak_daily_with_gap():
|
||||
"""Test daily streak breaks on gap."""
|
||||
today = datetime.now().date()
|
||||
habit = {
|
||||
"frequency": {"type": "daily"},
|
||||
"completions": [
|
||||
{"date": today.isoformat()},
|
||||
{"date": (today - timedelta(days=1)).isoformat()},
|
||||
# Gap here (day 2 missing)
|
||||
{"date": (today - timedelta(days=3)).isoformat()},
|
||||
]
|
||||
}
|
||||
assert calculate_streak(habit) == 2
|
||||
|
||||
|
||||
def test_calculate_streak_daily_empty():
|
||||
"""Test daily streak with no completions."""
|
||||
habit = {
|
||||
"frequency": {"type": "daily"},
|
||||
"completions": []
|
||||
}
|
||||
assert calculate_streak(habit) == 0
|
||||
|
||||
|
||||
def test_calculate_streak_specific_days():
|
||||
"""Test specific_days streak (Mon, Wed, Fri)."""
|
||||
today = datetime.now().date()
|
||||
|
||||
# Find the most recent Monday
|
||||
days_since_monday = today.weekday()
|
||||
last_monday = today - timedelta(days=days_since_monday)
|
||||
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "specific_days",
|
||||
"days": [0, 2, 4] # Mon, Wed, Fri (0=Mon in Python weekday)
|
||||
},
|
||||
"completions": [
|
||||
{"date": last_monday.isoformat()}, # Mon
|
||||
{"date": (last_monday - timedelta(days=2)).isoformat()}, # Fri previous week
|
||||
{"date": (last_monday - timedelta(days=4)).isoformat()}, # Wed previous week
|
||||
]
|
||||
}
|
||||
|
||||
# Should count 3 consecutive relevant days
|
||||
streak = calculate_streak(habit)
|
||||
assert streak >= 1 # At least the most recent relevant day
|
||||
|
||||
|
||||
def test_calculate_streak_x_per_week():
|
||||
"""Test x_per_week streak (3 times per week)."""
|
||||
today = datetime.now().date()
|
||||
|
||||
# Find Monday of current week
|
||||
days_since_monday = today.weekday()
|
||||
monday = today - timedelta(days=days_since_monday)
|
||||
|
||||
# Current week: 3 completions (Mon, Tue, Wed)
|
||||
# Previous week: 3 completions (Mon, Tue, Wed)
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "x_per_week",
|
||||
"count": 3
|
||||
},
|
||||
"completions": [
|
||||
{"date": monday.isoformat()}, # This week Mon
|
||||
{"date": (monday + timedelta(days=1)).isoformat()}, # This week Tue
|
||||
{"date": (monday + timedelta(days=2)).isoformat()}, # This week Wed
|
||||
# Previous week
|
||||
{"date": (monday - timedelta(days=7)).isoformat()}, # Last week Mon
|
||||
{"date": (monday - timedelta(days=6)).isoformat()}, # Last week Tue
|
||||
{"date": (monday - timedelta(days=5)).isoformat()}, # Last week Wed
|
||||
]
|
||||
}
|
||||
|
||||
streak = calculate_streak(habit)
|
||||
assert streak >= 2 # Both weeks meet the target
|
||||
|
||||
|
||||
def test_calculate_streak_weekly():
|
||||
"""Test weekly streak (at least 1 per week)."""
|
||||
today = datetime.now().date()
|
||||
|
||||
habit = {
|
||||
"frequency": {"type": "weekly"},
|
||||
"completions": [
|
||||
{"date": today.isoformat()}, # This week
|
||||
{"date": (today - timedelta(days=7)).isoformat()}, # Last week
|
||||
{"date": (today - timedelta(days=14)).isoformat()}, # 2 weeks ago
|
||||
]
|
||||
}
|
||||
|
||||
streak = calculate_streak(habit)
|
||||
assert streak >= 1
|
||||
|
||||
|
||||
def test_calculate_streak_monthly():
|
||||
"""Test monthly streak (at least 1 per month)."""
|
||||
today = datetime.now().date()
|
||||
|
||||
# This month
|
||||
habit = {
|
||||
"frequency": {"type": "monthly"},
|
||||
"completions": [
|
||||
{"date": today.isoformat()},
|
||||
]
|
||||
}
|
||||
|
||||
streak = calculate_streak(habit)
|
||||
assert streak >= 1
|
||||
|
||||
|
||||
def test_calculate_streak_custom_interval():
|
||||
"""Test custom interval streak (every 3 days)."""
|
||||
today = datetime.now().date()
|
||||
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "custom",
|
||||
"interval": 3
|
||||
},
|
||||
"completions": [
|
||||
{"date": today.isoformat()},
|
||||
{"date": (today - timedelta(days=3)).isoformat()},
|
||||
{"date": (today - timedelta(days=6)).isoformat()},
|
||||
]
|
||||
}
|
||||
|
||||
streak = calculate_streak(habit)
|
||||
assert streak == 3
|
||||
|
||||
|
||||
def test_should_check_today_daily():
|
||||
"""Test should_check_today for daily habit."""
|
||||
habit = {"frequency": {"type": "daily"}}
|
||||
assert should_check_today(habit) is True
|
||||
|
||||
|
||||
def test_should_check_today_specific_days():
|
||||
"""Test should_check_today for specific_days habit."""
|
||||
today_weekday = datetime.now().date().weekday()
|
||||
|
||||
# Habit relevant today
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "specific_days",
|
||||
"days": [today_weekday]
|
||||
}
|
||||
}
|
||||
assert should_check_today(habit) is True
|
||||
|
||||
# Habit not relevant today
|
||||
other_day = (today_weekday + 1) % 7
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "specific_days",
|
||||
"days": [other_day]
|
||||
}
|
||||
}
|
||||
assert should_check_today(habit) is False
|
||||
|
||||
|
||||
def test_should_check_today_x_per_week():
|
||||
"""Test should_check_today for x_per_week habit."""
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "x_per_week",
|
||||
"count": 3
|
||||
}
|
||||
}
|
||||
assert should_check_today(habit) is True
|
||||
|
||||
|
||||
def test_should_check_today_weekly():
|
||||
"""Test should_check_today for weekly habit."""
|
||||
habit = {"frequency": {"type": "weekly"}}
|
||||
assert should_check_today(habit) is True
|
||||
|
||||
|
||||
def test_should_check_today_monthly():
|
||||
"""Test should_check_today for monthly habit."""
|
||||
habit = {"frequency": {"type": "monthly"}}
|
||||
assert should_check_today(habit) is True
|
||||
|
||||
|
||||
def test_should_check_today_custom_ready():
|
||||
"""Test should_check_today for custom interval when ready."""
|
||||
today = datetime.now().date()
|
||||
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "custom",
|
||||
"interval": 3
|
||||
},
|
||||
"completions": [
|
||||
{"date": (today - timedelta(days=3)).isoformat()}
|
||||
]
|
||||
}
|
||||
assert should_check_today(habit) is True
|
||||
|
||||
|
||||
def test_should_check_today_custom_not_ready():
|
||||
"""Test should_check_today for custom interval when not ready."""
|
||||
today = datetime.now().date()
|
||||
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "custom",
|
||||
"interval": 3
|
||||
},
|
||||
"completions": [
|
||||
{"date": (today - timedelta(days=1)).isoformat()}
|
||||
]
|
||||
}
|
||||
assert should_check_today(habit) is False
|
||||
|
||||
|
||||
def test_get_completion_rate_daily_perfect():
|
||||
"""Test completion rate for daily habit with 100%."""
|
||||
today = datetime.now().date()
|
||||
|
||||
completions = []
|
||||
for i in range(30):
|
||||
completions.append({"date": (today - timedelta(days=i)).isoformat()})
|
||||
|
||||
habit = {
|
||||
"frequency": {"type": "daily"},
|
||||
"completions": completions
|
||||
}
|
||||
|
||||
rate = get_completion_rate(habit, days=30)
|
||||
assert rate == 100.0
|
||||
|
||||
|
||||
def test_get_completion_rate_daily_half():
|
||||
"""Test completion rate for daily habit with 50%."""
|
||||
today = datetime.now().date()
|
||||
|
||||
completions = []
|
||||
for i in range(0, 30, 2): # Every other day
|
||||
completions.append({"date": (today - timedelta(days=i)).isoformat()})
|
||||
|
||||
habit = {
|
||||
"frequency": {"type": "daily"},
|
||||
"completions": completions
|
||||
}
|
||||
|
||||
rate = get_completion_rate(habit, days=30)
|
||||
assert 45 <= rate <= 55 # Around 50%
|
||||
|
||||
|
||||
def test_get_completion_rate_specific_days():
|
||||
"""Test completion rate for specific_days habit."""
|
||||
today = datetime.now().date()
|
||||
today_weekday = today.weekday()
|
||||
|
||||
# Create habit for Mon, Wed, Fri
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "specific_days",
|
||||
"days": [0, 2, 4]
|
||||
},
|
||||
"completions": []
|
||||
}
|
||||
|
||||
# Add completions for all relevant days in last 30 days
|
||||
for i in range(30):
|
||||
check_date = today - timedelta(days=i)
|
||||
if check_date.weekday() in [0, 2, 4]:
|
||||
habit["completions"].append({"date": check_date.isoformat()})
|
||||
|
||||
rate = get_completion_rate(habit, days=30)
|
||||
assert rate == 100.0
|
||||
|
||||
|
||||
def test_get_completion_rate_empty():
|
||||
"""Test completion rate with no completions."""
|
||||
habit = {
|
||||
"frequency": {"type": "daily"},
|
||||
"completions": []
|
||||
}
|
||||
|
||||
rate = get_completion_rate(habit, days=30)
|
||||
assert rate == 0.0
|
||||
|
||||
|
||||
def test_get_weekly_summary():
|
||||
"""Test weekly summary returns correct structure."""
|
||||
today = datetime.now().date()
|
||||
|
||||
habit = {
|
||||
"frequency": {"type": "daily"},
|
||||
"completions": [
|
||||
{"date": today.isoformat()},
|
||||
{"date": (today - timedelta(days=1)).isoformat()},
|
||||
]
|
||||
}
|
||||
|
||||
summary = get_weekly_summary(habit)
|
||||
|
||||
# Check structure
|
||||
assert isinstance(summary, dict)
|
||||
assert "Monday" in summary
|
||||
assert "Tuesday" in summary
|
||||
assert "Wednesday" in summary
|
||||
assert "Thursday" in summary
|
||||
assert "Friday" in summary
|
||||
assert "Saturday" in summary
|
||||
assert "Sunday" in summary
|
||||
|
||||
# Check values are valid
|
||||
valid_statuses = ["checked", "skipped", "missed", "upcoming", "not_relevant"]
|
||||
for day, status in summary.items():
|
||||
assert status in valid_statuses
|
||||
|
||||
|
||||
def test_get_weekly_summary_with_skip():
|
||||
"""Test weekly summary handles skipped days."""
|
||||
today = datetime.now().date()
|
||||
|
||||
habit = {
|
||||
"frequency": {"type": "daily"},
|
||||
"completions": [
|
||||
{"date": today.isoformat(), "type": "check"},
|
||||
{"date": (today - timedelta(days=1)).isoformat(), "type": "skip"},
|
||||
]
|
||||
}
|
||||
|
||||
summary = get_weekly_summary(habit)
|
||||
|
||||
# Find today's day name
|
||||
day_names = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
|
||||
today_name = day_names[today.weekday()]
|
||||
yesterday_name = day_names[(today.weekday() - 1) % 7]
|
||||
|
||||
assert summary[today_name] == "checked"
|
||||
assert summary[yesterday_name] == "skipped"
|
||||
|
||||
|
||||
def test_get_weekly_summary_specific_days():
|
||||
"""Test weekly summary marks non-relevant days correctly."""
|
||||
today = datetime.now().date()
|
||||
today_weekday = today.weekday()
|
||||
|
||||
# Habit only for Monday (0)
|
||||
habit = {
|
||||
"frequency": {
|
||||
"type": "specific_days",
|
||||
"days": [0]
|
||||
},
|
||||
"completions": []
|
||||
}
|
||||
|
||||
summary = get_weekly_summary(habit)
|
||||
|
||||
# All days except Monday should be not_relevant or upcoming
|
||||
for day_name, status in summary.items():
|
||||
if day_name == "Monday":
|
||||
continue # Monday can be any status
|
||||
if status not in ["upcoming", "not_relevant"]:
|
||||
# Day should be not_relevant if it's in the past
|
||||
pass
|
||||
|
||||
|
||||
def test_check_and_award_weekly_lives_awards_life_with_checkin():
|
||||
"""Test that +1 life is awarded if there was ≥1 check-in in previous week."""
|
||||
today = datetime.now().date()
|
||||
current_week_start = today - timedelta(days=today.weekday())
|
||||
previous_week_start = current_week_start - timedelta(days=7)
|
||||
|
||||
# Add check-in in previous week (Wednesday)
|
||||
habit = {
|
||||
"lives": 2,
|
||||
"completions": [
|
||||
{"date": (previous_week_start + timedelta(days=2)).isoformat(), "type": "check"}
|
||||
]
|
||||
}
|
||||
|
||||
new_lives, was_awarded = check_and_award_weekly_lives(habit)
|
||||
|
||||
assert was_awarded == True
|
||||
assert new_lives == 3
|
||||
|
||||
|
||||
def test_check_and_award_weekly_lives_no_award_without_checkin():
|
||||
"""Test that no life is awarded if there were no check-ins in previous week."""
|
||||
today = datetime.now().date()
|
||||
current_week_start = today - timedelta(days=today.weekday())
|
||||
|
||||
# Add check-in in current week only
|
||||
habit = {
|
||||
"lives": 2,
|
||||
"completions": [
|
||||
{"date": (current_week_start + timedelta(days=1)).isoformat(), "type": "check"}
|
||||
]
|
||||
}
|
||||
|
||||
new_lives, was_awarded = check_and_award_weekly_lives(habit)
|
||||
|
||||
assert was_awarded == False
|
||||
assert new_lives == 2
|
||||
|
||||
|
||||
def test_check_and_award_weekly_lives_no_duplicate_award():
|
||||
"""Test that life is not awarded twice in the same week."""
|
||||
today = datetime.now().date()
|
||||
current_week_start = today - timedelta(days=today.weekday())
|
||||
previous_week_start = current_week_start - timedelta(days=7)
|
||||
|
||||
# Add check-in in previous week and mark as already awarded this week
|
||||
habit = {
|
||||
"lives": 3,
|
||||
"lastLivesAward": current_week_start.isoformat(),
|
||||
"completions": [
|
||||
{"date": (previous_week_start + timedelta(days=2)).isoformat(), "type": "check"}
|
||||
]
|
||||
}
|
||||
|
||||
new_lives, was_awarded = check_and_award_weekly_lives(habit)
|
||||
|
||||
assert was_awarded == False
|
||||
assert new_lives == 3
|
||||
|
||||
|
||||
def test_check_and_award_weekly_lives_skip_doesnt_count():
|
||||
"""Test that skips don't count toward weekly recovery."""
|
||||
today = datetime.now().date()
|
||||
current_week_start = today - timedelta(days=today.weekday())
|
||||
previous_week_start = current_week_start - timedelta(days=7)
|
||||
|
||||
# Add only skips in previous week, no check-ins
|
||||
habit = {
|
||||
"lives": 1,
|
||||
"completions": [
|
||||
{"date": (previous_week_start + timedelta(days=2)).isoformat(), "type": "skip"},
|
||||
{"date": (previous_week_start + timedelta(days=4)).isoformat(), "type": "skip"}
|
||||
]
|
||||
}
|
||||
|
||||
new_lives, was_awarded = check_and_award_weekly_lives(habit)
|
||||
|
||||
assert was_awarded == False
|
||||
assert new_lives == 1
|
||||
|
||||
|
||||
def test_check_and_award_weekly_lives_multiple_checkins():
|
||||
"""Test that award works with multiple check-ins in previous week."""
|
||||
today = datetime.now().date()
|
||||
current_week_start = today - timedelta(days=today.weekday())
|
||||
previous_week_start = current_week_start - timedelta(days=7)
|
||||
|
||||
# Add multiple check-ins in previous week
|
||||
habit = {
|
||||
"lives": 2,
|
||||
"completions": [
|
||||
{"date": (previous_week_start + timedelta(days=1)).isoformat(), "type": "check"},
|
||||
{"date": (previous_week_start + timedelta(days=3)).isoformat(), "type": "check"},
|
||||
{"date": (previous_week_start + timedelta(days=5)).isoformat(), "type": "check"}
|
||||
]
|
||||
}
|
||||
|
||||
new_lives, was_awarded = check_and_award_weekly_lives(habit)
|
||||
|
||||
assert was_awarded == True
|
||||
assert new_lives == 3
|
||||
|
||||
|
||||
def test_check_and_award_weekly_lives_no_cap():
|
||||
"""Test that lives can accumulate beyond 3."""
|
||||
today = datetime.now().date()
|
||||
current_week_start = today - timedelta(days=today.weekday())
|
||||
previous_week_start = current_week_start - timedelta(days=7)
|
||||
|
||||
# Habit with 5 lives
|
||||
habit = {
|
||||
"lives": 5,
|
||||
"completions": [
|
||||
{"date": (previous_week_start + timedelta(days=2)).isoformat(), "type": "check"}
|
||||
]
|
||||
}
|
||||
|
||||
new_lives, was_awarded = check_and_award_weekly_lives(habit)
|
||||
|
||||
assert was_awarded == True
|
||||
assert new_lives == 6
|
||||
|
||||
|
||||
def test_check_and_award_weekly_lives_missing_last_award_field():
|
||||
"""Test backward compatibility when lastLivesAward field is missing."""
|
||||
today = datetime.now().date()
|
||||
current_week_start = today - timedelta(days=today.weekday())
|
||||
previous_week_start = current_week_start - timedelta(days=7)
|
||||
|
||||
# Habit without lastLivesAward field (backward compatible)
|
||||
habit = {
|
||||
"lives": 2,
|
||||
"completions": [
|
||||
{"date": (previous_week_start + timedelta(days=2)).isoformat(), "type": "check"}
|
||||
]
|
||||
}
|
||||
|
||||
new_lives, was_awarded = check_and_award_weekly_lives(habit)
|
||||
|
||||
assert was_awarded == True
|
||||
assert new_lives == 3
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Run all tests
|
||||
import inspect
|
||||
|
||||
test_functions = [
|
||||
obj for name, obj in inspect.getmembers(sys.modules[__name__])
|
||||
if inspect.isfunction(obj) and name.startswith("test_")
|
||||
]
|
||||
|
||||
passed = 0
|
||||
failed = 0
|
||||
|
||||
for test_func in test_functions:
|
||||
try:
|
||||
test_func()
|
||||
print(f"✓ {test_func.__name__}")
|
||||
passed += 1
|
||||
except AssertionError as e:
|
||||
print(f"✗ {test_func.__name__}: {e}")
|
||||
failed += 1
|
||||
except Exception as e:
|
||||
print(f"✗ {test_func.__name__}: {type(e).__name__}: {e}")
|
||||
failed += 1
|
||||
|
||||
print(f"\n{passed} passed, {failed} failed")
|
||||
sys.exit(0 if failed == 0 else 1)
|
||||
555
dashboard/tests/test_habits_integration.py
Normal file
555
dashboard/tests/test_habits_integration.py
Normal file
@@ -0,0 +1,555 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Integration tests for Habits feature - End-to-end flows
|
||||
|
||||
Tests complete workflows involving multiple API calls and state transitions.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import shutil
|
||||
from datetime import datetime, timedelta
|
||||
from http.server import HTTPServer
|
||||
from threading import Thread
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
|
||||
# Add parent directory to path to import api module
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
from api import TaskBoardHandler
|
||||
import habits_helpers
|
||||
|
||||
|
||||
# Test helpers
|
||||
def setup_test_env():
|
||||
"""Create temporary environment for testing"""
|
||||
from pathlib import Path
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
habits_file = Path(temp_dir) / 'habits.json'
|
||||
|
||||
# Initialize empty habits file
|
||||
with open(habits_file, 'w') as f:
|
||||
json.dump({'lastUpdated': datetime.now().isoformat(), 'habits': []}, f)
|
||||
|
||||
# Override HABITS_FILE constant
|
||||
import api
|
||||
api.HABITS_FILE = habits_file
|
||||
|
||||
return temp_dir
|
||||
|
||||
|
||||
def teardown_test_env(temp_dir):
|
||||
"""Clean up temporary environment"""
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
def start_test_server():
|
||||
"""Start HTTP server on random port for testing"""
|
||||
server = HTTPServer(('localhost', 0), TaskBoardHandler)
|
||||
thread = Thread(target=server.serve_forever, daemon=True)
|
||||
thread.start()
|
||||
return server
|
||||
|
||||
|
||||
def http_request(url, method='GET', data=None):
|
||||
"""Make HTTP request and return response data"""
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
|
||||
if data:
|
||||
data = json.dumps(data).encode('utf-8')
|
||||
|
||||
req = urllib.request.Request(url, data=data, headers=headers, method=method)
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(req) as response:
|
||||
body = response.read().decode('utf-8')
|
||||
return json.loads(body) if body else None
|
||||
except urllib.error.HTTPError as e:
|
||||
error_body = e.read().decode('utf-8')
|
||||
try:
|
||||
return {'error': json.loads(error_body), 'status': e.code}
|
||||
except:
|
||||
return {'error': error_body, 'status': e.code}
|
||||
|
||||
|
||||
# Integration Tests
|
||||
|
||||
def test_01_create_and_checkin_increments_streak():
|
||||
"""Integration test: create habit → check-in → verify streak is 1"""
|
||||
temp_dir = setup_test_env()
|
||||
server = start_test_server()
|
||||
base_url = f"http://localhost:{server.server_port}"
|
||||
|
||||
try:
|
||||
# Create daily habit
|
||||
habit_data = {
|
||||
'name': 'Morning meditation',
|
||||
'category': 'health',
|
||||
'color': '#10B981',
|
||||
'icon': 'brain',
|
||||
'priority': 50,
|
||||
'frequency': {'type': 'daily'}
|
||||
}
|
||||
|
||||
result = http_request(f"{base_url}/api/habits", method='POST', data=habit_data)
|
||||
if 'error' in result:
|
||||
print(f"Error creating habit: {result}")
|
||||
assert 'id' in result, f"Should return created habit with ID, got: {result}"
|
||||
habit_id = result['id']
|
||||
|
||||
# Check in today
|
||||
checkin_result = http_request(f"{base_url}/api/habits/{habit_id}/check", method='POST')
|
||||
|
||||
# Verify streak incremented to 1
|
||||
assert checkin_result['streak']['current'] == 1, "Streak should be 1 after first check-in"
|
||||
assert checkin_result['streak']['best'] == 1, "Best streak should be 1 after first check-in"
|
||||
assert checkin_result['streak']['lastCheckIn'] == datetime.now().date().isoformat(), "Last check-in should be today"
|
||||
|
||||
print("✓ Test 1: Create + check-in → streak is 1")
|
||||
|
||||
finally:
|
||||
server.shutdown()
|
||||
teardown_test_env(temp_dir)
|
||||
|
||||
|
||||
def test_02_seven_consecutive_checkins_restore_life():
|
||||
"""Integration test: 7 consecutive check-ins → life restored (if below 3)"""
|
||||
temp_dir = setup_test_env()
|
||||
server = start_test_server()
|
||||
base_url = f"http://localhost:{server.server_port}"
|
||||
|
||||
try:
|
||||
# Create daily habit
|
||||
habit_data = {
|
||||
'name': 'Daily exercise',
|
||||
'category': 'health',
|
||||
'color': '#EF4444',
|
||||
'icon': 'dumbbell',
|
||||
'priority': 50,
|
||||
'frequency': {'type': 'daily'}
|
||||
}
|
||||
|
||||
result = http_request(f"{base_url}/api/habits", method='POST', data=habit_data)
|
||||
habit_id = result['id']
|
||||
|
||||
# Manually set lives to 1 (instead of using skip API which would add completions)
|
||||
import api
|
||||
with open(api.HABITS_FILE, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
habit_obj = next(h for h in data['habits'] if h['id'] == habit_id)
|
||||
habit_obj['lives'] = 1 # Directly set to 1 (simulating 2 skips used)
|
||||
|
||||
# Add 7 consecutive check-in completions for the past 7 days
|
||||
for i in range(7):
|
||||
check_date = (datetime.now() - timedelta(days=6-i)).date().isoformat()
|
||||
habit_obj['completions'].append({
|
||||
'date': check_date,
|
||||
'type': 'check'
|
||||
})
|
||||
|
||||
# Recalculate streak and check for life restore
|
||||
habit_obj['streak'] = {
|
||||
'current': habits_helpers.calculate_streak(habit_obj),
|
||||
'best': max(habit_obj['streak']['best'], habits_helpers.calculate_streak(habit_obj)),
|
||||
'lastCheckIn': datetime.now().date().isoformat()
|
||||
}
|
||||
|
||||
# Check life restore logic: last 7 completions all 'check' type
|
||||
last_7 = habit_obj['completions'][-7:]
|
||||
if len(last_7) == 7 and all(c.get('type') == 'check' for c in last_7):
|
||||
if habit_obj['lives'] < 3:
|
||||
habit_obj['lives'] += 1
|
||||
|
||||
data['lastUpdated'] = datetime.now().isoformat()
|
||||
with open(api.HABITS_FILE, 'w') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
# Get updated habit
|
||||
habits = http_request(f"{base_url}/api/habits")
|
||||
habit = next(h for h in habits if h['id'] == habit_id)
|
||||
|
||||
# Verify life restored
|
||||
assert habit['lives'] == 2, f"Should have 2 lives after 7 consecutive check-ins (was {habit['lives']})"
|
||||
assert habit['current_streak'] == 7, "Should have streak of 7"
|
||||
|
||||
print("✓ Test 2: 7 consecutive check-ins → life restored")
|
||||
|
||||
finally:
|
||||
server.shutdown()
|
||||
teardown_test_env(temp_dir)
|
||||
|
||||
|
||||
def test_03_skip_with_life_maintains_streak():
|
||||
"""Integration test: skip with life → lives decremented, streak unchanged"""
|
||||
temp_dir = setup_test_env()
|
||||
server = start_test_server()
|
||||
base_url = f"http://localhost:{server.server_port}"
|
||||
|
||||
try:
|
||||
# Create daily habit
|
||||
habit_data = {
|
||||
'name': 'Read book',
|
||||
'category': 'growth',
|
||||
'color': '#3B82F6',
|
||||
'icon': 'book',
|
||||
'priority': 50,
|
||||
'frequency': {'type': 'daily'}
|
||||
}
|
||||
|
||||
result = http_request(f"{base_url}/api/habits", method='POST', data=habit_data)
|
||||
habit_id = result['id']
|
||||
|
||||
# Check in yesterday (to build a streak)
|
||||
import api
|
||||
with open(api.HABITS_FILE, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
habit_obj = next(h for h in data['habits'] if h['id'] == habit_id)
|
||||
yesterday = (datetime.now() - timedelta(days=1)).date().isoformat()
|
||||
habit_obj['completions'].append({
|
||||
'date': yesterday,
|
||||
'type': 'check'
|
||||
})
|
||||
habit_obj['streak'] = {
|
||||
'current': 1,
|
||||
'best': 1,
|
||||
'lastCheckIn': yesterday
|
||||
}
|
||||
|
||||
data['lastUpdated'] = datetime.now().isoformat()
|
||||
with open(api.HABITS_FILE, 'w') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
# Skip today
|
||||
skip_result = http_request(f"{base_url}/api/habits/{habit_id}/skip", method='POST')
|
||||
|
||||
# Verify lives decremented and streak maintained
|
||||
assert skip_result['lives'] == 2, "Lives should be 2 after skip"
|
||||
|
||||
# Get fresh habit data to check streak
|
||||
habits = http_request(f"{base_url}/api/habits")
|
||||
habit = next(h for h in habits if h['id'] == habit_id)
|
||||
|
||||
# Streak should still be 1 (skip doesn't break it)
|
||||
assert habit['current_streak'] == 1, "Streak should be maintained after skip"
|
||||
|
||||
print("✓ Test 3: Skip with life → lives decremented, streak unchanged")
|
||||
|
||||
finally:
|
||||
server.shutdown()
|
||||
teardown_test_env(temp_dir)
|
||||
|
||||
|
||||
def test_04_skip_with_zero_lives_returns_400():
|
||||
"""Integration test: skip with 0 lives → returns 400 error"""
|
||||
temp_dir = setup_test_env()
|
||||
server = start_test_server()
|
||||
base_url = f"http://localhost:{server.server_port}"
|
||||
|
||||
try:
|
||||
# Create daily habit
|
||||
habit_data = {
|
||||
'name': 'Yoga practice',
|
||||
'category': 'health',
|
||||
'color': '#8B5CF6',
|
||||
'icon': 'heart',
|
||||
'priority': 50,
|
||||
'frequency': {'type': 'daily'}
|
||||
}
|
||||
|
||||
result = http_request(f"{base_url}/api/habits", method='POST', data=habit_data)
|
||||
habit_id = result['id']
|
||||
|
||||
# Use all 3 lives
|
||||
http_request(f"{base_url}/api/habits/{habit_id}/skip", method='POST')
|
||||
http_request(f"{base_url}/api/habits/{habit_id}/skip", method='POST')
|
||||
http_request(f"{base_url}/api/habits/{habit_id}/skip", method='POST')
|
||||
|
||||
# Attempt to skip with 0 lives
|
||||
result = http_request(f"{base_url}/api/habits/{habit_id}/skip", method='POST')
|
||||
|
||||
# Verify 400 error
|
||||
assert result['status'] == 400, "Should return 400 status"
|
||||
assert 'error' in result, "Should return error message"
|
||||
|
||||
print("✓ Test 4: Skip with 0 lives → returns 400 error")
|
||||
|
||||
finally:
|
||||
server.shutdown()
|
||||
teardown_test_env(temp_dir)
|
||||
|
||||
|
||||
def test_05_edit_frequency_changes_should_check_today():
|
||||
"""Integration test: edit frequency → should_check_today logic changes"""
|
||||
temp_dir = setup_test_env()
|
||||
server = start_test_server()
|
||||
base_url = f"http://localhost:{server.server_port}"
|
||||
|
||||
try:
|
||||
# Create daily habit
|
||||
habit_data = {
|
||||
'name': 'Code review',
|
||||
'category': 'work',
|
||||
'color': '#F59E0B',
|
||||
'icon': 'code',
|
||||
'priority': 50,
|
||||
'frequency': {'type': 'daily'}
|
||||
}
|
||||
|
||||
result = http_request(f"{base_url}/api/habits", method='POST', data=habit_data)
|
||||
habit_id = result['id']
|
||||
|
||||
# Verify should_check_today is True for daily habit
|
||||
habits = http_request(f"{base_url}/api/habits")
|
||||
habit = next(h for h in habits if h['id'] == habit_id)
|
||||
assert habit['should_check_today'] == True, "Daily habit should be checkable today"
|
||||
|
||||
# Edit to specific_days (only Monday and Wednesday)
|
||||
update_data = {
|
||||
'name': 'Code review',
|
||||
'category': 'work',
|
||||
'color': '#F59E0B',
|
||||
'icon': 'code',
|
||||
'priority': 50,
|
||||
'frequency': {
|
||||
'type': 'specific_days',
|
||||
'days': ['monday', 'wednesday']
|
||||
}
|
||||
}
|
||||
|
||||
http_request(f"{base_url}/api/habits/{habit_id}", method='PUT', data=update_data)
|
||||
|
||||
# Get updated habit
|
||||
habits = http_request(f"{base_url}/api/habits")
|
||||
habit = next(h for h in habits if h['id'] == habit_id)
|
||||
|
||||
# Verify should_check_today reflects new frequency
|
||||
today_name = datetime.now().strftime('%A').lower()
|
||||
expected = today_name in ['monday', 'wednesday']
|
||||
assert habit['should_check_today'] == expected, f"Should check today should be {expected} for {today_name}"
|
||||
|
||||
print(f"✓ Test 5: Edit frequency → should_check_today is {expected} for {today_name}")
|
||||
|
||||
finally:
|
||||
server.shutdown()
|
||||
teardown_test_env(temp_dir)
|
||||
|
||||
|
||||
def test_06_delete_removes_habit_from_storage():
|
||||
"""Integration test: delete → habit removed from storage"""
|
||||
temp_dir = setup_test_env()
|
||||
server = start_test_server()
|
||||
base_url = f"http://localhost:{server.server_port}"
|
||||
|
||||
try:
|
||||
# Create habit
|
||||
habit_data = {
|
||||
'name': 'Guitar practice',
|
||||
'category': 'personal',
|
||||
'color': '#EC4899',
|
||||
'icon': 'music',
|
||||
'priority': 50,
|
||||
'frequency': {'type': 'daily'}
|
||||
}
|
||||
|
||||
result = http_request(f"{base_url}/api/habits", method='POST', data=habit_data)
|
||||
habit_id = result['id']
|
||||
|
||||
# Verify habit exists
|
||||
habits = http_request(f"{base_url}/api/habits")
|
||||
assert len(habits) == 1, "Should have 1 habit"
|
||||
assert habits[0]['id'] == habit_id, "Should be the created habit"
|
||||
|
||||
# Delete habit
|
||||
http_request(f"{base_url}/api/habits/{habit_id}", method='DELETE')
|
||||
|
||||
# Verify habit removed
|
||||
habits = http_request(f"{base_url}/api/habits")
|
||||
assert len(habits) == 0, "Should have 0 habits after delete"
|
||||
|
||||
# Verify not in storage file
|
||||
import api
|
||||
with open(api.HABITS_FILE, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
assert len(data['habits']) == 0, "Storage file should have 0 habits"
|
||||
|
||||
print("✓ Test 6: Delete → habit removed from storage")
|
||||
|
||||
finally:
|
||||
server.shutdown()
|
||||
teardown_test_env(temp_dir)
|
||||
|
||||
|
||||
def test_07_checkin_on_wrong_day_for_specific_days_returns_400():
|
||||
"""Integration test: check-in on wrong day for specific_days → returns 400"""
|
||||
temp_dir = setup_test_env()
|
||||
server = start_test_server()
|
||||
base_url = f"http://localhost:{server.server_port}"
|
||||
|
||||
try:
|
||||
# Get today's day name
|
||||
today_name = datetime.now().strftime('%A').lower()
|
||||
|
||||
# Create habit for different days (not today)
|
||||
if today_name == 'monday':
|
||||
allowed_days = ['tuesday', 'wednesday']
|
||||
elif today_name == 'tuesday':
|
||||
allowed_days = ['monday', 'wednesday']
|
||||
else:
|
||||
allowed_days = ['monday', 'tuesday']
|
||||
|
||||
habit_data = {
|
||||
'name': 'Gym workout',
|
||||
'category': 'health',
|
||||
'color': '#EF4444',
|
||||
'icon': 'dumbbell',
|
||||
'priority': 50,
|
||||
'frequency': {
|
||||
'type': 'specific_days',
|
||||
'days': allowed_days
|
||||
}
|
||||
}
|
||||
|
||||
result = http_request(f"{base_url}/api/habits", method='POST', data=habit_data)
|
||||
habit_id = result['id']
|
||||
|
||||
# Attempt to check in today (wrong day)
|
||||
result = http_request(f"{base_url}/api/habits/{habit_id}/check", method='POST')
|
||||
|
||||
# Verify 400 error
|
||||
assert result['status'] == 400, "Should return 400 status"
|
||||
assert 'error' in result, "Should return error message"
|
||||
|
||||
print(f"✓ Test 7: Check-in on {today_name} (not in {allowed_days}) → returns 400")
|
||||
|
||||
finally:
|
||||
server.shutdown()
|
||||
teardown_test_env(temp_dir)
|
||||
|
||||
|
||||
def test_08_get_response_includes_all_stats():
|
||||
"""Integration test: GET response includes stats (streak, completion_rate, weekly_summary)"""
|
||||
temp_dir = setup_test_env()
|
||||
server = start_test_server()
|
||||
base_url = f"http://localhost:{server.server_port}"
|
||||
|
||||
try:
|
||||
# Create habit with some completions
|
||||
habit_data = {
|
||||
'name': 'Meditation',
|
||||
'category': 'health',
|
||||
'color': '#10B981',
|
||||
'icon': 'brain',
|
||||
'priority': 50,
|
||||
'frequency': {'type': 'daily'}
|
||||
}
|
||||
|
||||
result = http_request(f"{base_url}/api/habits", method='POST', data=habit_data)
|
||||
habit_id = result['id']
|
||||
|
||||
# Add some completions
|
||||
import api
|
||||
with open(api.HABITS_FILE, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
habit_obj = next(h for h in data['habits'] if h['id'] == habit_id)
|
||||
|
||||
# Add completions for last 3 days
|
||||
for i in range(3):
|
||||
check_date = (datetime.now() - timedelta(days=2-i)).date().isoformat()
|
||||
habit_obj['completions'].append({
|
||||
'date': check_date,
|
||||
'type': 'check'
|
||||
})
|
||||
|
||||
habit_obj['streak'] = {
|
||||
'current': 3,
|
||||
'best': 3,
|
||||
'lastCheckIn': datetime.now().date().isoformat()
|
||||
}
|
||||
|
||||
data['lastUpdated'] = datetime.now().isoformat()
|
||||
with open(api.HABITS_FILE, 'w') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
# Get habits
|
||||
habits = http_request(f"{base_url}/api/habits")
|
||||
habit = habits[0]
|
||||
|
||||
# Verify all enriched stats are present
|
||||
assert 'current_streak' in habit, "Should include current_streak"
|
||||
assert 'best_streak' in habit, "Should include best_streak"
|
||||
assert 'completion_rate_30d' in habit, "Should include completion_rate_30d"
|
||||
assert 'weekly_summary' in habit, "Should include weekly_summary"
|
||||
assert 'should_check_today' in habit, "Should include should_check_today"
|
||||
|
||||
# Verify streak values
|
||||
assert habit['current_streak'] == 3, "Current streak should be 3"
|
||||
assert habit['best_streak'] == 3, "Best streak should be 3"
|
||||
|
||||
# Verify weekly_summary structure
|
||||
assert isinstance(habit['weekly_summary'], dict), "Weekly summary should be a dict"
|
||||
days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
|
||||
for day in days:
|
||||
assert day in habit['weekly_summary'], f"Weekly summary should include {day}"
|
||||
|
||||
print("✓ Test 8: GET response includes all stats (streak, completion_rate, weekly_summary)")
|
||||
|
||||
finally:
|
||||
server.shutdown()
|
||||
teardown_test_env(temp_dir)
|
||||
|
||||
|
||||
def test_09_typecheck_passes():
|
||||
"""Integration test: Typecheck passes"""
|
||||
result = os.system('python3 -m py_compile /home/moltbot/clawd/dashboard/api.py')
|
||||
assert result == 0, "Typecheck should pass for api.py"
|
||||
|
||||
result = os.system('python3 -m py_compile /home/moltbot/clawd/dashboard/habits_helpers.py')
|
||||
assert result == 0, "Typecheck should pass for habits_helpers.py"
|
||||
|
||||
print("✓ Test 9: Typecheck passes")
|
||||
|
||||
|
||||
# Run all tests
|
||||
if __name__ == '__main__':
|
||||
tests = [
|
||||
test_01_create_and_checkin_increments_streak,
|
||||
test_02_seven_consecutive_checkins_restore_life,
|
||||
test_03_skip_with_life_maintains_streak,
|
||||
test_04_skip_with_zero_lives_returns_400,
|
||||
test_05_edit_frequency_changes_should_check_today,
|
||||
test_06_delete_removes_habit_from_storage,
|
||||
test_07_checkin_on_wrong_day_for_specific_days_returns_400,
|
||||
test_08_get_response_includes_all_stats,
|
||||
test_09_typecheck_passes,
|
||||
]
|
||||
|
||||
passed = 0
|
||||
failed = 0
|
||||
|
||||
print("Running integration tests...\n")
|
||||
|
||||
for test in tests:
|
||||
try:
|
||||
test()
|
||||
passed += 1
|
||||
except AssertionError as e:
|
||||
print(f"✗ {test.__name__}: {e}")
|
||||
failed += 1
|
||||
except Exception as e:
|
||||
print(f"✗ {test.__name__}: Unexpected error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
failed += 1
|
||||
|
||||
print(f"\n{'='*50}")
|
||||
print(f"Integration Tests: {passed} passed, {failed} failed")
|
||||
print(f"{'='*50}")
|
||||
|
||||
sys.exit(0 if failed == 0 else 1)
|
||||
134
dashboard/tests/test_weekly_lives_integration.py
Normal file
134
dashboard/tests/test_weekly_lives_integration.py
Normal file
@@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Integration test for weekly lives recovery feature.
|
||||
|
||||
Tests the full flow:
|
||||
1. Habit has check-ins in previous week
|
||||
2. Check-in today triggers weekly lives recovery
|
||||
3. Response includes livesAwarded flag
|
||||
4. Lives count increases
|
||||
5. Duplicate awards are prevented
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from habits_helpers import check_and_award_weekly_lives
|
||||
|
||||
|
||||
def test_integration_weekly_lives_award():
|
||||
"""Test complete weekly lives recovery flow."""
|
||||
print("\n=== Testing Weekly Lives Recovery Integration ===\n")
|
||||
|
||||
today = datetime.now().date()
|
||||
current_week_start = today - timedelta(days=today.weekday())
|
||||
previous_week_start = current_week_start - timedelta(days=7)
|
||||
|
||||
# Scenario 1: New habit with check-ins in previous week
|
||||
print("Scenario 1: First award of the week")
|
||||
habit = {
|
||||
"id": "test-habit-1",
|
||||
"name": "Test Habit",
|
||||
"lives": 2,
|
||||
"completions": [
|
||||
{"date": (previous_week_start + timedelta(days=2)).isoformat(), "type": "check"},
|
||||
{"date": (previous_week_start + timedelta(days=4)).isoformat(), "type": "check"},
|
||||
]
|
||||
}
|
||||
|
||||
new_lives, was_awarded = check_and_award_weekly_lives(habit)
|
||||
|
||||
assert was_awarded == True, "Expected life to be awarded"
|
||||
assert new_lives == 3, f"Expected 3 lives, got {new_lives}"
|
||||
print(f"✓ Lives awarded: {habit['lives']} → {new_lives}")
|
||||
print(f"✓ Award flag: {was_awarded}")
|
||||
|
||||
# Scenario 2: Already awarded this week
|
||||
print("\nScenario 2: Prevent duplicate award")
|
||||
habit['lives'] = new_lives
|
||||
habit['lastLivesAward'] = current_week_start.isoformat()
|
||||
|
||||
new_lives2, was_awarded2 = check_and_award_weekly_lives(habit)
|
||||
|
||||
assert was_awarded2 == False, "Expected no duplicate award"
|
||||
assert new_lives2 == 3, f"Lives should remain at 3, got {new_lives2}"
|
||||
print(f"✓ No duplicate award: lives remain at {new_lives2}")
|
||||
|
||||
# Scenario 3: Only skips in previous week
|
||||
print("\nScenario 3: Skips don't qualify for recovery")
|
||||
habit_with_skips = {
|
||||
"id": "test-habit-2",
|
||||
"name": "Habit with Skips",
|
||||
"lives": 1,
|
||||
"completions": [
|
||||
{"date": (previous_week_start + timedelta(days=2)).isoformat(), "type": "skip"},
|
||||
{"date": (previous_week_start + timedelta(days=4)).isoformat(), "type": "skip"},
|
||||
]
|
||||
}
|
||||
|
||||
new_lives3, was_awarded3 = check_and_award_weekly_lives(habit_with_skips)
|
||||
|
||||
assert was_awarded3 == False, "Skips shouldn't trigger award"
|
||||
assert new_lives3 == 1, f"Lives should remain at 1, got {new_lives3}"
|
||||
print(f"✓ Skips don't count: lives remain at {new_lives3}")
|
||||
|
||||
# Scenario 4: No cap on lives (can go beyond 3)
|
||||
print("\nScenario 4: Lives can exceed 3")
|
||||
habit_many_lives = {
|
||||
"id": "test-habit-3",
|
||||
"name": "Habit with Many Lives",
|
||||
"lives": 5,
|
||||
"completions": [
|
||||
{"date": (previous_week_start + timedelta(days=2)).isoformat(), "type": "check"},
|
||||
]
|
||||
}
|
||||
|
||||
new_lives4, was_awarded4 = check_and_award_weekly_lives(habit_many_lives)
|
||||
|
||||
assert was_awarded4 == True, "Expected life to be awarded"
|
||||
assert new_lives4 == 6, f"Expected 6 lives, got {new_lives4}"
|
||||
print(f"✓ No cap: lives increased from 5 → {new_lives4}")
|
||||
|
||||
# Scenario 5: No check-ins in previous week
|
||||
print("\nScenario 5: No check-ins = no award")
|
||||
habit_no_checkins = {
|
||||
"id": "test-habit-4",
|
||||
"name": "New Habit",
|
||||
"lives": 2,
|
||||
"completions": []
|
||||
}
|
||||
|
||||
new_lives5, was_awarded5 = check_and_award_weekly_lives(habit_no_checkins)
|
||||
|
||||
assert was_awarded5 == False, "No check-ins = no award"
|
||||
assert new_lives5 == 2, f"Lives should remain at 2, got {new_lives5}"
|
||||
print(f"✓ No previous week check-ins: lives remain at {new_lives5}")
|
||||
|
||||
print("\n=== All Integration Tests Passed! ===\n")
|
||||
|
||||
# Print summary of the feature
|
||||
print("Feature Summary:")
|
||||
print("• +1 life awarded per week if habit had ≥1 check-in in previous week")
|
||||
print("• Monday-Sunday week boundaries (ISO 8601)")
|
||||
print("• Award triggers on first check-in of current week")
|
||||
print("• Skips don't count toward recovery")
|
||||
print("• No cap on lives (can accumulate beyond 3)")
|
||||
print("• Prevents duplicate awards in same week")
|
||||
print("")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
test_integration_weekly_lives_award()
|
||||
sys.exit(0)
|
||||
except AssertionError as e:
|
||||
print(f"\n✗ Test failed: {e}\n")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f"\n✗ Unexpected error: {type(e).__name__}: {e}\n")
|
||||
sys.exit(1)
|
||||
@@ -1,6 +1,19 @@
|
||||
{
|
||||
"lastUpdated": "2026-02-09T19:00:00.000Z",
|
||||
"lastUpdated": "2026-02-11T07:00:00.000Z",
|
||||
"items": [
|
||||
{
|
||||
"id": "prov-2026-02-11",
|
||||
"text": "Provocare: Identifică un task pe care îl execuți singur și ar putea fi orchestrat",
|
||||
"context": "Alege UNA din variantele: (1) Delegat la angajat - task repetitiv pe care îl faci de 10 ori și ar putea învăța? (2) Automatizat cu Echo - verificare/raport/backup care rulează manual? (3) Modelat de la colegă - proces pe care ea îl face excelent și tu îl faci mai greu? (4) Documentat pentru viitor - explicație pe care o repeți la fiecare client nou? La 17:00 notează: Ce task? Cum ar arăta orchestrat? Primul pas minim pentru orchestrare? Nu implementa imediat - doar identifică și scrie. Conștientizarea e primul pas.",
|
||||
"example": "Exemple reale: (1) Explicația cum să adauge client nou în ROA - ai făcut-o de 10 ori la angajat, ar putea fi screencast + checklist. (2) Verificarea zilnică backups - rulează manual, ar putea fi script Echo automat cu alertă doar dacă fail. (3) Suportul tehnic calm - colega face excelent, tu mai nervos, ar putea cere să te învețe procesul TOTE intern. (4) Setup ANAF pentru client nou - repeți aceiași pași, ar putea fi documentație step-by-step pe care Echo o trimite automat.",
|
||||
"domain": "work",
|
||||
"dueDate": "2026-02-11",
|
||||
"done": false,
|
||||
"doneAt": null,
|
||||
"source": "Claude Code Multi-Agent Orchestration + TDi Mindset Entrepreneurship",
|
||||
"sourceUrl": "https://moltbot.tailf7372d.ts.net/echo/files.html#memory/kb/coaching/2026-02-11-dimineata.md",
|
||||
"createdAt": "2026-02-11T07:00:00.000Z"
|
||||
},
|
||||
{
|
||||
"id": "prov-2026-02-10",
|
||||
"text": "Provocare: Body Loose, Head Clear - verifică corpul înainte de situație tensionată",
|
||||
|
||||
@@ -440,6 +440,10 @@
|
||||
<i data-lucide="file-text"></i>
|
||||
<span>KB</span>
|
||||
</a>
|
||||
<a href="/echo/habits.html" class="nav-item">
|
||||
<i data-lucide="dumbbell"></i>
|
||||
<span>Habits</span>
|
||||
</a>
|
||||
<a href="/echo/files.html" class="nav-item">
|
||||
<i data-lucide="folder"></i>
|
||||
<span>Files</span>
|
||||
|
||||
Reference in New Issue
Block a user