Initial commit
This commit is contained in:
commit
983cee0320
322 changed files with 57174 additions and 0 deletions
110
app/routes/admin.py
Normal file
110
app/routes/admin.py
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
from flask import Blueprint, request, jsonify
|
||||
from flask_login import login_required, current_user
|
||||
from app import db, bcrypt
|
||||
from app.models import User, Expense, Category
|
||||
from functools import wraps
|
||||
|
||||
bp = Blueprint('admin', __name__, url_prefix='/api/admin')
|
||||
|
||||
def admin_required(f):
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
if not current_user.is_admin:
|
||||
return jsonify({'success': False, 'message': 'Admin access required'}), 403
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
|
||||
|
||||
@bp.route('/users', methods=['GET'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def get_users():
|
||||
users = User.query.all()
|
||||
return jsonify({
|
||||
'users': [{
|
||||
'id': user.id,
|
||||
'username': user.username,
|
||||
'email': user.email,
|
||||
'is_admin': user.is_admin,
|
||||
'language': user.language,
|
||||
'currency': user.currency,
|
||||
'two_factor_enabled': user.two_factor_enabled,
|
||||
'created_at': user.created_at.isoformat()
|
||||
} for user in users]
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/users', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def create_user():
|
||||
data = request.get_json()
|
||||
|
||||
if not data.get('username') or not data.get('email') or not data.get('password'):
|
||||
return jsonify({'success': False, 'message': 'Missing required fields'}), 400
|
||||
|
||||
# Check if user exists
|
||||
if User.query.filter_by(email=data['email']).first():
|
||||
return jsonify({'success': False, 'message': 'Email already exists'}), 400
|
||||
|
||||
if User.query.filter_by(username=data['username']).first():
|
||||
return jsonify({'success': False, 'message': 'Username already exists'}), 400
|
||||
|
||||
# Create user
|
||||
password_hash = bcrypt.generate_password_hash(data['password']).decode('utf-8')
|
||||
user = User(
|
||||
username=data['username'],
|
||||
email=data['email'],
|
||||
password_hash=password_hash,
|
||||
is_admin=data.get('is_admin', False),
|
||||
language=data.get('language', 'en'),
|
||||
currency=data.get('currency', 'USD')
|
||||
)
|
||||
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
|
||||
# Create default categories
|
||||
from app.utils import create_default_categories
|
||||
create_default_categories(user.id)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'user': {
|
||||
'id': user.id,
|
||||
'username': user.username,
|
||||
'email': user.email
|
||||
}
|
||||
}), 201
|
||||
|
||||
|
||||
@bp.route('/users/<int:user_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def delete_user(user_id):
|
||||
if user_id == current_user.id:
|
||||
return jsonify({'success': False, 'message': 'Cannot delete yourself'}), 400
|
||||
|
||||
user = User.query.get(user_id)
|
||||
if not user:
|
||||
return jsonify({'success': False, 'message': 'User not found'}), 404
|
||||
|
||||
db.session.delete(user)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({'success': True, 'message': 'User deleted'})
|
||||
|
||||
|
||||
@bp.route('/stats', methods=['GET'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def get_stats():
|
||||
total_users = User.query.count()
|
||||
total_expenses = Expense.query.count()
|
||||
total_categories = Category.query.count()
|
||||
|
||||
return jsonify({
|
||||
'total_users': total_users,
|
||||
'total_expenses': total_expenses,
|
||||
'total_categories': total_categories
|
||||
})
|
||||
360
app/routes/auth.py
Normal file
360
app/routes/auth.py
Normal file
|
|
@ -0,0 +1,360 @@
|
|||
from flask import Blueprint, render_template, redirect, url_for, flash, request, session, send_file, make_response
|
||||
from flask_login import login_user, logout_user, login_required, current_user
|
||||
from app import db, bcrypt
|
||||
from app.models import User
|
||||
import pyotp
|
||||
import qrcode
|
||||
import io
|
||||
import base64
|
||||
import secrets
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
bp = Blueprint('auth', __name__, url_prefix='/auth')
|
||||
|
||||
|
||||
def generate_backup_codes(count=10):
|
||||
"""Generate backup codes for 2FA"""
|
||||
codes = []
|
||||
for _ in range(count):
|
||||
# Generate 8-character alphanumeric code
|
||||
code = ''.join(secrets.choice('ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789') for _ in range(8))
|
||||
# Format as XXXX-XXXX for readability
|
||||
formatted_code = f"{code[:4]}-{code[4:]}"
|
||||
codes.append(formatted_code)
|
||||
return codes
|
||||
|
||||
|
||||
def hash_backup_codes(codes):
|
||||
"""Hash backup codes for secure storage"""
|
||||
return [bcrypt.generate_password_hash(code).decode('utf-8') for code in codes]
|
||||
|
||||
|
||||
def verify_backup_code(user, code):
|
||||
"""Verify a backup code and mark it as used"""
|
||||
if not user.backup_codes:
|
||||
return False
|
||||
|
||||
stored_codes = json.loads(user.backup_codes)
|
||||
|
||||
for i, hashed_code in enumerate(stored_codes):
|
||||
if bcrypt.check_password_hash(hashed_code, code):
|
||||
# Remove used code
|
||||
stored_codes.pop(i)
|
||||
user.backup_codes = json.dumps(stored_codes)
|
||||
db.session.commit()
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@bp.route('/login', methods=['GET', 'POST'])
|
||||
def login():
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
|
||||
if request.method == 'POST':
|
||||
data = request.get_json() if request.is_json else request.form
|
||||
username = data.get('username')
|
||||
password = data.get('password')
|
||||
two_factor_code = data.get('two_factor_code')
|
||||
remember = data.get('remember', False)
|
||||
|
||||
# Accept both username and email
|
||||
user = User.query.filter((User.username == username) | (User.email == username)).first()
|
||||
|
||||
if user and bcrypt.check_password_hash(user.password_hash, password):
|
||||
# Check 2FA if enabled
|
||||
if user.two_factor_enabled:
|
||||
if not two_factor_code:
|
||||
if request.is_json:
|
||||
return {'success': False, 'requires_2fa': True}, 200
|
||||
session['pending_user_id'] = user.id
|
||||
return render_template('auth/two_factor.html')
|
||||
|
||||
# Try TOTP code first
|
||||
totp = pyotp.TOTP(user.totp_secret)
|
||||
is_valid = totp.verify(two_factor_code)
|
||||
|
||||
# If TOTP fails, try backup code (format: XXXX-XXXX or XXXXXXXX)
|
||||
if not is_valid:
|
||||
is_valid = verify_backup_code(user, two_factor_code)
|
||||
|
||||
if not is_valid:
|
||||
if request.is_json:
|
||||
return {'success': False, 'message': 'Invalid 2FA code'}, 401
|
||||
flash('Invalid 2FA code', 'error')
|
||||
return render_template('auth/login.html')
|
||||
|
||||
login_user(user, remember=remember)
|
||||
session.permanent = remember
|
||||
|
||||
if request.is_json:
|
||||
return {'success': True, 'redirect': url_for('main.dashboard')}
|
||||
|
||||
next_page = request.args.get('next')
|
||||
return redirect(next_page if next_page else url_for('main.dashboard'))
|
||||
|
||||
if request.is_json:
|
||||
return {'success': False, 'message': 'Invalid username or password'}, 401
|
||||
|
||||
flash('Invalid username or password', 'error')
|
||||
|
||||
return render_template('auth/login.html')
|
||||
|
||||
|
||||
@bp.route('/register', methods=['GET', 'POST'])
|
||||
def register():
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
|
||||
if request.method == 'POST':
|
||||
data = request.get_json() if request.is_json else request.form
|
||||
username = data.get('username')
|
||||
email = data.get('email')
|
||||
password = data.get('password')
|
||||
language = data.get('language', 'en')
|
||||
currency = data.get('currency', 'USD')
|
||||
|
||||
# Check if user exists
|
||||
if User.query.filter_by(email=email).first():
|
||||
if request.is_json:
|
||||
return {'success': False, 'message': 'Email already registered'}, 400
|
||||
flash('Email already registered', 'error')
|
||||
return render_template('auth/register.html')
|
||||
|
||||
if User.query.filter_by(username=username).first():
|
||||
if request.is_json:
|
||||
return {'success': False, 'message': 'Username already taken'}, 400
|
||||
flash('Username already taken', 'error')
|
||||
return render_template('auth/register.html')
|
||||
|
||||
# Check if this is the first user (make them admin)
|
||||
is_first_user = User.query.count() == 0
|
||||
|
||||
# Create user
|
||||
password_hash = bcrypt.generate_password_hash(password).decode('utf-8')
|
||||
user = User(
|
||||
username=username,
|
||||
email=email,
|
||||
password_hash=password_hash,
|
||||
is_admin=is_first_user,
|
||||
language=language,
|
||||
currency=currency
|
||||
)
|
||||
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
|
||||
# Create default categories
|
||||
from app.utils import create_default_categories
|
||||
create_default_categories(user.id)
|
||||
|
||||
login_user(user)
|
||||
|
||||
if request.is_json:
|
||||
return {'success': True, 'redirect': url_for('main.dashboard')}
|
||||
|
||||
flash('Registration successful!', 'success')
|
||||
return redirect(url_for('main.dashboard'))
|
||||
|
||||
return render_template('auth/register.html')
|
||||
|
||||
|
||||
@bp.route('/logout')
|
||||
@login_required
|
||||
def logout():
|
||||
logout_user()
|
||||
return redirect(url_for('auth.login'))
|
||||
|
||||
|
||||
@bp.route('/setup-2fa', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def setup_2fa():
|
||||
if request.method == 'POST':
|
||||
data = request.get_json() if request.is_json else request.form
|
||||
code = data.get('code')
|
||||
|
||||
if not current_user.totp_secret:
|
||||
secret = pyotp.random_base32()
|
||||
current_user.totp_secret = secret
|
||||
|
||||
totp = pyotp.TOTP(current_user.totp_secret)
|
||||
|
||||
if totp.verify(code):
|
||||
# Generate backup codes
|
||||
backup_codes_plain = generate_backup_codes(10)
|
||||
backup_codes_hashed = hash_backup_codes(backup_codes_plain)
|
||||
|
||||
current_user.two_factor_enabled = True
|
||||
current_user.backup_codes = json.dumps(backup_codes_hashed)
|
||||
db.session.commit()
|
||||
|
||||
# Store plain backup codes in session for display
|
||||
session['backup_codes'] = backup_codes_plain
|
||||
|
||||
if request.is_json:
|
||||
return {'success': True, 'message': '2FA enabled successfully', 'backup_codes': backup_codes_plain}
|
||||
|
||||
flash('2FA enabled successfully', 'success')
|
||||
return redirect(url_for('auth.show_backup_codes'))
|
||||
|
||||
if request.is_json:
|
||||
return {'success': False, 'message': 'Invalid code'}, 400
|
||||
|
||||
flash('Invalid code', 'error')
|
||||
|
||||
# Generate QR code
|
||||
if not current_user.totp_secret:
|
||||
current_user.totp_secret = pyotp.random_base32()
|
||||
db.session.commit()
|
||||
|
||||
totp = pyotp.TOTP(current_user.totp_secret)
|
||||
provisioning_uri = totp.provisioning_uri(
|
||||
name=current_user.email,
|
||||
issuer_name='FINA'
|
||||
)
|
||||
|
||||
# Generate QR code image
|
||||
qr = qrcode.QRCode(version=1, box_size=10, border=5)
|
||||
qr.add_data(provisioning_uri)
|
||||
qr.make(fit=True)
|
||||
img = qr.make_image(fill_color="black", back_color="white")
|
||||
|
||||
buf = io.BytesIO()
|
||||
img.save(buf, format='PNG')
|
||||
buf.seek(0)
|
||||
qr_code_base64 = base64.b64encode(buf.getvalue()).decode()
|
||||
|
||||
return render_template('auth/setup_2fa.html',
|
||||
qr_code=qr_code_base64,
|
||||
secret=current_user.totp_secret)
|
||||
|
||||
|
||||
@bp.route('/backup-codes', methods=['GET'])
|
||||
@login_required
|
||||
def show_backup_codes():
|
||||
"""Display backup codes after 2FA setup"""
|
||||
backup_codes = session.get('backup_codes', [])
|
||||
|
||||
if not backup_codes:
|
||||
flash('No backup codes available', 'error')
|
||||
return redirect(url_for('main.settings'))
|
||||
|
||||
return render_template('auth/backup_codes.html',
|
||||
backup_codes=backup_codes,
|
||||
username=current_user.username)
|
||||
|
||||
|
||||
@bp.route('/backup-codes/download', methods=['GET'])
|
||||
@login_required
|
||||
def download_backup_codes_pdf():
|
||||
"""Download backup codes as PDF"""
|
||||
backup_codes = session.get('backup_codes', [])
|
||||
|
||||
if not backup_codes:
|
||||
flash('No backup codes available', 'error')
|
||||
return redirect(url_for('main.settings'))
|
||||
|
||||
try:
|
||||
from reportlab.lib.pagesizes import letter
|
||||
from reportlab.lib.units import inch
|
||||
from reportlab.pdfgen import canvas
|
||||
from reportlab.lib import colors
|
||||
|
||||
# Create PDF in memory
|
||||
buffer = io.BytesIO()
|
||||
c = canvas.Canvas(buffer, pagesize=letter)
|
||||
width, height = letter
|
||||
|
||||
# Title
|
||||
c.setFont("Helvetica-Bold", 24)
|
||||
c.drawCentredString(width/2, height - 1*inch, "FINA")
|
||||
|
||||
c.setFont("Helvetica-Bold", 18)
|
||||
c.drawCentredString(width/2, height - 1.5*inch, "Two-Factor Authentication")
|
||||
c.drawCentredString(width/2, height - 1.9*inch, "Backup Codes")
|
||||
|
||||
# User info
|
||||
c.setFont("Helvetica", 12)
|
||||
c.drawString(1*inch, height - 2.5*inch, f"User: {current_user.username}")
|
||||
c.drawString(1*inch, height - 2.8*inch, f"Email: {current_user.email}")
|
||||
c.drawString(1*inch, height - 3.1*inch, f"Generated: {datetime.utcnow().strftime('%Y-%m-%d %H:%M UTC')}")
|
||||
|
||||
# Warning message
|
||||
c.setFillColorRGB(0.8, 0.2, 0.2)
|
||||
c.setFont("Helvetica-Bold", 11)
|
||||
c.drawString(1*inch, height - 3.7*inch, "IMPORTANT: Store these codes in a secure location!")
|
||||
c.setFillColorRGB(0, 0, 0)
|
||||
c.setFont("Helvetica", 10)
|
||||
c.drawString(1*inch, height - 4.0*inch, "Each code can only be used once. Use them if you lose access to your authenticator app.")
|
||||
|
||||
# Backup codes in two columns
|
||||
c.setFont("Courier-Bold", 14)
|
||||
y_position = height - 4.8*inch
|
||||
x_left = 1.5*inch
|
||||
x_right = 4.5*inch
|
||||
|
||||
for i, code in enumerate(backup_codes):
|
||||
if i % 2 == 0:
|
||||
c.drawString(x_left, y_position, f"{i+1:2d}. {code}")
|
||||
else:
|
||||
c.drawString(x_right, y_position, f"{i+1:2d}. {code}")
|
||||
y_position -= 0.4*inch
|
||||
|
||||
# Footer
|
||||
c.setFont("Helvetica", 8)
|
||||
c.setFillColorRGB(0.5, 0.5, 0.5)
|
||||
c.drawCentredString(width/2, 0.5*inch, "Keep this document secure and do not share these codes with anyone.")
|
||||
|
||||
c.save()
|
||||
buffer.seek(0)
|
||||
|
||||
# Clear backup codes from session after download
|
||||
session.pop('backup_codes', None)
|
||||
|
||||
# Create response with PDF
|
||||
response = make_response(buffer.getvalue())
|
||||
response.headers['Content-Type'] = 'application/pdf'
|
||||
response.headers['Content-Disposition'] = f'attachment; filename=FINA_BackupCodes_{current_user.username}_{datetime.utcnow().strftime("%Y%m%d")}.pdf'
|
||||
|
||||
return response
|
||||
|
||||
except ImportError:
|
||||
# If reportlab is not installed, return codes as text file
|
||||
text_content = f"FINA - Two-Factor Authentication Backup Codes\n\n"
|
||||
text_content += f"User: {current_user.username}\n"
|
||||
text_content += f"Email: {current_user.email}\n"
|
||||
text_content += f"Generated: {datetime.utcnow().strftime('%Y-%m-%d %H:%M UTC')}\n\n"
|
||||
text_content += "IMPORTANT: Store these codes in a secure location!\n"
|
||||
text_content += "Each code can only be used once.\n\n"
|
||||
text_content += "Backup Codes:\n"
|
||||
text_content += "-" * 40 + "\n"
|
||||
|
||||
for i, code in enumerate(backup_codes, 1):
|
||||
text_content += f"{i:2d}. {code}\n"
|
||||
|
||||
text_content += "-" * 40 + "\n"
|
||||
text_content += "\nKeep this document secure and do not share these codes with anyone."
|
||||
|
||||
# Clear backup codes from session
|
||||
session.pop('backup_codes', None)
|
||||
|
||||
response = make_response(text_content)
|
||||
response.headers['Content-Type'] = 'text/plain'
|
||||
response.headers['Content-Disposition'] = f'attachment; filename=FINA_BackupCodes_{current_user.username}_{datetime.utcnow().strftime("%Y%m%d")}.txt'
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@bp.route('/disable-2fa', methods=['POST'])
|
||||
@login_required
|
||||
def disable_2fa():
|
||||
current_user.two_factor_enabled = False
|
||||
current_user.backup_codes = None
|
||||
db.session.commit()
|
||||
|
||||
if request.is_json:
|
||||
return {'success': True, 'message': '2FA disabled'}
|
||||
|
||||
flash('2FA disabled', 'success')
|
||||
return redirect(url_for('main.settings'))
|
||||
198
app/routes/budget.py
Normal file
198
app/routes/budget.py
Normal file
|
|
@ -0,0 +1,198 @@
|
|||
"""
|
||||
Budget Alerts API
|
||||
Provides budget status, alerts, and notification management
|
||||
Security: All queries filtered by user_id
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
from flask_login import login_required, current_user
|
||||
from app.models import Category, Expense
|
||||
from app import db
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy import func
|
||||
|
||||
bp = Blueprint('budget', __name__, url_prefix='/api/budget')
|
||||
|
||||
|
||||
@bp.route('/status', methods=['GET'])
|
||||
@login_required
|
||||
def get_budget_status():
|
||||
"""
|
||||
Get budget status for all user categories and overall monthly budget
|
||||
Security: Only returns current user's data
|
||||
|
||||
Returns:
|
||||
- overall: Total spending vs monthly budget
|
||||
- categories: Per-category budget status
|
||||
- alerts: Active budget alerts
|
||||
"""
|
||||
# Get current month date range
|
||||
now = datetime.utcnow()
|
||||
start_of_month = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
# Calculate overall monthly spending - Security: filter by user_id
|
||||
total_spent = db.session.query(func.sum(Expense.amount)).filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= start_of_month
|
||||
).scalar() or 0.0
|
||||
|
||||
overall_status = {
|
||||
'spent': float(total_spent),
|
||||
'budget': current_user.monthly_budget or 0,
|
||||
'remaining': (current_user.monthly_budget or 0) - float(total_spent),
|
||||
'percentage': 0 if not current_user.monthly_budget else round((float(total_spent) / current_user.monthly_budget) * 100, 1),
|
||||
'alert_level': 'none'
|
||||
}
|
||||
|
||||
# Determine overall alert level
|
||||
if current_user.monthly_budget and current_user.monthly_budget > 0:
|
||||
if overall_status['percentage'] >= 100:
|
||||
overall_status['alert_level'] = 'exceeded'
|
||||
elif overall_status['percentage'] >= 90:
|
||||
overall_status['alert_level'] = 'danger'
|
||||
elif overall_status['percentage'] >= 80:
|
||||
overall_status['alert_level'] = 'warning'
|
||||
|
||||
# Get category budgets - Security: filter by user_id
|
||||
categories = Category.query.filter_by(user_id=current_user.id).all()
|
||||
category_statuses = []
|
||||
active_alerts = []
|
||||
|
||||
for category in categories:
|
||||
if category.monthly_budget and category.monthly_budget > 0:
|
||||
status = category.get_budget_status()
|
||||
category_statuses.append({
|
||||
'category_id': category.id,
|
||||
'category_name': category.name,
|
||||
'category_color': category.color,
|
||||
'category_icon': category.icon,
|
||||
**status
|
||||
})
|
||||
|
||||
# Add to alerts if over threshold
|
||||
if status['alert_level'] in ['warning', 'danger', 'exceeded']:
|
||||
active_alerts.append({
|
||||
'category_id': category.id,
|
||||
'category_name': category.name,
|
||||
'category_color': category.color,
|
||||
'alert_level': status['alert_level'],
|
||||
'percentage': status['percentage'],
|
||||
'spent': status['spent'],
|
||||
'budget': status['budget'],
|
||||
'remaining': status['remaining']
|
||||
})
|
||||
|
||||
# Sort alerts by severity
|
||||
alert_order = {'exceeded': 0, 'danger': 1, 'warning': 2}
|
||||
active_alerts.sort(key=lambda x: (alert_order[x['alert_level']], -x['percentage']))
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'overall': overall_status,
|
||||
'categories': category_statuses,
|
||||
'alerts': active_alerts,
|
||||
'alert_count': len(active_alerts)
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/weekly-summary', methods=['GET'])
|
||||
@login_required
|
||||
def get_weekly_summary():
|
||||
"""
|
||||
Get weekly spending summary for notification
|
||||
Security: Only returns current user's data
|
||||
|
||||
Returns:
|
||||
- week_total: Total spent this week
|
||||
- daily_average: Average per day
|
||||
- top_category: Highest spending category
|
||||
- comparison: vs previous week
|
||||
"""
|
||||
now = datetime.utcnow()
|
||||
week_start = now - timedelta(days=now.weekday()) # Monday
|
||||
week_start = week_start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
prev_week_start = week_start - timedelta(days=7)
|
||||
|
||||
# Current week spending - Security: filter by user_id
|
||||
current_week_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= week_start
|
||||
).all()
|
||||
|
||||
week_total = sum(e.amount for e in current_week_expenses)
|
||||
daily_average = week_total / max(1, (now - week_start).days + 1)
|
||||
|
||||
# Previous week for comparison
|
||||
prev_week_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= prev_week_start,
|
||||
Expense.date < week_start
|
||||
).all()
|
||||
|
||||
prev_week_total = sum(e.amount for e in prev_week_expenses)
|
||||
change_percent = 0
|
||||
if prev_week_total > 0:
|
||||
change_percent = ((week_total - prev_week_total) / prev_week_total) * 100
|
||||
|
||||
# Find top category
|
||||
category_totals = {}
|
||||
for expense in current_week_expenses:
|
||||
if expense.category:
|
||||
category_totals[expense.category.name] = category_totals.get(expense.category.name, 0) + expense.amount
|
||||
|
||||
top_category = max(category_totals.items(), key=lambda x: x[1]) if category_totals else (None, 0)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'week_total': float(week_total),
|
||||
'daily_average': float(daily_average),
|
||||
'previous_week_total': float(prev_week_total),
|
||||
'change_percent': round(change_percent, 1),
|
||||
'top_category': top_category[0] if top_category[0] else 'None',
|
||||
'top_category_amount': float(top_category[1]),
|
||||
'expense_count': len(current_week_expenses),
|
||||
'week_start': week_start.isoformat(),
|
||||
'currency': current_user.currency
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/category/<int:category_id>/budget', methods=['PUT'])
|
||||
@login_required
|
||||
def update_category_budget(category_id):
|
||||
"""
|
||||
Update budget settings for a category
|
||||
Security: Verify category belongs to current user
|
||||
"""
|
||||
# Security check: ensure category belongs to current user
|
||||
category = Category.query.filter_by(id=category_id, user_id=current_user.id).first()
|
||||
|
||||
if not category:
|
||||
return jsonify({'success': False, 'message': 'Category not found'}), 404
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
try:
|
||||
if 'monthly_budget' in data:
|
||||
budget = float(data['monthly_budget']) if data['monthly_budget'] else None
|
||||
if budget is not None and budget < 0:
|
||||
return jsonify({'success': False, 'message': 'Budget cannot be negative'}), 400
|
||||
category.monthly_budget = budget
|
||||
|
||||
if 'budget_alert_threshold' in data:
|
||||
threshold = float(data['budget_alert_threshold'])
|
||||
if threshold < 0.5 or threshold > 2.0:
|
||||
return jsonify({'success': False, 'message': 'Threshold must be between 0.5 (50%) and 2.0 (200%)'}), 400
|
||||
category.budget_alert_threshold = threshold
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Budget updated successfully',
|
||||
'category': category.to_dict()
|
||||
})
|
||||
except ValueError as e:
|
||||
return jsonify({'success': False, 'message': f'Invalid data: {str(e)}'}), 400
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({'success': False, 'message': f'Error updating budget: {str(e)}'}), 500
|
||||
609
app/routes/csv_import.py
Normal file
609
app/routes/csv_import.py
Normal file
|
|
@ -0,0 +1,609 @@
|
|||
"""
|
||||
CSV/Bank Statement Import Routes for FINA
|
||||
Handles file upload, parsing, duplicate detection, and category mapping
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
from flask_login import login_required, current_user
|
||||
from werkzeug.utils import secure_filename
|
||||
from app import db
|
||||
from app.models import Expense, Category
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy import and_, or_
|
||||
import csv
|
||||
import io
|
||||
import re
|
||||
import json
|
||||
from decimal import Decimal
|
||||
|
||||
bp = Blueprint('csv_import', __name__, url_prefix='/api/import')
|
||||
|
||||
|
||||
class CSVParser:
|
||||
"""Parse CSV files with auto-detection of format"""
|
||||
|
||||
def __init__(self):
|
||||
self.errors = []
|
||||
|
||||
def detect_delimiter(self, sample):
|
||||
"""Auto-detect CSV delimiter"""
|
||||
delimiters = [',', ';', '\t', '|']
|
||||
counts = {d: sample.count(d) for d in delimiters}
|
||||
return max(counts, key=counts.get)
|
||||
|
||||
def detect_encoding(self, file_bytes):
|
||||
"""Detect file encoding"""
|
||||
encodings = ['utf-8', 'utf-8-sig', 'latin-1', 'cp1252', 'iso-8859-1']
|
||||
for encoding in encodings:
|
||||
try:
|
||||
file_bytes.decode(encoding)
|
||||
return encoding
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
return 'utf-8'
|
||||
|
||||
def detect_columns(self, headers):
|
||||
"""Auto-detect which columns contain date, description, amount"""
|
||||
headers_lower = [h.lower().strip() if h else '' for h in headers]
|
||||
|
||||
mapping = {
|
||||
'date': None,
|
||||
'description': None,
|
||||
'amount': None,
|
||||
'debit': None,
|
||||
'credit': None,
|
||||
'category': None
|
||||
}
|
||||
|
||||
# Date column keywords
|
||||
date_keywords = ['date', 'data', 'fecha', 'datum', 'transaction date', 'trans date', 'posting date']
|
||||
for idx, name in enumerate(headers_lower):
|
||||
if any(keyword in name for keyword in date_keywords):
|
||||
mapping['date'] = idx
|
||||
break
|
||||
|
||||
# Description column keywords - prioritize "name" for merchant/payee names
|
||||
# First try to find "name" column (commonly used for merchant/payee)
|
||||
for idx, name in enumerate(headers_lower):
|
||||
if name == 'name' or 'payee' in name or 'merchant name' in name:
|
||||
mapping['description'] = idx
|
||||
break
|
||||
|
||||
# If no "name" column, look for other description columns
|
||||
if mapping['description'] is None:
|
||||
desc_keywords = ['description', 'descriere', 'descripción', 'details', 'detalii', 'merchant',
|
||||
'comerciant', 'narrative', 'memo', 'particulars', 'transaction details']
|
||||
for idx, name in enumerate(headers_lower):
|
||||
if any(keyword in name for keyword in desc_keywords):
|
||||
mapping['description'] = idx
|
||||
break
|
||||
|
||||
# Category column keywords (optional) - avoid generic "type" column that contains payment types
|
||||
# Only use "category" explicitly, not "type" which often contains payment methods
|
||||
for idx, name in enumerate(headers_lower):
|
||||
if name == 'category' or 'categorie' in name or 'categoría' in name:
|
||||
mapping['category'] = idx
|
||||
break
|
||||
|
||||
# Amount columns
|
||||
amount_keywords = ['amount', 'suma', 'monto', 'valoare', 'value']
|
||||
debit_keywords = ['debit', 'withdrawal', 'retragere', 'spent', 'expense', 'cheltuială', 'out']
|
||||
credit_keywords = ['credit', 'deposit', 'depunere', 'income', 'venit', 'in']
|
||||
|
||||
for idx, name in enumerate(headers_lower):
|
||||
if any(keyword in name for keyword in debit_keywords):
|
||||
mapping['debit'] = idx
|
||||
elif any(keyword in name for keyword in credit_keywords):
|
||||
mapping['credit'] = idx
|
||||
elif any(keyword in name for keyword in amount_keywords) and mapping['amount'] is None:
|
||||
mapping['amount'] = idx
|
||||
|
||||
return mapping
|
||||
|
||||
def parse_date(self, date_str):
|
||||
"""Parse date string in various formats"""
|
||||
if not date_str or not isinstance(date_str, str):
|
||||
return None
|
||||
|
||||
date_str = date_str.strip()
|
||||
if not date_str:
|
||||
return None
|
||||
|
||||
# Common date formats
|
||||
formats = [
|
||||
'%d/%m/%Y', '%d-%m-%Y', '%Y-%m-%d', '%Y/%m/%d',
|
||||
'%d.%m.%Y', '%m/%d/%Y', '%d %b %Y', '%d %B %Y',
|
||||
'%Y%m%d', '%d-%b-%Y', '%d-%B-%Y', '%b %d, %Y',
|
||||
'%B %d, %Y', '%Y-%m-%d %H:%M:%S', '%d/%m/%Y %H:%M:%S'
|
||||
]
|
||||
|
||||
for fmt in formats:
|
||||
try:
|
||||
return datetime.strptime(date_str, fmt).date()
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
def parse_amount(self, amount_str):
|
||||
"""Parse amount string to float"""
|
||||
if not amount_str:
|
||||
return 0.0
|
||||
|
||||
if isinstance(amount_str, (int, float)):
|
||||
return float(amount_str)
|
||||
|
||||
# Remove currency symbols and spaces
|
||||
amount_str = str(amount_str).strip()
|
||||
amount_str = re.sub(r'[^\d.,\-+]', '', amount_str)
|
||||
|
||||
if not amount_str or amount_str == '-':
|
||||
return 0.0
|
||||
|
||||
try:
|
||||
# Handle European format (1.234,56)
|
||||
if ',' in amount_str and '.' in amount_str:
|
||||
if amount_str.rfind(',') > amount_str.rfind('.'):
|
||||
# European format: 1.234,56
|
||||
amount_str = amount_str.replace('.', '').replace(',', '.')
|
||||
else:
|
||||
# US format: 1,234.56
|
||||
amount_str = amount_str.replace(',', '')
|
||||
elif ',' in amount_str:
|
||||
# Could be European (1,56) or US thousands (1,234)
|
||||
parts = amount_str.split(',')
|
||||
if len(parts[-1]) == 2: # Likely European decimal
|
||||
amount_str = amount_str.replace(',', '.')
|
||||
else: # Likely US thousands
|
||||
amount_str = amount_str.replace(',', '')
|
||||
|
||||
return abs(float(amount_str))
|
||||
except (ValueError, AttributeError):
|
||||
return 0.0
|
||||
|
||||
def parse_csv(self, file_bytes):
|
||||
"""Parse CSV file and extract transactions"""
|
||||
try:
|
||||
# Detect encoding
|
||||
encoding = self.detect_encoding(file_bytes)
|
||||
content = file_bytes.decode(encoding)
|
||||
|
||||
# Detect delimiter
|
||||
first_line = content.split('\n')[0]
|
||||
delimiter = self.detect_delimiter(first_line)
|
||||
|
||||
# Parse CSV
|
||||
stream = io.StringIO(content)
|
||||
reader = csv.reader(stream, delimiter=delimiter)
|
||||
|
||||
# Read headers
|
||||
headers = next(reader, None)
|
||||
if not headers:
|
||||
return {'success': False, 'error': 'CSV file is empty'}
|
||||
|
||||
# Detect column mapping
|
||||
column_map = self.detect_columns(headers)
|
||||
|
||||
if column_map['date'] is None:
|
||||
return {'success': False, 'error': 'Could not detect date column. Please ensure your CSV has a date column.'}
|
||||
|
||||
if column_map['description'] is None:
|
||||
column_map['description'] = 1 if len(headers) > 1 else 0
|
||||
|
||||
# Parse transactions
|
||||
transactions = []
|
||||
row_num = 0
|
||||
|
||||
for row in reader:
|
||||
row_num += 1
|
||||
|
||||
if not row or len(row) == 0:
|
||||
continue
|
||||
|
||||
try:
|
||||
transaction = self.extract_transaction(row, column_map)
|
||||
if transaction:
|
||||
transactions.append(transaction)
|
||||
except Exception as e:
|
||||
self.errors.append(f"Row {row_num}: {str(e)}")
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'transactions': transactions,
|
||||
'total_found': len(transactions),
|
||||
'column_mapping': {k: headers[v] if v is not None else None for k, v in column_map.items()},
|
||||
'errors': self.errors
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': f'Failed to parse CSV: {str(e)}'}
|
||||
|
||||
def extract_transaction(self, row, column_map):
|
||||
"""Extract transaction data from CSV row"""
|
||||
if len(row) <= max(v for v in column_map.values() if v is not None):
|
||||
return None
|
||||
|
||||
# Parse date
|
||||
date_idx = column_map['date']
|
||||
trans_date = self.parse_date(row[date_idx])
|
||||
if not trans_date:
|
||||
return None
|
||||
|
||||
# Parse description
|
||||
desc_idx = column_map['description']
|
||||
description = row[desc_idx].strip() if desc_idx is not None and desc_idx < len(row) else 'Transaction'
|
||||
if not description:
|
||||
description = 'Transaction'
|
||||
|
||||
# Parse amount (handle debit/credit or single amount column)
|
||||
amount = 0.0
|
||||
trans_type = 'expense'
|
||||
|
||||
if column_map['debit'] is not None and column_map['credit'] is not None:
|
||||
debit_val = self.parse_amount(row[column_map['debit']] if column_map['debit'] < len(row) else '0')
|
||||
credit_val = self.parse_amount(row[column_map['credit']] if column_map['credit'] < len(row) else '0')
|
||||
|
||||
if debit_val > 0:
|
||||
amount = debit_val
|
||||
trans_type = 'expense'
|
||||
elif credit_val > 0:
|
||||
amount = credit_val
|
||||
trans_type = 'income'
|
||||
elif column_map['amount'] is not None:
|
||||
amount_val = self.parse_amount(row[column_map['amount']] if column_map['amount'] < len(row) else '0')
|
||||
amount = abs(amount_val)
|
||||
# Negative amounts are expenses, positive are income
|
||||
trans_type = 'expense' if amount_val < 0 or amount_val == 0 else 'income'
|
||||
|
||||
if amount == 0:
|
||||
return None
|
||||
|
||||
# Get bank category if available
|
||||
bank_category = None
|
||||
if column_map['category'] is not None and column_map['category'] < len(row):
|
||||
bank_category = row[column_map['category']].strip()
|
||||
|
||||
return {
|
||||
'date': trans_date.isoformat(),
|
||||
'description': description[:200], # Limit description length
|
||||
'amount': round(amount, 2),
|
||||
'type': trans_type,
|
||||
'bank_category': bank_category
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/parse-csv', methods=['POST'])
|
||||
@login_required
|
||||
def parse_csv():
|
||||
"""
|
||||
Parse uploaded CSV file and return transactions for review
|
||||
Security: User must be authenticated, file size limited
|
||||
"""
|
||||
if 'file' not in request.files:
|
||||
return jsonify({'success': False, 'error': 'No file uploaded'}), 400
|
||||
|
||||
file = request.files['file']
|
||||
|
||||
if not file or not file.filename:
|
||||
return jsonify({'success': False, 'error': 'No file selected'}), 400
|
||||
|
||||
# Security: Validate filename
|
||||
filename = secure_filename(file.filename)
|
||||
if not filename.lower().endswith('.csv'):
|
||||
return jsonify({'success': False, 'error': 'Only CSV files are supported'}), 400
|
||||
|
||||
# Security: Check file size (max 10MB)
|
||||
file_bytes = file.read()
|
||||
if len(file_bytes) > 10 * 1024 * 1024:
|
||||
return jsonify({'success': False, 'error': 'File too large. Maximum size is 10MB'}), 400
|
||||
|
||||
# Parse CSV
|
||||
parser = CSVParser()
|
||||
result = parser.parse_csv(file_bytes)
|
||||
|
||||
if not result['success']:
|
||||
return jsonify(result), 400
|
||||
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
@bp.route('/detect-duplicates', methods=['POST'])
|
||||
@login_required
|
||||
def detect_duplicates():
|
||||
"""
|
||||
Check for duplicate transactions in the database
|
||||
Security: Only checks current user's expenses
|
||||
"""
|
||||
data = request.get_json()
|
||||
transactions = data.get('transactions', [])
|
||||
|
||||
if not transactions:
|
||||
return jsonify({'success': False, 'error': 'No transactions provided'}), 400
|
||||
|
||||
duplicates = []
|
||||
|
||||
for trans in transactions:
|
||||
try:
|
||||
trans_date = datetime.fromisoformat(trans['date']).date()
|
||||
amount = float(trans['amount'])
|
||||
description = trans['description']
|
||||
|
||||
# Look for potential duplicates within ±2 days and exact amount
|
||||
date_start = trans_date - timedelta(days=2)
|
||||
date_end = trans_date + timedelta(days=2)
|
||||
|
||||
# Security: Filter by current user only
|
||||
existing = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= date_start,
|
||||
Expense.date <= date_end,
|
||||
Expense.amount == amount
|
||||
).all()
|
||||
|
||||
# Check for similar descriptions
|
||||
for exp in existing:
|
||||
# Simple similarity: check if descriptions overlap significantly
|
||||
desc_lower = description.lower()
|
||||
exp_desc_lower = exp.description.lower()
|
||||
|
||||
# Check if at least 50% of words match
|
||||
desc_words = set(desc_lower.split())
|
||||
exp_words = set(exp_desc_lower.split())
|
||||
|
||||
if len(desc_words) > 0:
|
||||
overlap = len(desc_words.intersection(exp_words)) / len(desc_words)
|
||||
if overlap >= 0.5:
|
||||
duplicates.append({
|
||||
'transaction': trans,
|
||||
'existing': {
|
||||
'id': exp.id,
|
||||
'date': exp.date.isoformat(),
|
||||
'description': exp.description,
|
||||
'amount': float(exp.amount),
|
||||
'category': exp.category.name if exp.category else None
|
||||
},
|
||||
'similarity': round(overlap * 100, 0)
|
||||
})
|
||||
break
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'duplicates': duplicates,
|
||||
'duplicate_count': len(duplicates)
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/import', methods=['POST'])
|
||||
@login_required
|
||||
def import_transactions():
|
||||
"""
|
||||
Import selected transactions into the database
|
||||
Security: Only imports to current user's account, validates all data
|
||||
"""
|
||||
data = request.get_json()
|
||||
transactions = data.get('transactions', [])
|
||||
category_mapping = data.get('category_mapping', {})
|
||||
skip_duplicates = data.get('skip_duplicates', False)
|
||||
|
||||
if not transactions:
|
||||
return jsonify({'success': False, 'error': 'No transactions to import'}), 400
|
||||
|
||||
imported = []
|
||||
skipped = []
|
||||
errors = []
|
||||
|
||||
# Security: Get user's categories
|
||||
user_categories = {cat.id: cat for cat in Category.query.filter_by(user_id=current_user.id).all()}
|
||||
|
||||
if not user_categories:
|
||||
return jsonify({'success': False, 'error': 'No categories found. Please create categories first.'}), 400
|
||||
|
||||
# Get default category
|
||||
default_category_id = list(user_categories.keys())[0]
|
||||
|
||||
for idx, trans in enumerate(transactions):
|
||||
try:
|
||||
# Skip if marked as duplicate
|
||||
if skip_duplicates and trans.get('is_duplicate'):
|
||||
skipped.append({'transaction': trans, 'reason': 'Duplicate'})
|
||||
continue
|
||||
|
||||
# Parse and validate data
|
||||
try:
|
||||
trans_date = datetime.fromisoformat(trans['date']).date()
|
||||
except (ValueError, KeyError) as e:
|
||||
errors.append({'transaction': trans, 'error': f'Invalid date: {trans.get("date", "missing")}'})
|
||||
continue
|
||||
|
||||
try:
|
||||
amount = float(trans['amount'])
|
||||
except (ValueError, KeyError, TypeError) as e:
|
||||
errors.append({'transaction': trans, 'error': f'Invalid amount: {trans.get("amount", "missing")}'})
|
||||
continue
|
||||
|
||||
description = trans.get('description', 'Transaction')
|
||||
|
||||
# Validate amount
|
||||
if amount <= 0:
|
||||
errors.append({'transaction': trans, 'error': f'Invalid amount: {amount}'})
|
||||
continue
|
||||
|
||||
# Get category ID from mapping or bank category
|
||||
category_id = None
|
||||
bank_category = trans.get('bank_category')
|
||||
|
||||
# Try to get from explicit mapping
|
||||
if bank_category and bank_category in category_mapping:
|
||||
category_id = int(category_mapping[bank_category])
|
||||
elif str(idx) in category_mapping:
|
||||
category_id = int(category_mapping[str(idx)])
|
||||
else:
|
||||
category_id = default_category_id
|
||||
|
||||
# Security: Verify category belongs to user
|
||||
if category_id not in user_categories:
|
||||
errors.append({'transaction': trans, 'error': f'Invalid category ID: {category_id}'})
|
||||
continue
|
||||
|
||||
# Prepare tags with bank category if available
|
||||
tags = []
|
||||
if bank_category:
|
||||
tags.append(f'Import: {bank_category}')
|
||||
|
||||
# Create expense
|
||||
expense = Expense(
|
||||
user_id=current_user.id,
|
||||
category_id=category_id,
|
||||
amount=amount,
|
||||
description=description,
|
||||
date=trans_date,
|
||||
currency=current_user.currency,
|
||||
tags=json.dumps(tags)
|
||||
)
|
||||
|
||||
db.session.add(expense)
|
||||
imported.append({
|
||||
'date': trans_date.isoformat(),
|
||||
'description': description,
|
||||
'amount': amount,
|
||||
'category': user_categories[category_id].name
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
errors.append({'transaction': trans, 'error': str(e)})
|
||||
|
||||
# Commit all imports
|
||||
try:
|
||||
db.session.commit()
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'imported_count': len(imported),
|
||||
'skipped_count': len(skipped),
|
||||
'error_count': len(errors),
|
||||
'imported': imported,
|
||||
'skipped': skipped,
|
||||
'errors': errors
|
||||
})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({'success': False, 'error': f'Database error: {str(e)}'}), 500
|
||||
|
||||
|
||||
@bp.route('/create-categories', methods=['POST'])
|
||||
@login_required
|
||||
def create_categories():
|
||||
"""
|
||||
Create missing categories from CSV bank categories
|
||||
Security: Only creates for current user
|
||||
"""
|
||||
data = request.get_json()
|
||||
bank_categories = data.get('bank_categories', [])
|
||||
|
||||
if not bank_categories:
|
||||
return jsonify({'success': False, 'error': 'No categories provided'}), 400
|
||||
|
||||
# Get existing categories for user
|
||||
existing_cats = {cat.name.lower(): cat for cat in Category.query.filter_by(user_id=current_user.id).all()}
|
||||
|
||||
created = []
|
||||
mapping = {}
|
||||
|
||||
for bank_cat in bank_categories:
|
||||
if not bank_cat or not bank_cat.strip():
|
||||
continue
|
||||
|
||||
bank_cat_clean = bank_cat.strip()
|
||||
bank_cat_lower = bank_cat_clean.lower()
|
||||
|
||||
# Check if category already exists
|
||||
if bank_cat_lower in existing_cats:
|
||||
mapping[bank_cat] = existing_cats[bank_cat_lower].id
|
||||
else:
|
||||
# Create new category
|
||||
max_order = db.session.query(db.func.max(Category.display_order)).filter_by(user_id=current_user.id).scalar() or 0
|
||||
new_cat = Category(
|
||||
user_id=current_user.id,
|
||||
name=bank_cat_clean,
|
||||
icon='category',
|
||||
color='#' + format(hash(bank_cat_clean) % 0xFFFFFF, '06x'), # Generate color from name
|
||||
display_order=max_order + 1
|
||||
)
|
||||
db.session.add(new_cat)
|
||||
db.session.flush() # Get ID without committing
|
||||
|
||||
created.append({
|
||||
'name': bank_cat_clean,
|
||||
'id': new_cat.id
|
||||
})
|
||||
mapping[bank_cat] = new_cat.id
|
||||
existing_cats[bank_cat_lower] = new_cat
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'created': created,
|
||||
'mapping': mapping,
|
||||
'message': f'Created {len(created)} new categories'
|
||||
})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({'success': False, 'error': f'Failed to create categories: {str(e)}'}), 500
|
||||
|
||||
|
||||
@bp.route('/suggest-category', methods=['POST'])
|
||||
@login_required
|
||||
def suggest_category():
|
||||
"""
|
||||
Suggest category mapping based on description and existing expenses
|
||||
Uses simple keyword matching and historical patterns
|
||||
"""
|
||||
data = request.get_json()
|
||||
description = data.get('description', '').lower()
|
||||
bank_category = data.get('bank_category', '').lower()
|
||||
|
||||
if not description:
|
||||
return jsonify({'success': False, 'error': 'No description provided'}), 400
|
||||
|
||||
# Security: Get only user's categories
|
||||
user_categories = Category.query.filter_by(user_id=current_user.id).all()
|
||||
|
||||
# Look for similar expenses in user's history
|
||||
similar_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id
|
||||
).order_by(Expense.date.desc()).limit(100).all()
|
||||
|
||||
# Score categories based on keyword matching
|
||||
category_scores = {cat.id: 0 for cat in user_categories}
|
||||
|
||||
for expense in similar_expenses:
|
||||
exp_desc = expense.description.lower()
|
||||
|
||||
# Simple word matching
|
||||
desc_words = set(description.split())
|
||||
exp_words = set(exp_desc.split())
|
||||
overlap = len(desc_words.intersection(exp_words))
|
||||
|
||||
if overlap > 0:
|
||||
category_scores[expense.category_id] += overlap
|
||||
|
||||
# Get best match
|
||||
if max(category_scores.values()) > 0:
|
||||
best_category_id = max(category_scores, key=category_scores.get)
|
||||
best_category = next(cat for cat in user_categories if cat.id == best_category_id)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'suggested_category_id': best_category.id,
|
||||
'suggested_category_name': best_category.name,
|
||||
'confidence': min(100, category_scores[best_category_id] * 20)
|
||||
})
|
||||
|
||||
# No match found, return first category
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'suggested_category_id': user_categories[0].id,
|
||||
'suggested_category_name': user_categories[0].name,
|
||||
'confidence': 0
|
||||
})
|
||||
262
app/routes/documents.py
Normal file
262
app/routes/documents.py
Normal file
|
|
@ -0,0 +1,262 @@
|
|||
from flask import Blueprint, request, jsonify, send_file, current_app
|
||||
from flask_login import login_required, current_user
|
||||
from app import db
|
||||
from app.models import Document
|
||||
from werkzeug.utils import secure_filename
|
||||
import os
|
||||
import mimetypes
|
||||
from datetime import datetime
|
||||
from app.ocr import extract_text_from_file
|
||||
|
||||
bp = Blueprint('documents', __name__, url_prefix='/api/documents')
|
||||
|
||||
# Max file size: 10MB
|
||||
MAX_FILE_SIZE = 10 * 1024 * 1024
|
||||
|
||||
# Allowed file types for documents
|
||||
ALLOWED_DOCUMENT_TYPES = {
|
||||
'pdf': 'application/pdf',
|
||||
'csv': 'text/csv',
|
||||
'xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
'xls': 'application/vnd.ms-excel',
|
||||
'png': 'image/png',
|
||||
'jpg': 'image/jpeg',
|
||||
'jpeg': 'image/jpeg'
|
||||
}
|
||||
|
||||
def allowed_document(filename):
|
||||
"""Check if file type is allowed"""
|
||||
return '.' in filename and \
|
||||
filename.rsplit('.', 1)[1].lower() in ALLOWED_DOCUMENT_TYPES.keys()
|
||||
|
||||
def get_file_type_icon(file_type):
|
||||
"""Get material icon name for file type"""
|
||||
icons = {
|
||||
'pdf': 'picture_as_pdf',
|
||||
'csv': 'table_view',
|
||||
'xlsx': 'table_view',
|
||||
'xls': 'table_view',
|
||||
'png': 'image',
|
||||
'jpg': 'image',
|
||||
'jpeg': 'image'
|
||||
}
|
||||
return icons.get(file_type.lower(), 'description')
|
||||
|
||||
@bp.route('/', methods=['GET'])
|
||||
@login_required
|
||||
def get_documents():
|
||||
"""
|
||||
Get all documents for current user
|
||||
Security: Filters by current_user.id
|
||||
"""
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = request.args.get('per_page', 10, type=int)
|
||||
search = request.args.get('search', '')
|
||||
|
||||
# Security: Only get documents for current user
|
||||
query = Document.query.filter_by(user_id=current_user.id)
|
||||
|
||||
if search:
|
||||
query = query.filter(Document.original_filename.ilike(f'%{search}%'))
|
||||
|
||||
pagination = query.order_by(Document.created_at.desc()).paginate(
|
||||
page=page, per_page=per_page, error_out=False
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'documents': [doc.to_dict() for doc in pagination.items],
|
||||
'pagination': {
|
||||
'page': page,
|
||||
'pages': pagination.pages,
|
||||
'total': pagination.total,
|
||||
'per_page': per_page
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/', methods=['POST'])
|
||||
@login_required
|
||||
def upload_document():
|
||||
"""
|
||||
Upload a new document
|
||||
Security: Associates document with current_user.id
|
||||
"""
|
||||
if 'file' not in request.files:
|
||||
return jsonify({'success': False, 'message': 'No file provided'}), 400
|
||||
|
||||
file = request.files['file']
|
||||
|
||||
if not file or not file.filename:
|
||||
return jsonify({'success': False, 'message': 'No file selected'}), 400
|
||||
|
||||
if not allowed_document(file.filename):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': 'Invalid file type. Allowed: PDF, CSV, XLS, XLSX, PNG, JPG'
|
||||
}), 400
|
||||
|
||||
# Check file size
|
||||
file.seek(0, os.SEEK_END)
|
||||
file_size = file.tell()
|
||||
file.seek(0)
|
||||
|
||||
if file_size > MAX_FILE_SIZE:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': f'File too large. Maximum size: {MAX_FILE_SIZE // (1024*1024)}MB'
|
||||
}), 400
|
||||
|
||||
# Generate secure filename
|
||||
original_filename = secure_filename(file.filename)
|
||||
file_ext = original_filename.rsplit('.', 1)[1].lower()
|
||||
timestamp = datetime.utcnow().timestamp()
|
||||
filename = f"{current_user.id}_{timestamp}_{original_filename}"
|
||||
|
||||
# Create documents directory if it doesn't exist
|
||||
documents_dir = os.path.join(current_app.config['UPLOAD_FOLDER'], 'documents')
|
||||
os.makedirs(documents_dir, exist_ok=True)
|
||||
|
||||
# Save file
|
||||
file_path = os.path.join(documents_dir, filename)
|
||||
file.save(file_path)
|
||||
|
||||
# Get document category from form data
|
||||
document_category = request.form.get('category', 'Other')
|
||||
|
||||
# Process OCR for supported file types (PDF, PNG, JPG, JPEG)
|
||||
ocr_text = ""
|
||||
if file_ext in ['pdf', 'png', 'jpg', 'jpeg']:
|
||||
try:
|
||||
# Get absolute path for OCR processing
|
||||
abs_file_path = os.path.abspath(file_path)
|
||||
ocr_text = extract_text_from_file(abs_file_path, file_ext)
|
||||
print(f"OCR extracted {len(ocr_text)} characters from {original_filename}")
|
||||
except Exception as e:
|
||||
print(f"OCR processing failed for {original_filename}: {str(e)}")
|
||||
# Continue without OCR text - non-critical failure
|
||||
|
||||
# Create document record - Security: user_id is current_user.id
|
||||
document = Document(
|
||||
filename=filename,
|
||||
original_filename=original_filename,
|
||||
file_path=file_path,
|
||||
file_size=file_size,
|
||||
file_type=file_ext.upper(),
|
||||
mime_type=ALLOWED_DOCUMENT_TYPES.get(file_ext, 'application/octet-stream'),
|
||||
document_category=document_category,
|
||||
status='uploaded',
|
||||
ocr_text=ocr_text,
|
||||
user_id=current_user.id
|
||||
)
|
||||
|
||||
db.session.add(document)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Document uploaded successfully',
|
||||
'document': document.to_dict()
|
||||
}), 201
|
||||
|
||||
|
||||
@bp.route('/<int:document_id>/view', methods=['GET'])
|
||||
@login_required
|
||||
def view_document(document_id):
|
||||
"""
|
||||
View/preview a document (inline, not download)
|
||||
Security: Checks document belongs to current_user
|
||||
"""
|
||||
# Security: Filter by user_id
|
||||
document = Document.query.filter_by(id=document_id, user_id=current_user.id).first()
|
||||
|
||||
if not document:
|
||||
return jsonify({'success': False, 'message': 'Document not found'}), 404
|
||||
|
||||
if not os.path.exists(document.file_path):
|
||||
return jsonify({'success': False, 'message': 'File not found on server'}), 404
|
||||
|
||||
return send_file(
|
||||
document.file_path,
|
||||
mimetype=document.mime_type,
|
||||
as_attachment=False
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/<int:document_id>/download', methods=['GET'])
|
||||
@login_required
|
||||
def download_document(document_id):
|
||||
"""
|
||||
Download a document
|
||||
Security: Checks document belongs to current_user
|
||||
"""
|
||||
# Security: Filter by user_id
|
||||
document = Document.query.filter_by(id=document_id, user_id=current_user.id).first()
|
||||
|
||||
if not document:
|
||||
return jsonify({'success': False, 'message': 'Document not found'}), 404
|
||||
|
||||
if not os.path.exists(document.file_path):
|
||||
return jsonify({'success': False, 'message': 'File not found on server'}), 404
|
||||
|
||||
return send_file(
|
||||
document.file_path,
|
||||
mimetype=document.mime_type,
|
||||
as_attachment=True,
|
||||
download_name=document.original_filename
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/<int:document_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
def delete_document(document_id):
|
||||
"""
|
||||
Delete a document
|
||||
Security: Checks document belongs to current_user
|
||||
"""
|
||||
# Security: Filter by user_id
|
||||
document = Document.query.filter_by(id=document_id, user_id=current_user.id).first()
|
||||
|
||||
if not document:
|
||||
return jsonify({'success': False, 'message': 'Document not found'}), 404
|
||||
|
||||
# Delete physical file
|
||||
if os.path.exists(document.file_path):
|
||||
try:
|
||||
os.remove(document.file_path)
|
||||
except Exception as e:
|
||||
print(f"Error deleting file: {e}")
|
||||
|
||||
# Delete database record
|
||||
db.session.delete(document)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({'success': True, 'message': 'Document deleted successfully'})
|
||||
|
||||
|
||||
@bp.route('/<int:document_id>/status', methods=['PUT'])
|
||||
@login_required
|
||||
def update_document_status(document_id):
|
||||
"""
|
||||
Update document status (e.g., mark as analyzed)
|
||||
Security: Checks document belongs to current_user
|
||||
"""
|
||||
# Security: Filter by user_id
|
||||
document = Document.query.filter_by(id=document_id, user_id=current_user.id).first()
|
||||
|
||||
if not document:
|
||||
return jsonify({'success': False, 'message': 'Document not found'}), 404
|
||||
|
||||
data = request.get_json()
|
||||
new_status = data.get('status')
|
||||
|
||||
if new_status not in ['uploaded', 'processing', 'analyzed', 'error']:
|
||||
return jsonify({'success': False, 'message': 'Invalid status'}), 400
|
||||
|
||||
document.status = new_status
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Document status updated',
|
||||
'document': document.to_dict()
|
||||
})
|
||||
570
app/routes/expenses.py
Normal file
570
app/routes/expenses.py
Normal file
|
|
@ -0,0 +1,570 @@
|
|||
from flask import Blueprint, request, jsonify, send_file, current_app
|
||||
from flask_login import login_required, current_user
|
||||
from app import db
|
||||
from app.models import Expense, Category, Tag
|
||||
from werkzeug.utils import secure_filename
|
||||
import os
|
||||
import csv
|
||||
import io
|
||||
from datetime import datetime
|
||||
from app.ocr import extract_text_from_file
|
||||
from app.auto_tagger import suggest_tags_for_expense
|
||||
|
||||
bp = Blueprint('expenses', __name__, url_prefix='/api/expenses')
|
||||
|
||||
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'pdf'}
|
||||
|
||||
def allowed_file(filename):
|
||||
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET'])
|
||||
@login_required
|
||||
def get_expenses():
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = request.args.get('per_page', 20, type=int)
|
||||
category_id = request.args.get('category_id', type=int)
|
||||
start_date = request.args.get('start_date')
|
||||
end_date = request.args.get('end_date')
|
||||
search = request.args.get('search', '')
|
||||
tag_ids = request.args.get('tag_ids', '') # Comma-separated tag IDs
|
||||
|
||||
query = Expense.query.filter_by(user_id=current_user.id)
|
||||
|
||||
if category_id:
|
||||
query = query.filter_by(category_id=category_id)
|
||||
|
||||
if start_date:
|
||||
query = query.filter(Expense.date >= datetime.fromisoformat(start_date))
|
||||
|
||||
if end_date:
|
||||
query = query.filter(Expense.date <= datetime.fromisoformat(end_date))
|
||||
|
||||
if search:
|
||||
query = query.filter(Expense.description.ilike(f'%{search}%'))
|
||||
|
||||
# Filter by tags
|
||||
if tag_ids:
|
||||
try:
|
||||
tag_id_list = [int(tid.strip()) for tid in tag_ids.split(',') if tid.strip()]
|
||||
if tag_id_list:
|
||||
# Join with expense_tags to filter by tag IDs
|
||||
# Security: Tags are already filtered by user through Tag.user_id
|
||||
from app.models import ExpenseTag
|
||||
query = query.join(ExpenseTag).filter(ExpenseTag.tag_id.in_(tag_id_list))
|
||||
except ValueError:
|
||||
pass # Invalid tag IDs, ignore filter
|
||||
|
||||
pagination = query.order_by(Expense.date.desc()).paginate(
|
||||
page=page, per_page=per_page, error_out=False
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'expenses': [expense.to_dict() for expense in pagination.items],
|
||||
'total': pagination.total,
|
||||
'pages': pagination.pages,
|
||||
'current_page': page
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/', methods=['POST'])
|
||||
@login_required
|
||||
def create_expense():
|
||||
# Handle both FormData and JSON requests
|
||||
# When FormData is sent (even without files), request.form will have the data
|
||||
# When JSON is sent, request.form will be empty
|
||||
data = request.form if request.form else request.get_json()
|
||||
|
||||
# Validate required fields
|
||||
if not data or not data.get('amount') or not data.get('category_id') or not data.get('description'):
|
||||
return jsonify({'success': False, 'message': 'Missing required fields'}), 400
|
||||
|
||||
# Security: Verify category belongs to current user
|
||||
category = Category.query.filter_by(id=int(data.get('category_id')), user_id=current_user.id).first()
|
||||
if not category:
|
||||
return jsonify({'success': False, 'message': 'Invalid category'}), 400
|
||||
|
||||
# Handle receipt upload
|
||||
receipt_path = None
|
||||
receipt_ocr_text = ""
|
||||
if 'receipt' in request.files:
|
||||
file = request.files['receipt']
|
||||
if file and file.filename and allowed_file(file.filename):
|
||||
filename = secure_filename(f"{current_user.id}_{datetime.utcnow().timestamp()}_{file.filename}")
|
||||
receipts_dir = os.path.join(current_app.config['UPLOAD_FOLDER'], 'receipts')
|
||||
filepath = os.path.join(receipts_dir, filename)
|
||||
file.save(filepath)
|
||||
receipt_path = f'receipts/{filename}'
|
||||
|
||||
# Process OCR for image receipts
|
||||
file_ext = filename.rsplit('.', 1)[1].lower() if '.' in filename else ''
|
||||
if file_ext in ['png', 'jpg', 'jpeg', 'pdf']:
|
||||
try:
|
||||
abs_filepath = os.path.abspath(filepath)
|
||||
receipt_ocr_text = extract_text_from_file(abs_filepath, file_ext)
|
||||
print(f"OCR extracted {len(receipt_ocr_text)} characters from receipt {filename}")
|
||||
except Exception as e:
|
||||
print(f"OCR processing failed for receipt {filename}: {str(e)}")
|
||||
|
||||
# Create expense
|
||||
expense = Expense(
|
||||
amount=float(data.get('amount')),
|
||||
currency=data.get('currency', current_user.currency),
|
||||
description=data.get('description'),
|
||||
category_id=int(data.get('category_id')),
|
||||
user_id=current_user.id,
|
||||
receipt_path=receipt_path,
|
||||
receipt_ocr_text=receipt_ocr_text,
|
||||
date=datetime.fromisoformat(data.get('date')) if data.get('date') else datetime.utcnow()
|
||||
)
|
||||
|
||||
# Handle legacy JSON tags
|
||||
if data.get('tags'):
|
||||
if isinstance(data.get('tags'), str):
|
||||
import json
|
||||
tags = json.loads(data.get('tags'))
|
||||
else:
|
||||
tags = data.get('tags')
|
||||
expense.set_tags(tags)
|
||||
|
||||
db.session.add(expense)
|
||||
db.session.flush() # Get expense ID before handling tag objects
|
||||
|
||||
# Auto-suggest tags based on description and OCR text
|
||||
enable_auto_tags = data.get('enable_auto_tags', True) # Default to True
|
||||
if enable_auto_tags:
|
||||
suggested_tags = suggest_tags_for_expense(
|
||||
description=data.get('description'),
|
||||
ocr_text=receipt_ocr_text,
|
||||
category_name=category.name
|
||||
)
|
||||
|
||||
# Create or get tags and associate with expense
|
||||
for tag_data in suggested_tags:
|
||||
# Check if tag exists for user
|
||||
tag = Tag.query.filter_by(
|
||||
user_id=current_user.id,
|
||||
name=tag_data['name']
|
||||
).first()
|
||||
|
||||
if not tag:
|
||||
# Create new auto-generated tag
|
||||
tag = Tag(
|
||||
name=tag_data['name'],
|
||||
color=tag_data['color'],
|
||||
icon=tag_data['icon'],
|
||||
user_id=current_user.id,
|
||||
is_auto=True,
|
||||
use_count=0
|
||||
)
|
||||
db.session.add(tag)
|
||||
db.session.flush()
|
||||
|
||||
# Associate tag with expense
|
||||
expense.add_tag(tag)
|
||||
|
||||
# Handle manual tag associations (tag IDs passed from frontend)
|
||||
if data.get('tag_ids'):
|
||||
tag_ids = data.get('tag_ids')
|
||||
if isinstance(tag_ids, str):
|
||||
import json
|
||||
tag_ids = json.loads(tag_ids)
|
||||
|
||||
for tag_id in tag_ids:
|
||||
# Security: Verify tag belongs to user
|
||||
tag = Tag.query.filter_by(id=tag_id, user_id=current_user.id).first()
|
||||
if tag:
|
||||
expense.add_tag(tag)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'expense': expense.to_dict()
|
||||
}), 201
|
||||
|
||||
|
||||
@bp.route('/<int:expense_id>', methods=['PUT'])
|
||||
@login_required
|
||||
def update_expense(expense_id):
|
||||
expense = Expense.query.filter_by(id=expense_id, user_id=current_user.id).first()
|
||||
|
||||
if not expense:
|
||||
return jsonify({'success': False, 'message': 'Expense not found'}), 404
|
||||
|
||||
# Handle both FormData and JSON requests
|
||||
data = request.form if request.form else request.get_json()
|
||||
|
||||
# Update fields
|
||||
if data.get('amount'):
|
||||
expense.amount = float(data.get('amount'))
|
||||
if data.get('currency'):
|
||||
expense.currency = data.get('currency')
|
||||
if data.get('description'):
|
||||
expense.description = data.get('description')
|
||||
if data.get('category_id'):
|
||||
# Security: Verify category belongs to current user
|
||||
category = Category.query.filter_by(id=int(data.get('category_id')), user_id=current_user.id).first()
|
||||
if not category:
|
||||
return jsonify({'success': False, 'message': 'Invalid category'}), 400
|
||||
expense.category_id = int(data.get('category_id'))
|
||||
if data.get('date'):
|
||||
expense.date = datetime.fromisoformat(data.get('date'))
|
||||
if data.get('tags') is not None:
|
||||
if isinstance(data.get('tags'), str):
|
||||
import json
|
||||
tags = json.loads(data.get('tags'))
|
||||
else:
|
||||
tags = data.get('tags')
|
||||
expense.set_tags(tags)
|
||||
|
||||
# Handle receipt upload
|
||||
if 'receipt' in request.files:
|
||||
file = request.files['receipt']
|
||||
if file and file.filename and allowed_file(file.filename):
|
||||
# Delete old receipt
|
||||
if expense.receipt_path:
|
||||
clean_path = expense.receipt_path.replace('/uploads/', '').lstrip('/')
|
||||
old_path = os.path.join(current_app.config['UPLOAD_FOLDER'], clean_path)
|
||||
if os.path.exists(old_path):
|
||||
os.remove(old_path)
|
||||
|
||||
filename = secure_filename(f"{current_user.id}_{datetime.utcnow().timestamp()}_{file.filename}")
|
||||
receipts_dir = os.path.join(current_app.config['UPLOAD_FOLDER'], 'receipts')
|
||||
filepath = os.path.join(receipts_dir, filename)
|
||||
file.save(filepath)
|
||||
expense.receipt_path = f'receipts/{filename}'
|
||||
|
||||
# Process OCR for new receipt
|
||||
file_ext = filename.rsplit('.', 1)[1].lower() if '.' in filename else ''
|
||||
if file_ext in ['png', 'jpg', 'jpeg', 'pdf']:
|
||||
try:
|
||||
abs_filepath = os.path.abspath(filepath)
|
||||
expense.receipt_ocr_text = extract_text_from_file(abs_filepath, file_ext)
|
||||
print(f"OCR extracted {len(expense.receipt_ocr_text)} characters from receipt {filename}")
|
||||
except Exception as e:
|
||||
print(f"OCR processing failed for receipt {filename}: {str(e)}")
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'expense': expense.to_dict()
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/<int:expense_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
def delete_expense(expense_id):
|
||||
expense = Expense.query.filter_by(id=expense_id, user_id=current_user.id).first()
|
||||
|
||||
if not expense:
|
||||
return jsonify({'success': False, 'message': 'Expense not found'}), 404
|
||||
|
||||
# Delete receipt file
|
||||
if expense.receipt_path:
|
||||
# Remove leading slash and 'uploads/' prefix if present
|
||||
clean_path = expense.receipt_path.replace('/uploads/', '').lstrip('/')
|
||||
receipt_file = os.path.join(current_app.config['UPLOAD_FOLDER'], clean_path)
|
||||
if os.path.exists(receipt_file):
|
||||
os.remove(receipt_file)
|
||||
|
||||
db.session.delete(expense)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({'success': True, 'message': 'Expense deleted'})
|
||||
|
||||
|
||||
@bp.route('/categories', methods=['GET'])
|
||||
@login_required
|
||||
def get_categories():
|
||||
categories = Category.query.filter_by(user_id=current_user.id).order_by(Category.display_order, Category.created_at).all()
|
||||
|
||||
# Also return popular tags for quick selection
|
||||
popular_tags = Tag.query.filter_by(user_id=current_user.id)\
|
||||
.filter(Tag.use_count > 0)\
|
||||
.order_by(Tag.use_count.desc())\
|
||||
.limit(10)\
|
||||
.all()
|
||||
|
||||
return jsonify({
|
||||
'categories': [cat.to_dict() for cat in categories],
|
||||
'popular_tags': [tag.to_dict() for tag in popular_tags]
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/suggest-tags', methods=['POST'])
|
||||
@login_required
|
||||
def suggest_tags():
|
||||
"""
|
||||
Get tag suggestions for an expense based on description and category
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
description = data.get('description', '')
|
||||
category_id = data.get('category_id')
|
||||
ocr_text = data.get('ocr_text', '')
|
||||
|
||||
category_name = None
|
||||
if category_id:
|
||||
category = Category.query.filter_by(id=category_id, user_id=current_user.id).first()
|
||||
if category:
|
||||
category_name = category.name
|
||||
|
||||
# Get suggestions from auto-tagger
|
||||
suggestions = suggest_tags_for_expense(description, ocr_text, category_name)
|
||||
|
||||
# Check which tags already exist for this user
|
||||
existing_tags = []
|
||||
if suggestions:
|
||||
tag_names = [s['name'] for s in suggestions]
|
||||
existing = Tag.query.filter(
|
||||
Tag.user_id == current_user.id,
|
||||
Tag.name.in_(tag_names)
|
||||
).all()
|
||||
existing_tags = [tag.to_dict() for tag in existing]
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'suggested_tags': suggestions,
|
||||
'existing_tags': existing_tags
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/categories', methods=['POST'])
|
||||
@login_required
|
||||
def create_category():
|
||||
data = request.get_json()
|
||||
|
||||
if not data.get('name'):
|
||||
return jsonify({'success': False, 'message': 'Name is required'}), 400
|
||||
|
||||
# Sanitize inputs
|
||||
name = str(data.get('name')).strip()[:50] # Limit to 50 chars
|
||||
color = str(data.get('color', '#2b8cee')).strip()[:7] # Hex color format
|
||||
icon = str(data.get('icon', 'category')).strip()[:50] # Limit to 50 chars, alphanumeric and underscore only
|
||||
|
||||
# Validate color format (must be hex)
|
||||
if not color.startswith('#') or len(color) != 7:
|
||||
color = '#2b8cee'
|
||||
|
||||
# Validate icon (alphanumeric and underscore only for security)
|
||||
if not all(c.isalnum() or c == '_' for c in icon):
|
||||
icon = 'category'
|
||||
|
||||
# Get max display_order for user's categories
|
||||
max_order = db.session.query(db.func.max(Category.display_order)).filter_by(user_id=current_user.id).scalar() or 0
|
||||
|
||||
category = Category(
|
||||
name=name,
|
||||
color=color,
|
||||
icon=icon,
|
||||
display_order=max_order + 1,
|
||||
user_id=current_user.id
|
||||
)
|
||||
|
||||
db.session.add(category)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'category': category.to_dict()
|
||||
}), 201
|
||||
|
||||
|
||||
@bp.route('/categories/<int:category_id>', methods=['PUT'])
|
||||
@login_required
|
||||
def update_category(category_id):
|
||||
category = Category.query.filter_by(id=category_id, user_id=current_user.id).first()
|
||||
|
||||
if not category:
|
||||
return jsonify({'success': False, 'message': 'Category not found'}), 404
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
if data.get('name'):
|
||||
category.name = str(data.get('name')).strip()[:50]
|
||||
if data.get('color'):
|
||||
color = str(data.get('color')).strip()[:7]
|
||||
if color.startswith('#') and len(color) == 7:
|
||||
category.color = color
|
||||
if data.get('icon'):
|
||||
icon = str(data.get('icon')).strip()[:50]
|
||||
# Validate icon (alphanumeric and underscore only for security)
|
||||
if all(c.isalnum() or c == '_' for c in icon):
|
||||
category.icon = icon
|
||||
if 'display_order' in data:
|
||||
category.display_order = int(data.get('display_order'))
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'category': category.to_dict()
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/categories/<int:category_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
def delete_category(category_id):
|
||||
category = Category.query.filter_by(id=category_id, user_id=current_user.id).first()
|
||||
|
||||
if not category:
|
||||
return jsonify({'success': False, 'message': 'Category not found'}), 404
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
move_to_category_id = data.get('move_to_category_id')
|
||||
|
||||
# Count expenses in this category
|
||||
expense_count = category.expenses.count()
|
||||
|
||||
# If category has expenses and no move_to_category_id specified, return error with count
|
||||
if expense_count > 0 and not move_to_category_id:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': 'Category has expenses',
|
||||
'expense_count': expense_count,
|
||||
'requires_reassignment': True
|
||||
}), 400
|
||||
|
||||
# If move_to_category_id specified, reassign expenses
|
||||
if expense_count > 0 and move_to_category_id:
|
||||
move_to_category = Category.query.filter_by(id=move_to_category_id, user_id=current_user.id).first()
|
||||
if not move_to_category:
|
||||
return jsonify({'success': False, 'message': 'Target category not found'}), 404
|
||||
|
||||
# Reassign all expenses to the new category
|
||||
for expense in category.expenses:
|
||||
expense.category_id = move_to_category_id
|
||||
|
||||
db.session.delete(category)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({'success': True, 'message': 'Category deleted', 'expenses_moved': expense_count})
|
||||
|
||||
|
||||
@bp.route('/categories/reorder', methods=['PUT'])
|
||||
@login_required
|
||||
def reorder_categories():
|
||||
"""
|
||||
Reorder categories for the current user
|
||||
Expects: { "categories": [{"id": 1, "display_order": 0}, {"id": 2, "display_order": 1}, ...] }
|
||||
Security: Only updates categories belonging to current_user
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or 'categories' not in data:
|
||||
return jsonify({'success': False, 'message': 'Categories array required'}), 400
|
||||
|
||||
try:
|
||||
for cat_data in data['categories']:
|
||||
category = Category.query.filter_by(id=cat_data['id'], user_id=current_user.id).first()
|
||||
if category:
|
||||
category.display_order = cat_data['display_order']
|
||||
|
||||
db.session.commit()
|
||||
return jsonify({'success': True, 'message': 'Categories reordered'})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({'success': False, 'message': str(e)}), 500
|
||||
|
||||
|
||||
@bp.route('/export/csv', methods=['GET'])
|
||||
@login_required
|
||||
def export_csv():
|
||||
expenses = Expense.query.filter_by(user_id=current_user.id).order_by(Expense.date.desc()).all()
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
# Write header
|
||||
writer.writerow(['Date', 'Description', 'Amount', 'Currency', 'Category', 'Tags'])
|
||||
|
||||
# Write data
|
||||
for expense in expenses:
|
||||
writer.writerow([
|
||||
expense.date.strftime('%Y-%m-%d %H:%M:%S'),
|
||||
expense.description,
|
||||
expense.amount,
|
||||
expense.currency,
|
||||
expense.category.name,
|
||||
', '.join(expense.get_tags())
|
||||
])
|
||||
|
||||
output.seek(0)
|
||||
|
||||
return send_file(
|
||||
io.BytesIO(output.getvalue().encode('utf-8')),
|
||||
mimetype='text/csv',
|
||||
as_attachment=True,
|
||||
download_name=f'fina_expenses_{datetime.utcnow().strftime("%Y%m%d")}.csv'
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/import/csv', methods=['POST'])
|
||||
@login_required
|
||||
def import_csv():
|
||||
if 'file' not in request.files:
|
||||
return jsonify({'success': False, 'message': 'No file provided'}), 400
|
||||
|
||||
file = request.files['file']
|
||||
if file.filename == '':
|
||||
return jsonify({'success': False, 'message': 'No file selected'}), 400
|
||||
|
||||
if not file.filename.endswith('.csv'):
|
||||
return jsonify({'success': False, 'message': 'File must be CSV'}), 400
|
||||
|
||||
try:
|
||||
stream = io.StringIO(file.stream.read().decode('utf-8'))
|
||||
reader = csv.DictReader(stream)
|
||||
|
||||
imported_count = 0
|
||||
errors = []
|
||||
|
||||
for row in reader:
|
||||
try:
|
||||
# Find or create category
|
||||
category_name = row.get('Category', 'Uncategorized')
|
||||
category = Category.query.filter_by(user_id=current_user.id, name=category_name).first()
|
||||
|
||||
if not category:
|
||||
category = Category(name=category_name, user_id=current_user.id)
|
||||
db.session.add(category)
|
||||
db.session.flush()
|
||||
|
||||
# Parse date
|
||||
date_str = row.get('Date', datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
|
||||
expense_date = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S')
|
||||
|
||||
# Create expense
|
||||
expense = Expense(
|
||||
amount=float(row['Amount']),
|
||||
currency=row.get('Currency', current_user.currency),
|
||||
description=row['Description'],
|
||||
category_id=category.id,
|
||||
user_id=current_user.id,
|
||||
date=expense_date
|
||||
)
|
||||
|
||||
# Handle tags
|
||||
if row.get('Tags'):
|
||||
tags = [tag.strip() for tag in row['Tags'].split(',')]
|
||||
expense.set_tags(tags)
|
||||
|
||||
db.session.add(expense)
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"Row error: {str(e)}")
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'imported': imported_count,
|
||||
'errors': errors
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({'success': False, 'message': f'Import failed: {str(e)}'}), 500
|
||||
408
app/routes/income.py
Normal file
408
app/routes/income.py
Normal file
|
|
@ -0,0 +1,408 @@
|
|||
from flask import Blueprint, request, jsonify, current_app
|
||||
from flask_login import login_required, current_user
|
||||
from app import db
|
||||
from app.models import Income
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import json
|
||||
|
||||
bp = Blueprint('income', __name__, url_prefix='/api/income')
|
||||
|
||||
|
||||
def calculate_income_next_due_date(frequency, custom_days=None, from_date=None):
|
||||
"""Calculate next due date for recurring income based on frequency
|
||||
Args:
|
||||
frequency: 'once', 'weekly', 'biweekly', 'every4weeks', 'monthly', 'custom'
|
||||
custom_days: Number of days for custom frequency
|
||||
from_date: Starting date (default: today)
|
||||
Returns:
|
||||
Next due date or None for one-time income
|
||||
"""
|
||||
if frequency == 'once':
|
||||
return None
|
||||
|
||||
if from_date is None:
|
||||
from_date = datetime.utcnow()
|
||||
|
||||
if frequency == 'weekly':
|
||||
return from_date + timedelta(days=7)
|
||||
elif frequency == 'biweekly':
|
||||
return from_date + timedelta(days=14)
|
||||
elif frequency == 'every4weeks':
|
||||
return from_date + timedelta(days=28)
|
||||
elif frequency == 'monthly':
|
||||
return from_date + relativedelta(months=1)
|
||||
elif frequency == 'custom' and custom_days:
|
||||
return from_date + timedelta(days=custom_days)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET'])
|
||||
@login_required
|
||||
def get_income():
|
||||
"""Get income entries with filtering and pagination
|
||||
Security: Only returns income for current_user
|
||||
"""
|
||||
current_app.logger.info(f"Getting income for user {current_user.id}")
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = request.args.get('per_page', 20, type=int)
|
||||
start_date = request.args.get('start_date')
|
||||
end_date = request.args.get('end_date')
|
||||
source = request.args.get('source')
|
||||
search = request.args.get('search', '')
|
||||
|
||||
# Security: Filter by current user
|
||||
query = Income.query.filter_by(user_id=current_user.id)
|
||||
|
||||
if source:
|
||||
query = query.filter_by(source=source)
|
||||
|
||||
if start_date:
|
||||
query = query.filter(Income.date >= datetime.fromisoformat(start_date))
|
||||
|
||||
if end_date:
|
||||
query = query.filter(Income.date <= datetime.fromisoformat(end_date))
|
||||
|
||||
if search:
|
||||
query = query.filter(Income.description.ilike(f'%{search}%'))
|
||||
|
||||
pagination = query.order_by(Income.date.desc()).paginate(
|
||||
page=page, per_page=per_page, error_out=False
|
||||
)
|
||||
|
||||
current_app.logger.info(f"Found {pagination.total} income entries for user {current_user.id}")
|
||||
|
||||
return jsonify({
|
||||
'income': [inc.to_dict() for inc in pagination.items],
|
||||
'total': pagination.total,
|
||||
'pages': pagination.pages,
|
||||
'current_page': page
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/', methods=['POST'])
|
||||
@login_required
|
||||
def create_income():
|
||||
"""Create new income entry
|
||||
Security: Only creates income for current_user
|
||||
"""
|
||||
data = request.get_json()
|
||||
current_app.logger.info(f"Creating income for user {current_user.id}, data: {data}")
|
||||
|
||||
# Validate required fields
|
||||
if not data or not data.get('amount') or not data.get('source') or not data.get('description'):
|
||||
current_app.logger.warning(f"Missing required fields: {data}")
|
||||
return jsonify({'success': False, 'message': 'Missing required fields'}), 400
|
||||
|
||||
try:
|
||||
income_date = datetime.fromisoformat(data.get('date')) if data.get('date') else datetime.utcnow()
|
||||
frequency = data.get('frequency', 'once')
|
||||
custom_days = data.get('custom_days')
|
||||
auto_create = data.get('auto_create', False)
|
||||
|
||||
# Calculate next due date for recurring income
|
||||
next_due_date = None
|
||||
if frequency != 'once' and auto_create:
|
||||
next_due_date = calculate_income_next_due_date(frequency, custom_days, income_date)
|
||||
|
||||
# Create income entry
|
||||
income = Income(
|
||||
amount=float(data.get('amount')),
|
||||
currency=data.get('currency', current_user.currency),
|
||||
description=data.get('description'),
|
||||
source=data.get('source'),
|
||||
user_id=current_user.id,
|
||||
tags=json.dumps(data.get('tags', [])) if isinstance(data.get('tags'), list) else data.get('tags', '[]'),
|
||||
frequency=frequency,
|
||||
custom_days=custom_days,
|
||||
next_due_date=next_due_date,
|
||||
is_active=True,
|
||||
auto_create=auto_create,
|
||||
date=income_date
|
||||
)
|
||||
|
||||
current_app.logger.info(f"Adding income to session: {income.description}")
|
||||
db.session.add(income)
|
||||
db.session.commit()
|
||||
current_app.logger.info(f"Income committed with ID: {income.id}")
|
||||
|
||||
# Verify it was saved
|
||||
saved_income = Income.query.filter_by(id=income.id).first()
|
||||
current_app.logger.info(f"Verification - Income exists: {saved_income is not None}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Income added successfully',
|
||||
'income': income.to_dict()
|
||||
}), 201
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Error creating income: {str(e)}", exc_info=True)
|
||||
return jsonify({'success': False, 'message': 'Failed to create income'}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:income_id>', methods=['PUT'])
|
||||
@login_required
|
||||
def update_income(income_id):
|
||||
"""Update income entry
|
||||
Security: Only allows updating user's own income
|
||||
"""
|
||||
# Security check: verify income belongs to current user
|
||||
income = Income.query.filter_by(id=income_id, user_id=current_user.id).first()
|
||||
if not income:
|
||||
return jsonify({'success': False, 'message': 'Income not found'}), 404
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
try:
|
||||
# Update fields
|
||||
if 'amount' in data:
|
||||
income.amount = float(data['amount'])
|
||||
if 'currency' in data:
|
||||
income.currency = data['currency']
|
||||
if 'description' in data:
|
||||
income.description = data['description']
|
||||
if 'source' in data:
|
||||
income.source = data['source']
|
||||
if 'tags' in data:
|
||||
income.tags = json.dumps(data['tags']) if isinstance(data['tags'], list) else data['tags']
|
||||
if 'date' in data:
|
||||
income.date = datetime.fromisoformat(data['date'])
|
||||
|
||||
# Handle frequency changes
|
||||
frequency_changed = False
|
||||
if 'frequency' in data and data['frequency'] != income.frequency:
|
||||
income.frequency = data['frequency']
|
||||
frequency_changed = True
|
||||
|
||||
if 'custom_days' in data:
|
||||
income.custom_days = data['custom_days']
|
||||
frequency_changed = True
|
||||
|
||||
if 'auto_create' in data:
|
||||
income.auto_create = data['auto_create']
|
||||
|
||||
if 'is_active' in data:
|
||||
income.is_active = data['is_active']
|
||||
|
||||
# Recalculate next_due_date if frequency changed or auto_create enabled
|
||||
if (frequency_changed or 'auto_create' in data) and income.auto_create and income.is_active:
|
||||
if income.frequency != 'once':
|
||||
from_date = income.last_created_date if income.last_created_date else income.date
|
||||
income.next_due_date = calculate_income_next_due_date(
|
||||
income.frequency,
|
||||
income.custom_days,
|
||||
from_date
|
||||
)
|
||||
else:
|
||||
income.next_due_date = None
|
||||
elif not income.auto_create or not income.is_active:
|
||||
income.next_due_date = None
|
||||
|
||||
income.updated_at = datetime.utcnow()
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Income updated successfully',
|
||||
'income': income.to_dict()
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Error updating income: {str(e)}")
|
||||
return jsonify({'success': False, 'message': 'Failed to update income'}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:income_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
def delete_income(income_id):
|
||||
"""Delete income entry
|
||||
Security: Only allows deleting user's own income
|
||||
"""
|
||||
# Security check: verify income belongs to current user
|
||||
income = Income.query.filter_by(id=income_id, user_id=current_user.id).first()
|
||||
if not income:
|
||||
return jsonify({'success': False, 'message': 'Income not found'}), 404
|
||||
|
||||
try:
|
||||
db.session.delete(income)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Income deleted successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Error deleting income: {str(e)}")
|
||||
return jsonify({'success': False, 'message': 'Failed to delete income'}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:income_id>/toggle', methods=['PUT'])
|
||||
@login_required
|
||||
def toggle_recurring_income(income_id):
|
||||
"""Toggle recurring income active status
|
||||
Security: Only allows toggling user's own income
|
||||
"""
|
||||
# Security check: verify income belongs to current user
|
||||
income = Income.query.filter_by(id=income_id, user_id=current_user.id).first()
|
||||
if not income:
|
||||
return jsonify({'success': False, 'message': 'Income not found'}), 404
|
||||
|
||||
try:
|
||||
income.is_active = not income.is_active
|
||||
|
||||
# Clear next_due_date if deactivated
|
||||
if not income.is_active:
|
||||
income.next_due_date = None
|
||||
elif income.auto_create and income.frequency != 'once':
|
||||
# Recalculate next_due_date when reactivated
|
||||
from_date = income.last_created_date if income.last_created_date else income.date
|
||||
income.next_due_date = calculate_income_next_due_date(
|
||||
income.frequency,
|
||||
income.custom_days,
|
||||
from_date
|
||||
)
|
||||
|
||||
income.updated_at = datetime.utcnow()
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Income {"activated" if income.is_active else "deactivated"}',
|
||||
'income': income.to_dict()
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Error toggling income: {str(e)}")
|
||||
return jsonify({'success': False, 'message': 'Failed to toggle income'}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:income_id>/create-now', methods=['POST'])
|
||||
@login_required
|
||||
def create_income_now(income_id):
|
||||
"""Manually create income entry from recurring income
|
||||
Security: Only allows creating from user's own recurring income
|
||||
"""
|
||||
# Security check: verify income belongs to current user
|
||||
recurring_income = Income.query.filter_by(id=income_id, user_id=current_user.id).first()
|
||||
if not recurring_income:
|
||||
return jsonify({'success': False, 'message': 'Recurring income not found'}), 404
|
||||
|
||||
if recurring_income.frequency == 'once':
|
||||
return jsonify({'success': False, 'message': 'This is not a recurring income'}), 400
|
||||
|
||||
try:
|
||||
# Create new income entry based on recurring income
|
||||
new_income = Income(
|
||||
amount=recurring_income.amount,
|
||||
currency=recurring_income.currency,
|
||||
description=recurring_income.description,
|
||||
source=recurring_income.source,
|
||||
user_id=current_user.id,
|
||||
tags=recurring_income.tags,
|
||||
frequency='once', # Created income is one-time
|
||||
date=datetime.utcnow()
|
||||
)
|
||||
|
||||
db.session.add(new_income)
|
||||
|
||||
# Update recurring income's next due date and last created date
|
||||
recurring_income.last_created_date = datetime.utcnow()
|
||||
if recurring_income.auto_create and recurring_income.is_active:
|
||||
recurring_income.next_due_date = calculate_income_next_due_date(
|
||||
recurring_income.frequency,
|
||||
recurring_income.custom_days,
|
||||
recurring_income.last_created_date
|
||||
)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Income created successfully',
|
||||
'income': new_income.to_dict(),
|
||||
'recurring_income': recurring_income.to_dict()
|
||||
}), 201
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Error creating income from recurring: {str(e)}")
|
||||
return jsonify({'success': False, 'message': 'Failed to create income'}), 500
|
||||
|
||||
|
||||
@bp.route('/sources', methods=['GET'])
|
||||
@login_required
|
||||
def get_income_sources():
|
||||
"""Get list of income sources
|
||||
Returns predefined sources for consistency
|
||||
"""
|
||||
sources = [
|
||||
{'value': 'Salary', 'label': 'Salary', 'icon': 'payments'},
|
||||
{'value': 'Freelance', 'label': 'Freelance', 'icon': 'work'},
|
||||
{'value': 'Investment', 'label': 'Investment', 'icon': 'trending_up'},
|
||||
{'value': 'Rental', 'label': 'Rental Income', 'icon': 'home'},
|
||||
{'value': 'Gift', 'label': 'Gift', 'icon': 'card_giftcard'},
|
||||
{'value': 'Bonus', 'label': 'Bonus', 'icon': 'star'},
|
||||
{'value': 'Refund', 'label': 'Refund', 'icon': 'refresh'},
|
||||
{'value': 'Other', 'label': 'Other', 'icon': 'category'}
|
||||
]
|
||||
|
||||
return jsonify({'sources': sources})
|
||||
|
||||
|
||||
@bp.route('/summary', methods=['GET'])
|
||||
@login_required
|
||||
def get_income_summary():
|
||||
"""Get income summary for dashboard
|
||||
Security: Only returns data for current_user
|
||||
"""
|
||||
start_date = request.args.get('start_date')
|
||||
end_date = request.args.get('end_date')
|
||||
|
||||
# Security: Filter by current user
|
||||
query = Income.query.filter_by(user_id=current_user.id)
|
||||
|
||||
if start_date:
|
||||
query = query.filter(Income.date >= datetime.fromisoformat(start_date))
|
||||
|
||||
if end_date:
|
||||
query = query.filter(Income.date <= datetime.fromisoformat(end_date))
|
||||
|
||||
# Calculate totals by source
|
||||
income_by_source = db.session.query(
|
||||
Income.source,
|
||||
db.func.sum(Income.amount).label('total'),
|
||||
db.func.count(Income.id).label('count')
|
||||
).filter_by(user_id=current_user.id)
|
||||
|
||||
if start_date:
|
||||
income_by_source = income_by_source.filter(Income.date >= datetime.fromisoformat(start_date))
|
||||
if end_date:
|
||||
income_by_source = income_by_source.filter(Income.date <= datetime.fromisoformat(end_date))
|
||||
|
||||
income_by_source = income_by_source.group_by(Income.source).all()
|
||||
|
||||
total_income = sum(item.total for item in income_by_source)
|
||||
|
||||
breakdown = [
|
||||
{
|
||||
'source': item.source,
|
||||
'total': float(item.total),
|
||||
'count': item.count,
|
||||
'percentage': (float(item.total) / total_income * 100) if total_income > 0 else 0
|
||||
}
|
||||
for item in income_by_source
|
||||
]
|
||||
|
||||
return jsonify({
|
||||
'total_income': total_income,
|
||||
'count': sum(item.count for item in income_by_source),
|
||||
'breakdown': breakdown
|
||||
})
|
||||
581
app/routes/main.py
Normal file
581
app/routes/main.py
Normal file
|
|
@ -0,0 +1,581 @@
|
|||
from flask import Blueprint, render_template, request, jsonify
|
||||
from flask_login import login_required, current_user
|
||||
from app import db
|
||||
from app.models import Expense, Category, Income
|
||||
from sqlalchemy import func, extract
|
||||
from datetime import datetime, timedelta
|
||||
from collections import defaultdict
|
||||
|
||||
bp = Blueprint('main', __name__)
|
||||
|
||||
@bp.route('/')
|
||||
def index():
|
||||
if current_user.is_authenticated:
|
||||
return render_template('dashboard.html')
|
||||
return render_template('landing.html')
|
||||
|
||||
|
||||
@bp.route('/dashboard')
|
||||
@login_required
|
||||
def dashboard():
|
||||
return render_template('dashboard.html')
|
||||
|
||||
|
||||
@bp.route('/transactions')
|
||||
@login_required
|
||||
def transactions():
|
||||
return render_template('transactions.html')
|
||||
|
||||
|
||||
@bp.route('/reports')
|
||||
@login_required
|
||||
def reports():
|
||||
return render_template('reports.html')
|
||||
|
||||
|
||||
@bp.route('/settings')
|
||||
@login_required
|
||||
def settings():
|
||||
return render_template('settings.html')
|
||||
|
||||
|
||||
@bp.route('/documents')
|
||||
@login_required
|
||||
def documents():
|
||||
return render_template('documents.html')
|
||||
|
||||
|
||||
@bp.route('/recurring')
|
||||
@login_required
|
||||
def recurring():
|
||||
return render_template('recurring.html')
|
||||
|
||||
|
||||
@bp.route('/import')
|
||||
@login_required
|
||||
def import_page():
|
||||
return render_template('import.html')
|
||||
|
||||
|
||||
@bp.route('/income')
|
||||
@login_required
|
||||
def income():
|
||||
return render_template('income.html')
|
||||
|
||||
|
||||
@bp.route('/admin')
|
||||
@login_required
|
||||
def admin():
|
||||
if not current_user.is_admin:
|
||||
return render_template('404.html'), 404
|
||||
return render_template('admin.html')
|
||||
|
||||
|
||||
@bp.route('/api/dashboard-stats')
|
||||
@login_required
|
||||
def dashboard_stats():
|
||||
now = datetime.utcnow()
|
||||
|
||||
# Current month stats
|
||||
current_month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
# Previous month stats
|
||||
if now.month == 1:
|
||||
prev_month_start = now.replace(year=now.year-1, month=12, day=1)
|
||||
else:
|
||||
prev_month_start = current_month_start.replace(month=current_month_start.month-1)
|
||||
|
||||
# Total spent this month (all currencies - show user's preferred currency)
|
||||
current_month_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= current_month_start
|
||||
).all()
|
||||
current_month_total = sum(exp.amount for exp in current_month_expenses)
|
||||
|
||||
# Previous month total
|
||||
prev_month_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= prev_month_start,
|
||||
Expense.date < current_month_start
|
||||
).all()
|
||||
prev_month_total = sum(exp.amount for exp in prev_month_expenses)
|
||||
|
||||
# Current month income
|
||||
current_month_income = Income.query.filter(
|
||||
Income.user_id == current_user.id,
|
||||
Income.date >= current_month_start
|
||||
).all()
|
||||
current_income_total = sum(inc.amount for inc in current_month_income)
|
||||
|
||||
# Previous month income
|
||||
prev_month_income = Income.query.filter(
|
||||
Income.user_id == current_user.id,
|
||||
Income.date >= prev_month_start,
|
||||
Income.date < current_month_start
|
||||
).all()
|
||||
prev_income_total = sum(inc.amount for inc in prev_month_income)
|
||||
|
||||
# Calculate profit/loss
|
||||
current_profit = current_income_total - current_month_total
|
||||
prev_profit = prev_income_total - prev_month_total
|
||||
|
||||
# Calculate percentage change
|
||||
if prev_month_total > 0:
|
||||
percent_change = ((current_month_total - prev_month_total) / prev_month_total) * 100
|
||||
else:
|
||||
percent_change = 100 if current_month_total > 0 else 0
|
||||
|
||||
# Active categories
|
||||
active_categories = Category.query.filter_by(user_id=current_user.id).count()
|
||||
|
||||
# Total transactions this month
|
||||
total_transactions = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= current_month_start
|
||||
).count()
|
||||
|
||||
# Category breakdown for entire current year (all currencies)
|
||||
current_year_start = now.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
category_stats = db.session.query(
|
||||
Category.id,
|
||||
Category.name,
|
||||
Category.color,
|
||||
Category.icon,
|
||||
func.sum(Expense.amount).label('total'),
|
||||
func.count(Expense.id).label('count')
|
||||
).join(Expense).filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= current_year_start
|
||||
).group_by(Category.id).order_by(Category.display_order, Category.created_at).all()
|
||||
|
||||
# Monthly breakdown (all 12 months of current year) - including income
|
||||
monthly_data = []
|
||||
for month_num in range(1, 13):
|
||||
month_start = now.replace(month=month_num, day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
if month_num == 12:
|
||||
month_end = now.replace(year=now.year+1, month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
else:
|
||||
month_end = now.replace(month=month_num+1, day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
month_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= month_start,
|
||||
Expense.date < month_end
|
||||
).all()
|
||||
month_total = sum(exp.amount for exp in month_expenses)
|
||||
|
||||
month_income_list = Income.query.filter(
|
||||
Income.user_id == current_user.id,
|
||||
Income.date >= month_start,
|
||||
Income.date < month_end
|
||||
).all()
|
||||
month_income = sum(inc.amount for inc in month_income_list)
|
||||
|
||||
monthly_data.append({
|
||||
'month': month_start.strftime('%b'),
|
||||
'expenses': float(month_total),
|
||||
'income': float(month_income),
|
||||
'profit': float(month_income - month_total)
|
||||
})
|
||||
|
||||
# Add budget status to category breakdown
|
||||
category_breakdown = []
|
||||
for stat in category_stats:
|
||||
cat = Category.query.get(stat[0])
|
||||
cat_data = {
|
||||
'id': stat[0],
|
||||
'name': stat[1],
|
||||
'color': stat[2],
|
||||
'icon': stat[3],
|
||||
'total': float(stat[4]),
|
||||
'count': stat[5]
|
||||
}
|
||||
if cat:
|
||||
cat_data['budget_status'] = cat.get_budget_status()
|
||||
cat_data['monthly_budget'] = cat.monthly_budget
|
||||
cat_data['budget_alert_threshold'] = cat.budget_alert_threshold
|
||||
category_breakdown.append(cat_data)
|
||||
|
||||
return jsonify({
|
||||
'total_spent': float(current_month_total),
|
||||
'total_income': float(current_income_total),
|
||||
'profit_loss': float(current_profit),
|
||||
'percent_change': round(percent_change, 1),
|
||||
'active_categories': active_categories,
|
||||
'total_transactions': total_transactions,
|
||||
'currency': current_user.currency,
|
||||
'category_breakdown': category_breakdown,
|
||||
'monthly_data': monthly_data
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/api/recent-transactions')
|
||||
@login_required
|
||||
def recent_transactions():
|
||||
limit = request.args.get('limit', 10, type=int)
|
||||
|
||||
expenses = Expense.query.filter_by(user_id=current_user.id)\
|
||||
.order_by(Expense.date.desc())\
|
||||
.limit(limit)\
|
||||
.all()
|
||||
|
||||
return jsonify({
|
||||
'transactions': [expense.to_dict() for expense in expenses]
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/api/reports-stats')
|
||||
@login_required
|
||||
def reports_stats():
|
||||
"""
|
||||
Generate comprehensive financial reports including income tracking
|
||||
Security: Only returns data for current_user (enforced by user_id filter)
|
||||
"""
|
||||
period = request.args.get('period', '30') # days
|
||||
category_filter = request.args.get('category_id', type=int)
|
||||
|
||||
try:
|
||||
days = int(period)
|
||||
except ValueError:
|
||||
days = 30
|
||||
|
||||
now = datetime.utcnow()
|
||||
period_start = now - timedelta(days=days)
|
||||
|
||||
# Query expenses with security filter
|
||||
query = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= period_start
|
||||
)
|
||||
|
||||
if category_filter:
|
||||
query = query.filter_by(category_id=category_filter)
|
||||
|
||||
expenses = query.all()
|
||||
|
||||
# Query income for the same period
|
||||
income_query = Income.query.filter(
|
||||
Income.user_id == current_user.id,
|
||||
Income.date >= period_start
|
||||
)
|
||||
incomes = income_query.all()
|
||||
|
||||
# Total spent and earned in period
|
||||
total_spent = sum(exp.amount for exp in expenses)
|
||||
total_income = sum(inc.amount for inc in incomes)
|
||||
|
||||
# Previous period comparison for expenses and income
|
||||
prev_period_start = period_start - timedelta(days=days)
|
||||
prev_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= prev_period_start,
|
||||
Expense.date < period_start
|
||||
).all()
|
||||
prev_total = sum(exp.amount for exp in prev_expenses)
|
||||
|
||||
prev_incomes = Income.query.filter(
|
||||
Income.user_id == current_user.id,
|
||||
Income.date >= prev_period_start,
|
||||
Income.date < period_start
|
||||
).all()
|
||||
prev_income_total = sum(inc.amount for inc in prev_incomes)
|
||||
|
||||
# Calculate profit/loss
|
||||
current_profit = total_income - total_spent
|
||||
prev_profit = prev_income_total - prev_total
|
||||
|
||||
percent_change = 0
|
||||
if prev_total > 0:
|
||||
percent_change = ((total_spent - prev_total) / prev_total) * 100
|
||||
elif total_spent > 0:
|
||||
percent_change = 100
|
||||
|
||||
# Income change percentage
|
||||
income_percent_change = 0
|
||||
if prev_income_total > 0:
|
||||
income_percent_change = ((total_income - prev_income_total) / prev_income_total) * 100
|
||||
elif total_income > 0:
|
||||
income_percent_change = 100
|
||||
|
||||
# Profit/loss change percentage
|
||||
profit_percent_change = 0
|
||||
if prev_profit != 0:
|
||||
profit_percent_change = ((current_profit - prev_profit) / abs(prev_profit)) * 100
|
||||
elif current_profit != 0:
|
||||
profit_percent_change = 100
|
||||
|
||||
# Top category (all currencies)
|
||||
category_totals = {}
|
||||
for exp in expenses:
|
||||
cat_name = exp.category.name
|
||||
category_totals[cat_name] = category_totals.get(cat_name, 0) + exp.amount
|
||||
|
||||
top_category = max(category_totals.items(), key=lambda x: x[1]) if category_totals else ('None', 0)
|
||||
|
||||
# Average daily spending
|
||||
avg_daily = total_spent / days if days > 0 else 0
|
||||
prev_avg_daily = prev_total / days if days > 0 else 0
|
||||
avg_change = 0
|
||||
if prev_avg_daily > 0:
|
||||
avg_change = ((avg_daily - prev_avg_daily) / prev_avg_daily) * 100
|
||||
elif avg_daily > 0:
|
||||
avg_change = 100
|
||||
|
||||
# Savings rate calculation based on income (more accurate than budget)
|
||||
if total_income > 0:
|
||||
savings_amount = total_income - total_spent
|
||||
savings_rate = (savings_amount / total_income) * 100
|
||||
savings_rate = max(-100, min(100, savings_rate)) # Clamp between -100% and 100%
|
||||
else:
|
||||
# Fallback to budget if no income data
|
||||
if current_user.monthly_budget and current_user.monthly_budget > 0:
|
||||
savings_amount = current_user.monthly_budget - total_spent
|
||||
savings_rate = (savings_amount / current_user.monthly_budget) * 100
|
||||
savings_rate = max(0, min(100, savings_rate))
|
||||
else:
|
||||
savings_rate = 0
|
||||
|
||||
# Previous period savings rate
|
||||
if prev_income_total > 0:
|
||||
prev_savings_amount = prev_income_total - prev_total
|
||||
prev_savings_rate = (prev_savings_amount / prev_income_total) * 100
|
||||
prev_savings_rate = max(-100, min(100, prev_savings_rate))
|
||||
else:
|
||||
if current_user.monthly_budget and current_user.monthly_budget > 0:
|
||||
prev_savings_amount = current_user.monthly_budget - prev_total
|
||||
prev_savings_rate = (prev_savings_amount / current_user.monthly_budget) * 100
|
||||
prev_savings_rate = max(0, min(100, prev_savings_rate))
|
||||
else:
|
||||
prev_savings_rate = 0
|
||||
|
||||
savings_rate_change = savings_rate - prev_savings_rate
|
||||
|
||||
# Category breakdown for pie chart
|
||||
category_breakdown = []
|
||||
for cat_name, amount in sorted(category_totals.items(), key=lambda x: x[1], reverse=True):
|
||||
category = Category.query.filter_by(user_id=current_user.id, name=cat_name).first()
|
||||
if category:
|
||||
percentage = (amount / total_spent * 100) if total_spent > 0 else 0
|
||||
category_breakdown.append({
|
||||
'name': cat_name,
|
||||
'color': category.color,
|
||||
'amount': float(amount),
|
||||
'percentage': round(percentage, 1)
|
||||
})
|
||||
|
||||
# Daily spending and income trend (last 30 days)
|
||||
daily_trend = []
|
||||
for i in range(min(30, days)):
|
||||
day_date = now - timedelta(days=i)
|
||||
day_start = day_date.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
day_end = day_start + timedelta(days=1)
|
||||
|
||||
day_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= day_start,
|
||||
Expense.date < day_end
|
||||
).all()
|
||||
day_total = sum(exp.amount for exp in day_expenses)
|
||||
|
||||
day_incomes = Income.query.filter(
|
||||
Income.user_id == current_user.id,
|
||||
Income.date >= day_start,
|
||||
Income.date < day_end
|
||||
).all()
|
||||
day_income = sum(inc.amount for inc in day_incomes)
|
||||
|
||||
daily_trend.insert(0, {
|
||||
'date': day_date.strftime('%d %b'),
|
||||
'expenses': float(day_total),
|
||||
'income': float(day_income),
|
||||
'profit': float(day_income - day_total)
|
||||
})
|
||||
|
||||
# Monthly comparison with income (all 12 months of current year)
|
||||
monthly_comparison = []
|
||||
current_year = now.year
|
||||
for month in range(1, 13):
|
||||
month_start = datetime(current_year, month, 1)
|
||||
if month == 12:
|
||||
month_end = datetime(current_year + 1, 1, 1)
|
||||
else:
|
||||
month_end = datetime(current_year, month + 1, 1)
|
||||
|
||||
month_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= month_start,
|
||||
Expense.date < month_end
|
||||
).all()
|
||||
month_total = sum(exp.amount for exp in month_expenses)
|
||||
|
||||
month_incomes = Income.query.filter(
|
||||
Income.user_id == current_user.id,
|
||||
Income.date >= month_start,
|
||||
Income.date < month_end
|
||||
).all()
|
||||
month_income = sum(inc.amount for inc in month_incomes)
|
||||
|
||||
monthly_comparison.append({
|
||||
'month': month_start.strftime('%b'),
|
||||
'expenses': float(month_total),
|
||||
'income': float(month_income),
|
||||
'profit': float(month_income - month_total)
|
||||
})
|
||||
|
||||
# Income sources breakdown
|
||||
income_by_source = {}
|
||||
for inc in incomes:
|
||||
source = inc.source
|
||||
income_by_source[source] = income_by_source.get(source, 0) + inc.amount
|
||||
|
||||
income_breakdown = [{
|
||||
'source': source,
|
||||
'amount': float(amount),
|
||||
'percentage': round((amount / total_income * 100) if total_income > 0 else 0, 1)
|
||||
} for source, amount in sorted(income_by_source.items(), key=lambda x: x[1], reverse=True)]
|
||||
|
||||
return jsonify({
|
||||
'total_spent': float(total_spent),
|
||||
'total_income': float(total_income),
|
||||
'profit_loss': float(current_profit),
|
||||
'percent_change': round(percent_change, 1),
|
||||
'income_percent_change': round(income_percent_change, 1),
|
||||
'profit_percent_change': round(profit_percent_change, 1),
|
||||
'top_category': {'name': top_category[0], 'amount': float(top_category[1])},
|
||||
'avg_daily': float(avg_daily),
|
||||
'avg_daily_change': round(avg_change, 1),
|
||||
'savings_rate': round(savings_rate, 1),
|
||||
'savings_rate_change': round(savings_rate_change, 1),
|
||||
'category_breakdown': category_breakdown,
|
||||
'income_breakdown': income_breakdown,
|
||||
'daily_trend': daily_trend,
|
||||
'monthly_comparison': monthly_comparison,
|
||||
'currency': current_user.currency,
|
||||
'period_days': days
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/api/smart-recommendations')
|
||||
@login_required
|
||||
def smart_recommendations():
|
||||
"""
|
||||
Generate smart financial recommendations based on user spending patterns
|
||||
Security: Only returns recommendations for current_user
|
||||
"""
|
||||
now = datetime.utcnow()
|
||||
|
||||
# Get data for last 30 and 60 days for comparison
|
||||
period_30 = now - timedelta(days=30)
|
||||
period_60 = now - timedelta(days=60)
|
||||
period_30_start = period_60
|
||||
|
||||
# Current period expenses (all currencies)
|
||||
current_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= period_30
|
||||
).all()
|
||||
|
||||
# Previous period expenses (all currencies)
|
||||
previous_expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= period_60,
|
||||
Expense.date < period_30
|
||||
).all()
|
||||
|
||||
current_total = sum(exp.amount for exp in current_expenses)
|
||||
previous_total = sum(exp.amount for exp in previous_expenses)
|
||||
|
||||
# Category analysis
|
||||
current_by_category = defaultdict(float)
|
||||
previous_by_category = defaultdict(float)
|
||||
|
||||
for exp in current_expenses:
|
||||
current_by_category[exp.category.name] += exp.amount
|
||||
|
||||
for exp in previous_expenses:
|
||||
previous_by_category[exp.category.name] += exp.amount
|
||||
|
||||
recommendations = []
|
||||
|
||||
# Recommendation 1: Budget vs Spending
|
||||
if current_user.monthly_budget and current_user.monthly_budget > 0:
|
||||
budget_used_percent = (current_total / current_user.monthly_budget) * 100
|
||||
remaining = current_user.monthly_budget - current_total
|
||||
|
||||
if budget_used_percent > 90:
|
||||
recommendations.append({
|
||||
'type': 'warning',
|
||||
'icon': 'warning',
|
||||
'color': 'red',
|
||||
'title': 'Budget Alert' if current_user.language == 'en' else 'Alertă Buget',
|
||||
'description': f'You\'ve used {budget_used_percent:.1f}% of your monthly budget. Only {abs(remaining):.2f} {current_user.currency} remaining.' if current_user.language == 'en' else f'Ai folosit {budget_used_percent:.1f}% din bugetul lunar. Mai rămân doar {abs(remaining):.2f} {current_user.currency}.'
|
||||
})
|
||||
elif budget_used_percent < 70 and remaining > 0:
|
||||
recommendations.append({
|
||||
'type': 'success',
|
||||
'icon': 'trending_up',
|
||||
'color': 'green',
|
||||
'title': 'Great Savings Opportunity' if current_user.language == 'en' else 'Oportunitate de Economisire',
|
||||
'description': f'You have {remaining:.2f} {current_user.currency} remaining from your budget. Consider saving or investing it.' if current_user.language == 'en' else f'Mai ai {remaining:.2f} {current_user.currency} din buget. Consideră să economisești sau să investești.'
|
||||
})
|
||||
|
||||
# Recommendation 2: Category spending changes
|
||||
for category_name, current_amount in current_by_category.items():
|
||||
if category_name in previous_by_category:
|
||||
previous_amount = previous_by_category[category_name]
|
||||
if previous_amount > 0:
|
||||
change_percent = ((current_amount - previous_amount) / previous_amount) * 100
|
||||
|
||||
if change_percent > 50: # 50% increase
|
||||
recommendations.append({
|
||||
'type': 'warning',
|
||||
'icon': 'trending_up',
|
||||
'color': 'yellow',
|
||||
'title': f'{category_name} Spending Up' if current_user.language == 'en' else f'Cheltuieli {category_name} în Creștere',
|
||||
'description': f'Your {category_name} spending increased by {change_percent:.0f}%. Review recent transactions.' if current_user.language == 'en' else f'Cheltuielile pentru {category_name} au crescut cu {change_percent:.0f}%. Revizuiește tranzacțiile recente.'
|
||||
})
|
||||
elif change_percent < -30: # 30% decrease
|
||||
recommendations.append({
|
||||
'type': 'success',
|
||||
'icon': 'trending_down',
|
||||
'color': 'green',
|
||||
'title': f'{category_name} Savings' if current_user.language == 'en' else f'Economii {category_name}',
|
||||
'description': f'Great job! You reduced {category_name} spending by {abs(change_percent):.0f}%.' if current_user.language == 'en' else f'Foarte bine! Ai redus cheltuielile pentru {category_name} cu {abs(change_percent):.0f}%.'
|
||||
})
|
||||
|
||||
# Recommendation 3: Unusual transactions
|
||||
if current_expenses:
|
||||
category_averages = {}
|
||||
for category_name, amount in current_by_category.items():
|
||||
count = sum(1 for exp in current_expenses if exp.category.name == category_name)
|
||||
category_averages[category_name] = amount / count if count > 0 else 0
|
||||
|
||||
for exp in current_expenses[-10:]: # Check last 10 transactions
|
||||
category_avg = category_averages.get(exp.category.name, 0)
|
||||
if category_avg > 0 and exp.amount > category_avg * 2: # 200% of average
|
||||
recommendations.append({
|
||||
'type': 'info',
|
||||
'icon': 'info',
|
||||
'color': 'blue',
|
||||
'title': 'Unusual Transaction' if current_user.language == 'en' else 'Tranzacție Neobișnuită',
|
||||
'description': f'A transaction of {exp.amount:.2f} {current_user.currency} in {exp.category.name} is higher than usual.' if current_user.language == 'en' else f'O tranzacție de {exp.amount:.2f} {current_user.currency} în {exp.category.name} este mai mare decât de obicei.'
|
||||
})
|
||||
break # Only show one unusual transaction warning
|
||||
|
||||
# Limit to top 3 recommendations
|
||||
recommendations = recommendations[:3]
|
||||
|
||||
# If no recommendations, add a positive message
|
||||
if not recommendations:
|
||||
recommendations.append({
|
||||
'type': 'success',
|
||||
'icon': 'check_circle',
|
||||
'color': 'green',
|
||||
'title': 'Spending on Track' if current_user.language == 'en' else 'Cheltuieli sub Control',
|
||||
'description': 'Your spending patterns look healthy. Keep up the good work!' if current_user.language == 'en' else 'Obiceiurile tale de cheltuieli arată bine. Continuă așa!'
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'recommendations': recommendations
|
||||
})
|
||||
438
app/routes/recurring.py
Normal file
438
app/routes/recurring.py
Normal file
|
|
@ -0,0 +1,438 @@
|
|||
from flask import Blueprint, request, jsonify, current_app
|
||||
from flask_login import login_required, current_user
|
||||
from app import db
|
||||
from app.models import RecurringExpense, Expense, Category
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from collections import defaultdict
|
||||
import re
|
||||
|
||||
bp = Blueprint('recurring', __name__, url_prefix='/api/recurring')
|
||||
|
||||
|
||||
def calculate_next_due_date(frequency, day_of_period=None, from_date=None):
|
||||
"""Calculate next due date based on frequency"""
|
||||
base_date = from_date or datetime.utcnow()
|
||||
|
||||
if frequency == 'daily':
|
||||
return base_date + timedelta(days=1)
|
||||
elif frequency == 'weekly':
|
||||
# day_of_period is day of week (0=Monday, 6=Sunday)
|
||||
target_day = day_of_period if day_of_period is not None else base_date.weekday()
|
||||
days_ahead = target_day - base_date.weekday()
|
||||
if days_ahead <= 0:
|
||||
days_ahead += 7
|
||||
return base_date + timedelta(days=days_ahead)
|
||||
elif frequency == 'monthly':
|
||||
# day_of_period is day of month (1-31)
|
||||
target_day = day_of_period if day_of_period is not None else base_date.day
|
||||
next_month = base_date + relativedelta(months=1)
|
||||
try:
|
||||
return next_month.replace(day=min(target_day, 28)) # Safe day
|
||||
except ValueError:
|
||||
# Handle months with fewer days
|
||||
return next_month.replace(day=28)
|
||||
elif frequency == 'yearly':
|
||||
return base_date + relativedelta(years=1)
|
||||
else:
|
||||
return base_date + timedelta(days=30)
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET'])
|
||||
@login_required
|
||||
def get_recurring_expenses():
|
||||
"""Get all recurring expenses for current user"""
|
||||
# Security: Filter by user_id
|
||||
recurring = RecurringExpense.query.filter_by(user_id=current_user.id).order_by(
|
||||
RecurringExpense.is_active.desc(),
|
||||
RecurringExpense.next_due_date.asc()
|
||||
).all()
|
||||
|
||||
return jsonify({
|
||||
'recurring_expenses': [r.to_dict() for r in recurring]
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/', methods=['POST'])
|
||||
@login_required
|
||||
def create_recurring_expense():
|
||||
"""Create a new recurring expense"""
|
||||
data = request.get_json()
|
||||
|
||||
# Validate required fields
|
||||
if not data or not data.get('name') or not data.get('amount') or not data.get('category_id') or not data.get('frequency'):
|
||||
return jsonify({'success': False, 'message': 'Missing required fields'}), 400
|
||||
|
||||
# Security: Verify category belongs to current user
|
||||
category = Category.query.filter_by(id=int(data.get('category_id')), user_id=current_user.id).first()
|
||||
if not category:
|
||||
return jsonify({'success': False, 'message': 'Invalid category'}), 400
|
||||
|
||||
# Validate frequency
|
||||
valid_frequencies = ['daily', 'weekly', 'monthly', 'yearly']
|
||||
frequency = data.get('frequency')
|
||||
if frequency not in valid_frequencies:
|
||||
return jsonify({'success': False, 'message': 'Invalid frequency'}), 400
|
||||
|
||||
# Calculate next due date
|
||||
day_of_period = data.get('day_of_period')
|
||||
next_due_date = data.get('next_due_date')
|
||||
|
||||
if next_due_date:
|
||||
next_due_date = datetime.fromisoformat(next_due_date)
|
||||
else:
|
||||
next_due_date = calculate_next_due_date(frequency, day_of_period)
|
||||
|
||||
# Create recurring expense
|
||||
recurring = RecurringExpense(
|
||||
name=data.get('name'),
|
||||
amount=float(data.get('amount')),
|
||||
currency=data.get('currency', current_user.currency),
|
||||
category_id=int(data.get('category_id')),
|
||||
frequency=frequency,
|
||||
day_of_period=day_of_period,
|
||||
next_due_date=next_due_date,
|
||||
auto_create=data.get('auto_create', False),
|
||||
is_active=data.get('is_active', True),
|
||||
notes=data.get('notes'),
|
||||
detected=False, # Manually created
|
||||
user_id=current_user.id
|
||||
)
|
||||
|
||||
db.session.add(recurring)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'recurring_expense': recurring.to_dict()
|
||||
}), 201
|
||||
|
||||
|
||||
@bp.route('/<int:recurring_id>', methods=['PUT'])
|
||||
@login_required
|
||||
def update_recurring_expense(recurring_id):
|
||||
"""Update a recurring expense"""
|
||||
# Security: Filter by user_id
|
||||
recurring = RecurringExpense.query.filter_by(id=recurring_id, user_id=current_user.id).first()
|
||||
|
||||
if not recurring:
|
||||
return jsonify({'success': False, 'message': 'Recurring expense not found'}), 404
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Update fields
|
||||
if data.get('name'):
|
||||
recurring.name = data.get('name')
|
||||
if data.get('amount'):
|
||||
recurring.amount = float(data.get('amount'))
|
||||
if data.get('currency'):
|
||||
recurring.currency = data.get('currency')
|
||||
if data.get('category_id'):
|
||||
# Security: Verify category belongs to current user
|
||||
category = Category.query.filter_by(id=int(data.get('category_id')), user_id=current_user.id).first()
|
||||
if not category:
|
||||
return jsonify({'success': False, 'message': 'Invalid category'}), 400
|
||||
recurring.category_id = int(data.get('category_id'))
|
||||
if data.get('frequency'):
|
||||
valid_frequencies = ['daily', 'weekly', 'monthly', 'yearly']
|
||||
if data.get('frequency') not in valid_frequencies:
|
||||
return jsonify({'success': False, 'message': 'Invalid frequency'}), 400
|
||||
recurring.frequency = data.get('frequency')
|
||||
if 'day_of_period' in data:
|
||||
recurring.day_of_period = data.get('day_of_period')
|
||||
if data.get('next_due_date'):
|
||||
recurring.next_due_date = datetime.fromisoformat(data.get('next_due_date'))
|
||||
if 'auto_create' in data:
|
||||
recurring.auto_create = data.get('auto_create')
|
||||
if 'is_active' in data:
|
||||
recurring.is_active = data.get('is_active')
|
||||
if 'notes' in data:
|
||||
recurring.notes = data.get('notes')
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'recurring_expense': recurring.to_dict()
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/<int:recurring_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
def delete_recurring_expense(recurring_id):
|
||||
"""Delete a recurring expense"""
|
||||
# Security: Filter by user_id
|
||||
recurring = RecurringExpense.query.filter_by(id=recurring_id, user_id=current_user.id).first()
|
||||
|
||||
if not recurring:
|
||||
return jsonify({'success': False, 'message': 'Recurring expense not found'}), 404
|
||||
|
||||
db.session.delete(recurring)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({'success': True, 'message': 'Recurring expense deleted'})
|
||||
|
||||
|
||||
@bp.route('/<int:recurring_id>/create-expense', methods=['POST'])
|
||||
@login_required
|
||||
def create_expense_from_recurring(recurring_id):
|
||||
"""Manually create an expense from a recurring expense"""
|
||||
# Security: Filter by user_id
|
||||
recurring = RecurringExpense.query.filter_by(id=recurring_id, user_id=current_user.id).first()
|
||||
|
||||
if not recurring:
|
||||
return jsonify({'success': False, 'message': 'Recurring expense not found'}), 404
|
||||
|
||||
# Create expense
|
||||
expense = Expense(
|
||||
amount=recurring.amount,
|
||||
currency=recurring.currency,
|
||||
description=recurring.name,
|
||||
category_id=recurring.category_id,
|
||||
user_id=current_user.id,
|
||||
tags=['recurring', recurring.frequency],
|
||||
date=datetime.utcnow()
|
||||
)
|
||||
expense.set_tags(['recurring', recurring.frequency])
|
||||
|
||||
# Update recurring expense
|
||||
recurring.last_created_date = datetime.utcnow()
|
||||
recurring.next_due_date = calculate_next_due_date(
|
||||
recurring.frequency,
|
||||
recurring.day_of_period,
|
||||
recurring.next_due_date
|
||||
)
|
||||
|
||||
db.session.add(expense)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'expense': expense.to_dict(),
|
||||
'recurring_expense': recurring.to_dict()
|
||||
}), 201
|
||||
|
||||
|
||||
@bp.route('/detect', methods=['POST'])
|
||||
@login_required
|
||||
def detect_recurring_patterns():
|
||||
"""
|
||||
Detect recurring expense patterns from historical expenses
|
||||
Returns suggestions for potential recurring expenses
|
||||
"""
|
||||
# Get user's expenses from last 6 months
|
||||
six_months_ago = datetime.utcnow() - relativedelta(months=6)
|
||||
expenses = Expense.query.filter(
|
||||
Expense.user_id == current_user.id,
|
||||
Expense.date >= six_months_ago
|
||||
).order_by(Expense.date.asc()).all()
|
||||
|
||||
if len(expenses) < 10:
|
||||
return jsonify({
|
||||
'suggestions': [],
|
||||
'message': 'Not enough expense history to detect patterns'
|
||||
})
|
||||
|
||||
# Group expenses by similar descriptions and amounts
|
||||
patterns = defaultdict(list)
|
||||
|
||||
for expense in expenses:
|
||||
# Normalize description (lowercase, remove numbers/special chars)
|
||||
normalized_desc = re.sub(r'[^a-z\s]', '', expense.description.lower()).strip()
|
||||
|
||||
# Create a key based on normalized description and approximate amount
|
||||
amount_bucket = round(expense.amount / 10) * 10 # Group by 10 currency units
|
||||
key = f"{normalized_desc}_{amount_bucket}_{expense.category_id}"
|
||||
|
||||
patterns[key].append(expense)
|
||||
|
||||
suggestions = []
|
||||
|
||||
# Analyze patterns
|
||||
for key, expense_list in patterns.items():
|
||||
if len(expense_list) < 3: # Need at least 3 occurrences
|
||||
continue
|
||||
|
||||
# Calculate intervals between expenses
|
||||
intervals = []
|
||||
for i in range(1, len(expense_list)):
|
||||
days_diff = (expense_list[i].date - expense_list[i-1].date).days
|
||||
intervals.append(days_diff)
|
||||
|
||||
if not intervals:
|
||||
continue
|
||||
|
||||
avg_interval = sum(intervals) / len(intervals)
|
||||
# Check variance to ensure consistency
|
||||
variance = sum((x - avg_interval) ** 2 for x in intervals) / len(intervals)
|
||||
std_dev = variance ** 0.5
|
||||
|
||||
# Determine if pattern is consistent
|
||||
if std_dev / avg_interval > 0.3: # More than 30% variance
|
||||
continue
|
||||
|
||||
# Determine frequency
|
||||
frequency = None
|
||||
day_of_period = None
|
||||
confidence = 0
|
||||
|
||||
if 25 <= avg_interval <= 35: # Monthly
|
||||
frequency = 'monthly'
|
||||
# Get most common day of month
|
||||
days = [e.date.day for e in expense_list]
|
||||
day_of_period = max(set(days), key=days.count)
|
||||
confidence = 90 - (std_dev / avg_interval * 100)
|
||||
elif 6 <= avg_interval <= 8: # Weekly
|
||||
frequency = 'weekly'
|
||||
days = [e.date.weekday() for e in expense_list]
|
||||
day_of_period = max(set(days), key=days.count)
|
||||
confidence = 85 - (std_dev / avg_interval * 100)
|
||||
elif 360 <= avg_interval <= 370: # Yearly
|
||||
frequency = 'yearly'
|
||||
confidence = 80 - (std_dev / avg_interval * 100)
|
||||
|
||||
if frequency and confidence > 60: # Only suggest if confidence > 60%
|
||||
# Use most recent expense data
|
||||
latest = expense_list[-1]
|
||||
avg_amount = sum(e.amount for e in expense_list) / len(expense_list)
|
||||
|
||||
# Check if already exists as recurring expense
|
||||
existing = RecurringExpense.query.filter_by(
|
||||
user_id=current_user.id,
|
||||
name=latest.description,
|
||||
category_id=latest.category_id
|
||||
).first()
|
||||
|
||||
if not existing:
|
||||
suggestions.append({
|
||||
'name': latest.description,
|
||||
'amount': round(avg_amount, 2),
|
||||
'currency': latest.currency,
|
||||
'category_id': latest.category_id,
|
||||
'category_name': latest.category.name,
|
||||
'category_color': latest.category.color,
|
||||
'frequency': frequency,
|
||||
'day_of_period': day_of_period,
|
||||
'confidence_score': round(confidence, 1),
|
||||
'occurrences': len(expense_list),
|
||||
'detected': True
|
||||
})
|
||||
|
||||
# Sort by confidence score
|
||||
suggestions.sort(key=lambda x: x['confidence_score'], reverse=True)
|
||||
|
||||
return jsonify({
|
||||
'suggestions': suggestions[:10], # Return top 10
|
||||
'message': f'Found {len(suggestions)} potential recurring expenses'
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/accept-suggestion', methods=['POST'])
|
||||
@login_required
|
||||
def accept_suggestion():
|
||||
"""Accept a detected recurring expense suggestion and create it"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or not data.get('name') or not data.get('amount') or not data.get('category_id') or not data.get('frequency'):
|
||||
return jsonify({'success': False, 'message': 'Missing required fields'}), 400
|
||||
|
||||
# Security: Verify category belongs to current user
|
||||
category = Category.query.filter_by(id=int(data.get('category_id')), user_id=current_user.id).first()
|
||||
if not category:
|
||||
return jsonify({'success': False, 'message': 'Invalid category'}), 400
|
||||
|
||||
# Calculate next due date
|
||||
day_of_period = data.get('day_of_period')
|
||||
next_due_date = calculate_next_due_date(data.get('frequency'), day_of_period)
|
||||
|
||||
# Create recurring expense
|
||||
recurring = RecurringExpense(
|
||||
name=data.get('name'),
|
||||
amount=float(data.get('amount')),
|
||||
currency=data.get('currency', current_user.currency),
|
||||
category_id=int(data.get('category_id')),
|
||||
frequency=data.get('frequency'),
|
||||
day_of_period=day_of_period,
|
||||
next_due_date=next_due_date,
|
||||
auto_create=data.get('auto_create', False),
|
||||
is_active=True,
|
||||
detected=True, # Auto-detected
|
||||
confidence_score=data.get('confidence_score', 0),
|
||||
user_id=current_user.id
|
||||
)
|
||||
|
||||
db.session.add(recurring)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'recurring_expense': recurring.to_dict()
|
||||
}), 201
|
||||
|
||||
|
||||
@bp.route('/upcoming', methods=['GET'])
|
||||
@login_required
|
||||
def get_upcoming_recurring():
|
||||
"""Get upcoming recurring expenses (next 30 days)"""
|
||||
# Security: Filter by user_id
|
||||
thirty_days_later = datetime.utcnow() + timedelta(days=30)
|
||||
|
||||
recurring = RecurringExpense.query.filter(
|
||||
RecurringExpense.user_id == current_user.id,
|
||||
RecurringExpense.is_active == True,
|
||||
RecurringExpense.next_due_date <= thirty_days_later
|
||||
).order_by(RecurringExpense.next_due_date.asc()).all()
|
||||
|
||||
return jsonify({
|
||||
'upcoming': [r.to_dict() for r in recurring]
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/process-due', methods=['POST'])
|
||||
@login_required
|
||||
def process_due_manual():
|
||||
"""
|
||||
Manually trigger processing of due recurring expenses
|
||||
Admin only for security - prevents users from spamming expense creation
|
||||
"""
|
||||
if not current_user.is_admin:
|
||||
return jsonify({'success': False, 'message': 'Unauthorized'}), 403
|
||||
|
||||
try:
|
||||
from app.scheduler import process_due_recurring_expenses
|
||||
process_due_recurring_expenses()
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Recurring expenses processed successfully'
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': f'Error processing recurring expenses: {str(e)}'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/sync-currency', methods=['POST'])
|
||||
@login_required
|
||||
def sync_currency():
|
||||
"""
|
||||
Sync all user's recurring expenses to use their current profile currency
|
||||
Security: Only updates current user's recurring expenses
|
||||
"""
|
||||
try:
|
||||
# Update all recurring expenses to match user's current currency
|
||||
RecurringExpense.query.filter_by(user_id=current_user.id).update(
|
||||
{'currency': current_user.currency}
|
||||
)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'All recurring expenses synced to your current currency'
|
||||
})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': f'Error syncing currency: {str(e)}'
|
||||
}), 500
|
||||
285
app/routes/search.py
Normal file
285
app/routes/search.py
Normal file
|
|
@ -0,0 +1,285 @@
|
|||
"""
|
||||
Global Search API
|
||||
Provides unified search across all app content and features
|
||||
Security: All searches filtered by user_id to prevent data leakage
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
from flask_login import login_required, current_user
|
||||
from app.models import Expense, Document, Category, RecurringExpense, Tag
|
||||
from sqlalchemy import or_, func
|
||||
from datetime import datetime
|
||||
|
||||
bp = Blueprint('search', __name__, url_prefix='/api/search')
|
||||
|
||||
# App features/pages for navigation
|
||||
APP_FEATURES = [
|
||||
{
|
||||
'id': 'dashboard',
|
||||
'name': 'Dashboard',
|
||||
'name_ro': 'Tablou de bord',
|
||||
'description': 'View your financial overview',
|
||||
'description_ro': 'Vezi prezentarea generală financiară',
|
||||
'icon': 'dashboard',
|
||||
'url': '/dashboard',
|
||||
'keywords': ['dashboard', 'tablou', 'bord', 'overview', 'home', 'start']
|
||||
},
|
||||
{
|
||||
'id': 'transactions',
|
||||
'name': 'Transactions',
|
||||
'name_ro': 'Tranzacții',
|
||||
'description': 'Manage your expenses and transactions',
|
||||
'description_ro': 'Gestionează cheltuielile și tranzacțiile',
|
||||
'icon': 'receipt_long',
|
||||
'url': '/transactions',
|
||||
'keywords': ['transactions', 'tranzactii', 'expenses', 'cheltuieli', 'spending']
|
||||
},
|
||||
{
|
||||
'id': 'recurring',
|
||||
'name': 'Recurring Expenses',
|
||||
'name_ro': 'Cheltuieli recurente',
|
||||
'description': 'Manage subscriptions and recurring bills',
|
||||
'description_ro': 'Gestionează abonamente și facturi recurente',
|
||||
'icon': 'repeat',
|
||||
'url': '/recurring',
|
||||
'keywords': ['recurring', 'recurente', 'subscriptions', 'abonamente', 'bills', 'facturi', 'monthly']
|
||||
},
|
||||
{
|
||||
'id': 'reports',
|
||||
'name': 'Reports',
|
||||
'name_ro': 'Rapoarte',
|
||||
'description': 'View detailed financial reports',
|
||||
'description_ro': 'Vezi rapoarte financiare detaliate',
|
||||
'icon': 'analytics',
|
||||
'url': '/reports',
|
||||
'keywords': ['reports', 'rapoarte', 'analytics', 'analize', 'statistics', 'statistici']
|
||||
},
|
||||
{
|
||||
'id': 'documents',
|
||||
'name': 'Documents',
|
||||
'name_ro': 'Documente',
|
||||
'description': 'Upload and manage your documents',
|
||||
'description_ro': 'Încarcă și gestionează documentele',
|
||||
'icon': 'description',
|
||||
'url': '/documents',
|
||||
'keywords': ['documents', 'documente', 'files', 'fisiere', 'upload', 'receipts', 'chitante']
|
||||
},
|
||||
{
|
||||
'id': 'settings',
|
||||
'name': 'Settings',
|
||||
'name_ro': 'Setări',
|
||||
'description': 'Configure your account settings',
|
||||
'description_ro': 'Configurează setările contului',
|
||||
'icon': 'settings',
|
||||
'url': '/settings',
|
||||
'keywords': ['settings', 'setari', 'preferences', 'preferinte', 'account', 'cont', 'profile', 'profil']
|
||||
}
|
||||
]
|
||||
|
||||
# Admin-only features
|
||||
ADMIN_FEATURES = [
|
||||
{
|
||||
'id': 'admin',
|
||||
'name': 'Admin Panel',
|
||||
'name_ro': 'Panou Admin',
|
||||
'description': 'Manage users and system settings',
|
||||
'description_ro': 'Gestionează utilizatori și setări sistem',
|
||||
'icon': 'admin_panel_settings',
|
||||
'url': '/admin',
|
||||
'keywords': ['admin', 'administration', 'users', 'utilizatori', 'system', 'sistem']
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET'])
|
||||
@login_required
|
||||
def global_search():
|
||||
"""
|
||||
Global search across all content and app features
|
||||
Security: All data searches filtered by current_user.id
|
||||
|
||||
Query params:
|
||||
- q: Search query string
|
||||
- limit: Max results per category (default 5)
|
||||
|
||||
Returns:
|
||||
- features: Matching app features/pages
|
||||
- expenses: Matching expenses (by description or OCR text)
|
||||
- documents: Matching documents (by filename or OCR text)
|
||||
- categories: Matching categories
|
||||
- recurring: Matching recurring expenses
|
||||
"""
|
||||
query = request.args.get('q', '').strip()
|
||||
limit = request.args.get('limit', 5, type=int)
|
||||
|
||||
if not query or len(query) < 2:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': 'Query must be at least 2 characters'
|
||||
}), 400
|
||||
|
||||
results = {
|
||||
'features': [],
|
||||
'expenses': [],
|
||||
'documents': [],
|
||||
'categories': [],
|
||||
'recurring': [],
|
||||
'tags': []
|
||||
}
|
||||
|
||||
# Search app features
|
||||
query_lower = query.lower()
|
||||
for feature in APP_FEATURES:
|
||||
# Check if query matches any keyword
|
||||
if any(query_lower in keyword.lower() for keyword in feature['keywords']):
|
||||
results['features'].append({
|
||||
'id': feature['id'],
|
||||
'type': 'feature',
|
||||
'name': feature['name'],
|
||||
'name_ro': feature['name_ro'],
|
||||
'description': feature['description'],
|
||||
'description_ro': feature['description_ro'],
|
||||
'icon': feature['icon'],
|
||||
'url': feature['url']
|
||||
})
|
||||
|
||||
# Add admin features if user is admin
|
||||
if current_user.is_admin:
|
||||
for feature in ADMIN_FEATURES:
|
||||
if any(query_lower in keyword.lower() for keyword in feature['keywords']):
|
||||
results['features'].append({
|
||||
'id': feature['id'],
|
||||
'type': 'feature',
|
||||
'name': feature['name'],
|
||||
'name_ro': feature['name_ro'],
|
||||
'description': feature['description'],
|
||||
'description_ro': feature['description_ro'],
|
||||
'icon': feature['icon'],
|
||||
'url': feature['url']
|
||||
})
|
||||
|
||||
# Search expenses - Security: filter by user_id
|
||||
expense_query = Expense.query.filter_by(user_id=current_user.id)
|
||||
expense_query = expense_query.filter(
|
||||
or_(
|
||||
Expense.description.ilike(f'%{query}%'),
|
||||
Expense.receipt_ocr_text.ilike(f'%{query}%')
|
||||
)
|
||||
)
|
||||
expenses = expense_query.order_by(Expense.date.desc()).limit(limit).all()
|
||||
|
||||
for expense in expenses:
|
||||
# Check if match is from OCR text
|
||||
ocr_match = expense.receipt_ocr_text and query_lower in expense.receipt_ocr_text.lower()
|
||||
|
||||
results['expenses'].append({
|
||||
'id': expense.id,
|
||||
'type': 'expense',
|
||||
'description': expense.description,
|
||||
'amount': expense.amount,
|
||||
'currency': expense.currency,
|
||||
'category_name': expense.category.name if expense.category else None,
|
||||
'category_color': expense.category.color if expense.category else None,
|
||||
'date': expense.date.isoformat(),
|
||||
'has_receipt': bool(expense.receipt_path),
|
||||
'ocr_match': ocr_match,
|
||||
'url': '/transactions'
|
||||
})
|
||||
|
||||
# Search documents - Security: filter by user_id
|
||||
doc_query = Document.query.filter_by(user_id=current_user.id)
|
||||
doc_query = doc_query.filter(
|
||||
or_(
|
||||
Document.original_filename.ilike(f'%{query}%'),
|
||||
Document.ocr_text.ilike(f'%{query}%')
|
||||
)
|
||||
)
|
||||
documents = doc_query.order_by(Document.created_at.desc()).limit(limit).all()
|
||||
|
||||
for doc in documents:
|
||||
# Check if match is from OCR text
|
||||
ocr_match = doc.ocr_text and query_lower in doc.ocr_text.lower()
|
||||
|
||||
results['documents'].append({
|
||||
'id': doc.id,
|
||||
'type': 'document',
|
||||
'filename': doc.original_filename,
|
||||
'file_type': doc.file_type,
|
||||
'file_size': doc.file_size,
|
||||
'category': doc.document_category,
|
||||
'created_at': doc.created_at.isoformat(),
|
||||
'ocr_match': ocr_match,
|
||||
'url': '/documents'
|
||||
})
|
||||
|
||||
# Search categories - Security: filter by user_id
|
||||
categories = Category.query.filter_by(user_id=current_user.id).filter(
|
||||
Category.name.ilike(f'%{query}%')
|
||||
).order_by(Category.display_order).limit(limit).all()
|
||||
|
||||
for category in categories:
|
||||
results['categories'].append({
|
||||
'id': category.id,
|
||||
'type': 'category',
|
||||
'name': category.name,
|
||||
'color': category.color,
|
||||
'icon': category.icon,
|
||||
'url': '/transactions'
|
||||
})
|
||||
|
||||
# Search recurring expenses - Security: filter by user_id
|
||||
recurring = RecurringExpense.query.filter_by(user_id=current_user.id).filter(
|
||||
or_(
|
||||
RecurringExpense.name.ilike(f'%{query}%'),
|
||||
RecurringExpense.notes.ilike(f'%{query}%')
|
||||
)
|
||||
).order_by(RecurringExpense.next_due_date).limit(limit).all()
|
||||
|
||||
for rec in recurring:
|
||||
results['recurring'].append({
|
||||
'id': rec.id,
|
||||
'type': 'recurring',
|
||||
'name': rec.name,
|
||||
'amount': rec.amount,
|
||||
'currency': rec.currency,
|
||||
'frequency': rec.frequency,
|
||||
'category_name': rec.category.name if rec.category else None,
|
||||
'category_color': rec.category.color if rec.category else None,
|
||||
'next_due_date': rec.next_due_date.isoformat(),
|
||||
'is_active': rec.is_active,
|
||||
'url': '/recurring'
|
||||
})
|
||||
|
||||
# Search tags
|
||||
# Security: Filtered by user_id
|
||||
tags = Tag.query.filter(
|
||||
Tag.user_id == current_user.id,
|
||||
Tag.name.ilike(f'%{query}%')
|
||||
).limit(limit).all()
|
||||
|
||||
for tag in tags:
|
||||
results['tags'].append({
|
||||
'id': tag.id,
|
||||
'type': 'tag',
|
||||
'name': tag.name,
|
||||
'color': tag.color,
|
||||
'icon': tag.icon,
|
||||
'use_count': tag.use_count,
|
||||
'is_auto': tag.is_auto
|
||||
})
|
||||
|
||||
# Calculate total results
|
||||
total_results = sum([
|
||||
len(results['features']),
|
||||
len(results['expenses']),
|
||||
len(results['documents']),
|
||||
len(results['categories']),
|
||||
len(results['recurring']),
|
||||
len(results['tags'])
|
||||
])
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'query': query,
|
||||
'total_results': total_results,
|
||||
'results': results
|
||||
})
|
||||
253
app/routes/settings.py
Normal file
253
app/routes/settings.py
Normal file
|
|
@ -0,0 +1,253 @@
|
|||
from flask import Blueprint, request, jsonify, current_app
|
||||
from flask_login import login_required, current_user
|
||||
from werkzeug.utils import secure_filename
|
||||
from app import db, bcrypt
|
||||
from app.models import User
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
bp = Blueprint('settings', __name__, url_prefix='/api/settings')
|
||||
|
||||
# Allowed avatar image types
|
||||
ALLOWED_AVATAR_TYPES = {'png', 'jpg', 'jpeg', 'gif', 'webp'}
|
||||
MAX_AVATAR_SIZE = 20 * 1024 * 1024 # 20MB
|
||||
|
||||
def allowed_avatar(filename):
|
||||
"""Check if file extension is allowed for avatars"""
|
||||
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_AVATAR_TYPES
|
||||
|
||||
|
||||
@bp.route('/profile', methods=['GET'])
|
||||
@login_required
|
||||
def get_profile():
|
||||
"""
|
||||
Get current user profile information
|
||||
Security: Returns only current user's data
|
||||
"""
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'profile': {
|
||||
'username': current_user.username,
|
||||
'email': current_user.email,
|
||||
'language': current_user.language,
|
||||
'currency': current_user.currency,
|
||||
'monthly_budget': current_user.monthly_budget or 0,
|
||||
'avatar': current_user.avatar,
|
||||
'is_admin': current_user.is_admin,
|
||||
'two_factor_enabled': current_user.two_factor_enabled,
|
||||
'created_at': current_user.created_at.isoformat()
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/profile', methods=['PUT'])
|
||||
@login_required
|
||||
def update_profile():
|
||||
"""
|
||||
Update user profile information
|
||||
Security: Updates only current user's profile
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return jsonify({'success': False, 'error': 'No data provided'}), 400
|
||||
|
||||
try:
|
||||
# Update language
|
||||
if 'language' in data:
|
||||
if data['language'] in ['en', 'ro']:
|
||||
current_user.language = data['language']
|
||||
else:
|
||||
return jsonify({'success': False, 'error': 'Invalid language'}), 400
|
||||
|
||||
# Update currency
|
||||
if 'currency' in data:
|
||||
current_user.currency = data['currency']
|
||||
|
||||
# Update monthly budget
|
||||
if 'monthly_budget' in data:
|
||||
try:
|
||||
budget = float(data['monthly_budget'])
|
||||
if budget < 0:
|
||||
return jsonify({'success': False, 'error': 'Budget must be positive'}), 400
|
||||
current_user.monthly_budget = budget
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({'success': False, 'error': 'Invalid budget value'}), 400
|
||||
|
||||
# Update username (check uniqueness)
|
||||
if 'username' in data and data['username'] != current_user.username:
|
||||
existing = User.query.filter_by(username=data['username']).first()
|
||||
if existing:
|
||||
return jsonify({'success': False, 'error': 'Username already taken'}), 400
|
||||
current_user.username = data['username']
|
||||
|
||||
# Update email (check uniqueness)
|
||||
if 'email' in data and data['email'] != current_user.email:
|
||||
existing = User.query.filter_by(email=data['email']).first()
|
||||
if existing:
|
||||
return jsonify({'success': False, 'error': 'Email already taken'}), 400
|
||||
current_user.email = data['email']
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Profile updated successfully',
|
||||
'profile': {
|
||||
'username': current_user.username,
|
||||
'email': current_user.email,
|
||||
'language': current_user.language,
|
||||
'currency': current_user.currency,
|
||||
'monthly_budget': current_user.monthly_budget,
|
||||
'avatar': current_user.avatar
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
|
||||
@bp.route('/avatar', methods=['POST'])
|
||||
@login_required
|
||||
def upload_avatar():
|
||||
"""
|
||||
Upload custom avatar image
|
||||
Security: Associates avatar with current_user.id, validates file type and size
|
||||
"""
|
||||
if 'avatar' not in request.files:
|
||||
return jsonify({'success': False, 'error': 'No file provided'}), 400
|
||||
|
||||
file = request.files['avatar']
|
||||
|
||||
if not file or not file.filename:
|
||||
return jsonify({'success': False, 'error': 'No file selected'}), 400
|
||||
|
||||
if not allowed_avatar(file.filename):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid file type. Allowed: PNG, JPG, JPEG, GIF, WEBP'
|
||||
}), 400
|
||||
|
||||
# Check file size
|
||||
file.seek(0, os.SEEK_END)
|
||||
file_size = file.tell()
|
||||
file.seek(0)
|
||||
|
||||
if file_size > MAX_AVATAR_SIZE:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': f'File too large. Maximum size: {MAX_AVATAR_SIZE // (1024*1024)}MB'
|
||||
}), 400
|
||||
|
||||
try:
|
||||
# Delete old custom avatar if exists (not default avatars)
|
||||
if current_user.avatar and not current_user.avatar.startswith('icons/avatars/'):
|
||||
old_path = os.path.join(current_app.root_path, 'static', current_user.avatar)
|
||||
if os.path.exists(old_path):
|
||||
os.remove(old_path)
|
||||
|
||||
# Generate secure filename
|
||||
file_ext = file.filename.rsplit('.', 1)[1].lower()
|
||||
timestamp = int(datetime.utcnow().timestamp())
|
||||
filename = f"user_{current_user.id}_{timestamp}.{file_ext}"
|
||||
|
||||
# Create avatars directory in uploads
|
||||
avatars_dir = os.path.join(current_app.config['UPLOAD_FOLDER'], 'avatars')
|
||||
os.makedirs(avatars_dir, exist_ok=True)
|
||||
|
||||
# Save file
|
||||
file_path = os.path.join(avatars_dir, filename)
|
||||
file.save(file_path)
|
||||
|
||||
# Update user avatar (store relative path from static folder)
|
||||
current_user.avatar = f"uploads/avatars/{filename}"
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Avatar uploaded successfully',
|
||||
'avatar': current_user.avatar
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
|
||||
@bp.route('/avatar/default', methods=['PUT'])
|
||||
@login_required
|
||||
def set_default_avatar():
|
||||
"""
|
||||
Set avatar to one of the default avatars
|
||||
Security: Updates only current user's avatar
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or 'avatar' not in data:
|
||||
return jsonify({'success': False, 'error': 'Avatar path required'}), 400
|
||||
|
||||
avatar_path = data['avatar']
|
||||
|
||||
# Validate it's a default avatar
|
||||
if not avatar_path.startswith('icons/avatars/avatar-'):
|
||||
return jsonify({'success': False, 'error': 'Invalid avatar selection'}), 400
|
||||
|
||||
try:
|
||||
# Delete old custom avatar if exists (not default avatars)
|
||||
if current_user.avatar and not current_user.avatar.startswith('icons/avatars/'):
|
||||
old_path = os.path.join(current_app.root_path, 'static', current_user.avatar)
|
||||
if os.path.exists(old_path):
|
||||
os.remove(old_path)
|
||||
|
||||
current_user.avatar = avatar_path
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Avatar updated successfully',
|
||||
'avatar': current_user.avatar
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
|
||||
@bp.route('/password', methods=['PUT'])
|
||||
@login_required
|
||||
def change_password():
|
||||
"""
|
||||
Change user password
|
||||
Security: Requires current password verification
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return jsonify({'success': False, 'error': 'No data provided'}), 400
|
||||
|
||||
current_password = data.get('current_password')
|
||||
new_password = data.get('new_password')
|
||||
|
||||
if not current_password or not new_password:
|
||||
return jsonify({'success': False, 'error': 'Current and new password required'}), 400
|
||||
|
||||
# Verify current password
|
||||
if not bcrypt.check_password_hash(current_user.password_hash, current_password):
|
||||
return jsonify({'success': False, 'error': 'Current password is incorrect'}), 400
|
||||
|
||||
if len(new_password) < 6:
|
||||
return jsonify({'success': False, 'error': 'Password must be at least 6 characters'}), 400
|
||||
|
||||
try:
|
||||
current_user.password_hash = bcrypt.generate_password_hash(new_password).decode('utf-8')
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Password changed successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
322
app/routes/tags.py
Normal file
322
app/routes/tags.py
Normal file
|
|
@ -0,0 +1,322 @@
|
|||
"""
|
||||
Tags API Routes
|
||||
Manage smart tags for expenses with auto-tagging capabilities
|
||||
Security: All operations filtered by user_id
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
from flask_login import login_required, current_user
|
||||
from app import db
|
||||
from app.models import Tag, Expense, ExpenseTag
|
||||
from sqlalchemy import func, desc
|
||||
import re
|
||||
|
||||
bp = Blueprint('tags', __name__, url_prefix='/api/tags')
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET'])
|
||||
@login_required
|
||||
def get_tags():
|
||||
"""
|
||||
Get all tags for current user
|
||||
Security: Filtered by user_id
|
||||
"""
|
||||
# Get sort and filter parameters
|
||||
sort_by = request.args.get('sort_by', 'use_count') # use_count, name, created_at
|
||||
order = request.args.get('order', 'desc') # asc, desc
|
||||
|
||||
# Base query filtered by user
|
||||
query = Tag.query.filter_by(user_id=current_user.id)
|
||||
|
||||
# Apply sorting
|
||||
if sort_by == 'use_count':
|
||||
query = query.order_by(Tag.use_count.desc() if order == 'desc' else Tag.use_count.asc())
|
||||
elif sort_by == 'name':
|
||||
query = query.order_by(Tag.name.asc() if order == 'asc' else Tag.name.desc())
|
||||
else: # created_at
|
||||
query = query.order_by(Tag.created_at.desc() if order == 'desc' else Tag.created_at.asc())
|
||||
|
||||
tags = query.all()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'tags': [tag.to_dict() for tag in tags]
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/', methods=['POST'])
|
||||
@login_required
|
||||
def create_tag():
|
||||
"""
|
||||
Create a new tag
|
||||
Security: Only creates for current_user
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data.get('name'):
|
||||
return jsonify({'success': False, 'message': 'Tag name is required'}), 400
|
||||
|
||||
# Sanitize and validate input
|
||||
name = str(data.get('name')).strip().lower()[:50]
|
||||
|
||||
# Validate name format (alphanumeric, hyphens, underscores only)
|
||||
if not re.match(r'^[a-z0-9\-_]+$', name):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': 'Tag name can only contain letters, numbers, hyphens, and underscores'
|
||||
}), 400
|
||||
|
||||
# Check if tag already exists for this user
|
||||
existing_tag = Tag.query.filter_by(user_id=current_user.id, name=name).first()
|
||||
if existing_tag:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': 'Tag already exists',
|
||||
'tag': existing_tag.to_dict()
|
||||
}), 409
|
||||
|
||||
# Sanitize color and icon
|
||||
color = str(data.get('color', '#6366f1')).strip()[:7]
|
||||
if not re.match(r'^#[0-9a-fA-F]{6}$', color):
|
||||
color = '#6366f1'
|
||||
|
||||
icon = str(data.get('icon', 'label')).strip()[:50]
|
||||
if not re.match(r'^[a-z0-9_]+$', icon):
|
||||
icon = 'label'
|
||||
|
||||
# Create tag
|
||||
tag = Tag(
|
||||
name=name,
|
||||
color=color,
|
||||
icon=icon,
|
||||
user_id=current_user.id,
|
||||
is_auto=False
|
||||
)
|
||||
|
||||
db.session.add(tag)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'tag': tag.to_dict()
|
||||
}), 201
|
||||
|
||||
|
||||
@bp.route('/<int:tag_id>', methods=['PUT'])
|
||||
@login_required
|
||||
def update_tag(tag_id):
|
||||
"""
|
||||
Update a tag
|
||||
Security: Only owner can update
|
||||
"""
|
||||
tag = Tag.query.filter_by(id=tag_id, user_id=current_user.id).first()
|
||||
|
||||
if not tag:
|
||||
return jsonify({'success': False, 'message': 'Tag not found'}), 404
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Update name if provided
|
||||
if data.get('name'):
|
||||
name = str(data.get('name')).strip().lower()[:50]
|
||||
if not re.match(r'^[a-z0-9\-_]+$', name):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': 'Tag name can only contain letters, numbers, hyphens, and underscores'
|
||||
}), 400
|
||||
|
||||
# Check for duplicate name (excluding current tag)
|
||||
existing = Tag.query.filter(
|
||||
Tag.user_id == current_user.id,
|
||||
Tag.name == name,
|
||||
Tag.id != tag_id
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
return jsonify({'success': False, 'message': 'Tag name already exists'}), 409
|
||||
|
||||
tag.name = name
|
||||
|
||||
# Update color if provided
|
||||
if data.get('color'):
|
||||
color = str(data.get('color')).strip()[:7]
|
||||
if re.match(r'^#[0-9a-fA-F]{6}$', color):
|
||||
tag.color = color
|
||||
|
||||
# Update icon if provided
|
||||
if data.get('icon'):
|
||||
icon = str(data.get('icon')).strip()[:50]
|
||||
if re.match(r'^[a-z0-9_]+$', icon):
|
||||
tag.icon = icon
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'tag': tag.to_dict()
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/<int:tag_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
def delete_tag(tag_id):
|
||||
"""
|
||||
Delete a tag
|
||||
Security: Only owner can delete
|
||||
Note: This will also remove all associations with expenses (CASCADE)
|
||||
"""
|
||||
tag = Tag.query.filter_by(id=tag_id, user_id=current_user.id).first()
|
||||
|
||||
if not tag:
|
||||
return jsonify({'success': False, 'message': 'Tag not found'}), 404
|
||||
|
||||
db.session.delete(tag)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Tag deleted successfully'
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/suggest', methods=['POST'])
|
||||
@login_required
|
||||
def suggest_tags():
|
||||
"""
|
||||
Suggest tags based on text (description, OCR, etc.)
|
||||
Security: Only processes for current user
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or not data.get('text'):
|
||||
return jsonify({'success': False, 'message': 'Text is required'}), 400
|
||||
|
||||
from app.utils.auto_tagger import extract_tags_from_text
|
||||
|
||||
text = str(data.get('text'))
|
||||
max_tags = data.get('max_tags', 5)
|
||||
|
||||
suggested_tags = extract_tags_from_text(text, max_tags=max_tags)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'suggested_tags': suggested_tags
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/popular', methods=['GET'])
|
||||
@login_required
|
||||
def get_popular_tags():
|
||||
"""
|
||||
Get most popular tags for current user
|
||||
Security: Filtered by user_id
|
||||
"""
|
||||
limit = request.args.get('limit', 10, type=int)
|
||||
|
||||
tags = Tag.query.filter_by(user_id=current_user.id)\
|
||||
.filter(Tag.use_count > 0)\
|
||||
.order_by(Tag.use_count.desc())\
|
||||
.limit(limit)\
|
||||
.all()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'tags': [tag.to_dict() for tag in tags]
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/stats', methods=['GET'])
|
||||
@login_required
|
||||
def get_tag_stats():
|
||||
"""
|
||||
Get tag usage statistics
|
||||
Security: Filtered by user_id
|
||||
"""
|
||||
# Total tags count
|
||||
total_tags = Tag.query.filter_by(user_id=current_user.id).count()
|
||||
|
||||
# Auto-generated tags count
|
||||
auto_tags = Tag.query.filter_by(user_id=current_user.id, is_auto=True).count()
|
||||
|
||||
# Total tag uses across all expenses
|
||||
total_uses = db.session.query(func.sum(Tag.use_count))\
|
||||
.filter(Tag.user_id == current_user.id)\
|
||||
.scalar() or 0
|
||||
|
||||
# Most used tag
|
||||
most_used_tag = Tag.query.filter_by(user_id=current_user.id)\
|
||||
.filter(Tag.use_count > 0)\
|
||||
.order_by(Tag.use_count.desc())\
|
||||
.first()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'stats': {
|
||||
'total_tags': total_tags,
|
||||
'auto_generated_tags': auto_tags,
|
||||
'manual_tags': total_tags - auto_tags,
|
||||
'total_uses': int(total_uses),
|
||||
'most_used_tag': most_used_tag.to_dict() if most_used_tag else None
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/bulk-create', methods=['POST'])
|
||||
@login_required
|
||||
def bulk_create_tags():
|
||||
"""
|
||||
Create multiple tags at once
|
||||
Security: Only creates for current_user
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or not data.get('tags') or not isinstance(data.get('tags'), list):
|
||||
return jsonify({'success': False, 'message': 'Tags array is required'}), 400
|
||||
|
||||
created_tags = []
|
||||
errors = []
|
||||
|
||||
for tag_data in data.get('tags'):
|
||||
try:
|
||||
name = str(tag_data.get('name', '')).strip().lower()[:50]
|
||||
|
||||
if not name or not re.match(r'^[a-z0-9\-_]+$', name):
|
||||
errors.append(f"Invalid tag name: {tag_data.get('name')}")
|
||||
continue
|
||||
|
||||
# Check if already exists
|
||||
existing = Tag.query.filter_by(user_id=current_user.id, name=name).first()
|
||||
if existing:
|
||||
created_tags.append(existing.to_dict())
|
||||
continue
|
||||
|
||||
# Validate color and icon
|
||||
color = str(tag_data.get('color', '#6366f1')).strip()[:7]
|
||||
if not re.match(r'^#[0-9a-fA-F]{6}$', color):
|
||||
color = '#6366f1'
|
||||
|
||||
icon = str(tag_data.get('icon', 'label')).strip()[:50]
|
||||
if not re.match(r'^[a-z0-9_]+$', icon):
|
||||
icon = 'label'
|
||||
|
||||
tag = Tag(
|
||||
name=name,
|
||||
color=color,
|
||||
icon=icon,
|
||||
user_id=current_user.id,
|
||||
is_auto=tag_data.get('is_auto', False)
|
||||
)
|
||||
|
||||
db.session.add(tag)
|
||||
created_tags.append(tag.to_dict())
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"Error creating tag {tag_data.get('name')}: {str(e)}")
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'created': len(created_tags),
|
||||
'tags': created_tags,
|
||||
'errors': errors
|
||||
})
|
||||
Loading…
Add table
Add a link
Reference in a new issue