diff --git a/.env.example b/.env.example index d7b1933..376d5dd 100644 --- a/.env.example +++ b/.env.example @@ -18,3 +18,9 @@ SQL_PORT=5432 # Gunicorn WEB_CONCURRENCY=4 + +# App Configs +# Enable this if you want to keep deleted transactions in the database +ENABLE_SOFT_DELETE=false +# If ENABLE_SOFT_DELETE is true, transactions deleted for more than KEEP_DELETED_TRANSACTIONS_FOR days will be truly deleted. Set to 0 to keep all. +KEEP_DELETED_TRANSACTIONS_FOR=365 diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index b2eeba1..9b0c7a8 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -64,6 +64,7 @@ INSTALLED_APPS = [ "apps.accounts.apps.AccountsConfig", "apps.common.apps.CommonConfig", "apps.net_worth.apps.NetWorthConfig", + "apps.import_app.apps.ImportConfig", "apps.api.apps.ApiConfig", "cachalot", "rest_framework", @@ -72,6 +73,7 @@ INSTALLED_APPS = [ "apps.rules.apps.RulesConfig", "apps.calendar_view.apps.CalendarViewConfig", "apps.dca.apps.DcaConfig", + "pwa", ] MIDDLEWARE = [ @@ -335,3 +337,47 @@ else: } CACHALOT_UNCACHABLE_TABLES = ("django_migrations", "procrastinate_jobs") + + +# PWA +PWA_APP_NAME = SITE_TITLE +PWA_APP_DESCRIPTION = "A simple and powerful finance tracker" +PWA_APP_THEME_COLOR = "#fbb700" +PWA_APP_BACKGROUND_COLOR = "#222222" +PWA_APP_DISPLAY = "standalone" +PWA_APP_SCOPE = "/" +PWA_APP_ORIENTATION = "any" +PWA_APP_START_URL = "/" +PWA_APP_STATUS_BAR_COLOR = "default" +PWA_APP_ICONS = [ + {"src": "/static/img/favicon/android-icon-192x192.png", "sizes": "192x192"} +] +PWA_APP_ICONS_APPLE = [ + {"src": "/static/img/favicon/apple-icon-180x180.png", "sizes": "180x180"} +] +PWA_APP_SPLASH_SCREEN = [ + { + "src": "/static/img/pwa/splash-640x1136.png", + "media": "(device-width: 320px) and (device-height: 568px) and (-webkit-device-pixel-ratio: 2)", + } +] +PWA_APP_DIR = "ltr" +PWA_APP_LANG = "en-US" +PWA_APP_SHORTCUTS = [] +PWA_APP_SCREENSHOTS = [ + { + "src": "/static/img/pwa/splash-750x1334.png", + "sizes": "750x1334", + "type": "image/png", + "form_factor": "wide", + }, + { + "src": "/static/img/pwa/splash-750x1334.png", + "sizes": "750x1334", + "type": "image/png", + }, +] +PWA_SERVICE_WORKER_PATH = BASE_DIR / "templates" / "pwa" / "serviceworker.js" + +ENABLE_SOFT_DELETE = os.getenv("ENABLE_SOFT_DELETE", "false").lower() == "true" +KEEP_DELETED_TRANSACTIONS_FOR = int(os.getenv("KEEP_DELETED_ENTRIES_FOR", "365")) diff --git a/app/WYGIWYH/urls.py b/app/WYGIWYH/urls.py index 5a465a5..d2d2d5d 100644 --- a/app/WYGIWYH/urls.py +++ b/app/WYGIWYH/urls.py @@ -27,6 +27,7 @@ urlpatterns = [ path("hijack/", include("hijack.urls")), path("__debug__/", include("debug_toolbar.urls")), path("__reload__/", include("django_browser_reload.urls")), + path("", include("pwa.urls")), # path("api/", include("rest_framework.urls")), path("api/", include("apps.api.urls")), path("api/schema/", SpectacularAPIView.as_view(), name="schema"), @@ -47,4 +48,5 @@ urlpatterns = [ path("", include("apps.calendar_view.urls")), path("", include("apps.dca.urls")), path("", include("apps.mini_tools.urls")), + path("", include("apps.import_app.urls")), ] diff --git a/app/apps/accounts/migrations/0007_make_account_names_unique.py b/app/apps/accounts/migrations/0007_make_account_names_unique.py new file mode 100644 index 0000000..e570246 --- /dev/null +++ b/app/apps/accounts/migrations/0007_make_account_names_unique.py @@ -0,0 +1,38 @@ +from django.db import migrations, models + + +def make_names_unique(apps, schema_editor): + Account = apps.get_model("accounts", "Account") + + # Get all accounts ordered by id + accounts = Account.objects.all().order_by("id") + + # Track seen names + seen_names = {} + + for account in accounts: + original_name = account.name + counter = seen_names.get(original_name, 0) + + while account.name in seen_names: + counter += 1 + account.name = f"{original_name} ({counter})" + + seen_names[account.name] = counter + account.save() + + +def reverse_migration(apps, schema_editor): + # Can't restore original names, so do nothing + pass + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0006_rename_archived_account_is_archived_and_more"), + ] + + operations = [ + migrations.RunPython(make_names_unique, reverse_migration), + ] diff --git a/app/apps/accounts/migrations/0008_alter_account_name.py b/app/apps/accounts/migrations/0008_alter_account_name.py new file mode 100644 index 0000000..a6a5cfc --- /dev/null +++ b/app/apps/accounts/migrations/0008_alter_account_name.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-01-24 00:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('accounts', '0007_make_account_names_unique'), + ] + + operations = [ + migrations.AlterField( + model_name='account', + name='name', + field=models.CharField(max_length=255, unique=True, verbose_name='Name'), + ), + ] diff --git a/app/apps/accounts/models.py b/app/apps/accounts/models.py index 7e0c824..eed0cd5 100644 --- a/app/apps/accounts/models.py +++ b/app/apps/accounts/models.py @@ -18,7 +18,7 @@ class AccountGroup(models.Model): class Account(models.Model): - name = models.CharField(max_length=255, verbose_name=_("Name")) + name = models.CharField(max_length=255, verbose_name=_("Name"), unique=True) group = models.ForeignKey( AccountGroup, on_delete=models.SET_NULL, diff --git a/app/apps/common/templatetags/json.py b/app/apps/common/templatetags/json.py new file mode 100644 index 0000000..8fb45e2 --- /dev/null +++ b/app/apps/common/templatetags/json.py @@ -0,0 +1,11 @@ +import json + +from django import template + + +register = template.Library() + + +@register.filter("json") +def convert_to_json(value): + return json.dumps(value) diff --git a/app/apps/import_app/__init__.py b/app/apps/import_app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import_app/admin.py b/app/apps/import_app/admin.py new file mode 100644 index 0000000..cbccf2b --- /dev/null +++ b/app/apps/import_app/admin.py @@ -0,0 +1,6 @@ +from django.contrib import admin +from apps.import_app import models + +# Register your models here. +admin.site.register(models.ImportRun) +admin.site.register(models.ImportProfile) diff --git a/app/apps/import_app/apps.py b/app/apps/import_app/apps.py new file mode 100644 index 0000000..4dbe90c --- /dev/null +++ b/app/apps/import_app/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class ImportConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.import_app" diff --git a/app/apps/import_app/forms.py b/app/apps/import_app/forms.py new file mode 100644 index 0000000..83eb6c4 --- /dev/null +++ b/app/apps/import_app/forms.py @@ -0,0 +1,64 @@ +from crispy_forms.bootstrap import FormActions +from crispy_forms.helper import FormHelper +from crispy_forms.layout import ( + Layout, +) +from django import forms +from django.utils.translation import gettext_lazy as _ + +from apps.import_app.models import ImportProfile +from apps.common.widgets.crispy.submit import NoClassSubmit + + +class ImportProfileForm(forms.ModelForm): + class Meta: + model = ImportProfile + fields = [ + "name", + "version", + "yaml_config", + ] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.helper = FormHelper() + self.helper.form_tag = False + self.helper.form_method = "post" + self.helper.layout = Layout("name", "version", "yaml_config") + + if self.instance and self.instance.pk: + self.helper.layout.append( + FormActions( + NoClassSubmit( + "submit", _("Update"), css_class="btn btn-outline-primary w-100" + ), + ), + ) + else: + self.helper.layout.append( + FormActions( + NoClassSubmit( + "submit", _("Add"), css_class="btn btn-outline-primary w-100" + ), + ), + ) + + +class ImportRunFileUploadForm(forms.Form): + file = forms.FileField(label=_("Select a file")) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.helper = FormHelper() + self.helper.form_tag = False + self.helper.form_method = "post" + self.helper.layout = Layout( + "file", + FormActions( + NoClassSubmit( + "submit", _("Import"), css_class="btn btn-outline-primary w-100" + ), + ), + ) diff --git a/app/apps/import_app/migrations/0001_initial.py b/app/apps/import_app/migrations/0001_initial.py new file mode 100644 index 0000000..bcce0fe --- /dev/null +++ b/app/apps/import_app/migrations/0001_initial.py @@ -0,0 +1,51 @@ +# Generated by Django 5.1.5 on 2025-01-19 00:44 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('currencies', '0006_currency_exchange_currency'), + ('transactions', '0028_transaction_internal_note'), + ] + + operations = [ + migrations.CreateModel( + name='ImportProfile', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=100)), + ('yaml_config', models.TextField(help_text='YAML configuration')), + ('version', models.IntegerField(choices=[(1, 'Version 1')], default=1, verbose_name='Version')), + ], + options={ + 'ordering': ['name'], + }, + ), + migrations.CreateModel( + name='ImportRun', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('status', models.CharField(choices=[('QUEUED', 'Queued'), ('PROCESSING', 'Processing'), ('FAILED', 'Failed'), ('FINISHED', 'Finished')], default='QUEUED', max_length=10, verbose_name='Status')), + ('file_name', models.CharField(help_text='File name', max_length=10000)), + ('logs', models.TextField(blank=True)), + ('processed_rows', models.IntegerField(default=0)), + ('total_rows', models.IntegerField(default=0)), + ('successful_rows', models.IntegerField(default=0)), + ('skipped_rows', models.IntegerField(default=0)), + ('failed_rows', models.IntegerField(default=0)), + ('started_at', models.DateTimeField(null=True)), + ('finished_at', models.DateTimeField(null=True)), + ('categories', models.ManyToManyField(related_name='import_runs', to='transactions.transactioncategory')), + ('currencies', models.ManyToManyField(related_name='import_runs', to='currencies.currency')), + ('entities', models.ManyToManyField(related_name='import_runs', to='transactions.transactionentity')), + ('profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='import_app.importprofile')), + ('tags', models.ManyToManyField(related_name='import_runs', to='transactions.transactiontag')), + ('transactions', models.ManyToManyField(related_name='import_runs', to='transactions.transaction')), + ], + ), + ] diff --git a/app/apps/import_app/migrations/0002_alter_importprofile_name_and_more.py b/app/apps/import_app/migrations/0002_alter_importprofile_name_and_more.py new file mode 100644 index 0000000..efa1ee3 --- /dev/null +++ b/app/apps/import_app/migrations/0002_alter_importprofile_name_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.5 on 2025-01-23 03:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('import_app', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='importprofile', + name='name', + field=models.CharField(max_length=100, unique=True, verbose_name='Name'), + ), + migrations.AlterField( + model_name='importprofile', + name='yaml_config', + field=models.TextField(verbose_name='YAML Configuration'), + ), + ] diff --git a/app/apps/import_app/migrations/__init__.py b/app/apps/import_app/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import_app/models.py b/app/apps/import_app/models.py new file mode 100644 index 0000000..170b431 --- /dev/null +++ b/app/apps/import_app/models.py @@ -0,0 +1,83 @@ +import yaml + +from django.core.exceptions import ValidationError +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from apps.import_app.schemas import version_1 + + +class ImportProfile(models.Model): + class Versions(models.IntegerChoices): + VERSION_1 = 1, _("Version") + " 1" + + name = models.CharField(max_length=100, verbose_name=_("Name"), unique=True) + yaml_config = models.TextField(verbose_name=_("YAML Configuration")) + version = models.IntegerField( + choices=Versions, + default=Versions.VERSION_1, + verbose_name=_("Version"), + ) + + def __str__(self): + return self.name + + class Meta: + ordering = ["name"] + + def clean(self): + if self.version and self.version == self.Versions.VERSION_1: + try: + yaml_data = yaml.safe_load(self.yaml_config) + version_1.ImportProfileSchema(**yaml_data) + except Exception as e: + raise ValidationError( + {"yaml_config": _("Invalid YAML Configuration: ") + str(e)} + ) + + +class ImportRun(models.Model): + class Status(models.TextChoices): + QUEUED = "QUEUED", _("Queued") + PROCESSING = "PROCESSING", _("Processing") + FAILED = "FAILED", _("Failed") + FINISHED = "FINISHED", _("Finished") + + status = models.CharField( + max_length=10, + choices=Status, + default=Status.QUEUED, + verbose_name=_("Status"), + ) + profile = models.ForeignKey( + ImportProfile, + on_delete=models.CASCADE, + ) + file_name = models.CharField( + max_length=10000, + help_text=_("File name"), + ) + transactions = models.ManyToManyField( + "transactions.Transaction", related_name="import_runs" + ) + tags = models.ManyToManyField( + "transactions.TransactionTag", related_name="import_runs" + ) + categories = models.ManyToManyField( + "transactions.TransactionCategory", related_name="import_runs" + ) + entities = models.ManyToManyField( + "transactions.TransactionEntity", related_name="import_runs" + ) + currencies = models.ManyToManyField( + "currencies.Currency", related_name="import_runs" + ) + + logs = models.TextField(blank=True) + processed_rows = models.IntegerField(default=0) + total_rows = models.IntegerField(default=0) + successful_rows = models.IntegerField(default=0) + skipped_rows = models.IntegerField(default=0) + failed_rows = models.IntegerField(default=0) + started_at = models.DateTimeField(null=True) + finished_at = models.DateTimeField(null=True) diff --git a/app/apps/import_app/schemas/__init__.py b/app/apps/import_app/schemas/__init__.py new file mode 100644 index 0000000..530268d --- /dev/null +++ b/app/apps/import_app/schemas/__init__.py @@ -0,0 +1 @@ +import apps.import_app.schemas.v1 as version_1 diff --git a/app/apps/import_app/schemas/v1.py b/app/apps/import_app/schemas/v1.py new file mode 100644 index 0000000..01ae643 --- /dev/null +++ b/app/apps/import_app/schemas/v1.py @@ -0,0 +1,400 @@ +from typing import Dict, List, Optional, Literal +from pydantic import BaseModel, Field, model_validator, field_validator + + +class CompareDeduplicationRule(BaseModel): + type: Literal["compare"] + fields: list[str] = Field(..., description="Compare fields for deduplication") + match_type: Literal["lax", "strict"] = "lax" + + +class ReplaceTransformationRule(BaseModel): + type: Literal["replace", "regex"] = Field( + ..., description="Type of transformation: replace or regex" + ) + pattern: str = Field(..., description="Pattern to match") + replacement: str = Field(..., description="Value to replace with") + exclusive: bool = Field( + default=False, + description="If it should match against the last transformation or the original value", + ) + + +class DateFormatTransformationRule(BaseModel): + type: Literal["date_format"] = Field( + ..., description="Type of transformation: date_format" + ) + original_format: str = Field(..., description="Original date format") + new_format: str = Field(..., description="New date format to use") + + +class HashTransformationRule(BaseModel): + fields: List[str] + type: Literal["hash"] + + +class MergeTransformationRule(BaseModel): + fields: List[str] + type: Literal["merge"] + separator: str = Field(default=" ", description="Separator to use when merging") + + +class SplitTransformationRule(BaseModel): + type: Literal["split"] + separator: str = Field(default=",", description="Separator to use when splitting") + index: int | None = Field( + default=0, description="Index to return as value. Empty to return all." + ) + + +class CSVImportSettings(BaseModel): + skip_errors: bool = Field( + default=False, + description="If True, errors during import will be logged and skipped", + ) + file_type: Literal["csv"] = "csv" + delimiter: str = Field(default=",", description="CSV delimiter character") + encoding: str = Field(default="utf-8", description="File encoding") + skip_lines: int = Field( + default=0, description="Number of rows to skip at the beginning of the file" + ) + trigger_transaction_rules: bool = True + importing: Literal[ + "transactions", "accounts", "currencies", "categories", "tags", "entities" + ] + + +class ColumnMapping(BaseModel): + source: Optional[str] = Field( + default=None, + description="CSV column header. If None, the field will be generated from transformations", + ) + default: Optional[str] = None + required: bool = False + transformations: Optional[ + List[ + ReplaceTransformationRule + | DateFormatTransformationRule + | HashTransformationRule + | MergeTransformationRule + | SplitTransformationRule + ] + ] = Field(default_factory=list) + + +class TransactionAccountMapping(ColumnMapping): + target: Literal["account"] = Field(..., description="Transaction field to map to") + type: Literal["id", "name"] = "name" + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + required: bool = Field(True, frozen=True) + + +class TransactionTypeMapping(ColumnMapping): + target: Literal["type"] = Field(..., description="Transaction field to map to") + detection_method: Literal["sign", "always_income", "always_expense"] = "sign" + coerce_to: Literal["transaction_type"] = Field("transaction_type", frozen=True) + + +class TransactionIsPaidMapping(ColumnMapping): + target: Literal["is_paid"] = Field(..., description="Transaction field to map to") + detection_method: Literal["boolean", "always_paid", "always_unpaid"] + coerce_to: Literal["is_paid"] = Field("is_paid", frozen=True) + + +class TransactionDateMapping(ColumnMapping): + target: Literal["date"] = Field(..., description="Transaction field to map to") + format: List[str] | str + coerce_to: Literal["date"] = Field("date", frozen=True) + required: bool = Field(True, frozen=True) + + +class TransactionReferenceDateMapping(ColumnMapping): + target: Literal["reference_date"] = Field( + ..., description="Transaction field to map to" + ) + format: List[str] | str + coerce_to: Literal["date"] = Field("date", frozen=True) + + +class TransactionAmountMapping(ColumnMapping): + target: Literal["amount"] = Field(..., description="Transaction field to map to") + coerce_to: Literal["positive_decimal"] = Field("positive_decimal", frozen=True) + required: bool = Field(True, frozen=True) + + +class TransactionDescriptionMapping(ColumnMapping): + target: Literal["description"] = Field( + ..., description="Transaction field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TransactionNotesMapping(ColumnMapping): + target: Literal["notes"] = Field(..., description="Transaction field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TransactionTagsMapping(ColumnMapping): + target: Literal["tags"] = Field(..., description="Transaction field to map to") + type: Literal["id", "name"] = "name" + create: bool = Field( + default=True, description="Create new tags if they doesn't exist" + ) + coerce_to: Literal["list"] = Field("list", frozen=True) + + +class TransactionEntitiesMapping(ColumnMapping): + target: Literal["entities"] = Field(..., description="Transaction field to map to") + type: Literal["id", "name"] = "name" + create: bool = Field( + default=True, description="Create new entities if they doesn't exist" + ) + coerce_to: Literal["list"] = Field("list", frozen=True) + + +class TransactionCategoryMapping(ColumnMapping): + target: Literal["category"] = Field(..., description="Transaction field to map to") + create: bool = Field( + default=True, description="Create category if it doesn't exist" + ) + type: Literal["id", "name"] = "name" + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class TransactionInternalNoteMapping(ColumnMapping): + target: Literal["internal_note"] = Field( + ..., description="Transaction field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TransactionInternalIDMapping(ColumnMapping): + target: Literal["internal_id"] = Field( + ..., description="Transaction field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CategoryNameMapping(ColumnMapping): + target: Literal["category_name"] = Field( + ..., description="Category field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CategoryMuteMapping(ColumnMapping): + target: Literal["category_mute"] = Field( + ..., description="Category field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class CategoryActiveMapping(ColumnMapping): + target: Literal["category_active"] = Field( + ..., description="Category field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class TagNameMapping(ColumnMapping): + target: Literal["tag_name"] = Field(..., description="Tag field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TagActiveMapping(ColumnMapping): + target: Literal["tag_active"] = Field(..., description="Tag field to map to") + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class EntityNameMapping(ColumnMapping): + target: Literal["entity_name"] = Field(..., description="Entity field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class EntityActiveMapping(ColumnMapping): + target: Literal["entity_active"] = Field(..., description="Entity field to map to") + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class AccountNameMapping(ColumnMapping): + target: Literal["account_name"] = Field(..., description="Account field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class AccountGroupMapping(ColumnMapping): + target: Literal["account_group"] = Field(..., description="Account field to map to") + type: Literal["id", "name"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class AccountCurrencyMapping(ColumnMapping): + target: Literal["account_currency"] = Field( + ..., description="Account field to map to" + ) + type: Literal["id", "name", "code"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class AccountExchangeCurrencyMapping(ColumnMapping): + target: Literal["account_exchange_currency"] = Field( + ..., description="Account field to map to" + ) + type: Literal["id", "name", "code"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class AccountIsAssetMapping(ColumnMapping): + target: Literal["account_is_asset"] = Field( + ..., description="Account field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class AccountIsArchivedMapping(ColumnMapping): + target: Literal["account_is_archived"] = Field( + ..., description="Account field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class CurrencyCodeMapping(ColumnMapping): + target: Literal["currency_code"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencyNameMapping(ColumnMapping): + target: Literal["currency_name"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencyDecimalPlacesMapping(ColumnMapping): + target: Literal["currency_decimal_places"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["int"] = Field("int", frozen=True) + + +class CurrencyPrefixMapping(ColumnMapping): + target: Literal["currency_prefix"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencySuffixMapping(ColumnMapping): + target: Literal["currency_suffix"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencyExchangeMapping(ColumnMapping): + target: Literal["currency_exchange"] = Field( + ..., description="Currency field to map to" + ) + type: Literal["id", "name", "code"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class ImportProfileSchema(BaseModel): + settings: CSVImportSettings + mapping: Dict[ + str, + TransactionAccountMapping + | TransactionTypeMapping + | TransactionIsPaidMapping + | TransactionDateMapping + | TransactionReferenceDateMapping + | TransactionAmountMapping + | TransactionDescriptionMapping + | TransactionNotesMapping + | TransactionTagsMapping + | TransactionEntitiesMapping + | TransactionCategoryMapping + | TransactionInternalNoteMapping + | TransactionInternalIDMapping + | CategoryNameMapping + | CategoryMuteMapping + | CategoryActiveMapping + | TagNameMapping + | TagActiveMapping + | EntityNameMapping + | EntityActiveMapping + | AccountNameMapping + | AccountGroupMapping + | AccountCurrencyMapping + | AccountExchangeCurrencyMapping + | AccountIsAssetMapping + | AccountIsArchivedMapping + | CurrencyCodeMapping + | CurrencyNameMapping + | CurrencyDecimalPlacesMapping + | CurrencyPrefixMapping + | CurrencySuffixMapping + | CurrencyExchangeMapping, + ] + deduplication: List[CompareDeduplicationRule] = Field( + default_factory=list, + description="Rules for deduplicating records during import", + ) + + @model_validator(mode="after") + def validate_mappings(self) -> "ImportProfileSchema": + import_type = self.settings.importing + + # Define allowed mapping types for each import type + allowed_mappings = { + "transactions": ( + TransactionAccountMapping, + TransactionTypeMapping, + TransactionIsPaidMapping, + TransactionDateMapping, + TransactionReferenceDateMapping, + TransactionAmountMapping, + TransactionDescriptionMapping, + TransactionNotesMapping, + TransactionTagsMapping, + TransactionEntitiesMapping, + TransactionCategoryMapping, + TransactionInternalNoteMapping, + TransactionInternalIDMapping, + ), + "accounts": ( + AccountNameMapping, + AccountGroupMapping, + AccountCurrencyMapping, + AccountExchangeCurrencyMapping, + AccountIsAssetMapping, + AccountIsArchivedMapping, + ), + "currencies": ( + CurrencyCodeMapping, + CurrencyNameMapping, + CurrencyDecimalPlacesMapping, + CurrencyPrefixMapping, + CurrencySuffixMapping, + CurrencyExchangeMapping, + ), + "categories": ( + CategoryNameMapping, + CategoryMuteMapping, + CategoryActiveMapping, + ), + "tags": (TagNameMapping, TagActiveMapping), + "entities": (EntityNameMapping, EntityActiveMapping), + } + + allowed_types = allowed_mappings[import_type] + + for field_name, mapping in self.mapping.items(): + if not isinstance(mapping, allowed_types): + raise ValueError( + f"Mapping type '{type(mapping).__name__}' is not allowed when importing {import_type}. " + f"Allowed types are: {', '.join(t.__name__ for t in allowed_types)}" + ) + + return self diff --git a/app/apps/import_app/services/__init__.py b/app/apps/import_app/services/__init__.py new file mode 100644 index 0000000..88aa4e8 --- /dev/null +++ b/app/apps/import_app/services/__init__.py @@ -0,0 +1,3 @@ +from apps.import_app.services.v1 import ImportService as ImportServiceV1 + +from apps.import_app.services.presets import PresetService diff --git a/app/apps/import_app/services/presets.py b/app/apps/import_app/services/presets.py new file mode 100644 index 0000000..824a246 --- /dev/null +++ b/app/apps/import_app/services/presets.py @@ -0,0 +1,45 @@ +import json +from pathlib import Path + +from apps.import_app.models import ImportProfile + + +class PresetService: + PRESET_PATH = "/usr/src/app/import_presets" + + @classmethod + def get_all_presets(cls): + presets = [] + + for folder in Path(cls.PRESET_PATH).iterdir(): + if folder.is_dir(): + manifest_path = folder / "manifest.json" + config_path = folder / "config.yml" + + if manifest_path.exists() and config_path.exists(): + with open(manifest_path) as f: + manifest = json.load(f) + + with open(config_path) as f: + config = json.dumps(f.read()) + + try: + preset = { + "name": manifest.get("name", folder.name), + "description": manifest.get("description", ""), + "message": json.dumps(manifest.get("message", "")), + "authors": manifest.get("author", "").split(","), + "schema_version": (int(manifest.get("schema_version", 1))), + "folder_name": folder.name, + "config": config, + } + + ImportProfile.Versions( + preset["schema_version"] + ) # Check if schema version is valid + except Exception as e: + pass + else: + presets.append(preset) + + return presets diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py new file mode 100644 index 0000000..d84935e --- /dev/null +++ b/app/apps/import_app/services/v1.py @@ -0,0 +1,632 @@ +import csv +import hashlib +import logging +import os +import re +from datetime import datetime +from decimal import Decimal +from typing import Dict, Any, Literal, Union + +import cachalot.api +import yaml +from cachalot.api import cachalot_disabled +from django.utils import timezone + +from apps.accounts.models import Account, AccountGroup +from apps.currencies.models import Currency +from apps.import_app.models import ImportRun, ImportProfile +from apps.import_app.schemas import version_1 +from apps.transactions.models import ( + Transaction, + TransactionCategory, + TransactionTag, + TransactionEntity, +) +from apps.rules.signals import transaction_created +from apps.import_app.schemas.v1 import ( + TransactionCategoryMapping, + TransactionAccountMapping, + TransactionTagsMapping, + TransactionEntitiesMapping, +) + +logger = logging.getLogger(__name__) + + +class ImportService: + TEMP_DIR = "/usr/src/app/temp" + + def __init__(self, import_run: ImportRun): + self.import_run: ImportRun = import_run + self.profile: ImportProfile = import_run.profile + self.config: version_1.ImportProfileSchema = self._load_config() + self.settings: version_1.CSVImportSettings = self.config.settings + self.deduplication: list[version_1.CompareDeduplicationRule] = ( + self.config.deduplication + ) + self.mapping: Dict[str, version_1.ColumnMapping] = self.config.mapping + + # Ensure temp directory exists + os.makedirs(self.TEMP_DIR, exist_ok=True) + + def _load_config(self) -> version_1.ImportProfileSchema: + yaml_data = yaml.safe_load(self.profile.yaml_config) + try: + config = version_1.ImportProfileSchema(**yaml_data) + except Exception as e: + self._log("error", f"Fatal error processing YAML config: {str(e)}") + self._update_status("FAILED") + raise e + else: + return config + + def _log(self, level: str, message: str, **kwargs) -> None: + """Add a log entry to the import run logs""" + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + # Format additional context if present + context = "" + if kwargs: + context = " - " + ", ".join(f"{k}={v}" for k, v in kwargs.items()) + + log_line = f"[{timestamp}] {level.upper()}: {message}{context}\n" + + # Append to existing logs + self.import_run.logs += log_line + self.import_run.save(update_fields=["logs"]) + + def _update_totals( + self, + field: Literal["total", "processed", "successful", "skipped", "failed"], + value: int, + ) -> None: + if field == "total": + self.import_run.total_rows = value + self.import_run.save(update_fields=["total_rows"]) + elif field == "processed": + self.import_run.processed_rows = value + self.import_run.save(update_fields=["processed_rows"]) + elif field == "successful": + self.import_run.successful_rows = value + self.import_run.save(update_fields=["successful_rows"]) + elif field == "skipped": + self.import_run.skipped_rows = value + self.import_run.save(update_fields=["skipped_rows"]) + elif field == "failed": + self.import_run.failed_rows = value + self.import_run.save(update_fields=["failed_rows"]) + + def _increment_totals( + self, + field: Literal["total", "processed", "successful", "skipped", "failed"], + value: int, + ) -> None: + if field == "total": + self.import_run.total_rows = self.import_run.total_rows + value + self.import_run.save(update_fields=["total_rows"]) + elif field == "processed": + self.import_run.processed_rows = self.import_run.processed_rows + value + self.import_run.save(update_fields=["processed_rows"]) + elif field == "successful": + self.import_run.successful_rows = self.import_run.successful_rows + value + self.import_run.save(update_fields=["successful_rows"]) + elif field == "skipped": + self.import_run.skipped_rows = self.import_run.skipped_rows + value + self.import_run.save(update_fields=["skipped_rows"]) + elif field == "failed": + self.import_run.failed_rows = self.import_run.failed_rows + value + self.import_run.save(update_fields=["failed_rows"]) + + def _update_status( + self, new_status: Literal["PROCESSING", "FAILED", "FINISHED"] + ) -> None: + if new_status == "PROCESSING": + self.import_run.status = ImportRun.Status.PROCESSING + elif new_status == "FAILED": + self.import_run.status = ImportRun.Status.FAILED + elif new_status == "FINISHED": + self.import_run.status = ImportRun.Status.FINISHED + + self.import_run.save(update_fields=["status"]) + + @staticmethod + def _transform_value( + value: str, mapping: version_1.ColumnMapping, row: Dict[str, str] = None + ) -> Any: + transformed = value + + for transform in mapping.transformations: + if transform.type == "hash": + # Collect all values to be hashed + values_to_hash = [] + for field in transform.fields: + if field in row: + values_to_hash.append(str(row[field])) + + # Create hash from concatenated values + if values_to_hash: + concatenated = "|".join(values_to_hash) + transformed = hashlib.sha256(concatenated.encode()).hexdigest() + + elif transform.type == "replace": + if transform.exclusive: + transformed = value.replace( + transform.pattern, transform.replacement + ) + else: + transformed = transformed.replace( + transform.pattern, transform.replacement + ) + elif transform.type == "regex": + if transform.exclusive: + transformed = re.sub( + transform.pattern, transform.replacement, value + ) + else: + transformed = re.sub( + transform.pattern, transform.replacement, transformed + ) + elif transform.type == "date_format": + transformed = datetime.strptime( + transformed, transform.original_format + ).strftime(transform.new_format) + elif transform.type == "merge": + values_to_merge = [] + for field in transform.fields: + if field in row: + values_to_merge.append(str(row[field])) + transformed = transform.separator.join(values_to_merge) + elif transform.type == "split": + parts = transformed.split(transform.separator) + if transform.index is not None: + transformed = parts[transform.index] if parts else "" + else: + transformed = parts + + return transformed + + def _create_transaction(self, data: Dict[str, Any]) -> Transaction: + tags = [] + entities = [] + # Handle related objects first + if "category" in data: + if "category" in data: + category_name = data.pop("category") + category_mapping = next( + ( + m + for m in self.mapping.values() + if isinstance(m, TransactionCategoryMapping) + and m.target == "category" + ), + None, + ) + + try: + if category_mapping: + if category_mapping.type == "id": + category = TransactionCategory.objects.get(id=category_name) + else: # name + if getattr(category_mapping, "create", False): + category, _ = TransactionCategory.objects.get_or_create( + name=category_name + ) + else: + category = TransactionCategory.objects.filter( + name=category_name + ).first() + + if category: + data["category"] = category + self.import_run.categories.add(category) + except (TransactionCategory.DoesNotExist, ValueError): + # Ignore if category doesn't exist and create is False or not set + data["category"] = None + + if "account" in data: + account_id = data.pop("account") + account_mapping = next( + ( + m + for m in self.mapping.values() + if isinstance(m, TransactionAccountMapping) + and m.target == "account" + ), + None, + ) + + try: + if account_mapping and account_mapping.type == "id": + account = Account.objects.filter(id=account_id).first() + else: # name + account = Account.objects.filter(name=account_id).first() + + if account: + data["account"] = account + except ValueError: + # Ignore if account doesn't exist + pass + + if "tags" in data: + tag_names = data.pop("tags") + tags_mapping = next( + ( + m + for m in self.mapping.values() + if isinstance(m, TransactionTagsMapping) and m.target == "tags" + ), + None, + ) + + for tag_name in tag_names: + try: + if tags_mapping: + if tags_mapping.type == "id": + tag = TransactionTag.objects.filter(id=tag_name).first() + else: # name + if getattr(tags_mapping, "create", False): + tag, _ = TransactionTag.objects.get_or_create( + name=tag_name.strip() + ) + else: + tag = TransactionTag.objects.filter( + name=tag_name.strip() + ).first() + + if tag: + tags.append(tag) + self.import_run.tags.add(tag) + except ValueError: + # Ignore if tag doesn't exist and create is False or not set + continue + + if "entities" in data: + entity_names = data.pop("entities") + entities_mapping = next( + ( + m + for m in self.mapping.values() + if isinstance(m, TransactionEntitiesMapping) + and m.target == "entities" + ), + None, + ) + + for entity_name in entity_names: + try: + if entities_mapping: + if entities_mapping.type == "id": + entity = TransactionTag.objects.filter( + id=entity_name + ).first() + else: # name + if getattr(entities_mapping, "create", False): + entity, _ = TransactionEntity.objects.get_or_create( + name=entity_name.strip() + ) + else: + entity = TransactionEntity.objects.filter( + name=entity_name.strip() + ).first() + + if entity: + entities.append(entity) + self.import_run.entities.add(entity) + except ValueError: + # Ignore if entity doesn't exist and create is False or not set + continue + + # Create the transaction + new_transaction = Transaction.objects.create(**data) + self.import_run.transactions.add(new_transaction) + + # Add many-to-many relationships + if tags: + new_transaction.tags.set(tags) + if entities: + new_transaction.entities.set(entities) + + if self.settings.trigger_transaction_rules: + transaction_created.send(sender=new_transaction) + + return new_transaction + + def _create_account(self, data: Dict[str, Any]) -> Account: + if "group" in data: + group_name = data.pop("group") + group, _ = AccountGroup.objects.get_or_create(name=group_name) + data["group"] = group + + # Handle currency references + if "currency" in data: + currency = Currency.objects.get(code=data["currency"]) + data["currency"] = currency + self.import_run.currencies.add(currency) + + if "exchange_currency" in data: + exchange_currency = Currency.objects.get(code=data["exchange_currency"]) + data["exchange_currency"] = exchange_currency + self.import_run.currencies.add(exchange_currency) + + return Account.objects.create(**data) + + def _create_currency(self, data: Dict[str, Any]) -> Currency: + # Handle exchange currency reference + if "exchange_currency" in data: + exchange_currency = Currency.objects.get(code=data["exchange_currency"]) + data["exchange_currency"] = exchange_currency + self.import_run.currencies.add(exchange_currency) + + currency = Currency.objects.create(**data) + self.import_run.currencies.add(currency) + return currency + + def _create_category(self, data: Dict[str, Any]) -> TransactionCategory: + category = TransactionCategory.objects.create(**data) + self.import_run.categories.add(category) + return category + + def _create_tag(self, data: Dict[str, Any]) -> TransactionTag: + tag = TransactionTag.objects.create(**data) + self.import_run.tags.add(tag) + return tag + + def _create_entity(self, data: Dict[str, Any]) -> TransactionEntity: + entity = TransactionEntity.objects.create(**data) + self.import_run.entities.add(entity) + return entity + + def _check_duplicate_transaction(self, transaction_data: Dict[str, Any]) -> bool: + for rule in self.deduplication: + if rule.type == "compare": + query = Transaction.all_objects.all().values("id") + + # Build query conditions for each field in the rule + for field in rule.fields: + if field in transaction_data: + if rule.match_type == "strict": + query = query.filter(**{field: transaction_data[field]}) + else: # lax matching + query = query.filter( + **{f"{field}__iexact": transaction_data[field]} + ) + + # If we found any matching transaction, it's a duplicate + if query.exists(): + return True + + return False + + def _coerce_type( + self, value: str, mapping: version_1.ColumnMapping + ) -> Union[str, int, bool, Decimal, datetime, list]: + if not value: + return None + + coerce_to = mapping.coerce_to + + return self._coerce_single_type(value, coerce_to, mapping) + + @staticmethod + def _coerce_single_type( + value: str, coerce_to: str, mapping: version_1.ColumnMapping + ) -> Union[str, int, bool, Decimal, datetime.date, list]: + if coerce_to == "str": + return str(value) + elif coerce_to == "int": + return int(value) + elif coerce_to == "str|int": + if hasattr(mapping, "type") and mapping.type == "id": + return int(value) + elif hasattr(mapping, "type") and mapping.type in ["name", "code"]: + return str(value) + else: + return str(value) + elif coerce_to == "bool": + return value.lower() in ["true", "1", "yes", "y", "on"] + elif coerce_to == "positive_decimal": + return abs(Decimal(value)) + elif coerce_to == "date": + if isinstance( + mapping, + ( + version_1.TransactionDateMapping, + version_1.TransactionReferenceDateMapping, + ), + ): + formats = ( + mapping.format + if isinstance(mapping.format, list) + else [mapping.format] + ) + for fmt in formats: + try: + return datetime.strptime(value, fmt).date() + except ValueError: + continue + raise ValueError( + f"Could not parse date '{value}' with any of the provided formats" + ) + else: + raise ValueError( + "Date coercion is only supported for TransactionDateMapping and TransactionReferenceDateMapping" + ) + elif coerce_to == "list": + return ( + value + if isinstance(value, list) + else [item.strip() for item in value.split(",") if item.strip()] + ) + elif coerce_to == "transaction_type": + if isinstance(mapping, version_1.TransactionTypeMapping): + if mapping.detection_method == "sign": + return ( + Transaction.Type.EXPENSE + if value.startswith("-") + else Transaction.Type.INCOME + ) + elif mapping.detection_method == "always_income": + return Transaction.Type.INCOME + elif mapping.detection_method == "always_expense": + return Transaction.Type.EXPENSE + raise ValueError("Invalid transaction type detection method") + elif coerce_to == "is_paid": + if isinstance(mapping, version_1.TransactionIsPaidMapping): + if mapping.detection_method == "boolean": + return value.lower() in ["true", "1", "yes", "y", "on"] + elif mapping.detection_method == "always_paid": + return True + elif mapping.detection_method == "always_unpaid": + return False + raise ValueError("Invalid is_paid detection method") + else: + raise ValueError(f"Unsupported coercion type: {coerce_to}") + + def _map_row(self, row: Dict[str, str]) -> Dict[str, Any]: + mapped_data = {} + + for field, mapping in self.mapping.items(): + # If source is None, use None as the initial value + value = row.get(mapping.source) if mapping.source else None + + # Use default_value if value is None + if value is None: + value = mapping.default + + # Apply transformations + if mapping.transformations: + value = self._transform_value(value, mapping, row) + + value = self._coerce_type(value, mapping) + + if mapping.required and value is None: + raise ValueError(f"Required field {field} is missing") + + if value is not None: + # Remove the prefix from the target field + target = mapping.target + if self.settings.importing == "transactions": + mapped_data[target] = value + else: + # Remove the model prefix (e.g., "account_" from "account_name") + field_name = target.split("_", 1)[1] + mapped_data[field_name] = value + + return mapped_data + + def _process_row(self, row: Dict[str, str], row_number: int) -> None: + try: + mapped_data = self._map_row(row) + + if mapped_data: + # Handle different import types + if self.settings.importing == "transactions": + if self.deduplication and self._check_duplicate_transaction( + mapped_data + ): + self._increment_totals("skipped", 1) + self._log("info", f"Skipped duplicate row {row_number}") + return + self._create_transaction(mapped_data) + elif self.settings.importing == "accounts": + self._create_account(mapped_data) + elif self.settings.importing == "currencies": + self._create_currency(mapped_data) + elif self.settings.importing == "categories": + self._create_category(mapped_data) + elif self.settings.importing == "tags": + self._create_tag(mapped_data) + elif self.settings.importing == "entities": + self._create_entity(mapped_data) + + self._increment_totals("successful", value=1) + self._log("info", f"Successfully processed row {row_number}") + + self._increment_totals("processed", value=1) + + except Exception as e: + if not self.settings.skip_errors: + self._log("error", f"Fatal error processing row {row_number}: {str(e)}") + self._update_status("FAILED") + raise + else: + self._log("warning", f"Error processing row {row_number}: {str(e)}") + self._increment_totals("failed", value=1) + + logger.error(f"Fatal error processing row {row_number}", exc_info=e) + + def _process_csv(self, file_path): + # First pass: count rows + with open(file_path, "r", encoding=self.settings.encoding) as csv_file: + # Skip specified number of rows + for _ in range(self.settings.skip_lines): + next(csv_file) + + reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + self._update_totals("total", value=sum(1 for _ in reader)) + + with open(file_path, "r", encoding=self.settings.encoding) as csv_file: + # Skip specified number of rows + for _ in range(self.settings.skip_lines): + next(csv_file) + if self.settings.skip_lines: + self._log("info", f"Skipped {self.settings.skip_lines} initial lines") + + reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + + self._log("info", f"Starting import with {self.import_run.total_rows} rows") + + for row_number, row in enumerate(reader, start=1): + self._process_row(row, row_number) + + def _validate_file_path(self, file_path: str) -> str: + """ + Validates that the file path is within the allowed temporary directory. + Returns the absolute path. + """ + abs_path = os.path.abspath(file_path) + if not abs_path.startswith(self.TEMP_DIR): + raise ValueError(f"Invalid file path. File must be in {self.TEMP_DIR}") + return abs_path + + def process_file(self, file_path: str): + with cachalot_disabled(): + # Validate and get absolute path + file_path = self._validate_file_path(file_path) + + self._update_status("PROCESSING") + self.import_run.started_at = timezone.now() + self.import_run.save(update_fields=["started_at"]) + + self._log("info", "Starting import process") + + try: + if self.settings.file_type == "csv": + self._process_csv(file_path) + + self._update_status("FINISHED") + self._log( + "info", + f"Import completed successfully. " + f"Successful: {self.import_run.successful_rows}, " + f"Failed: {self.import_run.failed_rows}, " + f"Skipped: {self.import_run.skipped_rows}", + ) + + except Exception as e: + self._update_status("FAILED") + self._log("error", f"Import failed: {str(e)}") + raise Exception("Import failed") + + finally: + self._log("info", "Cleaning up temporary files") + try: + if os.path.exists(file_path): + os.remove(file_path) + self._log("info", f"Deleted temporary file: {file_path}") + except OSError as e: + self._log("warning", f"Failed to delete temporary file: {str(e)}") + + self.import_run.finished_at = timezone.now() + self.import_run.save(update_fields=["finished_at"]) + cachalot.api.invalidate() diff --git a/app/apps/import_app/tasks.py b/app/apps/import_app/tasks.py new file mode 100644 index 0000000..44d63b4 --- /dev/null +++ b/app/apps/import_app/tasks.py @@ -0,0 +1,21 @@ +import logging + +import cachalot.api +from procrastinate.contrib.django import app + +from apps.import_app.models import ImportRun +from apps.import_app.services import ImportServiceV1 + +logger = logging.getLogger(__name__) + + +@app.task +def process_import(import_run_id: int, file_path: str): + try: + import_run = ImportRun.objects.get(id=import_run_id) + import_service = ImportServiceV1(import_run) + import_service.process_file(file_path) + cachalot.api.invalidate() + except ImportRun.DoesNotExist: + cachalot.api.invalidate() + raise ValueError(f"ImportRun with id {import_run_id} not found") diff --git a/app/apps/import_app/tests.py b/app/apps/import_app/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/app/apps/import_app/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/app/apps/import_app/urls.py b/app/apps/import_app/urls.py new file mode 100644 index 0000000..eae9851 --- /dev/null +++ b/app/apps/import_app/urls.py @@ -0,0 +1,56 @@ +from django.urls import path +import apps.import_app.views as views + +urlpatterns = [ + path("import/", views.import_view, name="import"), + path( + "import/presets/", + views.import_presets_list, + name="import_presets_list", + ), + path( + "import/profiles/", + views.import_profile_index, + name="import_profiles_index", + ), + path( + "import/profiles/list/", + views.import_profile_list, + name="import_profiles_list", + ), + path( + "import/profiles//delete/", + views.import_profile_delete, + name="import_profile_delete", + ), + path( + "import/profiles/add/", + views.import_profile_add, + name="import_profiles_add", + ), + path( + "import/profiles//edit/", + views.import_profile_edit, + name="import_profile_edit", + ), + path( + "import/profiles//runs/list/", + views.import_runs_list, + name="import_profile_runs_list", + ), + path( + "import/profiles//runs//log/", + views.import_run_log, + name="import_run_log", + ), + path( + "import/profiles//runs//delete/", + views.import_run_delete, + name="import_run_delete", + ), + path( + "import/profiles//runs/add/", + views.import_run_add, + name="import_run_add", + ), +] diff --git a/app/apps/import_app/views.py b/app/apps/import_app/views.py new file mode 100644 index 0000000..1069eca --- /dev/null +++ b/app/apps/import_app/views.py @@ -0,0 +1,230 @@ +import shutil + +from django.contrib import messages +from django.contrib.auth.decorators import login_required +from django.core.files.storage import FileSystemStorage +from django.http import HttpResponse +from django.shortcuts import render, get_object_or_404 +from django.views.decorators.csrf import csrf_exempt +from django.views.decorators.http import require_http_methods +from django.utils.translation import gettext_lazy as _ + +from apps.common.decorators.htmx import only_htmx +from apps.import_app.forms import ImportRunFileUploadForm, ImportProfileForm +from apps.import_app.models import ImportRun, ImportProfile +from apps.import_app.tasks import process_import +from apps.import_app.services import PresetService + + +def import_view(request): + import_profile = ImportProfile.objects.get(id=2) + shutil.copyfile( + "/usr/src/app/apps/import_app/teste2.csv", "/usr/src/app/temp/teste2.csv" + ) + ir = ImportRun.objects.create(profile=import_profile, file_name="teste.csv") + process_import.defer( + import_run_id=ir.id, + file_path="/usr/src/app/temp/teste2.csv", + ) + return HttpResponse("Hello, world. You're at the polls page.") + + +@login_required +@require_http_methods(["GET"]) +def import_presets_list(request): + presets = PresetService.get_all_presets() + return render( + request, + "import_app/fragments/profiles/list_presets.html", + {"presets": presets}, + ) + + +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_index(request): + return render( + request, + "import_app/pages/profiles_index.html", + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_list(request): + profiles = ImportProfile.objects.all() + + return render( + request, + "import_app/fragments/profiles/list.html", + {"profiles": profiles}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_add(request): + message = request.GET.get("message", None) or request.POST.get("message", None) + + if request.method == "POST": + form = ImportProfileForm(request.POST) + + if form.is_valid(): + form.save() + messages.success(request, _("Import Profile added successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated, hide_offcanvas", + }, + ) + else: + form = ImportProfileForm( + initial={ + "name": request.GET.get("name"), + "version": int(request.GET.get("version", 1)), + "yaml_config": request.GET.get("yaml_config"), + } + ) + + return render( + request, + "import_app/fragments/profiles/add.html", + {"form": form, "message": message}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_edit(request, profile_id): + profile = get_object_or_404(ImportProfile, id=profile_id) + + if request.method == "POST": + form = ImportProfileForm(request.POST, instance=profile) + + if form.is_valid(): + form.save() + messages.success(request, _("Import Profile update successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated, hide_offcanvas", + }, + ) + else: + form = ImportProfileForm(instance=profile) + + return render( + request, + "import_app/fragments/profiles/edit.html", + {"form": form, "profile": profile}, + ) + + +@only_htmx +@login_required +@csrf_exempt +@require_http_methods(["DELETE"]) +def import_profile_delete(request, profile_id): + profile = ImportProfile.objects.get(id=profile_id) + + profile.delete() + + messages.success(request, _("Import Profile deleted successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated", + }, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_runs_list(request, profile_id): + profile = ImportProfile.objects.get(id=profile_id) + + runs = ImportRun.objects.filter(profile=profile).order_by("-id") + + return render( + request, + "import_app/fragments/runs/list.html", + {"profile": profile, "runs": runs}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_run_log(request, profile_id, run_id): + run = ImportRun.objects.get(profile__id=profile_id, id=run_id) + + return render( + request, + "import_app/fragments/runs/log.html", + {"run": run}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_run_add(request, profile_id): + profile = ImportProfile.objects.get(id=profile_id) + + if request.method == "POST": + form = ImportRunFileUploadForm(request.POST, request.FILES) + + if form.is_valid(): + uploaded_file = request.FILES["file"] + fs = FileSystemStorage(location="/usr/src/app/temp") + filename = fs.save(uploaded_file.name, uploaded_file) + file_path = fs.path(filename) + + import_run = ImportRun.objects.create(profile=profile, file_name=filename) + + # Defer the procrastinate task + process_import.defer(import_run_id=import_run.id, file_path=file_path) + + messages.success(request, _("Import Run queued successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated, hide_offcanvas", + }, + ) + else: + form = ImportRunFileUploadForm() + + return render( + request, + "import_app/fragments/runs/add.html", + {"form": form, "profile": profile}, + ) + + +@only_htmx +@login_required +@csrf_exempt +@require_http_methods(["DELETE"]) +def import_run_delete(request, profile_id, run_id): + run = ImportRun.objects.get(profile__id=profile_id, id=run_id) + + run.delete() + + messages.success(request, _("Run deleted successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated", + }, + ) diff --git a/app/apps/transactions/admin.py b/app/apps/transactions/admin.py index 5a4ef15..8f37317 100644 --- a/app/apps/transactions/admin.py +++ b/app/apps/transactions/admin.py @@ -12,15 +12,34 @@ from apps.transactions.models import ( @admin.register(Transaction) class TransactionModelAdmin(admin.ModelAdmin): + def get_queryset(self, request): + # Use the all_objects manager to show all transactions, including deleted ones + return self.model.all_objects.all() + + list_filter = ["deleted", "type", "is_paid", "date", "account"] + list_display = [ + "date", "description", "type", "account__name", "amount", "account__currency__code", - "date", "reference_date", + "deleted", ] + readonly_fields = ["deleted_at"] + + actions = ["hard_delete_selected"] + + def hard_delete_selected(self, request, queryset): + for obj in queryset: + obj.hard_delete() + self.message_user( + request, f"Successfully hard deleted {queryset.count()} transactions." + ) + + hard_delete_selected.short_description = "Hard delete selected transactions" class TransactionInline(admin.TabularInline): diff --git a/app/apps/transactions/migrations/0028_transaction_internal_note.py b/app/apps/transactions/migrations/0028_transaction_internal_note.py new file mode 100644 index 0000000..c88c11d --- /dev/null +++ b/app/apps/transactions/migrations/0028_transaction_internal_note.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-01-19 00:44 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0027_alter_transaction_description'), + ] + + operations = [ + migrations.AddField( + model_name='transaction', + name='internal_note', + field=models.TextField(blank=True, verbose_name='Internal Note'), + ), + ] diff --git a/app/apps/transactions/migrations/0029_alter_transaction_options.py b/app/apps/transactions/migrations/0029_alter_transaction_options.py new file mode 100644 index 0000000..c06b7cd --- /dev/null +++ b/app/apps/transactions/migrations/0029_alter_transaction_options.py @@ -0,0 +1,17 @@ +# Generated by Django 5.1.5 on 2025-01-19 14:59 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0028_transaction_internal_note'), + ] + + operations = [ + migrations.AlterModelOptions( + name='transaction', + options={'default_manager_name': 'objects', 'verbose_name': 'Transaction', 'verbose_name_plural': 'Transactions'}, + ), + ] diff --git a/app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py b/app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py new file mode 100644 index 0000000..35f4c91 --- /dev/null +++ b/app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.5 on 2025-01-19 14:59 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0029_alter_transaction_options'), + ] + + operations = [ + migrations.AddField( + model_name='transaction', + name='deleted', + field=models.BooleanField(default=False, verbose_name='Deleted'), + ), + migrations.AddField( + model_name='transaction', + name='deleted_at', + field=models.DateTimeField(blank=True, null=True, verbose_name='Deleted At'), + ), + ] diff --git a/app/apps/transactions/migrations/0031_alter_transaction_deleted.py b/app/apps/transactions/migrations/0031_alter_transaction_deleted.py new file mode 100644 index 0000000..b5d2dc4 --- /dev/null +++ b/app/apps/transactions/migrations/0031_alter_transaction_deleted.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-01-19 15:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0030_transaction_deleted_transaction_deleted_at'), + ] + + operations = [ + migrations.AlterField( + model_name='transaction', + name='deleted', + field=models.BooleanField(db_index=True, default=False, verbose_name='Deleted'), + ), + ] diff --git a/app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py b/app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py new file mode 100644 index 0000000..46e76ae --- /dev/null +++ b/app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py @@ -0,0 +1,25 @@ +# Generated by Django 5.1.5 on 2025-01-19 16:48 + +import django.utils.timezone +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0031_alter_transaction_deleted'), + ] + + operations = [ + migrations.AddField( + model_name='transaction', + name='created_at', + field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), + preserve_default=False, + ), + migrations.AddField( + model_name='transaction', + name='updated_at', + field=models.DateTimeField(auto_now=True), + ), + ] diff --git a/app/apps/transactions/migrations/0033_transaction_internal_id.py b/app/apps/transactions/migrations/0033_transaction_internal_id.py new file mode 100644 index 0000000..b7d578c --- /dev/null +++ b/app/apps/transactions/migrations/0033_transaction_internal_id.py @@ -0,0 +1,20 @@ +# Generated by Django 5.1.5 on 2025-01-21 01:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("transactions", "0032_transaction_created_at_transaction_updated_at"), + ] + + operations = [ + migrations.AddField( + model_name="transaction", + name="internal_id", + field=models.TextField( + blank=True, null=True, unique=True, verbose_name="Internal ID" + ), + ), + ] diff --git a/app/apps/transactions/models.py b/app/apps/transactions/models.py index 70bbc94..4b21019 100644 --- a/app/apps/transactions/models.py +++ b/app/apps/transactions/models.py @@ -6,6 +6,7 @@ from django.db import models, transaction from django.db.models import Q from django.utils import timezone from django.utils.translation import gettext_lazy as _ +from django.conf import settings from apps.common.fields.month_year import MonthYearModelField from apps.common.functions.decimals import truncate_decimal @@ -15,6 +16,53 @@ from apps.transactions.validators import validate_decimal_places, validate_non_n logger = logging.getLogger() +class SoftDeleteQuerySet(models.QuerySet): + def delete(self): + if not settings.ENABLE_SOFT_DELETE: + # If soft deletion is disabled, perform a normal delete + return super().delete() + + # Separate the queryset into already deleted and not deleted objects + already_deleted = self.filter(deleted=True) + not_deleted = self.filter(deleted=False) + + # Use a transaction to ensure atomicity + with transaction.atomic(): + # Perform hard delete on already deleted objects + hard_deleted_count = already_deleted._raw_delete(already_deleted.db) + + # Perform soft delete on not deleted objects + soft_deleted_count = not_deleted.update( + deleted=True, deleted_at=timezone.now() + ) + + # Return a tuple of counts as expected by Django's delete method + return ( + hard_deleted_count + soft_deleted_count, + {"Transaction": hard_deleted_count + soft_deleted_count}, + ) + + def hard_delete(self): + return super().delete() + + +class SoftDeleteManager(models.Manager): + def get_queryset(self): + qs = SoftDeleteQuerySet(self.model, using=self._db) + return qs if not settings.ENABLE_SOFT_DELETE else qs.filter(deleted=False) + + +class AllObjectsManager(models.Manager): + def get_queryset(self): + return SoftDeleteQuerySet(self.model, using=self._db) + + +class DeletedObjectsManager(models.Manager): + def get_queryset(self): + qs = SoftDeleteQuerySet(self.model, using=self._db) + return qs if not settings.ENABLE_SOFT_DELETE else qs.filter(deleted=True) + + class TransactionCategory(models.Model): name = models.CharField(max_length=255, verbose_name=_("Name"), unique=True) mute = models.BooleanField(default=False, verbose_name=_("Mute")) @@ -141,11 +189,29 @@ class Transaction(models.Model): related_name="transactions", verbose_name=_("Recurring Transaction"), ) + internal_note = models.TextField(blank=True, verbose_name=_("Internal Note")) + internal_id = models.TextField( + blank=True, null=True, unique=True, verbose_name=_("Internal ID") + ) + + deleted = models.BooleanField( + default=False, verbose_name=_("Deleted"), db_index=True + ) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + deleted_at = models.DateTimeField( + null=True, blank=True, verbose_name=_("Deleted At") + ) + + objects = SoftDeleteManager.from_queryset(SoftDeleteQuerySet)() + all_objects = AllObjectsManager.from_queryset(SoftDeleteQuerySet)() + deleted_objects = DeletedObjectsManager.from_queryset(SoftDeleteQuerySet)() class Meta: verbose_name = _("Transaction") verbose_name_plural = _("Transactions") db_table = "transactions" + default_manager_name = "objects" def save(self, *args, **kwargs): self.amount = truncate_decimal( @@ -160,6 +226,17 @@ class Transaction(models.Model): self.full_clean() super().save(*args, **kwargs) + def delete(self, *args, **kwargs): + if settings.ENABLE_SOFT_DELETE: + self.deleted = True + self.deleted_at = timezone.now() + self.save() + else: + super().delete(*args, **kwargs) + + def hard_delete(self, *args, **kwargs): + super().delete(*args, **kwargs) + def exchanged_amount(self): if self.account.exchange_currency: converted_amount, prefix, suffix, decimal_places = convert( @@ -178,6 +255,10 @@ class Transaction(models.Model): return None + def __str__(self): + type_display = self.get_type_display() + return f"{self.description} - {type_display} - {self.account} - {self.date}" + class InstallmentPlan(models.Model): class Recurrence(models.TextChoices): diff --git a/app/apps/transactions/tasks.py b/app/apps/transactions/tasks.py index e0bfafc..0833f4e 100644 --- a/app/apps/transactions/tasks.py +++ b/app/apps/transactions/tasks.py @@ -1,9 +1,13 @@ import logging +from datetime import timedelta + +from cachalot.api import cachalot_disabled, invalidate +from django.utils import timezone +from django.conf import settings from procrastinate.contrib.django import app -from apps.transactions.models import RecurringTransaction - +from apps.transactions.models import RecurringTransaction, Transaction logger = logging.getLogger(__name__) @@ -19,3 +23,31 @@ def generate_recurring_transactions(timestamp=None): exc_info=True, ) raise e + + +@app.periodic(cron="10 1 * * *") +@app.task +def cleanup_deleted_transactions(): + with cachalot_disabled(): + if settings.ENABLE_SOFT_DELETE and settings.KEEP_DELETED_TRANSACTIONS_FOR == 0: + return "KEEP_DELETED_TRANSACTIONS_FOR is 0, no cleanup performed." + + if not settings.ENABLE_SOFT_DELETE: + # Hard delete all soft-deleted transactions + deleted_count, _ = Transaction.deleted_objects.all().hard_delete() + return ( + f"Hard deleted {deleted_count} transactions (soft deletion disabled)." + ) + + # Calculate the cutoff date + cutoff_date = timezone.now() - timedelta( + days=settings.KEEP_DELETED_TRANSACTIONS_FOR + ) + + invalidate("transactions.Transaction") + + # Hard delete soft-deleted transactions older than the cutoff date + old_transactions = Transaction.deleted_objects.filter(deleted_at__lt=cutoff_date) + deleted_count, _ = old_transactions.hard_delete() + + return f"Hard deleted {deleted_count} objects older than {settings.KEEP_DELETED_TRANSACTIONS_FOR} days." diff --git a/app/apps/users/migrations/0014_alter_usersettings_date_format_and_more.py b/app/apps/users/migrations/0014_alter_usersettings_date_format_and_more.py new file mode 100644 index 0000000..e38b096 --- /dev/null +++ b/app/apps/users/migrations/0014_alter_usersettings_date_format_and_more.py @@ -0,0 +1,28 @@ +# Generated by Django 5.1.5 on 2025-01-23 03:05 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0013_usersettings_date_format_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='usersettings', + name='date_format', + field=models.CharField(default='SHORT_DATE_FORMAT', max_length=100, verbose_name='Date Format'), + ), + migrations.AlterField( + model_name='usersettings', + name='datetime_format', + field=models.CharField(default='SHORT_DATETIME_FORMAT', max_length=100, verbose_name='Datetime Format'), + ), + migrations.AlterField( + model_name='usersettings', + name='language', + field=models.CharField(choices=[('auto', 'Auto'), ('en', 'English'), ('nl', 'Nederlands'), ('pt-br', 'Português (Brasil)')], default='auto', max_length=10, verbose_name='Language'), + ), + ] diff --git a/app/import_presets/.gitkeep b/app/import_presets/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/app/import_presets/nuconta/config.yml b/app/import_presets/nuconta/config.yml new file mode 100644 index 0000000..20fbdc3 --- /dev/null +++ b/app/import_presets/nuconta/config.yml @@ -0,0 +1,54 @@ +settings: + file_type: csv + delimiter: "," + encoding: utf-8 + skip_lines: 0 + importing: transactions + trigger_transaction_rules: true + skip_errors: true + +mapping: + account: + target: account + default: + type: name + + date: + target: date + source: Data + format: "%d/%m/%Y" + + amount: + target: amount + source: Valor + + description: + target: description + source: Descrição + transformations: + - type: split + separator: " - " + index: 0 + + type: + source: "Valor" + target: "type" + detection_method: sign + + notes: + target: notes + source: Notes + + internal_id: + target: internal_id + source: Identificador + + is_paid: + target: is_paid + detection_method: always_paid + +deduplicate: + - type: compare + fields: + - internal_id + match_type: lax diff --git a/app/import_presets/nuconta/manifest.json b/app/import_presets/nuconta/manifest.json new file mode 100644 index 0000000..a9d025f --- /dev/null +++ b/app/import_presets/nuconta/manifest.json @@ -0,0 +1,7 @@ +{ + "author": "eitchtee", + "description": "Importe suas transações da conta corrente do Nubank", + "schema_version": 1, + "name": "Nubank - Conta Corrente", + "message": "Mude '' para o nome da sua Nuconta dentro do WYGIWYH" +} diff --git a/app/static/img/favicon/manifest.json b/app/static/img/favicon/manifest.json deleted file mode 100644 index 9d5c19d..0000000 --- a/app/static/img/favicon/manifest.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "name": "WYGIWYH", - "icons": [ - { - "src": "\/static\/img\/favicon\/android-icon-36x36.png", - "sizes": "36x36", - "type": "image\/png", - "density": "0.75" - }, - { - "src": "\/static\/img\/favicon\/android-icon-48x48.png", - "sizes": "48x48", - "type": "image\/png", - "density": "1.0" - }, - { - "src": "\/static\/img\/favicon\/android-icon-72x72.png", - "sizes": "72x72", - "type": "image\/png", - "density": "1.5" - }, - { - "src": "\/static\/img\/favicon\/android-icon-96x96.png", - "sizes": "96x96", - "type": "image\/png", - "density": "2.0" - }, - { - "src": "\/static\/img\/favicon\/android-icon-144x144.png", - "sizes": "144x144", - "type": "image\/png", - "density": "3.0" - }, - { - "src": "\/static\/img\/favicon\/android-icon-192x192.png", - "sizes": "192x192", - "type": "image\/png", - "density": "4.0" - } - ] -} diff --git a/app/static/img/pwa/splash-640x1136.png b/app/static/img/pwa/splash-640x1136.png new file mode 100644 index 0000000..8f5db6b Binary files /dev/null and b/app/static/img/pwa/splash-640x1136.png differ diff --git a/app/static/img/pwa/splash-750x1334.png b/app/static/img/pwa/splash-750x1334.png new file mode 100644 index 0000000..93098b3 Binary files /dev/null and b/app/static/img/pwa/splash-750x1334.png differ diff --git a/app/templates/import_app/fragments/profiles/add.html b/app/templates/import_app/fragments/profiles/add.html new file mode 100644 index 0000000..03eb9a5 --- /dev/null +++ b/app/templates/import_app/fragments/profiles/add.html @@ -0,0 +1,19 @@ +{% extends 'extends/offcanvas.html' %} +{% load json %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Add new import profile' %}{% endblock %} + +{% block body %} +{% if message %} + +{% endif %} +
+ {% crispy form %} +
+{% endblock %} diff --git a/app/templates/import_app/fragments/profiles/edit.html b/app/templates/import_app/fragments/profiles/edit.html new file mode 100644 index 0000000..fa94bef --- /dev/null +++ b/app/templates/import_app/fragments/profiles/edit.html @@ -0,0 +1,11 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Edit import profile' %}{% endblock %} + +{% block body %} +
+ {% crispy form %} +
+{% endblock %} diff --git a/app/templates/import_app/fragments/profiles/list.html b/app/templates/import_app/fragments/profiles/list.html new file mode 100644 index 0000000..9c973b2 --- /dev/null +++ b/app/templates/import_app/fragments/profiles/list.html @@ -0,0 +1,90 @@ +{% load i18n %} +
+
+ {% spaceless %} +
{% translate 'Import Profiles' %} + + + + +
+ {% endspaceless %} +
+ +
+
+ {% if profiles %} + + + + + + + + + + + {% for profile in profiles %} + + + + + + {% endfor %} + +
{% translate 'Name' %}{% translate 'Version' %}
+
+ + + + + + + +
+
{{ profile.name }}{{ profile.get_version_display }}
+ {% else %} + + {% endif %} +
+
+
diff --git a/app/templates/import_app/fragments/profiles/list_presets.html b/app/templates/import_app/fragments/profiles/list_presets.html new file mode 100644 index 0000000..0b64342 --- /dev/null +++ b/app/templates/import_app/fragments/profiles/list_presets.html @@ -0,0 +1,43 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Import Presets' %}{% endblock %} + +{% block body %} + {% if presets %} + + +{% endblock %} diff --git a/app/templates/import_app/fragments/runs/add.html b/app/templates/import_app/fragments/runs/add.html new file mode 100644 index 0000000..9997044 --- /dev/null +++ b/app/templates/import_app/fragments/runs/add.html @@ -0,0 +1,11 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Import file with profile' %} {{ profile.name }}{% endblock %} + +{% block body %} +
+ {% crispy form %} +
+{% endblock %} diff --git a/app/templates/import_app/fragments/runs/list.html b/app/templates/import_app/fragments/runs/list.html new file mode 100644 index 0000000..f67054c --- /dev/null +++ b/app/templates/import_app/fragments/runs/list.html @@ -0,0 +1,120 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Runs for ' %}{{ profile.name }}{% endblock %} + +{% block body %} +
+ {% if runs %} +
+ {% for run in runs %} +
+
+
+ {{ run.get_status_display }} +
+
+
{{ run.id }}({{ run.file_name }})
+
+
+
+
+
+
+ {% trans 'Total Items' %} +
+
+ {{ run.total_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Processed Items' %} +
+
+ {{ run.processed_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Skipped Items' %} +
+
+ {{ run.skipped_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Failed Items' %} +
+
+ {{ run.failed_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Successful Items' %} +
+
+ {{ run.successful_rows }} +
+
+
+
+ +
+
+ +
+
+ {% endfor %} + {% else %} + + {% endif %} +
+
+{% endblock %} diff --git a/app/templates/import_app/fragments/runs/log.html b/app/templates/import_app/fragments/runs/log.html new file mode 100644 index 0000000..a7445a4 --- /dev/null +++ b/app/templates/import_app/fragments/runs/log.html @@ -0,0 +1,13 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Logs for' %} #{{ run.id }}{% endblock %} + +{% block body %} +
+
+ {{ run.logs|linebreaks }} +
+
+{% endblock %} diff --git a/app/templates/import_app/pages/profiles_index.html b/app/templates/import_app/pages/profiles_index.html new file mode 100644 index 0000000..a5c59ee --- /dev/null +++ b/app/templates/import_app/pages/profiles_index.html @@ -0,0 +1,8 @@ +{% extends "layouts/base.html" %} +{% load i18n %} + +{% block title %}{% translate 'Import Profiles' %}{% endblock %} + +{% block content %} +
+{% endblock %} diff --git a/app/templates/import_app/pages/runs_index.html b/app/templates/import_app/pages/runs_index.html new file mode 100644 index 0000000..38a48a6 --- /dev/null +++ b/app/templates/import_app/pages/runs_index.html @@ -0,0 +1,8 @@ +{% extends "layouts/base.html" %} +{% load i18n %} + +{% block title %}{% translate 'Import Runs' %}{% endblock %} + +{% block content %} +
+{% endblock %} diff --git a/app/templates/includes/head/favicons.html b/app/templates/includes/head/favicons.html index 52a872b..458ad45 100644 --- a/app/templates/includes/head/favicons.html +++ b/app/templates/includes/head/favicons.html @@ -12,7 +12,6 @@ - - \ No newline at end of file + diff --git a/app/templates/includes/navbar.html b/app/templates/includes/navbar.html index 66aafa4..a4ea8cc 100644 --- a/app/templates/includes/navbar.html +++ b/app/templates/includes/navbar.html @@ -120,6 +120,8 @@
  • {% translate 'Rules' %}
  • +
  • {% translate 'Import' %} beta
  • diff --git a/app/templates/layouts/base.html b/app/templates/layouts/base.html index b55ceba..6545aa9 100644 --- a/app/templates/layouts/base.html +++ b/app/templates/layouts/base.html @@ -1,3 +1,4 @@ +{% load pwa %} {% load formats %} {% load i18n %} {% load title %} @@ -15,6 +16,7 @@ {% include 'includes/head/favicons.html' %} + {% progressive_web_app_meta %} {% include 'includes/styles.html' %} {% block extra_styles %}{% endblock %} diff --git a/app/templates/layouts/base_auth.html b/app/templates/layouts/base_auth.html index c38d0d3..6cd9a16 100644 --- a/app/templates/layouts/base_auth.html +++ b/app/templates/layouts/base_auth.html @@ -1,3 +1,4 @@ +{% load pwa %} {% load title %} {% load webpack_loader %} @@ -11,8 +12,9 @@ {% endblock title %} {% endfilter %} - + {% include 'includes/head/favicons.html' %} + {% progressive_web_app_meta %} {% include 'includes/styles.html' %} {% block extra_styles %}{% endblock %} diff --git a/app/templates/offline.html b/app/templates/offline.html new file mode 100644 index 0000000..4e738ac --- /dev/null +++ b/app/templates/offline.html @@ -0,0 +1,79 @@ + + + + + + Offline + + + +
    + + + + +

    Either you or your WYGIWYH instance is offline.

    +
    +
    + + + + diff --git a/app/templates/pwa/serviceworker.js b/app/templates/pwa/serviceworker.js new file mode 100644 index 0000000..3dfdfba --- /dev/null +++ b/app/templates/pwa/serviceworker.js @@ -0,0 +1,74 @@ +// Base Service Worker implementation. To use your own Service Worker, set the PWA_SERVICE_WORKER_PATH variable in settings.py + +var staticCacheName = "django-pwa-v" + new Date().getTime(); +var filesToCache = [ + '/offline/', + '/static/css/django-pwa-app.css', + '/static/img/favicon/android-icon-192x192.png', + '/static/img/favicon/apple-icon-180x180.png', + '/static/img/pwa/splash-640x1136.png', + '/static/img/pwa/splash-750x1334.png', +]; + +// Cache on install +self.addEventListener("install", event => { + this.skipWaiting(); + event.waitUntil( + caches.open(staticCacheName) + .then(cache => { + return cache.addAll(filesToCache); + }) + ); +}); + +// Clear cache on activate +self.addEventListener('activate', event => { + event.waitUntil( + caches.keys().then(cacheNames => { + return Promise.all( + cacheNames + .filter(cacheName => (cacheName.startsWith("django-pwa-"))) + .filter(cacheName => (cacheName !== staticCacheName)) + .map(cacheName => caches.delete(cacheName)) + ); + }) + ); +}); + +// Serve from Cache +self.addEventListener("fetch", event => { + event.respondWith( + caches.match(event.request) + .then(response => { + if (response) { + return response; + } + return fetch(event.request).catch(() => { + const isHtmxRequest = event.request.headers.get('HX-Request') === 'true'; + const isHtmxBoosted = event.request.headers.get('HX-Boosted') === 'true'; + + if (!isHtmxRequest || isHtmxBoosted) { + // Serve offline content without changing URL + return caches.match('/offline/').then(offlineResponse => { + if (offlineResponse) { + return offlineResponse.text().then(offlineText => { + return new Response(offlineText, { + status: 200, + headers: {'Content-Type': 'text/html'} + }); + }); + } + // If offline page is not in cache, return a simple offline message + return new Response('

    Offline

    The page is not available offline.

    ', { + status: 200, + headers: {'Content-Type': 'text/html'} + }); + }); + } else { + // For non-boosted HTMX requests, let it fail normally + throw new Error('Network request failed'); + } + }); + }) + ); +}); diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index c06c0fd..133d522 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,6 +1,6 @@ volumes: wygiwyh_dev_postgres_data: {} - temp: + wygiwyh_temp: services: web: &django @@ -13,6 +13,7 @@ services: volumes: - ./app/:/usr/src/app/:z - ./frontend/:/usr/src/frontend:z + - wygiwyh_temp:/usr/src/app/temp/ ports: - "${OUTBOUND_PORT}:8000" env_file: diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index a12b4ed..b840e46 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -9,6 +9,8 @@ services: - .env depends_on: - db + volumes: + - wygiwyh_temp:/usr/src/app/temp/ restart: unless-stopped db: @@ -29,5 +31,10 @@ services: - db env_file: - .env + volumes: + - wygiwyh_temp:/usr/src/app/temp/ command: /start-procrastinate restart: unless-stopped + +volumes: + wygiwyh_temp: diff --git a/frontend/src/styles/_animations.scss b/frontend/src/styles/_animations.scss index 765a921..5b9c135 100644 --- a/frontend/src/styles/_animations.scss +++ b/frontend/src/styles/_animations.scss @@ -58,13 +58,21 @@ // HTMX Loading @keyframes spin { - 0% { transform: rotate(0deg); } - 100% { transform: rotate(360deg); } + 0% { + transform: rotate(0deg); + } + 100% { + transform: rotate(360deg); + } } @keyframes fade-in { - 0% { opacity: 0; } - 100% { opacity: 1; } + 0% { + opacity: 0; + } + 100% { + opacity: 1; + } } .show-loading.htmx-request { @@ -103,7 +111,7 @@ } .swing-out-top-bck { - animation: swing-out-top-bck 0.45s cubic-bezier(0.600, -0.280, 0.735, 0.045) both; + animation: swing-out-top-bck 0.45s cubic-bezier(0.600, -0.280, 0.735, 0.045) both; } /* ---------------------------------------------- @@ -155,7 +163,7 @@ } .scale-in-center { - animation: scale-in-center 0.5s cubic-bezier(0.250, 0.460, 0.450, 0.940) both; + animation: scale-in-center 0.5s cubic-bezier(0.250, 0.460, 0.450, 0.940) both; } /* ---------------------------------------------- @@ -182,5 +190,18 @@ } .scale-out-center { - animation: scale-out-center 0.5s cubic-bezier(0.550, 0.085, 0.680, 0.530) both; + animation: scale-out-center 0.5s cubic-bezier(0.550, 0.085, 0.680, 0.530) both; +} + +@keyframes flash { + 0%, 100% { + opacity: 1; + } + 50% { + opacity: 0.3; + } +} + +.flashing { + animation: flash 1s infinite; } diff --git a/frontend/src/styles/style.scss b/frontend/src/styles/style.scss index 1bb7f8a..7fc980b 100644 --- a/frontend/src/styles/style.scss +++ b/frontend/src/styles/style.scss @@ -53,3 +53,27 @@ select[multiple] { .transaction:has(input[type="checkbox"]:checked) > .transaction-item { background-color: $primary-bg-subtle-dark; } + +.offline { + text-align: center; + margin: 0; + padding: 0; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + height: 100vh; + background-color: #222; + color: #fbb700; + font-family: Arial, sans-serif; +} + +.wifi-icon { + width: 100px; + height: 100px; +} + +#offline-countdown { + margin-top: 20px; + font-size: 14px; +} diff --git a/requirements.txt b/requirements.txt index b4e4f02..1fc5d50 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ django-filter==24.3 django-debug-toolbar==4.3.0 django-cachalot~=2.6.3 django-cotton~=1.2.1 - +django-pwa~=2.0.1 djangorestframework~=3.15.2 drf-spectacular~=0.27.2 @@ -24,3 +24,5 @@ requests~=2.32.3 pytz~=2024.2 python-dateutil~=2.9.0.post0 simpleeval~=1.0.0 +pydantic~=2.10.5 +PyYAML~=6.0.2