Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions backend/justfile
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,10 @@ uv_run := "uv run --active"
# Repo root (parent of backend/) - clean() normalizes path (removes ..)
repo_root := clean(justfile_directory() / "..")

_set_pythonpath := 'export PYTHONPATH="' + repo_root / 'backend/src:' + repo_root / 'premium/backend/src:' + repo_root / 'enterprise/backend/src:' + repo_root / 'backend/tests:' + repo_root / 'premium/backend/tests:' + repo_root / 'enterprise/backend/tests${PYTHONPATH:+:$PYTHONPATH}"'
# Helper to load .env.local if present and set PYTHONPATH with absolute paths
# Include this at the start of bash recipes that need env vars
_load_env := 'if [ -f "../.env.local" ]; then set -a; source "../.env.local"; set +a; fi; export PYTHONPATH="' + repo_root / 'backend/src:' + repo_root / 'premium/backend/src:' + repo_root / 'enterprise/backend/src:' + repo_root / 'backend/tests:' + repo_root / 'premium/backend/tests:' + repo_root / 'enterprise/backend/tests${PYTHONPATH:+:$PYTHONPATH}"'
_load_env := 'if [ -f "../.env.local" ]; then set -a; source "../.env.local"; set +a; fi; ' + _set_pythonpath

# Source directories
backend_source_dirs := "src/ ../premium/backend/src/ ../enterprise/backend/src/"
Expand Down Expand Up @@ -228,14 +229,14 @@ alias f := fix

# PYTHONPATH for test fixtures across all test directories
test_pythonpath := "tests:../premium/backend/tests:../enterprise/backend/tests"
_pytest := 'PYTHONPATH="' + test_pythonpath + ':${PYTHONPATH:-}" ' + uv_run + ' pytest'
_pytest := 'PYTHONPATH="' + test_pythonpath + ':${PYTHONPATH:-}" ' + uv_run + ' pytest -c pytest.ini'

# Run tests. Pass -n=auto to run in parallel with pytest-xdist
[group('3 - testing')]
test *ARGS: _check-dev
#!/usr/bin/env bash
set -euo pipefail
{{ _load_env }}
{{ _set_pythonpath }}
{{ _pytest }} {{ ARGS }}

# Run tests with coverage report
Expand Down
7 changes: 4 additions & 3 deletions backend/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ dependencies = [
"langchain==0.3.28",
"langchain-openai==0.3.35",
"openai==2.14.0",
"anthropic==0.77.0",
"mistralai==1.1.0",
"anthropic==0.84.0",
"mistralai==2.0.0",
"icalendar==6.3.2",
"jira2markdown==0.5",
"openpyxl==3.1.5",
Expand All @@ -100,7 +100,8 @@ dependencies = [
"genson==1.3.0",
"pyotp==2.9.0",
"qrcode==8.2",
"udspy==0.1.8",
"pydantic-ai-slim[anthropic,bedrock,google,groq,openai]==1.66.0",
"opentelemetry-sdk>=1.20.0",
"netifaces==0.11.0",
"requests-futures>=1.0.2",
]
Expand Down
1 change: 1 addition & 0 deletions backend/pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -56,3 +56,4 @@ markers =
workspace_search: All tests related to workspace search functionality
enable_all_signals: Disables signal deferral for this test (all signals enabled)
enable_signals: Enables specific signals for this test (accepts dotted callable paths)
eval: mark test as an eval test (requires LLM API key)
10 changes: 5 additions & 5 deletions backend/src/baserow/config/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1332,15 +1332,15 @@ def __setitem__(self, key, value):
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.scrubber import DEFAULT_DENYLIST, EventScrubber

# Exclude the langchain integration from auto-discovery: its module-level
# imports are incompatible with Python 3.14 (langchain/pydantic type
# evaluation crash), and the import happens before disabled_integrations
# can take effect.
# Exclude integrations whose module-level imports are incompatible:
# - langchain: Python 3.14 type evaluation crash
# - pydantic_ai: sentry-sdk patches ToolManager._call_tool which was
# removed in pydantic-ai >= 1.x (now execute_tool_call)

_sentry_integrations._AUTO_ENABLING_INTEGRATIONS[:] = [
entry
for entry in _sentry_integrations._AUTO_ENABLING_INTEGRATIONS
if "langchain" not in entry
if "langchain" not in entry and "pydantic_ai" not in entry
]

SENTRY_DENYLIST = DEFAULT_DENYLIST + ["username", "email", "name"]
Expand Down
18 changes: 18 additions & 0 deletions backend/src/baserow/config/settings/dev.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,24 @@
post_migrate.connect(setup_dev_e2e, dispatch_uid="setup_dev_e2e")


# Mirror logs to a file when BASEROW_LOG_FILE is set (e.g. for AI access when
# running locally). Truncated on each restart.
BASEROW_LOG_FILE = os.getenv("BASEROW_LOG_FILE", "")
if BASEROW_LOG_FILE:
LOGGING["handlers"]["file"] = { # noqa: F405
"class": "logging.FileHandler",
"filename": BASEROW_LOG_FILE,
"formatter": "console",
"mode": "w",
}
LOGGING["root"]["handlers"].append("file") # noqa: F405

# Also route loguru to the same file so modules using loguru (e.g.
# the assistant telemetry) appear alongside stdlib log output.
from loguru import logger as _loguru_logger

_loguru_logger.add(BASEROW_LOG_FILE, mode="a")

try:
from .local import * # noqa: F403, F401
except ImportError:
Expand Down
10 changes: 5 additions & 5 deletions backend/src/baserow/config/settings/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,13 @@
TEST_ENV_VARS = {}

# Prefixes for vars that can be overridden via env vars (for DB/Redis configuration)
ALLOWED_ENV_PREFIXES = ("DATABASE_",)
ALLOWED_ENV_PREFIXES = ("DATABASE_", "BASEROW_EMBEDDINGS_API_URL")


def getenv_for_tests(key: str, default: str = "") -> str:
"""
Get env var for tests:
- DATABASE_* vars: check real env first, then TEST_ENV_FILE, then default
- ALLOWED_ENV_PREFIXES vars: use real env var if set, else TEST_ENV_FILE, else default
- Other vars: only use TEST_ENV_FILE or default (never real env)
"""

Expand Down Expand Up @@ -65,9 +65,9 @@ def getenv_for_tests(key: str, default: str = "") -> str:
BASEROW_TESTS_SETUP_DB_FIXTURE = str_to_bool(
os.getenv("BASEROW_TESTS_SETUP_DB_FIXTURE", "on")
)
DATABASES["default"]["TEST"] = {
"MIGRATE": not BASEROW_TESTS_SETUP_DB_FIXTURE,
}
DATABASES["default"].setdefault("TEST", {})[
"MIGRATE"
] = not BASEROW_TESTS_SETUP_DB_FIXTURE

# Open a second database connection that can be used to test transactions.
DATABASES["default-copy"] = deepcopy(DATABASES["default"])
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Generated by Django 5.2.12 on 2026-03-17 09:16

from django.db import migrations, models
from django.contrib.postgres.operations import AddIndexConcurrently


class Migration(migrations.Migration):
atomic = False

dependencies = [
('database', '0205_formvieweditrowfield'),
]

operations = [
AddIndexConcurrently(
model_name='rowhistory',
index=models.Index(fields=['action_timestamp'], name='database_ro_action__6ea699_idx'),
),
]
29 changes: 24 additions & 5 deletions backend/src/baserow/contrib/database/rows/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from itertools import groupby

from django.conf import settings
from django.db import router
from django.db import connection
from django.db.models import QuerySet
from django.dispatch import receiver

Expand All @@ -18,6 +18,7 @@
from baserow.contrib.database.rows.types import ActionData
from baserow.core.action.signals import action_done
from baserow.core.models import Workspace
from baserow.core.psycopg import sql
from baserow.core.telemetry.utils import baserow_trace
from baserow.core.types import AnyUser

Expand Down Expand Up @@ -68,15 +69,33 @@ def list_row_history(
return queryset

@classmethod
def delete_entries_older_than(cls, cutoff: datetime):
def delete_entries_older_than(cls, cutoff: datetime, batch_size: int = 20_000):
"""
Deletes all row history entries that are older than the given cutoff date.
Deletes all row history entries that are older than the given cutoff date
in batches to avoid long-running transactions.

:param cutoff: The date and time before which all entries will be deleted.
:param batch_size: The number of rows to delete per batch.
"""

delete_qs = RowHistory.objects.filter(action_timestamp__lt=cutoff)
delete_qs._raw_delete(using=router.db_for_write(delete_qs.model))
table = sql.Identifier(RowHistory._meta.db_table)
query = sql.SQL(
"""
WITH to_delete AS (
SELECT id FROM {table}
WHERE action_timestamp < %s
LIMIT %s
)
DELETE FROM {table}
USING to_delete
WHERE {table}.id = to_delete.id
"""
).format(table=table)
while True:
with connection.cursor() as cursor:
cursor.execute(query, [cutoff, batch_size])
if cursor.rowcount == 0:
break


@receiver(action_done)
Expand Down
7 changes: 6 additions & 1 deletion backend/src/baserow/contrib/database/rows/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,4 +59,9 @@ class RowHistory(models.Model):

class Meta:
ordering = ("-action_timestamp", "-id")
indexes = [models.Index(fields=["table", "row_id", "-action_timestamp", "-id"])]
indexes = [
# For deleting history entries by action timestamp.
models.Index(fields=["action_timestamp"]),
# For listing the history of a row.
models.Index(fields=["table", "row_id", "-action_timestamp", "-id"]),
]
Loading
Loading