Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .jules/sentinel.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,7 @@
**Vulnerability:** The `_legacy_export_to_video_bytes` function in `src/nodetool/media/video/video_utils.py` used a hardcoded path (`/tmp/temp_video.mp4`) for temporary file storage during video encoding. This allowed for symlink attacks, race conditions, and cross-tenant data leakage if multiple processes or users executed the function concurrently.
**Learning:** Hardcoded temporary paths are a common anti-pattern that can easily be overlooked in fallback or legacy code paths. They violate the principle of isolation and can lead to severe vulnerabilities in multi-user environments.
**Prevention:** Always use secure temporary file creation mechanisms like `tempfile.NamedTemporaryFile` with appropriate cleanup logic (e.g., `delete=False` followed by a `try...finally` block for manual removal) to guarantee uniqueness and prevent race conditions.
## 2024-05-18 - Parameterized DDL Operations in Database Adapters
**Vulnerability:** SQL Injection in Data Definition Language (DDL) queries. Methods like `create_table`, `migrate_table`, and `create_index` in `src/nodetool/models/postgres_adapter.py` used unsafe Python f-string concatenation for table names, column names, and types.
**Learning:** These methods construct structural modifications and typically cannot use normal parameterization (`$1` or `%s`) provided by DB drivers for values. As a result, developers sometimes mistakenly fall back to standard string formatting.
**Prevention:** Always use safe query composition objects specifically designed for DDL. In psycopg (PostgreSQL), strictly use `psycopg.sql.SQL` and `psycopg.sql.Identifier` to safely construct dynamic table, column, and schema names in database adapters.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -152,4 +152,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:7777/health || exit 1

# Run the NodeTool server
CMD ["python", "-m", "nodetool.api.run_server"]
CMD ["python", "-m", "nodetool.worker"]
2 changes: 1 addition & 1 deletion docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ services:
command:
- python
- -m
- nodetool.api.run_server
- nodetool.worker
- --host
- 0.0.0.0
- --port
Expand Down
43 changes: 34 additions & 9 deletions src/nodetool/models/postgres_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,11 +281,19 @@ async def create_table(self, suffix: str = "") -> None:
table_name = f"{self.table_name}{suffix}"
fields = self.fields
primary_key = self.get_primary_key()
sql = f"CREATE TABLE IF NOT EXISTS {table_name} ("

elements = []
for field_name, field in fields.items():
field_type = field.annotation
sql += f"{field_name} {get_postgres_type(field_type)}, "
sql += f"PRIMARY KEY ({primary_key}))"
pg_type = get_postgres_type(field_type)
elements.append(SQL("{} {}").format(Identifier(field_name), SQL(pg_type)))

elements.append(SQL("PRIMARY KEY ({})").format(Identifier(primary_key)))

sql = SQL("CREATE TABLE IF NOT EXISTS {} ({})").format(
Identifier(table_name),
SQL(", ").join(elements)
)

try:
pool = await self._get_pool()
Expand All @@ -299,7 +307,7 @@ async def create_table(self, suffix: str = "") -> None:

async def drop_table(self) -> None:
"""Drops the database table associated with this adapter."""
sql = f"DROP TABLE IF EXISTS {self.table_name}"
sql = SQL("DROP TABLE IF EXISTS {}").format(Identifier(self.table_name))
pool = await self._get_pool()
async with pool.connection() as conn:
async with conn.cursor() as cursor:
Expand Down Expand Up @@ -330,11 +338,20 @@ async def migrate_table(self) -> None:
# Alter table to add new fields
for field_name in fields_to_add:
field_type = get_postgres_type(self.fields[field_name].annotation)
await cursor.execute(f"ALTER TABLE {self.table_name} ADD COLUMN {field_name} {field_type}") # type: ignore[arg-type]
add_col_sql = SQL("ALTER TABLE {} ADD COLUMN {} {}").format(
Identifier(self.table_name),
Identifier(field_name),
SQL(field_type)
)
await cursor.execute(add_col_sql) # type: ignore[arg-type]

# Alter table to remove fields
for field_name in fields_to_remove:
await cursor.execute(f"ALTER TABLE {self.table_name} DROP COLUMN {field_name}") # type: ignore[arg-type]
drop_col_sql = SQL("ALTER TABLE {} DROP COLUMN {}").format(
Identifier(self.table_name),
Identifier(field_name)
)
await cursor.execute(drop_col_sql) # type: ignore[arg-type]

await conn.commit()

Expand Down Expand Up @@ -530,8 +547,16 @@ async def execute_sql(self, sql: str, params: Optional[dict[str, Any]] = None) -

async def create_index(self, index_name: str, columns: list[str], unique: bool = False) -> None:
unique_str = "UNIQUE" if unique else ""
columns_str = ", ".join(columns)
sql = f"CREATE {unique_str} INDEX IF NOT EXISTS {index_name} ON {self.table_name} ({columns_str})"

# Build list of identifiers for columns
col_identifiers = [Identifier(col) for col in columns]

sql = SQL("CREATE {} INDEX IF NOT EXISTS {} ON {} ({})").format(
SQL(unique_str),
Identifier(index_name),
Identifier(self.table_name),
SQL(", ").join(col_identifiers)
)

try:
pool = await self._get_pool()
Expand All @@ -544,7 +569,7 @@ async def create_index(self, index_name: str, columns: list[str], unique: bool =
raise e

async def drop_index(self, index_name: str) -> None:
sql = f"DROP INDEX IF EXISTS {index_name}"
sql = SQL("DROP INDEX IF EXISTS {}").format(Identifier(index_name))

try:
pool = await self._get_pool()
Expand Down
Loading