Jelajahi Sumber

-dev:后端接口开发:任务相关

LuoChinWen 1 bulan lalu
induk
melakukan
e495e45135

+ 2 - 0
.gitignore

@@ -18,6 +18,8 @@ dist
 .settings/
 *.sublime-workspace
 
+./.kiro/specs/
+
 # IDE - VSCode
 .vscode/*
 !.vscode/settings.json

+ 5 - 4
.kiro/specs/annotation-platform/tasks.md

@@ -24,19 +24,20 @@
   - 实现数据库初始化逻辑
   - _Requirements: 5.1, 5.2, 5.8, 6.1, 6.2, 6.3, 9.1, 9.2_
 
-- [ ]* 2.1 编写后端数据库初始化的单元测试
+- [x] 2.1 编写后端数据库初始化的单元测试
+
   - 在 backend/test/ 目录创建测试文件
   - 测试数据库表创建
   - 测试连接管理
   - _Requirements: 9.1, 9.2_
 
-- [ ] 3. 实现后端 Project API
-  - [ ] 3.1 创建 Project 数据模型和 Pydantic schemas
+- [x] 3. 实现后端 Project API
+  - [x] 3.1 创建 Project 数据模型和 Pydantic schemas
     - 定义 ProjectCreate, ProjectUpdate, ProjectResponse schemas
     - 创建数据库模型
     - _Requirements: 6.1_
 
-  - [ ] 3.2 实现 Project CRUD 端点
+  - [x] 3.2 实现 Project CRUD 端点
     - GET /api/projects (列表)
     - POST /api/projects (创建)
     - GET /api/projects/{id} (详情)

+ 2 - 0
.kiro/steering/python-后端规范.md

@@ -2,6 +2,8 @@
 description: Python 后端项目的编码标准和最佳实践
 ---
 
+**使用中文与用户对话**
+
 # Python 后端开发规范
 
 ## 项目结构

TEMPAT SAMPAH
backend/__pycache__/database.cpython-311.pyc


TEMPAT SAMPAH
backend/__pycache__/main.cpython-311.pyc


TEMPAT SAMPAH
backend/__pycache__/models.cpython-311.pyc


TEMPAT SAMPAH
backend/annotation_platform.db


+ 1 - 0
backend/database.py

@@ -19,6 +19,7 @@ def get_db_connection() -> Generator[sqlite3.Connection, None, None]:
     """
     conn = sqlite3.connect(DB_PATH)
     conn.row_factory = sqlite3.Row  # Enable column access by name
+    conn.execute("PRAGMA foreign_keys = ON")  # Enable foreign key constraints
     try:
         yield conn
         conn.commit()

+ 4 - 0
backend/main.py

@@ -6,6 +6,7 @@ from fastapi import FastAPI
 from fastapi.middleware.cors import CORSMiddleware
 from contextlib import asynccontextmanager
 from database import init_database
+from routers import project
 
 
 @asynccontextmanager
@@ -40,6 +41,9 @@ app.add_middleware(
     allow_headers=["*"],
 )
 
+# Include routers
+app.include_router(project.router)
+
 
 @app.get("/")
 async def root():

+ 23 - 0
backend/pytest.ini

@@ -0,0 +1,23 @@
+[pytest]
+# Pytest configuration for backend tests
+
+# Test discovery patterns
+python_files = test_*.py
+python_classes = Test*
+python_functions = test_*
+
+# Test paths
+testpaths = test
+
+# Output options
+addopts = 
+    -v
+    --strict-markers
+    --tb=short
+    --disable-warnings
+
+# Markers
+markers =
+    unit: Unit tests
+    integration: Integration tests
+    slow: Slow running tests

+ 3 - 0
backend/requirements.txt

@@ -2,3 +2,6 @@ fastapi==0.109.0
 uvicorn[standard]==0.27.0
 pydantic==2.5.3
 python-multipart==0.0.6
+pytest==7.4.3
+pytest-cov==4.1.0
+httpx==0.26.0

TEMPAT SAMPAH
backend/routers/__pycache__/__init__.cpython-311.pyc


TEMPAT SAMPAH
backend/routers/__pycache__/project.cpython-311.pyc


+ 281 - 0
backend/routers/project.py

@@ -0,0 +1,281 @@
+"""
+Project API router.
+Provides CRUD endpoints for project management.
+"""
+import uuid
+from typing import List
+from fastapi import APIRouter, HTTPException, status
+from database import get_db_connection
+from schemas.project import ProjectCreate, ProjectUpdate, ProjectResponse
+from models import Project
+
+router = APIRouter(
+    prefix="/api/projects",
+    tags=["projects"]
+)
+
+
+@router.get("", response_model=List[ProjectResponse])
+async def list_projects():
+    """
+    List all projects.
+    Returns a list of all projects with their task counts.
+    """
+    with get_db_connection() as conn:
+        cursor = conn.cursor()
+        
+        # Get all projects with task counts
+        cursor.execute("""
+            SELECT 
+                p.id,
+                p.name,
+                p.description,
+                p.config,
+                p.created_at,
+                COUNT(t.id) as task_count
+            FROM projects p
+            LEFT JOIN tasks t ON p.id = t.project_id
+            GROUP BY p.id
+            ORDER BY p.created_at DESC
+        """)
+        
+        rows = cursor.fetchall()
+        
+        projects = []
+        for row in rows:
+            projects.append(ProjectResponse(
+                id=row["id"],
+                name=row["name"],
+                description=row["description"] or "",
+                config=row["config"],
+                created_at=row["created_at"],
+                task_count=row["task_count"]
+            ))
+        
+        return projects
+
+
+@router.post("", response_model=ProjectResponse, status_code=status.HTTP_201_CREATED)
+async def create_project(project: ProjectCreate):
+    """
+    Create a new project.
+    
+    Args:
+        project: Project creation data
+    
+    Returns:
+        Created project with generated ID
+    """
+    # Generate unique ID
+    project_id = f"proj_{uuid.uuid4().hex[:12]}"
+    
+    with get_db_connection() as conn:
+        cursor = conn.cursor()
+        
+        # Insert new project
+        cursor.execute("""
+            INSERT INTO projects (id, name, description, config)
+            VALUES (?, ?, ?, ?)
+        """, (
+            project_id,
+            project.name,
+            project.description,
+            project.config
+        ))
+        
+        # Fetch the created project
+        cursor.execute("""
+            SELECT id, name, description, config, created_at
+            FROM projects
+            WHERE id = ?
+        """, (project_id,))
+        
+        row = cursor.fetchone()
+        
+        return ProjectResponse(
+            id=row["id"],
+            name=row["name"],
+            description=row["description"] or "",
+            config=row["config"],
+            created_at=row["created_at"],
+            task_count=0
+        )
+
+
+@router.get("/{project_id}", response_model=ProjectResponse)
+async def get_project(project_id: str):
+    """
+    Get project by ID.
+    
+    Args:
+        project_id: Project unique identifier
+    
+    Returns:
+        Project details with task count
+    
+    Raises:
+        HTTPException: 404 if project not found
+    """
+    with get_db_connection() as conn:
+        cursor = conn.cursor()
+        
+        # Get project with task count
+        cursor.execute("""
+            SELECT 
+                p.id,
+                p.name,
+                p.description,
+                p.config,
+                p.created_at,
+                COUNT(t.id) as task_count
+            FROM projects p
+            LEFT JOIN tasks t ON p.id = t.project_id
+            WHERE p.id = ?
+            GROUP BY p.id
+        """, (project_id,))
+        
+        row = cursor.fetchone()
+        
+        if not row:
+            raise HTTPException(
+                status_code=status.HTTP_404_NOT_FOUND,
+                detail=f"Project with id '{project_id}' not found"
+            )
+        
+        return ProjectResponse(
+            id=row["id"],
+            name=row["name"],
+            description=row["description"] or "",
+            config=row["config"],
+            created_at=row["created_at"],
+            task_count=row["task_count"]
+        )
+
+
+@router.put("/{project_id}", response_model=ProjectResponse)
+async def update_project(project_id: str, project: ProjectUpdate):
+    """
+    Update an existing project.
+    
+    Args:
+        project_id: Project unique identifier
+        project: Project update data
+    
+    Returns:
+        Updated project details
+    
+    Raises:
+        HTTPException: 404 if project not found
+    """
+    with get_db_connection() as conn:
+        cursor = conn.cursor()
+        
+        # Check if project exists
+        cursor.execute("SELECT id FROM projects WHERE id = ?", (project_id,))
+        if not cursor.fetchone():
+            raise HTTPException(
+                status_code=status.HTTP_404_NOT_FOUND,
+                detail=f"Project with id '{project_id}' not found"
+            )
+        
+        # Build update query dynamically based on provided fields
+        update_fields = []
+        update_values = []
+        
+        if project.name is not None:
+            update_fields.append("name = ?")
+            update_values.append(project.name)
+        
+        if project.description is not None:
+            update_fields.append("description = ?")
+            update_values.append(project.description)
+        
+        if project.config is not None:
+            update_fields.append("config = ?")
+            update_values.append(project.config)
+        
+        if not update_fields:
+            # No fields to update, just return current project
+            cursor.execute("""
+                SELECT 
+                    p.id,
+                    p.name,
+                    p.description,
+                    p.config,
+                    p.created_at,
+                    COUNT(t.id) as task_count
+                FROM projects p
+                LEFT JOIN tasks t ON p.id = t.project_id
+                WHERE p.id = ?
+                GROUP BY p.id
+            """, (project_id,))
+            row = cursor.fetchone()
+            return ProjectResponse(
+                id=row["id"],
+                name=row["name"],
+                description=row["description"] or "",
+                config=row["config"],
+                created_at=row["created_at"],
+                task_count=row["task_count"]
+            )
+        
+        # Execute update
+        update_values.append(project_id)
+        cursor.execute(f"""
+            UPDATE projects
+            SET {', '.join(update_fields)}
+            WHERE id = ?
+        """, update_values)
+        
+        # Fetch and return updated project
+        cursor.execute("""
+            SELECT 
+                p.id,
+                p.name,
+                p.description,
+                p.config,
+                p.created_at,
+                COUNT(t.id) as task_count
+            FROM projects p
+            LEFT JOIN tasks t ON p.id = t.project_id
+            WHERE p.id = ?
+            GROUP BY p.id
+        """, (project_id,))
+        
+        row = cursor.fetchone()
+        return ProjectResponse(
+            id=row["id"],
+            name=row["name"],
+            description=row["description"] or "",
+            config=row["config"],
+            created_at=row["created_at"],
+            task_count=row["task_count"]
+        )
+
+
+@router.delete("/{project_id}", status_code=status.HTTP_204_NO_CONTENT)
+async def delete_project(project_id: str):
+    """
+    Delete a project and all associated tasks.
+    
+    Args:
+        project_id: Project unique identifier
+    
+    Raises:
+        HTTPException: 404 if project not found
+    """
+    with get_db_connection() as conn:
+        cursor = conn.cursor()
+        
+        # Check if project exists
+        cursor.execute("SELECT id FROM projects WHERE id = ?", (project_id,))
+        if not cursor.fetchone():
+            raise HTTPException(
+                status_code=status.HTTP_404_NOT_FOUND,
+                detail=f"Project with id '{project_id}' not found"
+            )
+        
+        # Delete project (cascade will delete tasks and annotations)
+        cursor.execute("DELETE FROM projects WHERE id = ?", (project_id,))
+        
+        return None

+ 7 - 0
backend/schemas/__init__.py

@@ -1,3 +1,10 @@
 """
 Pydantic schemas package.
 """
+from .project import ProjectCreate, ProjectUpdate, ProjectResponse
+
+__all__ = [
+    "ProjectCreate",
+    "ProjectUpdate",
+    "ProjectResponse",
+]

TEMPAT SAMPAH
backend/schemas/__pycache__/__init__.cpython-311.pyc


TEMPAT SAMPAH
backend/schemas/__pycache__/project.cpython-311.pyc


+ 63 - 0
backend/schemas/project.py

@@ -0,0 +1,63 @@
+"""
+Pydantic schemas for Project API.
+Defines request and response models for project operations.
+"""
+from datetime import datetime
+from typing import Optional
+from pydantic import BaseModel, Field
+
+
+class ProjectCreate(BaseModel):
+    """Schema for creating a new project."""
+    
+    name: str = Field(..., min_length=1, description="Project name")
+    description: str = Field(default="", description="Project description")
+    config: str = Field(..., min_length=1, description="Label Studio configuration")
+    
+    class Config:
+        json_schema_extra = {
+            "example": {
+                "name": "Image Classification Project",
+                "description": "Classify images into categories",
+                "config": "<View><Image name='image' value='$image'/><Choices name='choice' toName='image'><Choice value='Cat'/><Choice value='Dog'/></Choices></View>"
+            }
+        }
+
+
+class ProjectUpdate(BaseModel):
+    """Schema for updating an existing project."""
+    
+    name: Optional[str] = Field(None, min_length=1, description="Project name")
+    description: Optional[str] = Field(None, description="Project description")
+    config: Optional[str] = Field(None, min_length=1, description="Label Studio configuration")
+    
+    class Config:
+        json_schema_extra = {
+            "example": {
+                "name": "Updated Project Name",
+                "description": "Updated description"
+            }
+        }
+
+
+class ProjectResponse(BaseModel):
+    """Schema for project response."""
+    
+    id: str = Field(..., description="Project unique identifier")
+    name: str = Field(..., description="Project name")
+    description: str = Field(..., description="Project description")
+    config: str = Field(..., description="Label Studio configuration")
+    created_at: datetime = Field(..., description="Project creation timestamp")
+    task_count: int = Field(default=0, description="Number of tasks in project")
+    
+    class Config:
+        json_schema_extra = {
+            "example": {
+                "id": "proj_123abc",
+                "name": "Image Classification Project",
+                "description": "Classify images into categories",
+                "config": "<View><Image name='image' value='$image'/></View>",
+                "created_at": "2024-01-12T10:30:00",
+                "task_count": 5
+            }
+        }

+ 69 - 0
backend/test/README.md

@@ -0,0 +1,69 @@
+# Backend Tests
+
+This directory contains all unit and integration tests for the backend application.
+
+## Test Structure
+
+- `test_database.py` - Database initialization and connection management tests
+
+## Running Tests
+
+### Run all tests
+```bash
+cd backend
+python -m pytest
+```
+
+### Run specific test file
+```bash
+python -m pytest test/test_database.py
+```
+
+### Run with coverage
+```bash
+python -m pytest --cov=. --cov-report=html
+```
+
+### Run specific test class or function
+```bash
+python -m pytest test/test_database.py::TestDatabaseInitialization
+python -m pytest test/test_database.py::TestDatabaseInitialization::test_init_database_creates_tables
+```
+
+## Test Organization
+
+Tests are organized into classes by functionality:
+
+- **TestDatabaseInitialization**: Tests for database table creation and schema validation
+- **TestConnectionManagement**: Tests for database connection handling and lifecycle
+- **TestDatabaseIntegrity**: Tests for database constraints and referential integrity
+
+## Writing Tests
+
+When adding new tests:
+
+1. Create test files with `test_` prefix (e.g., `test_routers_project.py`)
+2. Organize tests into classes with `Test` prefix
+3. Name test functions with `test_` prefix
+4. Use descriptive names that explain what is being tested
+5. Add docstrings explaining the test purpose and requirements validated
+6. Use fixtures for common setup/teardown
+7. Follow the Arrange-Act-Assert pattern
+
+## Test Coverage
+
+Current test coverage focuses on:
+
+- ✅ Database initialization (Requirements 9.1)
+- ✅ Connection management (Requirements 9.2)
+- ✅ Table schema validation
+- ✅ Foreign key constraints
+- ✅ Cascade deletes
+- ✅ Transaction handling (commit/rollback)
+
+## Dependencies
+
+- pytest==7.4.3
+- pytest-cov==4.1.0
+
+See `requirements.txt` for full dependency list.

+ 3 - 0
backend/test/__init__.py

@@ -0,0 +1,3 @@
+"""
+Test package for backend application.
+"""

TEMPAT SAMPAH
backend/test/__pycache__/__init__.cpython-311.pyc


TEMPAT SAMPAH
backend/test/__pycache__/test_database.cpython-311-pytest-7.4.3.pyc


TEMPAT SAMPAH
backend/test/__pycache__/test_project_api.cpython-311-pytest-7.4.3.pyc


+ 476 - 0
backend/test/test_database.py

@@ -0,0 +1,476 @@
+"""
+Unit tests for database initialization and connection management.
+Tests database table creation and connection handling.
+
+Requirements: 9.1, 9.2
+"""
+import os
+import sqlite3
+import tempfile
+import pytest
+from contextlib import contextmanager
+
+# Import database functions
+import sys
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
+from database import init_database, get_db_connection, get_db, DB_PATH
+
+
+@pytest.fixture
+def temp_db():
+    """
+    Fixture that creates a temporary database for testing.
+    Ensures each test runs with a clean database.
+    """
+    # Create a temporary database file
+    fd, temp_db_path = tempfile.mkstemp(suffix='.db')
+    os.close(fd)
+    
+    # Set the database path for testing
+    original_db_path = os.environ.get('DATABASE_PATH')
+    os.environ['DATABASE_PATH'] = temp_db_path
+    
+    # Force reload of database module to pick up new path
+    import database
+    database.DB_PATH = temp_db_path
+    
+    yield temp_db_path
+    
+    # Cleanup: restore original path and remove temp file
+    if original_db_path:
+        os.environ['DATABASE_PATH'] = original_db_path
+    else:
+        os.environ.pop('DATABASE_PATH', None)
+    
+    database.DB_PATH = original_db_path or "annotation_platform.db"
+    
+    try:
+        os.unlink(temp_db_path)
+    except OSError:
+        pass
+
+
+class TestDatabaseInitialization:
+    """Test suite for database initialization functionality."""
+    
+    def test_init_database_creates_tables(self, temp_db):
+        """
+        Test that init_database creates all required tables.
+        Validates: Requirements 9.1
+        """
+        # Initialize the database
+        init_database()
+        
+        # Connect to the database and check tables exist
+        conn = sqlite3.connect(temp_db)
+        cursor = conn.cursor()
+        
+        # Query for all tables
+        cursor.execute("""
+            SELECT name FROM sqlite_master 
+            WHERE type='table' 
+            ORDER BY name
+        """)
+        tables = [row[0] for row in cursor.fetchall()]
+        
+        # Verify all required tables exist
+        assert 'projects' in tables, "projects table should exist"
+        assert 'tasks' in tables, "tasks table should exist"
+        assert 'annotations' in tables, "annotations table should exist"
+        
+        conn.close()
+    
+    def test_projects_table_schema(self, temp_db):
+        """
+        Test that projects table has correct schema.
+        Validates: Requirements 9.1
+        """
+        init_database()
+        
+        conn = sqlite3.connect(temp_db)
+        cursor = conn.cursor()
+        
+        # Get table schema
+        cursor.execute("PRAGMA table_info(projects)")
+        columns = {row[1]: row[2] for row in cursor.fetchall()}
+        
+        # Verify required columns exist with correct types
+        assert 'id' in columns, "projects should have id column"
+        assert 'name' in columns, "projects should have name column"
+        assert 'description' in columns, "projects should have description column"
+        assert 'config' in columns, "projects should have config column"
+        assert 'created_at' in columns, "projects should have created_at column"
+        
+        # Verify id is primary key
+        cursor.execute("""
+            SELECT sql FROM sqlite_master 
+            WHERE type='table' AND name='projects'
+        """)
+        schema = cursor.fetchone()[0]
+        assert 'PRIMARY KEY' in schema, "id should be primary key"
+        
+        conn.close()
+    
+    def test_tasks_table_schema(self, temp_db):
+        """
+        Test that tasks table has correct schema.
+        Validates: Requirements 9.1
+        """
+        init_database()
+        
+        conn = sqlite3.connect(temp_db)
+        cursor = conn.cursor()
+        
+        # Get table schema
+        cursor.execute("PRAGMA table_info(tasks)")
+        columns = {row[1]: row[2] for row in cursor.fetchall()}
+        
+        # Verify required columns exist
+        assert 'id' in columns, "tasks should have id column"
+        assert 'project_id' in columns, "tasks should have project_id column"
+        assert 'name' in columns, "tasks should have name column"
+        assert 'data' in columns, "tasks should have data column"
+        assert 'status' in columns, "tasks should have status column"
+        assert 'assigned_to' in columns, "tasks should have assigned_to column"
+        assert 'created_at' in columns, "tasks should have created_at column"
+        
+        # Verify foreign key constraint
+        cursor.execute("PRAGMA foreign_key_list(tasks)")
+        foreign_keys = cursor.fetchall()
+        assert len(foreign_keys) > 0, "tasks should have foreign key constraint"
+        assert foreign_keys[0][2] == 'projects', "foreign key should reference projects"
+        
+        conn.close()
+    
+    def test_annotations_table_schema(self, temp_db):
+        """
+        Test that annotations table has correct schema.
+        Validates: Requirements 9.1
+        """
+        init_database()
+        
+        conn = sqlite3.connect(temp_db)
+        cursor = conn.cursor()
+        
+        # Get table schema
+        cursor.execute("PRAGMA table_info(annotations)")
+        columns = {row[1]: row[2] for row in cursor.fetchall()}
+        
+        # Verify required columns exist
+        assert 'id' in columns, "annotations should have id column"
+        assert 'task_id' in columns, "annotations should have task_id column"
+        assert 'user_id' in columns, "annotations should have user_id column"
+        assert 'result' in columns, "annotations should have result column"
+        assert 'created_at' in columns, "annotations should have created_at column"
+        assert 'updated_at' in columns, "annotations should have updated_at column"
+        
+        # Verify foreign key constraint
+        cursor.execute("PRAGMA foreign_key_list(annotations)")
+        foreign_keys = cursor.fetchall()
+        assert len(foreign_keys) > 0, "annotations should have foreign key constraint"
+        assert foreign_keys[0][2] == 'tasks', "foreign key should reference tasks"
+        
+        conn.close()
+    
+    def test_foreign_key_constraints_enabled(self, temp_db):
+        """
+        Test that foreign key constraints are enabled.
+        Validates: Requirements 9.1
+        """
+        init_database()
+        
+        conn = sqlite3.connect(temp_db)
+        cursor = conn.cursor()
+        
+        # Check if foreign keys are enabled
+        cursor.execute("PRAGMA foreign_keys")
+        result = cursor.fetchone()
+        
+        # Note: Foreign keys need to be enabled per connection
+        # The init_database function enables them, but we need to enable for this connection too
+        cursor.execute("PRAGMA foreign_keys = ON")
+        cursor.execute("PRAGMA foreign_keys")
+        result = cursor.fetchone()
+        assert result[0] == 1, "foreign key constraints should be enabled"
+        
+        conn.close()
+    
+    def test_init_database_idempotent(self, temp_db):
+        """
+        Test that calling init_database multiple times is safe.
+        Validates: Requirements 9.1
+        """
+        # Initialize database multiple times
+        init_database()
+        init_database()
+        init_database()
+        
+        # Verify tables still exist and are not duplicated
+        conn = sqlite3.connect(temp_db)
+        cursor = conn.cursor()
+        
+        cursor.execute("""
+            SELECT name FROM sqlite_master 
+            WHERE type='table' 
+            ORDER BY name
+        """)
+        tables = [row[0] for row in cursor.fetchall()]
+        
+        # Should have exactly 3 tables
+        assert len(tables) == 3, "should have exactly 3 tables"
+        assert 'projects' in tables
+        assert 'tasks' in tables
+        assert 'annotations' in tables
+        
+        conn.close()
+
+
+class TestConnectionManagement:
+    """Test suite for database connection management."""
+    
+    def test_get_db_connection_context_manager(self, temp_db):
+        """
+        Test that get_db_connection works as context manager.
+        Validates: Requirements 9.2
+        """
+        init_database()
+        
+        # Use context manager
+        with get_db_connection() as conn:
+            assert conn is not None, "connection should not be None"
+            assert isinstance(conn, sqlite3.Connection), "should return Connection object"
+            
+            # Verify we can execute queries
+            cursor = conn.cursor()
+            cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
+            tables = cursor.fetchall()
+            assert len(tables) > 0, "should be able to query tables"
+    
+    def test_get_db_connection_commits_on_success(self, temp_db):
+        """
+        Test that get_db_connection commits changes on success.
+        Validates: Requirements 9.2
+        """
+        init_database()
+        
+        # Insert data using context manager
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            cursor.execute("""
+                INSERT INTO projects (id, name, description, config)
+                VALUES ('test-1', 'Test Project', 'Description', '{}')
+            """)
+        
+        # Verify data was committed
+        conn = sqlite3.connect(temp_db)
+        cursor = conn.cursor()
+        cursor.execute("SELECT * FROM projects WHERE id='test-1'")
+        result = cursor.fetchone()
+        assert result is not None, "data should be committed"
+        conn.close()
+    
+    def test_get_db_connection_rolls_back_on_error(self, temp_db):
+        """
+        Test that get_db_connection rolls back on error.
+        Validates: Requirements 9.2
+        """
+        init_database()
+        
+        # Try to insert data and raise an error
+        try:
+            with get_db_connection() as conn:
+                cursor = conn.cursor()
+                cursor.execute("""
+                    INSERT INTO projects (id, name, description, config)
+                    VALUES ('test-2', 'Test Project', 'Description', '{}')
+                """)
+                # Raise an error before commit
+                raise ValueError("Test error")
+        except ValueError:
+            pass
+        
+        # Verify data was rolled back
+        conn = sqlite3.connect(temp_db)
+        cursor = conn.cursor()
+        cursor.execute("SELECT * FROM projects WHERE id='test-2'")
+        result = cursor.fetchone()
+        assert result is None, "data should be rolled back on error"
+        conn.close()
+    
+    def test_get_db_connection_closes_connection(self, temp_db):
+        """
+        Test that get_db_connection closes connection after use.
+        Validates: Requirements 9.2
+        """
+        init_database()
+        
+        conn_ref = None
+        with get_db_connection() as conn:
+            conn_ref = conn
+            assert conn is not None
+        
+        # Try to use connection after context manager exits
+        # This should fail because connection is closed
+        with pytest.raises(sqlite3.ProgrammingError):
+            cursor = conn_ref.cursor()
+            cursor.execute("SELECT 1")
+    
+    def test_get_db_connection_enables_row_factory(self, temp_db):
+        """
+        Test that get_db_connection enables row factory for column access.
+        Validates: Requirements 9.2
+        """
+        init_database()
+        
+        # Insert test data
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            cursor.execute("""
+                INSERT INTO projects (id, name, description, config)
+                VALUES ('test-3', 'Test Project', 'Description', '{}')
+            """)
+        
+        # Query and verify row factory works
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            cursor.execute("SELECT * FROM projects WHERE id='test-3'")
+            row = cursor.fetchone()
+            
+            # Should be able to access columns by name
+            assert row['id'] == 'test-3', "should access column by name"
+            assert row['name'] == 'Test Project', "should access column by name"
+    
+    def test_get_db_function(self, temp_db):
+        """
+        Test that get_db function returns a valid connection.
+        Validates: Requirements 9.2
+        """
+        init_database()
+        
+        # Get connection
+        conn = get_db()
+        
+        try:
+            assert conn is not None, "connection should not be None"
+            assert isinstance(conn, sqlite3.Connection), "should return Connection object"
+            
+            # Verify row factory is enabled
+            cursor = conn.cursor()
+            cursor.execute("""
+                INSERT INTO projects (id, name, description, config)
+                VALUES ('test-4', 'Test Project', 'Description', '{}')
+            """)
+            conn.commit()
+            
+            cursor.execute("SELECT * FROM projects WHERE id='test-4'")
+            row = cursor.fetchone()
+            assert row['id'] == 'test-4', "row factory should be enabled"
+            
+        finally:
+            # Caller is responsible for closing
+            conn.close()
+    
+    def test_get_db_enables_foreign_keys(self, temp_db):
+        """
+        Test that get_db enables foreign key constraints.
+        Validates: Requirements 9.2
+        """
+        init_database()
+        
+        conn = get_db()
+        
+        try:
+            cursor = conn.cursor()
+            cursor.execute("PRAGMA foreign_keys")
+            result = cursor.fetchone()
+            assert result[0] == 1, "foreign keys should be enabled"
+        finally:
+            conn.close()
+
+
+class TestDatabaseIntegrity:
+    """Test suite for database integrity and constraints."""
+    
+    def test_cascade_delete_tasks_on_project_delete(self, temp_db):
+        """
+        Test that deleting a project cascades to delete tasks.
+        Validates: Requirements 9.1
+        """
+        init_database()
+        
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            
+            # Insert project
+            cursor.execute("""
+                INSERT INTO projects (id, name, description, config)
+                VALUES ('proj-1', 'Project 1', 'Description', '{}')
+            """)
+            
+            # Insert task
+            cursor.execute("""
+                INSERT INTO tasks (id, project_id, name, data, status)
+                VALUES ('task-1', 'proj-1', 'Task 1', '{}', 'pending')
+            """)
+        
+        # Verify task exists
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            cursor.execute("SELECT * FROM tasks WHERE id='task-1'")
+            assert cursor.fetchone() is not None, "task should exist"
+        
+        # Delete project
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            cursor.execute("DELETE FROM projects WHERE id='proj-1'")
+        
+        # Verify task was deleted
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            cursor.execute("SELECT * FROM tasks WHERE id='task-1'")
+            assert cursor.fetchone() is None, "task should be deleted with project"
+    
+    def test_cascade_delete_annotations_on_task_delete(self, temp_db):
+        """
+        Test that deleting a task cascades to delete annotations.
+        Validates: Requirements 9.1
+        """
+        init_database()
+        
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            
+            # Insert project
+            cursor.execute("""
+                INSERT INTO projects (id, name, description, config)
+                VALUES ('proj-2', 'Project 2', 'Description', '{}')
+            """)
+            
+            # Insert task
+            cursor.execute("""
+                INSERT INTO tasks (id, project_id, name, data, status)
+                VALUES ('task-2', 'proj-2', 'Task 2', '{}', 'pending')
+            """)
+            
+            # Insert annotation
+            cursor.execute("""
+                INSERT INTO annotations (id, task_id, user_id, result)
+                VALUES ('ann-1', 'task-2', 'user-1', '{}')
+            """)
+        
+        # Verify annotation exists
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            cursor.execute("SELECT * FROM annotations WHERE id='ann-1'")
+            assert cursor.fetchone() is not None, "annotation should exist"
+        
+        # Delete task
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            cursor.execute("DELETE FROM tasks WHERE id='task-2'")
+        
+        # Verify annotation was deleted
+        with get_db_connection() as conn:
+            cursor = conn.cursor()
+            cursor.execute("SELECT * FROM annotations WHERE id='ann-1'")
+            assert cursor.fetchone() is None, "annotation should be deleted with task"

+ 192 - 0
backend/test/test_project_api.py

@@ -0,0 +1,192 @@
+"""
+Unit tests for Project API endpoints.
+Tests CRUD operations for projects.
+"""
+import pytest
+import os
+import sqlite3
+from fastapi.testclient import TestClient
+
+# Use a test database
+TEST_DB_PATH = "test_annotation_platform.db"
+
+
+@pytest.fixture(scope="function", autouse=True)
+def setup_test_db():
+    """Setup test database before each test and cleanup after."""
+    # Set test database path
+    original_db_path = os.environ.get("DATABASE_PATH")
+    os.environ["DATABASE_PATH"] = TEST_DB_PATH
+    
+    # Remove existing test database
+    if os.path.exists(TEST_DB_PATH):
+        os.remove(TEST_DB_PATH)
+    
+    # Import after setting env var
+    from database import init_database
+    init_database()
+    
+    yield
+    
+    # Cleanup
+    if os.path.exists(TEST_DB_PATH):
+        os.remove(TEST_DB_PATH)
+    
+    # Restore original path
+    if original_db_path:
+        os.environ["DATABASE_PATH"] = original_db_path
+    elif "DATABASE_PATH" in os.environ:
+        del os.environ["DATABASE_PATH"]
+
+
+@pytest.fixture(scope="function")
+def test_client():
+    """Create a test client."""
+    from main import app
+    return TestClient(app)
+
+
+def test_list_projects_empty(test_client):
+    """Test listing projects when database is empty."""
+    response = test_client.get("/api/projects")
+    assert response.status_code == 200
+    assert response.json() == []
+
+
+def test_create_project(test_client):
+    """Test creating a new project."""
+    project_data = {
+        "name": "Test Project",
+        "description": "Test Description",
+        "config": "<View><Image name='img' value='$image'/></View>"
+    }
+    
+    response = test_client.post("/api/projects", json=project_data)
+    assert response.status_code == 201
+    
+    data = response.json()
+    assert data["name"] == project_data["name"]
+    assert data["description"] == project_data["description"]
+    assert data["config"] == project_data["config"]
+    assert "id" in data
+    assert data["id"].startswith("proj_")
+    assert data["task_count"] == 0
+    assert "created_at" in data
+
+
+def test_create_project_empty_name(test_client):
+    """Test creating a project with empty name fails validation."""
+    project_data = {
+        "name": "",
+        "description": "Test Description",
+        "config": "<View></View>"
+    }
+    
+    response = test_client.post("/api/projects", json=project_data)
+    assert response.status_code == 422  # Validation error
+
+
+def test_get_project(test_client):
+    """Test getting a project by ID."""
+    # Create a project first
+    project_data = {
+        "name": "Test Project",
+        "description": "Test Description",
+        "config": "<View></View>"
+    }
+    create_response = test_client.post("/api/projects", json=project_data)
+    project_id = create_response.json()["id"]
+    
+    # Get the project
+    response = test_client.get(f"/api/projects/{project_id}")
+    assert response.status_code == 200
+    
+    data = response.json()
+    assert data["id"] == project_id
+    assert data["name"] == project_data["name"]
+
+
+def test_get_project_not_found(test_client):
+    """Test getting a non-existent project returns 404."""
+    response = test_client.get("/api/projects/nonexistent_id")
+    assert response.status_code == 404
+    assert "not found" in response.json()["detail"].lower()
+
+
+def test_update_project(test_client):
+    """Test updating a project."""
+    # Create a project first
+    project_data = {
+        "name": "Original Name",
+        "description": "Original Description",
+        "config": "<View></View>"
+    }
+    create_response = test_client.post("/api/projects", json=project_data)
+    project_id = create_response.json()["id"]
+    
+    # Update the project
+    update_data = {
+        "name": "Updated Name",
+        "description": "Updated Description"
+    }
+    response = test_client.put(f"/api/projects/{project_id}", json=update_data)
+    assert response.status_code == 200
+    
+    data = response.json()
+    assert data["name"] == update_data["name"]
+    assert data["description"] == update_data["description"]
+    assert data["config"] == project_data["config"]  # Config unchanged
+
+
+def test_update_project_not_found(test_client):
+    """Test updating a non-existent project returns 404."""
+    update_data = {"name": "Updated Name"}
+    response = test_client.put("/api/projects/nonexistent_id", json=update_data)
+    assert response.status_code == 404
+
+
+def test_delete_project(test_client):
+    """Test deleting a project."""
+    # Create a project first
+    project_data = {
+        "name": "Test Project",
+        "description": "Test Description",
+        "config": "<View></View>"
+    }
+    create_response = test_client.post("/api/projects", json=project_data)
+    project_id = create_response.json()["id"]
+    
+    # Delete the project
+    response = test_client.delete(f"/api/projects/{project_id}")
+    assert response.status_code == 204
+    
+    # Verify project is deleted
+    get_response = test_client.get(f"/api/projects/{project_id}")
+    assert get_response.status_code == 404
+
+
+def test_delete_project_not_found(test_client):
+    """Test deleting a non-existent project returns 404."""
+    response = test_client.delete("/api/projects/nonexistent_id")
+    assert response.status_code == 404
+
+
+def test_list_projects_after_creation(test_client):
+    """Test listing projects after creating some."""
+    # Create multiple projects
+    for i in range(3):
+        project_data = {
+            "name": f"Project {i}",
+            "description": f"Description {i}",
+            "config": "<View></View>"
+        }
+        test_client.post("/api/projects", json=project_data)
+    
+    # List projects
+    response = test_client.get("/api/projects")
+    assert response.status_code == 200
+    
+    data = response.json()
+    assert len(data) == 3
+    assert all("id" in project for project in data)
+    assert all("task_count" in project for project in data)