13 Commits

92 changed files with 7953 additions and 1267 deletions

156
.gitignore vendored
View File

@@ -216,3 +216,159 @@ __marimo__/
# Streamlit # Streamlit
.streamlit/secrets.toml .streamlit/secrets.toml
### react ###
.DS_*
*.log
logs
**/*.backup.*
**/*.back.*
node_modules
bower_components
*.sublime*
psd
thumb
sketch
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
### Node Patch ###
# Serverless Webpack directories
.webpack/
# Optional stylelint cache
# SvelteKit build / generate output
.svelte-kit

32
Makefile Normal file
View File

@@ -0,0 +1,32 @@
.PHONY: init up down restart logs clean
init:
@echo "Creating directories and setting permissions..."
@mkdir -p ./volumes/watched_files ./volumes/objects
@chown -R 1002:1002 ./volumes/watched_files ./volumes/objects
@echo "✓ Directories initialized"
up: init
@echo "Starting services..."
@docker-compose up -d
@echo "✓ Services started"
down:
@docker-compose down
restart:
@docker-compose restart
logs:
@docker-compose logs -f
clean: down
@echo "Cleaning volumes..."
@sudo rm -rf ./volumes
@echo "✓ Volumes cleaned"
rebuild: clean init
@echo "Rebuilding images..."
@docker-compose build --no-cache
@docker-compose up -d
@echo "✓ Services rebuilt and started"

141
Readme.md
View File

@@ -13,7 +13,7 @@ architecture with Redis for task queuing and MongoDB for data persistence.
- **Backend API**: FastAPI (Python 3.12) - **Backend API**: FastAPI (Python 3.12)
- **Task Processing**: Celery with Redis broker - **Task Processing**: Celery with Redis broker
- **Document Processing**: EasyOCR, PyMuPDF, python-docx, pdfplumber - **Document Processing**: EasyOCR, PyMuPDF, python-docx, pdfplumber
- **Database**: MongoDB - **Database**: MongoDB (pymongo)
- **Frontend**: React - **Frontend**: React
- **Containerization**: Docker & Docker Compose - **Containerization**: Docker & Docker Compose
- **File Monitoring**: Python watchdog library - **File Monitoring**: Python watchdog library
@@ -109,16 +109,18 @@ MyDocManager/
│ │ │ │ └── types.py # PyObjectId and other useful types │ │ │ │ └── types.py # PyObjectId and other useful types
│ │ │ ├── database/ │ │ │ ├── database/
│ │ │ │ ├── __init__.py │ │ │ │ ├── __init__.py
│ │ │ │ ├── connection.py # MongoDB connection │ │ │ │ ├── connection.py # MongoDB connection (pymongo)
│ │ │ │ └── repositories/ │ │ │ │ └── repositories/
│ │ │ │ ├── __init__.py │ │ │ │ ├── __init__.py
│ │ │ │ ├── user_repository.py # User CRUD operations │ │ │ │ ├── user_repository.py # User CRUD operations (synchronous)
│ │ │ │ ── document_repository.py # User CRUD operations │ │ │ │ ── document_repository.py # Document CRUD operations (synchronous)
│ │ │ │ └── job_repository.py # Job CRUD operations (synchronous)
│ │ │ ├── services/ │ │ │ ├── services/
│ │ │ │ ├── __init__.py │ │ │ │ ├── __init__.py
│ │ │ │ ├── auth_service.py # JWT & password logic │ │ │ │ ├── auth_service.py # JWT & password logic (synchronous)
│ │ │ │ ├── user_service.py # User business logic │ │ │ │ ├── user_service.py # User business logic (synchronous)
│ │ │ │ ├── document_service.py # Document business logic │ │ │ │ ├── document_service.py # Document business logic (synchronous)
│ │ │ │ ├── job_service.py # Job processing logic (synchronous)
│ │ │ │ └── init_service.py # Admin creation at startup │ │ │ │ └── init_service.py # Admin creation at startup
│ │ │ ├── api/ │ │ │ ├── api/
│ │ │ │ ├── __init__.py │ │ │ │ ├── __init__.py
@@ -138,7 +140,13 @@ MyDocManager/
│ └── frontend/ │ └── frontend/
│ ├── Dockerfile │ ├── Dockerfile
│ ├── package.json │ ├── package.json
│ ├── index.html
│ └── src/ │ └── src/
│ ├── assets/
│ ├── App.css
│ ├── App.jsx
│ ├── main.css
│ └── main.jsx
├── tests/ ├── tests/
│ ├── file-processor/ │ ├── file-processor/
│ │ ├── test_auth/ │ │ ├── test_auth/
@@ -334,13 +342,12 @@ class ProcessingJob(BaseModel):
- **Rationale**: MongoDB is not meant for large files, better performance. Files remain in the file system for easy - **Rationale**: MongoDB is not meant for large files, better performance. Files remain in the file system for easy
access. access.
### Implementation Order #### Repository and Services Implementation
- **Choice**: Synchronous implementation using pymongo
- **Rationale**: Full compatibility with Celery workers and simplified workflow
- **Implementation**: All repositories and services operate synchronously for seamless integration
1. ✅ Pydantic models for MongoDB collections
2. UNDER PROGRESS : Repository layer for data access (files + processing_jobs)
3. TODO : Celery tasks for document processing
4. TODO : Watchdog file monitoring implementation
5. TODO : FastAPI integration and startup coordination
## Job Management Layer ## Job Management Layer
@@ -350,7 +357,7 @@ The job management system follows the repository pattern for clean separation be
#### JobRepository #### JobRepository
Handles direct MongoDB operations for processing jobs: Handles direct MongoDB operations for processing jobs using synchronous pymongo:
**CRUD Operations:** **CRUD Operations:**
- `create_job()` - Create new processing job with automatic `created_at` timestamp - `create_job()` - Create new processing job with automatic `created_at` timestamp
@@ -367,7 +374,7 @@ Handles direct MongoDB operations for processing jobs:
#### JobService #### JobService
Provides business logic layer with strict status transition validation: Provides synchronous business logic layer with strict status transition validation:
**Status Transition Methods:** **Status Transition Methods:**
- `mark_job_as_started()` - PENDING → PROCESSING - `mark_job_as_started()` - PENDING → PROCESSING
@@ -381,7 +388,6 @@ Provides business logic layer with strict status transition validation:
#### Custom Exceptions #### Custom Exceptions
**JobNotFoundError**: Raised when job ID doesn't exist
**InvalidStatusTransitionError**: Raised for invalid status transitions **InvalidStatusTransitionError**: Raised for invalid status transitions
**JobRepositoryError**: Raised for MongoDB operation failures **JobRepositoryError**: Raised for MongoDB operation failures
@@ -400,9 +406,15 @@ All other transitions are forbidden and will raise `InvalidStatusTransitionError
``` ```
src/file-processor/app/ src/file-processor/app/
├── database/repositories/ ├── database/repositories/
── job_repository.py # JobRepository class ── job_repository.py # JobRepository class (synchronous)
│ ├── user_repository.py # UserRepository class (synchronous)
│ ├── document_repository.py # DocumentRepository class (synchronous)
│ └── file_repository.py # FileRepository class (synchronous)
├── services/ ├── services/
── job_service.py # JobService class ── job_service.py # JobService class (synchronous)
│ ├── auth_service.py # AuthService class (synchronous)
│ ├── user_service.py # UserService class (synchronous)
│ └── document_service.py # DocumentService class (synchronous)
└── exceptions/ └── exceptions/
└── job_exceptions.py # Custom exceptions └── job_exceptions.py # Custom exceptions
``` ```
@@ -414,6 +426,7 @@ src/file-processor/app/
- **Status Tracking**: Real-time processing status via `processing_jobs` collection - **Status Tracking**: Real-time processing status via `processing_jobs` collection
- **Extensible Metadata**: Flexible metadata storage per file type - **Extensible Metadata**: Flexible metadata storage per file type
- **Multiple Extraction Methods**: Support for direct text, OCR, and hybrid approaches - **Multiple Extraction Methods**: Support for direct text, OCR, and hybrid approaches
- **Synchronous Operations**: All database operations use pymongo for Celery compatibility
## Key Implementation Notes ## Key Implementation Notes
@@ -436,6 +449,7 @@ src/file-processor/app/
- **Package Manager**: pip (standard) - **Package Manager**: pip (standard)
- **External Dependencies**: Listed in each service's requirements.txt - **External Dependencies**: Listed in each service's requirements.txt
- **Standard Library First**: Prefer standard library when possible - **Standard Library First**: Prefer standard library when possible
- **Database Driver**: pymongo for synchronous MongoDB operations
### Testing Strategy ### Testing Strategy
@@ -460,6 +474,7 @@ src/file-processor/app/
12. **Content in Files Collection**: Extracted content stored with file metadata 12. **Content in Files Collection**: Extracted content stored with file metadata
13. **Direct Task Dispatch**: File watcher directly creates Celery tasks 13. **Direct Task Dispatch**: File watcher directly creates Celery tasks
14. **SHA256 Duplicate Detection**: Prevents reprocessing identical files 14. **SHA256 Duplicate Detection**: Prevents reprocessing identical files
15. **Synchronous Implementation**: All repositories and services use pymongo for Celery compatibility
### Development Process Requirements ### Development Process Requirements
@@ -470,14 +485,88 @@ src/file-processor/app/
### Next Implementation Steps ### Next Implementation Steps
1. **IN PROGRESS**: Implement file processing pipeline => 1. Build React Login Page
1. Create Pydantic models for files and processing_jobs collections 2. Build React Registration Page
2. Implement repository layer for file and processing job data access 3. Build React Default Dashboard
3. Create Celery tasks for document processing (.txt, .pdf, .docx) 4. Build React User Management Pages
4. Implement Watchdog file monitoring with dedicated observer
5. Integrate file watcher with FastAPI startup #### Validated Folders and files
2. Create protected API routes for user management ```
3. Build React monitoring interface with authentication src/frontend/src/
├── components/
│ ├── auth/
│ │ ├── LoginForm.jsx # Composant formulaire de login => Done
│ │ └── AuthLayout.jsx # Layout pour les pages d'auth => Done
│ └── common/
│ ├── Header.jsx # Header commun => TODO
│ ├── Layout.jsx # Header commun => TODO
│ └── ProtectedRoutes.jsx # Done
├── contexts/
│ └── AuthContext.jsx # Done
├── pages/
│ ├── LoginPage.jsx # Page complète de login => Done
│ └── DashboardPage.jsx # Page tableau de bord (exemple) => TODO
├── services/
│ └── authService.js # Service API pour auth => Done
├── hooks/
│ └── useAuth.js # Hook React pour gestion auth => TODO
├── utils/
│ └── api.js # Configuration axios/fetch => Done
├── App.jsx # Needs to be updated => TODO
```
#### Choices already made
* Pour la gestion des requêtes API et de l'état d'authentification, je propose
* axios (plus de fonctionnalités) :
* Installation d'axios pour les requêtes HTTP
* Intercepteurs pour gestion automatique du token
* Gestion d'erreurs centralisée
* Pour la gestion de l'état d'authentification et la navigation : Option A + C en même temps
* Option A - Context React + React Router :
* React Context pour l'état global d'auth (user, token, isAuthenticated)
* React Router pour la navigation entre pages
* Routes protégées automatiques
* Option C - Context + localStorage pour persistance :
* Token sauvegardé en localStorage pour rester connecté
* Context qui se recharge au démarrage de l'app
* CSS : Utilisation de daisyUI
#### Package.json
```
{
"name": "frontend",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"lint": "eslint .",
"preview": "vite preview"
},
"dependencies": {
"@tailwindcss/vite": "^4.1.13",
"axios": "^1.12.2",
"react": "^19.1.1",
"react-dom": "^19.1.1",
"react-router-dom": "^7.9.3"
},
"devDependencies": {
"@eslint/js": "^9.33.0",
"@types/react": "^19.1.10",
"@types/react-dom": "^19.1.7",
"@vitejs/plugin-react": "^5.0.0",
"autoprefixer": "^10.4.21",
"daisyui": "^5.1.23",
"eslint": "^9.33.0",
"eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-react-refresh": "^0.4.20",
"globals": "^16.3.0",
"postcss": "^8.5.6",
"tailwindcss": "^4.1.13",
"vite": "^7.1.2"
}
}
```
## Annexes ## Annexes

View File

@@ -40,6 +40,8 @@ services:
- ./src/worker/tasks:/app/tasks # <- Added: shared access to worker tasks - ./src/worker/tasks:/app/tasks # <- Added: shared access to worker tasks
- ./volumes/watched_files:/watched_files - ./volumes/watched_files:/watched_files
- ./volumes/objects:/objects - ./volumes/objects:/objects
- ./volumes/errors:/errors
- ./volumes/ignored:/ignored
depends_on: depends_on:
- redis - redis
- mongodb - mongodb
@@ -59,14 +61,31 @@ services:
- PYTHONPATH=/app - PYTHONPATH=/app
volumes: volumes:
- ./src/worker:/app - ./src/worker:/app
- ./src/file-processor/app:/app/app # <- Added: shared access file-processor app
- ./volumes/watched_files:/watched_files - ./volumes/watched_files:/watched_files
- ./volumes/objects:/objects
- ./volumes/errors:/errors
- ./volumes/ignored:/ignored
depends_on: depends_on:
- redis - redis
- mongodb - mongodb
networks: networks:
- mydocmanager-network - mydocmanager-network
command: celery -A tasks.main worker --loglevel=info command: celery -A tasks.main worker --loglevel=info
#command: celery -A main --loglevel=info # pour la production
# Frontend - React application with Vite
frontend:
build:
context: ./src/frontend
dockerfile: Dockerfile
container_name: mydocmanager-frontend
ports:
- "5173:5173"
volumes:
- ./src/frontend:/app
- /app/node_modules # Anonymous volume to prevent node_modules override
networks:
- mydocmanager-network
volumes: volumes:
mongodb-data: mongodb-data:

31
package-lock.json generated Normal file
View File

@@ -0,0 +1,31 @@
{
"name": "MyDocManager",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"dependencies": {
"react-icons": "^5.5.0"
}
},
"node_modules/react": {
"version": "19.1.1",
"resolved": "https://registry.npmjs.org/react/-/react-19.1.1.tgz",
"integrity": "sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/react-icons": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.5.0.tgz",
"integrity": "sha512-MEFcXdkP3dLo8uumGI5xN3lDFNsRtrjbOEKDLD7yv76v4wpnEq2Lt2qeHaQOr34I/wPN3s3+N08WkQ+CW37Xiw==",
"license": "MIT",
"peerDependencies": {
"react": "*"
}
}
}
}

5
package.json Normal file
View File

@@ -0,0 +1,5 @@
{
"dependencies": {
"react-icons": "^5.5.0"
}
}

View File

@@ -1,40 +1,66 @@
amqp==5.3.1 amqp==5.3.1
annotated-types==0.7.0 annotated-types==0.7.0
anyio==4.10.0 anyio==4.10.0
asgiref==3.9.1
bcrypt==4.3.0 bcrypt==4.3.0
billiard==4.2.1 billiard==4.2.1
celery==5.5.3 celery==5.5.3
certifi==2025.8.3
cffi==2.0.0
charset-normalizer==3.4.3
click==8.2.1 click==8.2.1
click-didyoumean==0.3.1 click-didyoumean==0.3.1
click-plugins==1.1.1.2 click-plugins==1.1.1.2
click-repl==0.3.0 click-repl==0.3.0
cryptography==46.0.1
Deprecated==1.2.18
dnspython==2.8.0 dnspython==2.8.0
ecdsa==0.19.1
email-validator==2.3.0 email-validator==2.3.0
fastapi==0.116.1 fastapi==0.116.1
h11==0.16.0 h11==0.16.0
hiredis==3.2.1
httpcore==1.0.9
httptools==0.6.4 httptools==0.6.4
httpx==0.28.1
idna==3.10 idna==3.10
importlib_metadata==8.7.0
iniconfig==2.1.0 iniconfig==2.1.0
izulu==0.50.0
kombu==5.5.4 kombu==5.5.4
lxml==6.0.2
mongomock==4.3.0 mongomock==4.3.0
mongomock-motor==0.0.36 mongomock-motor==0.0.36
motor==3.7.1 motor==3.7.1
packaging==25.0 packaging==25.0
pikepdf==9.11.0
pillow==11.3.0
pipdeptree==2.28.0 pipdeptree==2.28.0
pluggy==1.6.0 pluggy==1.6.0
prompt_toolkit==3.0.52 prompt_toolkit==3.0.52
pyasn1==0.6.1
pycparser==2.23
pycron==3.2.0
pydantic==2.11.9 pydantic==2.11.9
pydantic_core==2.33.2 pydantic_core==2.33.2
Pygments==2.19.2 Pygments==2.19.2
PyJWT==2.10.1
pymongo==4.15.1 pymongo==4.15.1
PyMuPDF==1.26.4
pypandoc==1.15
pytest==8.4.2 pytest==8.4.2
pytest-asyncio==1.2.0 pytest-asyncio==1.2.0
pytest-mock==3.15.1 pytest-mock==3.15.1
python-dateutil==2.9.0.post0 python-dateutil==2.9.0.post0
python-docx==1.2.0
python-dotenv==1.1.1 python-dotenv==1.1.1
python-magic==0.4.27 python-magic==0.4.27
python-multipart==0.0.20
pytz==2025.2 pytz==2025.2
PyYAML==6.0.2 PyYAML==6.0.2
redis==6.4.0
reportlab==4.4.4
rsa==4.9.1
sentinels==1.1.1 sentinels==1.1.1
six==1.17.0 six==1.17.0
sniffio==1.3.1 sniffio==1.3.1
@@ -49,3 +75,5 @@ watchdog==6.0.0
watchfiles==1.1.0 watchfiles==1.1.0
wcwidth==0.2.13 wcwidth==0.2.13
websockets==15.0.1 websockets==15.0.1
wrapt==1.17.3
zipp==3.23.0

View File

@@ -7,12 +7,19 @@ WORKDIR /app
RUN apt-get update && apt-get install -y --no-install-recommends \ RUN apt-get update && apt-get install -y --no-install-recommends \
libmagic1 \ libmagic1 \
file \ file \
pandoc \
ghostscript \
texlive-xetex \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Copy requirements and install dependencies # Copy requirements and install dependencies
COPY requirements.txt . COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
# Change the user
USER 1002:1002
# Copy application code # Copy application code
COPY . . COPY . .
@@ -21,5 +28,6 @@ ENV PYTHONPATH=/app
# Expose port # Expose port
EXPOSE 8000 EXPOSE 8000
# Command will be overridden by docker-compose # Command will be overridden by docker-compose
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

View File

View File

@@ -0,0 +1,107 @@
import jwt
from fastapi import Depends, HTTPException
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from jwt import InvalidTokenError
from starlette import status
from app.config import settings
from app.database.connection import get_database
from app.models.auth import UserRole
from app.models.user import UserInDB
from app.services.auth_service import AuthService
from app.services.document_service import DocumentService
from app.services.user_service import UserService
security = HTTPBearer()
def get_auth_service() -> AuthService:
"""Dependency to get AuthService instance."""
return AuthService()
def get_user_service() -> UserService:
"""Dependency to get UserService instance."""
database = get_database()
return UserService(database)
def get_document_service() -> DocumentService:
"""Dependency to get DocumentService instance."""
database = get_database()
return DocumentService(database)
def get_current_user(
credentials: HTTPAuthorizationCredentials = Depends(security),
user_service: UserService = Depends(get_user_service)
) -> UserInDB:
"""
Dependency to get current authenticated user from JWT token.
Args:
credentials: HTTP Bearer credentials
user_service: Auth service instance
Returns:
User: Current authenticated user
Raises:
HTTPException: If token is invalid or user not found
"""
try:
payload = jwt.decode(
credentials.credentials,
settings.get_jwt_secret_key(),
algorithms=[settings.get_jwt_algorithm()]
)
username: str = payload.get("sub")
if username is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
except InvalidTokenError:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
user = user_service.get_user_by_username(username)
if user is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
if not user.is_active:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Inactive user"
)
return user
def get_admin_user(current_user: UserInDB = Depends(get_current_user)) -> UserInDB:
"""
Dependency to ensure current user has admin role.
Args:
current_user: Current authenticated user
Returns:
User: Current user if admin
Raises:
HTTPException: If user is not admin
"""
if current_user.role != UserRole.ADMIN:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Not enough permissions"
)
return current_user

View File

@@ -0,0 +1,80 @@
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.security import OAuth2PasswordRequestForm
from app.api.dependencies import get_auth_service, get_current_user, get_user_service
from app.models.auth import LoginResponse, UserResponse
from app.models.user import UserInDB
from app.services.auth_service import AuthService
from app.services.user_service import UserService
router = APIRouter(tags=["authentication"])
@router.post("/login", response_model=LoginResponse)
def login(
form_data: OAuth2PasswordRequestForm = Depends(),
auth_service: AuthService = Depends(get_auth_service),
user_service: UserService = Depends(get_user_service)
):
"""
Authenticate user and return JWT token.
Args:
form_data: OAuth2 password form data
auth_service: Auth service instance
user_service: User service instance
Returns:
LoginResponse: JWT token and user info
Raises:
HTTPException: If authentication fails
"""
incorrect_username_or_pwd = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
user = user_service.get_user_by_username(form_data.username)
if (not user or
not user.is_active or
not auth_service.verify_user_password(form_data.password, user.hashed_password)):
raise incorrect_username_or_pwd
access_token = auth_service.create_access_token(data={"sub": user.username})
return LoginResponse(
access_token=access_token,
user=UserResponse(
_id=user.id,
username=user.username,
email=user.email,
role=user.role,
is_active=user.is_active,
created_at=user.created_at,
updated_at=user.updated_at
)
)
@router.get("/me", response_model=UserResponse)
def get_current_user_profile(current_user: UserInDB = Depends(get_current_user)):
"""
Get current user profile.
Args:
current_user: Current authenticated user
Returns:
UserResponse: Current user profile without sensitive data
"""
return UserResponse(
_id=current_user.id,
username=current_user.username,
email=current_user.email,
role=current_user.role,
is_active=current_user.is_active,
created_at=current_user.created_at,
updated_at=current_user.updated_at
)

View File

@@ -0,0 +1,243 @@
"""
Document API routes.
This module provides REST endpoints for document management operations.
"""
import logging
import os
from typing import List, Optional
import fitz # PyMuPDF
from fastapi import APIRouter, Depends, HTTPException, Query, status, Path
from starlette.responses import Response
from app.api.dependencies import get_document_service, get_current_user
from app.models.document import DocumentResponse, FileDocument
from app.models.user import UserInDB
from app.services.document_service import DocumentService
logger = logging.getLogger(__name__)
router = APIRouter(tags=["Documents"])
def _count_pdf_pages(pdf_file_path: str) -> int:
"""
Count the number of pages in a PDF file using PyMuPDF.
Args:
pdf_file_path: Path to the PDF file
Returns:
Number of pages in the PDF, or 0 if file cannot be read
"""
try:
with fitz.open(pdf_file_path) as doc:
return doc.page_count
except Exception as e:
logger.warning(f"Could not count pages for PDF {pdf_file_path}: {e}")
return 0
def _build_object_url(file_hash: Optional[str]) -> Optional[str]:
"""
Build object URL from file hash.
Args:
file_hash: SHA256 hash of the file
Returns:
URL string or None if hash is not provided
"""
if not file_hash:
return None
return f"/api/objects/{file_hash}"
def _extract_metadata_field(metadata: dict, field_name: str) -> List[str]:
"""
Extract a list field from metadata dictionary.
Args:
metadata: Document metadata dictionary
field_name: Name of the field to extract
Returns:
List of strings, empty list if field doesn't exist or is not a list
"""
field_value = metadata.get(field_name, [])
if isinstance(field_value, list):
return [str(item) for item in field_value]
return []
def _map_file_document_to_response(
document: FileDocument,
document_service: DocumentService
) -> DocumentResponse:
"""
Map FileDocument to DocumentResponse format.
Args:
document: FileDocument instance from database
document_service: Document service for file operations
Returns:
DocumentResponse instance ready for API response
"""
# Calculate page count for PDF files
page_count = 0
if document.pdf_file_hash and document_service.exists(document.pdf_file_hash):
pdf_path = document_service.get_document_path(document.pdf_file_hash)
page_count = _count_pdf_pages(pdf_path)
# Build URLs
thumbnail_url = _build_object_url(document.thumbnail_file_hash)
pdf_url = _build_object_url(document.pdf_file_hash)
# Extract tags and categories from metadata
tags = _extract_metadata_field(document.metadata, "tags")
categories = _extract_metadata_field(document.metadata, "categories")
# Format created_at timestamp
created_at = document.detected_at.isoformat() if document.detected_at else ""
as_dict = {
"id": str(document.id),
"name": document.filename,
"original_file_type": document.file_type.value.upper(),
"created_at": created_at,
"file_size": document.file_size,
"page_count": page_count,
"thumbnail_url": thumbnail_url,
"pdf_url": pdf_url,
"tags": tags,
"categories": categories
}
logger.info(f"Document: {as_dict}")
return DocumentResponse(**as_dict)
@router.get("/documents", response_model=List[DocumentResponse])
def list_documents(
skip: int = Query(0, ge=0, description="Number of documents to skip"),
limit: int = Query(100, ge=1, le=1000, description="Maximum number of documents to return"),
user: UserInDB = Depends(get_current_user),
document_service: DocumentService = Depends(get_document_service)
) -> List[DocumentResponse]:
"""
Retrieve a paginated list of documents.
Args:
skip: Number of documents to skip for pagination
limit: Maximum number of documents to return
document_service: Document service instance
Returns:
List of documents in API response format
Raises:
HTTPException: If database operation fails
"""
try:
# Get documents from service
documents = document_service.list_documents(skip=skip, limit=limit)
# Map to response format
document_responses = [
_map_file_document_to_response(doc, document_service)
for doc in documents
]
return document_responses
except Exception as e:
logger.error(f"Failed to list documents: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve documents"
)
@router.get("/objects/{file_hash}")
async def get_object_by_hash(
file_hash: str = Path(..., description="SHA256 hash of the object to retrieve"),
document_service: DocumentService = Depends(get_document_service),
user: UserInDB = Depends(get_current_user),
):
"""
Serve object content by its hash.
This endpoint serves files (original documents, PDFs, thumbnails) by their
SHA256 hash. It supports all file types stored in the objects folder.
Args:
file_hash: SHA256 hash of the object
document_service: Document service dependency
Returns:
FileResponse with the requested object content
Raises:
HTTPException: If object not found (404) or server error (500)
"""
try:
# Check if object exists
if not document_service.exists(file_hash):
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Object not found"
)
# Get file path
file_path = document_service.get_document_path(file_hash)
# Verify file exists on disk
if not os.path.exists(file_path):
logger.error(f"Object {file_hash} registered but file not found at {file_path}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Object file not found on disk"
)
# Determine media type based on file content
try:
file_content = document_service.get_document_content_by_hash(file_hash)
if not file_content:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Object content not available"
)
# Detect MIME type
import magic
mime_type = magic.from_buffer(file_content, mime=True)
# Return file content with appropriate headers
return Response(
content=file_content,
media_type=mime_type,
headers={
"Content-Length": str(len(file_content)),
"Cache-Control": "public, max-age=3600" # Cache for 1 hour
}
)
except Exception as e:
logger.error(f"Error reading object content for hash {file_hash}: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to read object content"
)
except HTTPException:
# Re-raise HTTP exceptions as-is
raise
except Exception as e:
logger.error(f"Unexpected error serving object {file_hash}: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Internal server error while serving object"
)

View File

@@ -0,0 +1,172 @@
from fastapi import APIRouter, Depends, HTTPException
from starlette import status
from app.api.dependencies import get_admin_user, get_user_service
from app.models.auth import UserResponse, MessageResponse
from app.models.types import PyObjectId
from app.models.user import UserInDB, UserCreate, UserUpdate
from app.services.user_service import UserService
router = APIRouter(tags=["users"])
@router.get("", response_model=list[UserInDB])
def list_users(
admin_user: UserInDB = Depends(get_admin_user),
user_service: UserService = Depends(get_user_service)
):
"""
List all users (admin only).
Args:
admin_user: Current admin user
user_service: User service instance
Returns:
List[UserResponse]: List of all users without sensitive data
"""
return user_service.list_users()
@router.get("/{user_id}", response_model=UserResponse)
def get_user_by_id(
user_id: PyObjectId,
admin_user: UserInDB = Depends(get_admin_user),
user_service: UserService = Depends(get_user_service)
):
"""
Get specific user by ID (admin only).
Args:
user_id: User ID to retrieve
admin_user: Current admin user
user_service: User service instance
Returns:
UserResponse: User information without sensitive data
Raises:
HTTPException: If user not found
"""
user = user_service.get_user_by_id(str(user_id))
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
return user
@router.post("", response_model=UserResponse, status_code=status.HTTP_201_CREATED)
def create_user(
user_data: UserCreate,
admin_user: UserInDB = Depends(get_admin_user),
user_service: UserService = Depends(get_user_service)
):
"""
Create new user (admin only).
Args:
user_data: User creation data
admin_user: Current admin user
user_service: User service instance
Returns:
UserResponse: Created user information without sensitive data
Raises:
HTTPException: If user creation fails
"""
try:
user = user_service.create_user(user_data)
return UserResponse(
_id=user.id,
username=user.username,
email=user.email,
role=user.role,
is_active=user.is_active,
created_at=user.created_at,
updated_at=user.updated_at
)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
@router.put("/{user_id}", response_model=UserResponse)
def update_user(
user_id: PyObjectId,
user_data: UserUpdate,
admin_user: UserInDB = Depends(get_admin_user),
user_service: UserService = Depends(get_user_service)
):
"""
Update existing user (admin only).
Args:
user_id: User ID to update
user_data: User update data
admin_user: Current admin user
user_service: User service instance
Returns:
UserResponse: Updated user information without sensitive data
Raises:
HTTPException: If user not found or update fails
"""
try:
user = user_service.update_user(str(user_id), user_data)
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
return UserResponse(
_id=user.id,
username=user.username,
email=user.email,
role=user.role,
is_active=user.is_active,
created_at=user.created_at,
updated_at=user.updated_at
)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
@router.delete("/{user_id}", response_model=MessageResponse)
def delete_user(
user_id: PyObjectId,
admin_user: UserInDB = Depends(get_admin_user),
user_service: UserService = Depends(get_user_service)
):
"""
Delete user by ID (admin only).
Args:
user_id: User ID to delete
admin_user: Current admin user
user_service: User service instance
Returns:
MessageResponse: Success message
Raises:
HTTPException: If user not found or deletion fails
"""
success = user_service.delete_user(str(user_id))
if not success:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
return MessageResponse(message="User successfully deleted")

View File

@@ -30,6 +30,10 @@ def get_mongodb_database_name() -> str:
return os.getenv("MONGODB_DATABASE", "mydocmanager") return os.getenv("MONGODB_DATABASE", "mydocmanager")
def get_redis_url() -> str:
return os.getenv("REDIS_URL", "redis://localhost:6379/0")
def get_jwt_secret_key() -> str: def get_jwt_secret_key() -> str:
""" """
Get JWT secret key from environment variables. Get JWT secret key from environment variables.
@@ -94,6 +98,21 @@ def get_objects_folder() -> str:
return os.getenv("OBJECTS_FOLDER", "/objects") return os.getenv("OBJECTS_FOLDER", "/objects")
def watch_directory() -> str: def get_watch_folder() -> str:
"""Directory to monitor for new files""" """Directory to monitor for new files"""
return os.getenv("WATCH_DIRECTORY", "/watched_files") return os.getenv("WATCH_DIRECTORY", "/watched_files")
def get_temp_folder() -> str:
"""Directory to store temporary files"""
return os.getenv("TEMP_DIRECTORY", "/tmp")
def get_errors_folder() -> str:
"""Directory to store temporary files"""
return os.getenv("ERRORS_DIRECTORY", "/errors")
def get_ignored_folder() -> str:
"""Directory to store temporary files"""
return os.getenv("IGNORED_DIRECTORY", "/ignored")

View File

@@ -4,21 +4,25 @@ MongoDB database connection management.
This module handles MongoDB connection with fail-fast approach. This module handles MongoDB connection with fail-fast approach.
The application will terminate if MongoDB is not accessible at startup. The application will terminate if MongoDB is not accessible at startup.
""" """
import logging
import sys import sys
from typing import Optional from typing import Optional
from pymongo import MongoClient from pymongo import MongoClient
from pymongo.database import Database from pymongo.database import Database
from pymongo.errors import ConnectionFailure, ServerSelectionTimeoutError from pymongo.errors import ConnectionFailure, ServerSelectionTimeoutError
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
from app.config.settings import get_mongodb_url, get_mongodb_database_name from app.config.settings import get_mongodb_url, get_mongodb_database_name
from app.utils.security import safe_connection_string
# Global variables for singleton pattern # Global variables for singleton pattern
_client: Optional[MongoClient] = None _client: Optional[MongoClient] = None
_database: Optional[Database] = None _database: Optional[Database] = None
logger = logging.getLogger(__name__)
def create_mongodb_client() -> AsyncIOMotorClient:
def create_mongodb_client() -> MongoClient:
""" """
Create MongoDB client with connection validation. Create MongoDB client with connection validation.
@@ -32,7 +36,7 @@ def create_mongodb_client() -> AsyncIOMotorClient:
try: try:
# Create client with short timeout for fail-fast behavior # Create client with short timeout for fail-fast behavior
client = AsyncIOMotorClient( client = MongoClient(
mongodb_url, mongodb_url,
serverSelectionTimeoutMS=5000, # 5 seconds timeout serverSelectionTimeoutMS=5000, # 5 seconds timeout
connectTimeoutMS=5000, connectTimeoutMS=5000,
@@ -42,16 +46,16 @@ def create_mongodb_client() -> AsyncIOMotorClient:
# Test connection by running admin command # Test connection by running admin command
client.admin.command('ping') client.admin.command('ping')
print(f"Successfully connected to MongoDB at {mongodb_url}") logger.info(f"Successfully connected to MongoDB at {safe_connection_string(mongodb_url)}")
return client return client
except (ConnectionFailure, ServerSelectionTimeoutError) as e: except (ConnectionFailure, ServerSelectionTimeoutError) as e:
print(f"ERROR: Failed to connect to MongoDB at {mongodb_url}") logger.error(f"ERROR: Failed to connect to MongoDB at {safe_connection_string(mongodb_url)}")
print(f"Connection error: {str(e)}") logger.error(f"Connection error: {str(e)}")
print("MongoDB is required for this application. Please ensure MongoDB is running and accessible.") logger.error("MongoDB is required for this application. Please ensure MongoDB is running and accessible.")
sys.exit(1) sys.exit(1)
except Exception as e: except Exception as e:
print(f"ERROR: Unexpected error connecting to MongoDB: {str(e)}") logger.error(f"ERROR: Unexpected error connecting to MongoDB: {str(e)}")
sys.exit(1) sys.exit(1)
@@ -73,7 +77,7 @@ def get_database() -> Database:
database_name = get_mongodb_database_name() database_name = get_mongodb_database_name()
_database = _client[database_name] _database = _client[database_name]
print(f"Connected to database: {database_name}") logger.info(f"Connected to database: {database_name}")
return _database return _database
@@ -91,7 +95,7 @@ def close_database_connection():
_client.close() _client.close()
_client = None _client = None
_database = None _database = None
print("MongoDB connection closed") logger.info("MongoDB connection closed")
def get_mongodb_client() -> Optional[MongoClient]: def get_mongodb_client() -> Optional[MongoClient]:

View File

@@ -6,9 +6,11 @@ in MongoDB with proper error handling and type safety.
""" """
from typing import Optional, List from typing import Optional, List
from bson import ObjectId from bson import ObjectId
from pymongo.collection import Collection
from pymongo.database import Database
from pymongo.errors import DuplicateKeyError, PyMongoError from pymongo.errors import DuplicateKeyError, PyMongoError
from motor.motor_asyncio import AsyncIOMotorCollection, AsyncIOMotorDatabase
from app.database.connection import get_extra_args from app.database.connection import get_extra_args
from app.models.document import FileDocument from app.models.document import FileDocument
@@ -36,34 +38,29 @@ class FileDocumentRepository:
with proper error handling and data validation. with proper error handling and data validation.
""" """
def __init__(self, database: AsyncIOMotorDatabase): def __init__(self, database: Database):
"""Initialize file repository with database connection.""" """Initialize file repository with database connection."""
self.db = database self.db = database
self.collection: AsyncIOMotorCollection = self.db.documents self.collection: Collection = self.db.documents
self._ensure_indexes()
async def initialize(self): def initialize(self):
""" """
Initialize repository by ensuring required indexes exist. Initialize repository by ensuring required indexes exist.
Should be called after repository instantiation to setup database indexes. Should be called after repository instantiation to setup database indexes.
""" """
await self._ensure_indexes() self._ensure_indexes()
return self return self
async def _ensure_indexes(self): def _ensure_indexes(self):
""" """
Ensure required database indexes exist. Ensure required database indexes exist.
Creates unique index on username field to prevent duplicates. Creates unique index on username field to prevent duplicates.
""" """
try:
await self.collection.create_index("filepath", unique=True)
except PyMongoError:
# Index might already exist, ignore error
pass pass
async def create_document(self, file_data: FileDocument, session=None) -> FileDocument: def create_document(self, file_data: FileDocument, session=None) -> FileDocument:
""" """
Create a new file document in database. Create a new file document in database.
@@ -83,7 +80,7 @@ class FileDocumentRepository:
if "_id" in file_dict and file_dict["_id"] is None: if "_id" in file_dict and file_dict["_id"] is None:
del file_dict["_id"] del file_dict["_id"]
result = await self.collection.insert_one(file_dict, **get_extra_args(session)) result = self.collection.insert_one(file_dict, **get_extra_args(session))
file_data.id = result.inserted_id file_data.id = result.inserted_id
return file_data return file_data
@@ -92,7 +89,7 @@ class FileDocumentRepository:
except PyMongoError as e: except PyMongoError as e:
raise ValueError(f"Failed to create file document: {e}") raise ValueError(f"Failed to create file document: {e}")
async def find_document_by_id(self, file_id: str) -> Optional[FileDocument]: def find_document_by_id(self, file_id: str) -> Optional[FileDocument]:
""" """
Find file document by ID. Find file document by ID.
@@ -106,7 +103,7 @@ class FileDocumentRepository:
if not ObjectId.is_valid(file_id): if not ObjectId.is_valid(file_id):
return None return None
file_doc = await self.collection.find_one({"_id": ObjectId(file_id)}) file_doc = self.collection.find_one({"_id": ObjectId(file_id)})
if file_doc: if file_doc:
return FileDocument(**file_doc) return FileDocument(**file_doc)
return None return None
@@ -114,7 +111,7 @@ class FileDocumentRepository:
except PyMongoError: except PyMongoError:
return None return None
async def find_document_by_hash(self, file_hash: str) -> Optional[FileDocument]: def find_document_by_hash(self, file_hash: str) -> Optional[FileDocument]:
""" """
Find file document by file hash to detect duplicates. Find file document by file hash to detect duplicates.
@@ -125,7 +122,7 @@ class FileDocumentRepository:
FileDocument or None: File document if found, None otherwise FileDocument or None: File document if found, None otherwise
""" """
try: try:
file_doc = await self.collection.find_one({"file_hash": file_hash}) file_doc = self.collection.find_one({"file_hash": file_hash})
if file_doc: if file_doc:
return FileDocument(**file_doc) return FileDocument(**file_doc)
return None return None
@@ -133,7 +130,48 @@ class FileDocumentRepository:
except PyMongoError: except PyMongoError:
return None return None
async def find_document_by_filepath(self, filepath: str) -> Optional[FileDocument]: def find_document_with_pdf_hash(self, file_hash: str) -> Optional[FileDocument]:
"""
Find file document by file hash with a pdf_file_hash set (not None).
Args:
file_hash (str): SHA256 hash of file content
Returns:
FileDocument or None: File document if found, None otherwise
"""
try:
file_doc = self.collection.find_one({"file_hash": file_hash,
"pdf_file_hash": {"$ne": None}})
if file_doc:
return FileDocument(**file_doc)
return None
except PyMongoError:
return None
def find_same_document(self, filename: str, file_hash: str):
"""
Find document with the same file_name and the same file hash
Args:
filename (str):
file_hash (str): SHA256 hash of file content
Returns:
FileDocument or None: File document if found, None otherwise
"""
try:
file_doc = self.collection.find_one({"file_hash": file_hash,
"filename": filename})
if file_doc:
return FileDocument(**file_doc)
return None
except PyMongoError:
return None
def find_document_by_filepath(self, filepath: str) -> Optional[FileDocument]:
""" """
Find file document by exact filepath. Find file document by exact filepath.
@@ -144,7 +182,7 @@ class FileDocumentRepository:
FileDocument or None: File document if found, None otherwise FileDocument or None: File document if found, None otherwise
""" """
try: try:
file_doc = await self.collection.find_one({"filepath": filepath}) file_doc = self.collection.find_one({"filepath": filepath})
if file_doc: if file_doc:
return FileDocument(**file_doc) return FileDocument(**file_doc)
return None return None
@@ -152,7 +190,7 @@ class FileDocumentRepository:
except PyMongoError: except PyMongoError:
return None return None
async def find_document_by_name(self, filename: str, matching_method: MatchMethodBase = None) -> List[FileDocument]: def find_document_by_name(self, filename: str, matching_method: MatchMethodBase = None) -> List[FileDocument]:
""" """
Find file documents by filename using fuzzy matching. Find file documents by filename using fuzzy matching.
@@ -166,8 +204,7 @@ class FileDocumentRepository:
try: try:
# Get all files from database # Get all files from database
cursor = self.collection.find({}) cursor = self.collection.find({})
all_files = await cursor.to_list(length=None) all_documents = [FileDocument(**file_doc) for file_doc in cursor]
all_documents = [FileDocument(**file_doc) for file_doc in all_files]
if isinstance(matching_method, FuzzyMatching): if isinstance(matching_method, FuzzyMatching):
return fuzzy_matching(filename, all_documents, matching_method.threshold) return fuzzy_matching(filename, all_documents, matching_method.threshold)
@@ -177,7 +214,7 @@ class FileDocumentRepository:
except PyMongoError: except PyMongoError:
return [] return []
async def list_documents(self, skip: int = 0, limit: int = 100) -> List[FileDocument]: def list_documents(self, skip: int = 0, limit: int = 100) -> List[FileDocument]:
""" """
List file documents with pagination. List file documents with pagination.
@@ -190,13 +227,12 @@ class FileDocumentRepository:
""" """
try: try:
cursor = self.collection.find({}).skip(skip).limit(limit).sort("detected_at", -1) cursor = self.collection.find({}).skip(skip).limit(limit).sort("detected_at", -1)
file_docs = await cursor.to_list(length=limit) return [FileDocument(**doc) for doc in cursor]
return [FileDocument(**doc) for doc in file_docs]
except PyMongoError: except PyMongoError:
return [] return []
async def count_documents(self) -> int: def count_documents(self) -> int:
""" """
Count total number of file documents. Count total number of file documents.
@@ -204,11 +240,11 @@ class FileDocumentRepository:
int: Total number of file documents in collection int: Total number of file documents in collection
""" """
try: try:
return await self.collection.count_documents({}) return self.collection.count_documents({})
except PyMongoError: except PyMongoError:
return 0 return 0
async def update_document(self, file_id: str, update_data: dict, session=None) -> Optional[FileDocument]: def update_document(self, file_id: str, update_data: dict, session=None) -> Optional[FileDocument]:
""" """
Update file document with new data. Update file document with new data.
@@ -228,9 +264,9 @@ class FileDocumentRepository:
clean_update_data = {k: v for k, v in update_data.items() if v is not None} clean_update_data = {k: v for k, v in update_data.items() if v is not None}
if not clean_update_data: if not clean_update_data:
return await self.find_document_by_id(file_id) return self.find_document_by_id(file_id)
result = await self.collection.find_one_and_update( result = self.collection.find_one_and_update(
{"_id": ObjectId(file_id)}, {"_id": ObjectId(file_id)},
{"$set": clean_update_data}, {"$set": clean_update_data},
return_document=True, return_document=True,
@@ -244,7 +280,7 @@ class FileDocumentRepository:
except PyMongoError: except PyMongoError:
return None return None
async def delete_document(self, file_id: str, session=None) -> bool: def delete_document(self, file_id: str, session=None) -> bool:
""" """
Delete file document from database. Delete file document from database.
@@ -259,7 +295,7 @@ class FileDocumentRepository:
if not ObjectId.is_valid(file_id): if not ObjectId.is_valid(file_id):
return False return False
result = await self.collection.delete_one({"_id": ObjectId(file_id)}, **get_extra_args(session)) result = self.collection.delete_one({"_id": ObjectId(file_id)}, **get_extra_args(session))
return result.deleted_count > 0 return result.deleted_count > 0
except PyMongoError: except PyMongoError:

View File

@@ -8,7 +8,8 @@ with automatic timestamp management and error handling.
from datetime import datetime from datetime import datetime
from typing import List, Optional from typing import List, Optional
from motor.motor_asyncio import AsyncIOMotorCollection, AsyncIOMotorDatabase from pymongo.collection import Collection
from pymongo.database import Database
from pymongo.errors import PyMongoError from pymongo.errors import PyMongoError
from app.exceptions.job_exceptions import JobRepositoryError from app.exceptions.job_exceptions import JobRepositoryError
@@ -24,33 +25,33 @@ class JobRepository:
timestamp management and proper error handling. timestamp management and proper error handling.
""" """
def __init__(self, database: AsyncIOMotorDatabase): def __init__(self, database: Database):
"""Initialize repository with MongoDB collection reference.""" """Initialize repository with MongoDB collection reference."""
self.db = database self.db = database
self.collection: AsyncIOMotorCollection = self.db.processing_jobs self.collection: Collection = self.db.processing_jobs
async def _ensure_indexes(self): def _ensure_indexes(self):
""" """
Ensure required database indexes exist. Ensure required database indexes exist.
Creates unique index on username field to prevent duplicates. Creates unique index on username field to prevent duplicates.
""" """
try: try:
await self.collection.create_index("document_id", unique=True) self.collection.create_index("document_id", unique=True)
except PyMongoError: except PyMongoError:
# Index might already exist, ignore error # Index might already exist, ignore error
pass pass
async def initialize(self): def initialize(self):
""" """
Initialize repository by ensuring required indexes exist. Initialize repository by ensuring required indexes exist.
Should be called after repository instantiation to setup database indexes. Should be called after repository instantiation to setup database indexes.
""" """
await self._ensure_indexes() self._ensure_indexes()
return self return self
async def create_job(self, document_id: PyObjectId, task_id: Optional[str] = None) -> ProcessingJob: def create_job(self, document_id: PyObjectId, task_id: Optional[str] = None) -> ProcessingJob:
""" """
Create a new processing job. Create a new processing job.
@@ -75,7 +76,7 @@ class JobRepository:
"error_message": None "error_message": None
} }
result = await self.collection.insert_one(job_data) result = self.collection.insert_one(job_data)
job_data["_id"] = result.inserted_id job_data["_id"] = result.inserted_id
return ProcessingJob(**job_data) return ProcessingJob(**job_data)
@@ -83,7 +84,7 @@ class JobRepository:
except PyMongoError as e: except PyMongoError as e:
raise JobRepositoryError("create_job", e) raise JobRepositoryError("create_job", e)
async def find_job_by_id(self, job_id: PyObjectId) -> Optional[ProcessingJob]: def find_job_by_id(self, job_id: PyObjectId) -> Optional[ProcessingJob]:
""" """
Retrieve a job by its ID. Retrieve a job by its ID.
@@ -98,7 +99,7 @@ class JobRepository:
JobRepositoryError: If database operation fails JobRepositoryError: If database operation fails
""" """
try: try:
job_data = await self.collection.find_one({"_id": job_id}) job_data = self.collection.find_one({"_id": job_id})
if job_data: if job_data:
return ProcessingJob(**job_data) return ProcessingJob(**job_data)
@@ -107,7 +108,7 @@ class JobRepository:
except PyMongoError as e: except PyMongoError as e:
raise JobRepositoryError("get_job_by_id", e) raise JobRepositoryError("get_job_by_id", e)
async def update_job_status( def update_job_status(
self, self,
job_id: PyObjectId, job_id: PyObjectId,
status: ProcessingStatus, status: ProcessingStatus,
@@ -143,7 +144,7 @@ class JobRepository:
if error_message is not None: if error_message is not None:
update_data["error_message"] = error_message update_data["error_message"] = error_message
result = await self.collection.find_one_and_update( result = self.collection.find_one_and_update(
{"_id": job_id}, {"_id": job_id},
{"$set": update_data}, {"$set": update_data},
return_document=True return_document=True
@@ -157,7 +158,7 @@ class JobRepository:
except PyMongoError as e: except PyMongoError as e:
raise JobRepositoryError("update_job_status", e) raise JobRepositoryError("update_job_status", e)
async def delete_job(self, job_id: PyObjectId) -> bool: def delete_job(self, job_id: PyObjectId) -> bool:
""" """
Delete a job from the database. Delete a job from the database.
@@ -171,14 +172,14 @@ class JobRepository:
JobRepositoryError: If database operation fails JobRepositoryError: If database operation fails
""" """
try: try:
result = await self.collection.delete_one({"_id": job_id}) result = self.collection.delete_one({"_id": job_id})
return result.deleted_count > 0 return result.deleted_count > 0
except PyMongoError as e: except PyMongoError as e:
raise JobRepositoryError("delete_job", e) raise JobRepositoryError("delete_job", e)
async def find_jobs_by_document_id(self, document_id: PyObjectId) -> List[ProcessingJob]: def find_jobs_by_document_id(self, document_id: PyObjectId) -> List[ProcessingJob]:
""" """
Retrieve all jobs for a specific file. Retrieve all jobs for a specific file.
@@ -195,7 +196,7 @@ class JobRepository:
cursor = self.collection.find({"document_id": document_id}) cursor = self.collection.find({"document_id": document_id})
jobs = [] jobs = []
async for job_data in cursor: for job_data in cursor:
jobs.append(ProcessingJob(**job_data)) jobs.append(ProcessingJob(**job_data))
return jobs return jobs
@@ -203,7 +204,7 @@ class JobRepository:
except PyMongoError as e: except PyMongoError as e:
raise JobRepositoryError("get_jobs_by_file_id", e) raise JobRepositoryError("get_jobs_by_file_id", e)
async def get_jobs_by_status(self, status: ProcessingStatus) -> List[ProcessingJob]: def get_jobs_by_status(self, status: ProcessingStatus) -> List[ProcessingJob]:
""" """
Retrieve all jobs with a specific status. Retrieve all jobs with a specific status.
@@ -220,7 +221,7 @@ class JobRepository:
cursor = self.collection.find({"status": status}) cursor = self.collection.find({"status": status})
jobs = [] jobs = []
async for job_data in cursor: for job_data in cursor:
jobs.append(ProcessingJob(**job_data)) jobs.append(ProcessingJob(**job_data))
return jobs return jobs

View File

@@ -5,10 +5,12 @@ This module implements the repository pattern for user CRUD operations
with dependency injection of the database connection using async/await. with dependency injection of the database connection using async/await.
""" """
from typing import Optional, List
from datetime import datetime from datetime import datetime
from typing import Optional, List
from bson import ObjectId from bson import ObjectId
from motor.motor_asyncio import AsyncIOMotorDatabase, AsyncIOMotorCollection from pymongo.collection import Collection
from pymongo.database import Database
from pymongo.errors import DuplicateKeyError, PyMongoError from pymongo.errors import DuplicateKeyError, PyMongoError
from app.models.user import UserCreate, UserInDB, UserUpdate from app.models.user import UserCreate, UserInDB, UserUpdate
@@ -23,7 +25,7 @@ class UserRepository:
following the repository pattern with dependency injection and async/await. following the repository pattern with dependency injection and async/await.
""" """
def __init__(self, database: AsyncIOMotorDatabase): def __init__(self, database: Database):
""" """
Initialize repository with database dependency. Initialize repository with database dependency.
@@ -31,30 +33,30 @@ class UserRepository:
database (AsyncIOMotorDatabase): MongoDB database instance database (AsyncIOMotorDatabase): MongoDB database instance
""" """
self.db = database self.db = database
self.collection: AsyncIOMotorCollection = database.users self.collection: Collection = database.users
async def initialize(self): def initialize(self):
""" """
Initialize repository by ensuring required indexes exist. Initialize repository by ensuring required indexes exist.
Should be called after repository instantiation to setup database indexes. Should be called after repository instantiation to setup database indexes.
""" """
await self._ensure_indexes() self._ensure_indexes()
return self return self
async def _ensure_indexes(self): def _ensure_indexes(self):
""" """
Ensure required database indexes exist. Ensure required database indexes exist.
Creates unique index on username field to prevent duplicates. Creates unique index on username field to prevent duplicates.
""" """
try: try:
await self.collection.create_index("username", unique=True) self.collection.create_index("username", unique=True)
except PyMongoError: except PyMongoError:
# Index might already exist, ignore error # Index might already exist, ignore error
pass pass
async def create_user(self, user_data: UserCreate) -> UserInDB: def create_user(self, user_data: UserCreate) -> UserInDB:
""" """
Create a new user in the database. Create a new user in the database.
@@ -79,7 +81,7 @@ class UserRepository:
} }
try: try:
result = await self.collection.insert_one(user_dict) result = self.collection.insert_one(user_dict)
user_dict["_id"] = result.inserted_id user_dict["_id"] = result.inserted_id
return UserInDB(**user_dict) return UserInDB(**user_dict)
except DuplicateKeyError as e: except DuplicateKeyError as e:
@@ -87,7 +89,7 @@ class UserRepository:
except PyMongoError as e: except PyMongoError as e:
raise ValueError(f"Failed to create user: {e}") raise ValueError(f"Failed to create user: {e}")
async def find_user_by_username(self, username: str) -> Optional[UserInDB]: def find_user_by_username(self, username: str) -> Optional[UserInDB]:
""" """
Find user by username. Find user by username.
@@ -98,14 +100,14 @@ class UserRepository:
UserInDB or None: User if found, None otherwise UserInDB or None: User if found, None otherwise
""" """
try: try:
user_doc = await self.collection.find_one({"username": username}) user_doc = self.collection.find_one({"username": username})
if user_doc: if user_doc:
return UserInDB(**user_doc) return UserInDB(**user_doc)
return None return None
except PyMongoError: except PyMongoError:
return None return None
async def find_user_by_id(self, user_id: str) -> Optional[UserInDB]: def find_user_by_id(self, user_id: str) -> Optional[UserInDB]:
""" """
Find user by ID. Find user by ID.
@@ -119,14 +121,14 @@ class UserRepository:
if not ObjectId.is_valid(user_id): if not ObjectId.is_valid(user_id):
return None return None
user_doc = await self.collection.find_one({"_id": ObjectId(user_id)}) user_doc = self.collection.find_one({"_id": ObjectId(user_id)})
if user_doc: if user_doc:
return UserInDB(**user_doc) return UserInDB(**user_doc)
return None return None
except PyMongoError: except PyMongoError:
return None return None
async def find_user_by_email(self, email: str) -> Optional[UserInDB]: def find_user_by_email(self, email: str) -> Optional[UserInDB]:
""" """
Find user by email address. Find user by email address.
@@ -137,14 +139,14 @@ class UserRepository:
UserInDB or None: User if found, None otherwise UserInDB or None: User if found, None otherwise
""" """
try: try:
user_doc = await self.collection.find_one({"email": email}) user_doc = self.collection.find_one({"email": email})
if user_doc: if user_doc:
return UserInDB(**user_doc) return UserInDB(**user_doc)
return None return None
except PyMongoError: except PyMongoError:
return None return None
async def update_user(self, user_id: str, user_update: UserUpdate) -> Optional[UserInDB]: def update_user(self, user_id: str, user_update: UserUpdate) -> Optional[UserInDB]:
""" """
Update user information. Update user information.
@@ -172,14 +174,16 @@ class UserRepository:
update_data["role"] = user_update.role update_data["role"] = user_update.role
if user_update.is_active is not None: if user_update.is_active is not None:
update_data["is_active"] = user_update.is_active update_data["is_active"] = user_update.is_active
if user_update.preferences is not None:
update_data["preferences"] = user_update.preferences
# Remove None values from update data # Remove None values from update data
clean_update_data = {k: v for k, v in update_data.items() if v is not None} clean_update_data = {k: v for k, v in update_data.items() if v is not None}
if not clean_update_data: if not clean_update_data:
return await self.find_user_by_id(user_id) return self.find_user_by_id(user_id)
result = await self.collection.find_one_and_update( result = self.collection.find_one_and_update(
{"_id": ObjectId(user_id)}, {"_id": ObjectId(user_id)},
{"$set": clean_update_data}, {"$set": clean_update_data},
return_document=True return_document=True
@@ -192,7 +196,7 @@ class UserRepository:
except PyMongoError: except PyMongoError:
return None return None
async def delete_user(self, user_id: str) -> bool: def delete_user(self, user_id: str) -> bool:
""" """
Delete user from database. Delete user from database.
@@ -206,12 +210,12 @@ class UserRepository:
if not ObjectId.is_valid(user_id): if not ObjectId.is_valid(user_id):
return False return False
result = await self.collection.delete_one({"_id": ObjectId(user_id)}) result = self.collection.delete_one({"_id": ObjectId(user_id)})
return result.deleted_count > 0 return result.deleted_count > 0
except PyMongoError: except PyMongoError:
return False return False
async def list_users(self, skip: int = 0, limit: int = 100) -> List[UserInDB]: def list_users(self, skip: int = 0, limit: int = 100) -> List[UserInDB]:
""" """
List users with pagination. List users with pagination.
@@ -224,12 +228,12 @@ class UserRepository:
""" """
try: try:
cursor = self.collection.find({}).skip(skip).limit(limit).sort("created_at", -1) cursor = self.collection.find({}).skip(skip).limit(limit).sort("created_at", -1)
user_docs = await cursor.to_list(length=limit) user_docs = cursor.to_list(length=limit)
return [UserInDB(**user_doc) for user_doc in user_docs] return [UserInDB(**user_doc) for user_doc in user_docs]
except PyMongoError: except PyMongoError:
return [] return []
async def count_users(self) -> int: def count_users(self) -> int:
""" """
Count total number of users. Count total number of users.
@@ -237,11 +241,11 @@ class UserRepository:
int: Total number of users in database int: Total number of users in database
""" """
try: try:
return await self.collection.count_documents({}) return self.collection.count_documents({})
except PyMongoError: except PyMongoError:
return 0 return 0
async def user_exists(self, username: str) -> bool: def user_exists(self, username: str) -> bool:
""" """
Check if user exists by username. Check if user exists by username.
@@ -252,7 +256,7 @@ class UserRepository:
bool: True if user exists, False otherwise bool: True if user exists, False otherwise
""" """
try: try:
count = await self.collection.count_documents({"username": username}) count = self.collection.count_documents({"username": username})
return count > 0 return count > 0
except PyMongoError: except PyMongoError:
return False return False

View File

@@ -30,7 +30,7 @@ class DocumentFileEventHandler(FileSystemEventHandler):
dispatching Celery tasks, and managing processing jobs. dispatching Celery tasks, and managing processing jobs.
""" """
SUPPORTED_EXTENSIONS = {'.txt', '.pdf', '.docx'} SUPPORTED_EXTENSIONS = {'.txt', '.pdf', '.docx', '.jpg', '.png', '.jpeg'}
def __init__(self, document_service: DocumentService, job_service: JobService): def __init__(self, document_service: DocumentService, job_service: JobService):
""" """
@@ -59,15 +59,16 @@ class DocumentFileEventHandler(FileSystemEventHandler):
if file_extension not in self.SUPPORTED_EXTENSIONS: if file_extension not in self.SUPPORTED_EXTENSIONS:
logger.info(f"Ignoring unsupported file type: {filepath}") logger.info(f"Ignoring unsupported file type: {filepath}")
self.document_service.move_to_ignored(filepath, "unsupported file type")
return return
logger.info(f"Processing new file: {filepath}") logger.info(f"Processing new file: {filepath}")
try: try:
from tasks.document_processing import process_document from tasks.document_processing import process_document
celery_result = process_document.delay(filepath) task_result = process_document.delay(filepath)
celery_task_id = celery_result.id task_id = task_result.task_id
logger.info(f"Dispatched Celery task with ID: {celery_task_id}") logger.info(f"Dispatched Celery task with ID: {task_id}")
except Exception as e: except Exception as e:
logger.error(f"Failed to process file {filepath}: {str(e)}") logger.error(f"Failed to process file {filepath}: {str(e)}")

View File

@@ -15,6 +15,9 @@ from typing import AsyncGenerator
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from app.api.routes.auth import router as auth_router
from app.api.routes.users import router as users_router
from app.api.routes.document import router as documents_router
from app.config import settings from app.config import settings
from app.database.connection import get_database from app.database.connection import get_database
from app.file_watcher import create_file_watcher, FileWatcher from app.file_watcher import create_file_watcher, FileWatcher
@@ -23,12 +26,6 @@ from app.services.init_service import InitializationService
from app.services.job_service import JobService from app.services.job_service import JobService
from app.services.user_service import UserService from app.services.user_service import UserService
# from api.routes.auth import router as auth_router
# from api.routes.users import router as users_router
# from api.routes.documents import router as documents_router
# from api.routes.jobs import router as jobs_router
# Configure logging # Configure logging
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -64,17 +61,17 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
# Create default admin user # Create default admin user
init_service = InitializationService(user_service) init_service = InitializationService(user_service)
await init_service.initialize_application() init_service.initialize_application()
logger.info("Default admin user initialization completed") logger.info("Default admin user initialization completed")
# Create and start file watcher # Create and start file watcher
file_watcher = create_file_watcher( file_watcher = create_file_watcher(
watch_directory=settings.watch_directory(), watch_directory=settings.get_watch_folder(),
document_service=document_service, document_service=document_service,
job_service=job_service job_service=job_service
) )
file_watcher.start() file_watcher.start()
logger.info(f"FileWatcher started for directory: {settings.watch_directory()}") logger.info(f"FileWatcher started for directory: {settings.get_watch_folder()}")
logger.info("Application startup completed successfully") logger.info("Application startup completed successfully")
@@ -106,17 +103,16 @@ app = FastAPI(
# Configure CORS # Configure CORS
app.add_middleware( app.add_middleware(
CORSMiddleware, CORSMiddleware,
allow_origins=["http://localhost:3000"], # React frontend allow_origins=["http://localhost:5173", "http://localhost:5174"], # React frontend
allow_credentials=True, allow_credentials=True,
allow_methods=["*"], allow_methods=["*"],
allow_headers=["*"], allow_headers=["*"],
) )
# Include routers # Include routers
# app.include_router(auth_router, prefix="/auth", tags=["Authentication"]) app.include_router(auth_router, prefix="/auth", tags=["Authentication"])
# app.include_router(users_router, prefix="/users", tags=["User Management"]) app.include_router(users_router, prefix="/users", tags=["User Management"])
# app.include_router(documents_router, prefix="/documents", tags=["Documents"]) app.include_router(documents_router, prefix="/api", tags=["Documents"])
# app.include_router(jobs_router, prefix="/jobs", tags=["Processing Jobs"]) # app.include_router(jobs_router, prefix="/jobs", tags=["Processing Jobs"])

View File

@@ -3,12 +3,45 @@ Authentication models and enums for user management.
Contains user roles enumeration and authentication-related Pydantic models. Contains user roles enumeration and authentication-related Pydantic models.
""" """
from datetime import datetime
from enum import Enum from enum import Enum
from pydantic import BaseModel, Field
from app.models.types import PyObjectId
class UserRole(str, Enum): class UserRole(str, Enum):
"""User roles enumeration with string values.""" """User roles enumeration with string values."""
USER = "user" USER = "user"
ADMIN = "admin" ADMIN = "admin"
class UserResponse(BaseModel):
"""Model for user data in API responses (excludes password_hash)."""
id: PyObjectId = Field(alias="_id")
username: str
email: str
role: UserRole
is_active: bool
created_at: datetime
updated_at: datetime
model_config = {
"populate_by_name": True,
"arbitrary_types_allowed": True,
}
class LoginResponse(BaseModel):
"""Response model for successful login."""
access_token: str
token_type: str = "bearer"
user: UserResponse
class MessageResponse(BaseModel):
"""Generic message response."""
message: str

View File

@@ -7,10 +7,9 @@ stored in MongoDB collections.
from datetime import datetime from datetime import datetime
from enum import Enum from enum import Enum
from typing import Any, Dict, Optional from typing import Any, Dict, List, Optional
from bson import ObjectId from pydantic import BaseModel, Field, field_validator, ConfigDict
from pydantic import BaseModel, Field, field_validator
from app.models.types import PyObjectId from app.models.types import PyObjectId
@@ -49,6 +48,8 @@ class FileDocument(BaseModel):
metadata: Dict[str, Any] = Field(default_factory=dict, description="File-specific metadata") metadata: Dict[str, Any] = Field(default_factory=dict, description="File-specific metadata")
detected_at: Optional[datetime] = Field(default=None, description="Timestamp when file was detected") detected_at: Optional[datetime] = Field(default=None, description="Timestamp when file was detected")
file_hash: Optional[str] = Field(default=None, description="SHA256 hash of file content") file_hash: Optional[str] = Field(default=None, description="SHA256 hash of file content")
pdf_file_hash: Optional[str] = Field(default=None, description="SHA256 hash of the associated pdf file content")
thumbnail_file_hash: Optional[str] = Field(default=None, description="SHA256 hash of the thumbnail")
encoding: str = Field(default="utf-8", description="Character encoding for text files") encoding: str = Field(default="utf-8", description="Character encoding for text files")
file_size: int = Field(..., ge=0, description="File size in bytes") file_size: int = Field(..., ge=0, description="File size in bytes")
mime_type: str = Field(..., description="MIME type detected") mime_type: str = Field(..., description="MIME type detected")
@@ -68,3 +69,28 @@ class FileDocument(BaseModel):
if not v.strip(): if not v.strip():
raise ValueError("Filename cannot be empty") raise ValueError("Filename cannot be empty")
return v.strip() return v.strip()
class DocumentResponse(BaseModel):
"""
Response model for document API endpoints.
Represents a document in the format expected by the frontend application.
Field names are automatically converted from snake_case to camelCase.
"""
model_config = ConfigDict(alias_generator=lambda field_name: ''.join(
word.capitalize() if i > 0 else word
for i, word in enumerate(field_name.split('_'))
), populate_by_name=True)
id: str = Field(..., description="Document unique identifier")
name: str = Field(..., description="Document filename")
original_file_type: str = Field(..., description="Original file type before conversion")
created_at: str = Field(..., description="ISO timestamp when document was created")
file_size: int = Field(..., description="File size in bytes")
page_count: int = Field(..., description="Number of pages in the document")
thumbnail_url: Optional[str] = Field(default=None, description="URL to document thumbnail")
pdf_url: Optional[str] = Field(default=None, description="URL to PDF version of document")
tags: List[str] = Field(default_factory=list, description="Document tags")
categories: List[str] = Field(default_factory=list, description="Document categories")

View File

@@ -14,6 +14,9 @@ class ProcessingStatus(str, Enum):
PENDING = "pending" PENDING = "pending"
PROCESSING = "processing" PROCESSING = "processing"
COMPLETED = "completed" COMPLETED = "completed"
SAVING_OBJECT = "saving_object"
SAVING_PDF = "saving_pdf"
CREATING_THUMBNAIL = "creating_thumbnail"
FAILED = "failed" FAILED = "failed"

View File

@@ -7,10 +7,10 @@ and API responses with proper validation and type safety.
import re import re
from datetime import datetime from datetime import datetime
from typing import Optional, Any from typing import Optional
from bson import ObjectId from bson import ObjectId
from pydantic import BaseModel, Field, field_validator, EmailStr from pydantic import BaseModel, Field, field_validator, EmailStr
from pydantic_core import core_schema
from app.models.auth import UserRole from app.models.auth import UserRole
from app.models.types import PyObjectId from app.models.types import PyObjectId
@@ -105,6 +105,7 @@ class UserUpdate(BaseModel):
password: Optional[str] = None password: Optional[str] = None
role: Optional[UserRole] = None role: Optional[UserRole] = None
is_active: Optional[bool] = None is_active: Optional[bool] = None
preferences: Optional[dict] = None
@field_validator('username') @field_validator('username')
@classmethod @classmethod
@@ -130,24 +131,7 @@ class UserInDB(BaseModel):
hashed_password: str hashed_password: str
role: UserRole role: UserRole
is_active: bool = True is_active: bool = True
created_at: datetime preferences: dict = Field(default_factory=dict)
updated_at: datetime
model_config = {
"populate_by_name": True,
"arbitrary_types_allowed": True,
"json_encoders": {ObjectId: str}
}
class UserResponse(BaseModel):
"""Model for user data in API responses (excludes password_hash)."""
id: PyObjectId = Field(alias="_id")
username: str
email: str
role: UserRole
is_active: bool
created_at: datetime created_at: datetime
updated_at: datetime updated_at: datetime

View File

@@ -4,7 +4,11 @@ Authentication service for password hashing and verification.
This module provides authentication-related functionality including This module provides authentication-related functionality including
password hashing, verification, and JWT token management. password hashing, verification, and JWT token management.
""" """
from datetime import datetime, timedelta
import jwt
from app.config import settings
from app.utils.security import hash_password, verify_password from app.utils.security import hash_password, verify_password
@@ -56,3 +60,25 @@ class AuthService:
False False
""" """
return verify_password(password, hashed_password) return verify_password(password, hashed_password)
@staticmethod
def create_access_token(data=dict) -> str:
"""
Create a JWT access token.
Args:
data (dict): Payload data to include in the token.
Returns:
str: Encoded JWT token.
"""
# Copy data to avoid modifying the original dict
to_encode = data.copy()
# Add expiration time
expire = datetime.now() + timedelta(hours=settings.get_jwt_expire_hours())
to_encode.update({"exp": expire})
# Encode JWT
encoded_jwt = jwt.encode(to_encode, settings.get_jwt_secret_key(), algorithm=settings.get_jwt_algorithm())
return encoded_jwt

View File

@@ -6,7 +6,9 @@ while maintaining data consistency through MongoDB transactions.
""" """
import hashlib import hashlib
import logging
import os import os
import shutil
from datetime import datetime from datetime import datetime
from pathlib import Path from pathlib import Path
from typing import List, Optional, Dict, Any from typing import List, Optional, Dict, Any
@@ -14,13 +16,28 @@ from typing import List, Optional, Dict, Any
import magic import magic
from pymongo.errors import PyMongoError from pymongo.errors import PyMongoError
from app.config.settings import get_objects_folder from app.config.settings import get_objects_folder, get_temp_folder, get_errors_folder, get_ignored_folder
from app.database.repositories.document_repository import FileDocumentRepository from app.database.repositories.document_repository import FileDocumentRepository
from app.models.document import ( from app.models.document import (
FileDocument, FileDocument,
FileType, FileType,
) )
from app.models.types import PyObjectId from app.models.types import PyObjectId
from app.utils.pdf_converter import convert_to_pdf
from app.utils.pdf_thumbmail import PDFThumbnailGenerator
from app.utils.security import generate_uuid_filename
logger = logging.getLogger(__name__)
class DocumentAlreadyExists(Exception):
def __init__(self, message):
self.message = message
class DocumentProcessingError(Exception):
def __init__(self, message):
self.message = message
class DocumentService: class DocumentService:
@@ -31,7 +48,11 @@ class DocumentService:
and their content while ensuring data consistency through transactions. and their content while ensuring data consistency through transactions.
""" """
def __init__(self, database, objects_folder: str = None): def __init__(self, database,
objects_folder: str = None,
temp_folder: str = None,
errors_folder: str = None,
ignored_folder: str = None):
""" """
Initialize the document service with repository dependencies. Initialize the document service with repository dependencies.
@@ -43,9 +64,12 @@ class DocumentService:
self.db = database self.db = database
self.document_repository = FileDocumentRepository(self.db) self.document_repository = FileDocumentRepository(self.db)
self.objects_folder = objects_folder or get_objects_folder() self.objects_folder = objects_folder or get_objects_folder()
self.temp_folder = temp_folder or get_temp_folder()
self.errors_folder = errors_folder or get_errors_folder()
self.ignored_folder = ignored_folder or get_ignored_folder()
async def initialize(self): def initialize(self):
await self.document_repository.initialize() self.document_repository.initialize()
return self return self
@staticmethod @staticmethod
@@ -117,7 +141,40 @@ class DocumentService:
return path.read_bytes() return path.read_bytes()
def _get_document_path(self, file_hash): @staticmethod
def _get_safe_path(file_path):
"""
If the path already exists, add a suffix to the filename.
Increment the suffix until a safe path is found.
:param file_path:
:return:
"""
path = Path(file_path)
# If the path doesn't exist, return it as is
if not path.exists():
return file_path
# Split the filename and extension
stem = path.stem
suffix = path.suffix
directory = path.parent
# Try incrementing numbers until a unique path is found
counter = 1
while True:
# Create new filename with counter
new_filename = f"{stem}_{counter}{suffix}"
new_path = os.path.join(directory, new_filename)
# Check if this new path exists
if not os.path.exists(new_path):
return new_path
# Increment counter for next attempt
counter += 1
def get_document_path(self, file_hash):
""" """
:param file_hash: :param file_hash:
@@ -125,8 +182,13 @@ class DocumentService:
""" """
return os.path.join(self.objects_folder, file_hash[:24], file_hash) return os.path.join(self.objects_folder, file_hash[:24], file_hash)
def exists(self, file_hash):
if file_hash is None:
return False
return os.path.exists(self.get_document_path(file_hash))
def save_content_if_needed(self, file_hash, content: bytes): def save_content_if_needed(self, file_hash, content: bytes):
target_path = self._get_document_path(file_hash) target_path = self.get_document_path(file_hash)
if os.path.exists(target_path): if os.path.exists(target_path):
return return
@@ -136,7 +198,20 @@ class DocumentService:
with open(target_path, "wb") as f: with open(target_path, "wb") as f:
f.write(content) f.write(content)
async def create_document( def move_to_errors(self, document_id, file_path):
logger.info(f"Moving file {file_path} to error folder")
error_file_name = f"{document_id}_{os.path.basename(file_path)}"
error_file_path = self._get_safe_path(os.path.join(self.errors_folder, error_file_name))
shutil.move(file_path, error_file_path)
def move_to_ignored(self, file_path, reason="Unknown"):
logger.info(f"Moving file {file_path} to ignored folder")
ignored_file_name = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + f"_### {reason} ###_" + os.path.basename(
file_path)
ignored_file_path = self._get_safe_path(os.path.join(self.ignored_folder, ignored_file_name))
shutil.move(file_path, ignored_file_path)
def create_document(
self, self,
file_path: str, file_path: str,
file_bytes: bytes | None = None, file_bytes: bytes | None = None,
@@ -162,7 +237,7 @@ class DocumentService:
PyMongoError: If database operation fails PyMongoError: If database operation fails
""" """
# Calculate automatic attributes # Calculate automatic attributes
file_bytes = file_bytes or self._read_file_bytes(file_path) file_bytes = file_bytes if file_bytes is not None else self._read_file_bytes(file_path)
file_hash = self._calculate_file_hash(file_bytes) file_hash = self._calculate_file_hash(file_bytes)
file_type = self._detect_file_type(file_path) file_type = self._detect_file_type(file_path)
mime_type = self._detect_mime_type(file_bytes) mime_type = self._detect_mime_type(file_bytes)
@@ -171,7 +246,16 @@ class DocumentService:
detected_at = datetime.now() detected_at = datetime.now()
try: try:
logger.info(f'Creating Document for "{file_path}"')
# Skip the document if it already exists
same_document = self.document_repository.find_same_document(filename, file_hash)
if same_document is not None:
logger.info(f" Document with same hash already exists. Skipping...")
self.move_to_ignored(file_path, f"already exists ({same_document.id})")
raise DocumentAlreadyExists(f"Document with same hash already exists ({same_document.id})")
self.save_content_if_needed(file_hash, file_bytes) self.save_content_if_needed(file_hash, file_bytes)
logger.info(f" Saved content to {self.get_document_path(file_hash)}")
# Create FileDocument # Create FileDocument
file_data = FileDocument( file_data = FileDocument(
@@ -187,15 +271,91 @@ class DocumentService:
mime_type=mime_type mime_type=mime_type
) )
created_file = await self.document_repository.create_document(file_data) created_document = self.document_repository.create_document(file_data)
logger.info(f" Created document with id '{created_document.id}'")
return created_file return created_document
except DocumentAlreadyExists as e:
raise e
except Exception as e: except Exception as e:
# Transaction will automatically rollback if supported # Transaction will automatically rollback if supported
raise PyMongoError(f"Failed to create document: {str(e)}") raise PyMongoError(f"Failed to create document: {str(e)}")
async def get_document_by_id(self, document_id: PyObjectId) -> Optional[FileDocument]: def create_pdf(self, document_id: PyObjectId):
"""
For all files, a controlled pdf version will be created for standard visualization and action
:return:
"""
logger.info(f"Creating PDF document for {document_id}")
document = self.get_document_by_id(document_id)
if document is None:
logger.error(f" Document not found")
raise DocumentProcessingError(f"Document {document_id} not found.")
# try to find another document that has the same hash
document_with_same_hash = self.get_document_with_pdf_hash(document.file_hash)
# the pdf will be created only if it does not exist yet
if document_with_same_hash and self.exists(document_with_same_hash.pdf_file_hash):
logger.info(f'Found document with same hash. Will use pdf "{document_with_same_hash.pdf_file_hash}".')
self.update_document(document_id, {"pdf_file_hash": document_with_same_hash.pdf_file_hash})
return
# get the content of the file
logger.info(f" No document with same hash and valid pdf found. Will create new pdf content.")
file_bytes = self.get_document_content_by_hash(document.file_hash)
if file_bytes is None:
logger.error(f'Content for document "{document_id}" not found. hash = "{document.file_hash}".')
raise DocumentProcessingError(f'Content for document "{document_id}" not found. hash = "{document.file_hash}".')
# create the pdf file
temp_pdf_file = convert_to_pdf(self.get_document_path(document.file_hash), self.temp_folder)
pdf_file_hash = self._calculate_file_hash(self._read_file_bytes(temp_pdf_file))
self.save_content_if_needed(pdf_file_hash, self._read_file_bytes(temp_pdf_file))
os.remove(temp_pdf_file) # remove the temporary file
logger.info(f' Created new pdf file with hash "{pdf_file_hash}"')
# update the document
self.update_document(document_id, {"pdf_file_hash": pdf_file_hash})
def create_thumbnail(self, document_id: PyObjectId):
logger.info(f'Creating thumbnail document for "{document_id}"')
document = self.get_document_by_id(document_id)
if document is None:
logger.error(f" Document not found !")
raise DocumentProcessingError(f"Document {document_id} not found.")
# try to find another document that has the same hash
document_with_same_hash = self.get_document_with_pdf_hash(document.file_hash)
# We will use the thumbnail of the pdf if it exists
if document_with_same_hash and self.exists(document_with_same_hash.thumbnail_file_hash):
logger.info(f" Found document with same hash. Will use thumbnail {document_with_same_hash.thumbnail_file_hash}")
self.update_document(document_id, {"thumbnail_file_hash": document_with_same_hash.thumbnail_file_hash})
return
logger.info(f" No document with same hash and valid thumbnail found. Will create new thumbnail")
if not self.exists(document.pdf_file_hash):
logger.error(f" PDF file not found.")
raise DocumentProcessingError(f"PDF file for document {document_id} not found")
tmp_thumbnail_path = os.path.join(self.temp_folder, f"{generate_uuid_filename()}.png")
with PDFThumbnailGenerator(self.get_document_path(document.pdf_file_hash)) as gen:
# create the thumbnail
gen.create_thumbnail(tmp_thumbnail_path, page_num=0, width=200)
thumbnail_file_hash = self._calculate_file_hash(self._read_file_bytes(tmp_thumbnail_path))
# save the thumbnail to the objects folder
self.save_content_if_needed(thumbnail_file_hash, self._read_file_bytes(tmp_thumbnail_path))
os.remove(tmp_thumbnail_path)
# update the document
self.update_document(document_id, {"thumbnail_file_hash": thumbnail_file_hash})
logger.info(f" Created thumbnail {thumbnail_file_hash}")
def get_document_by_id(self, document_id: PyObjectId) -> Optional[FileDocument]:
""" """
Retrieve a document by its ID. Retrieve a document by its ID.
@@ -205,9 +365,9 @@ class DocumentService:
Returns: Returns:
FileDocument if found, None otherwise FileDocument if found, None otherwise
""" """
return await self.document_repository.find_document_by_id(str(document_id)) return self.document_repository.find_document_by_id(str(document_id))
async def get_document_by_hash(self, file_hash: str) -> Optional[FileDocument]: def get_document_by_hash(self, file_hash: str) -> Optional[FileDocument]:
""" """
Retrieve a document by its file hash. Retrieve a document by its file hash.
@@ -217,9 +377,12 @@ class DocumentService:
Returns: Returns:
FileDocument if found, None otherwise FileDocument if found, None otherwise
""" """
return await self.document_repository.find_document_by_hash(file_hash) return self.document_repository.find_document_by_hash(file_hash)
async def get_document_by_filepath(self, filepath: str) -> Optional[FileDocument]: def get_document_with_pdf_hash(self, file_hash) -> Optional[FileDocument]:
return self.document_repository.find_document_with_pdf_hash(file_hash)
def get_document_by_filepath(self, filepath: str) -> Optional[FileDocument]:
""" """
Retrieve a document by its file path. Retrieve a document by its file path.
@@ -229,17 +392,17 @@ class DocumentService:
Returns: Returns:
FileDocument if found, None otherwise FileDocument if found, None otherwise
""" """
return await self.document_repository.find_document_by_filepath(filepath) return self.document_repository.find_document_by_filepath(filepath)
async def get_document_content_by_hash(self, file_hash): def get_document_content_by_hash(self, file_hash):
target_path = self._get_document_path(file_hash) target_path = self.get_document_path(file_hash)
if not os.path.exists(target_path): if not os.path.exists(target_path):
return None return None
with open(target_path, "rb") as f: with open(target_path, "rb") as f:
return f.read() return f.read()
async def list_documents( def list_documents(
self, self,
skip: int = 0, skip: int = 0,
limit: int = 100 limit: int = 100
@@ -254,18 +417,18 @@ class DocumentService:
Returns: Returns:
List of FileDocument instances List of FileDocument instances
""" """
return await self.document_repository.list_documents(skip=skip, limit=limit) return self.document_repository.list_documents(skip=skip, limit=limit)
async def count_documents(self) -> int: def count_documents(self) -> int:
""" """
Get total number of documents. Get total number of documents.
Returns: Returns:
Total document count Total document count
""" """
return await self.document_repository.count_documents() return self.document_repository.count_documents()
async def update_document( def update_document(
self, self,
document_id: PyObjectId, document_id: PyObjectId,
update_data: Dict[str, Any] update_data: Dict[str, Any]
@@ -285,9 +448,9 @@ class DocumentService:
update_data["file_hash"] = file_hash update_data["file_hash"] = file_hash
self.save_content_if_needed(file_hash, update_data["file_bytes"]) self.save_content_if_needed(file_hash, update_data["file_bytes"])
return await self.document_repository.update_document(document_id, update_data) return self.document_repository.update_document(document_id, update_data)
async def delete_document(self, document_id: PyObjectId) -> bool: def delete_document(self, document_id: PyObjectId) -> bool:
""" """
Delete a document and its orphaned content. Delete a document and its orphaned content.
@@ -308,22 +471,22 @@ class DocumentService:
try: try:
# Get document to find its hash # Get document to find its hash
document = await self.document_repository.find_document_by_id(document_id) document = self.document_repository.find_document_by_id(document_id)
if not document: if not document:
return False return False
# Delete the document # Delete the document
deleted = await self.document_repository.delete_document(document_id) deleted = self.document_repository.delete_document(document_id)
if not deleted: if not deleted:
return False return False
# Check if content is orphaned # Check if content is orphaned
remaining_files = await self.document_repository.find_document_by_hash(document.file_hash) remaining_files = self.document_repository.find_document_by_hash(document.file_hash)
# If no other files reference this content, delete it # If no other files reference this content, delete it
if not remaining_files: if not remaining_files:
try: try:
os.remove(self._get_document_path(document.file_hash)) os.remove(self.get_document_path(document.file_hash))
except Exception: except Exception:
pass pass

View File

@@ -32,7 +32,7 @@ class InitializationService:
""" """
self.user_service = user_service self.user_service = user_service
async def ensure_admin_user_exists(self) -> Optional[UserInDB]: def ensure_admin_user_exists(self) -> Optional[UserInDB]:
""" """
Ensure default admin user exists in the system. Ensure default admin user exists in the system.
@@ -48,7 +48,7 @@ class InitializationService:
logger.info("Checking if admin user exists...") logger.info("Checking if admin user exists...")
# Check if any admin user already exists # Check if any admin user already exists
if await self._admin_user_exists(): if self._admin_user_exists():
logger.info("Admin user already exists, skipping creation") logger.info("Admin user already exists, skipping creation")
return None return None
@@ -63,7 +63,7 @@ class InitializationService:
role=UserRole.ADMIN role=UserRole.ADMIN
) )
created_user = await self.user_service.create_user(admin_data) created_user = self.user_service.create_user(admin_data)
logger.info(f"Default admin user created successfully with ID: {created_user.id}") logger.info(f"Default admin user created successfully with ID: {created_user.id}")
logger.warning( logger.warning(
"Default admin user created with username 'admin' and password 'admin'. " "Default admin user created with username 'admin' and password 'admin'. "
@@ -76,7 +76,7 @@ class InitializationService:
logger.error(f"Failed to create default admin user: {str(e)}") logger.error(f"Failed to create default admin user: {str(e)}")
raise Exception(f"Admin user creation failed: {str(e)}") raise Exception(f"Admin user creation failed: {str(e)}")
async def _admin_user_exists(self) -> bool: def _admin_user_exists(self) -> bool:
""" """
Check if any admin user exists in the system. Check if any admin user exists in the system.
@@ -85,7 +85,7 @@ class InitializationService:
""" """
try: try:
# Get all users and check if any have admin role # Get all users and check if any have admin role
users = await self.user_service.list_users(limit=1000) # Reasonable limit for admin check users = self.user_service.list_users(limit=1000) # Reasonable limit for admin check
for user in users: for user in users:
if user.role == UserRole.ADMIN and user.is_active: if user.role == UserRole.ADMIN and user.is_active:
@@ -98,7 +98,7 @@ class InitializationService:
# In case of error, assume admin exists to avoid creating duplicates # In case of error, assume admin exists to avoid creating duplicates
return True return True
async def initialize_application(self) -> dict: def initialize_application(self) -> dict:
""" """
Perform all application initialization tasks. Perform all application initialization tasks.
@@ -118,7 +118,7 @@ class InitializationService:
try: try:
# Ensure admin user exists # Ensure admin user exists
created_admin = await self.ensure_admin_user_exists() created_admin = self.ensure_admin_user_exists()
if created_admin: if created_admin:
initialization_summary["admin_user_created"] = True initialization_summary["admin_user_created"] = True

View File

@@ -31,16 +31,16 @@ class JobService:
self.db = database self.db = database
self.repository = JobRepository(database) self.repository = JobRepository(database)
async def initialize(self): def initialize(self):
await self.repository.initialize() self.repository.initialize()
return self return self
async def create_job(self, file_id: PyObjectId, task_id: Optional[str] = None) -> ProcessingJob: def create_job(self, document_id: PyObjectId, task_id: Optional[str] = None) -> ProcessingJob:
""" """
Create a new processing job. Create a new processing job.
Args: Args:
file_id: Reference to the file document document_id: Reference to the file document
task_id: Optional Celery task UUID task_id: Optional Celery task UUID
Returns: Returns:
@@ -49,9 +49,9 @@ class JobService:
Raises: Raises:
JobRepositoryError: If database operation fails JobRepositoryError: If database operation fails
""" """
return await self.repository.create_job(file_id, task_id) return self.repository.create_job(document_id, task_id)
async def get_job_by_id(self, job_id: PyObjectId) -> ProcessingJob: def get_job_by_id(self, job_id: PyObjectId) -> ProcessingJob:
""" """
Retrieve a job by its ID. Retrieve a job by its ID.
@@ -65,9 +65,9 @@ class JobService:
JobNotFoundError: If job doesn't exist JobNotFoundError: If job doesn't exist
JobRepositoryError: If database operation fails JobRepositoryError: If database operation fails
""" """
return await self.repository.find_job_by_id(job_id) return self.repository.find_job_by_id(job_id)
async def mark_job_as_started(self, job_id: PyObjectId) -> ProcessingJob: def mark_job_as_started(self, job_id: PyObjectId) -> ProcessingJob:
""" """
Mark a job as started (PENDING → PROCESSING). Mark a job as started (PENDING → PROCESSING).
@@ -83,16 +83,16 @@ class JobService:
JobRepositoryError: If database operation fails JobRepositoryError: If database operation fails
""" """
# Get current job to validate transition # Get current job to validate transition
current_job = await self.repository.find_job_by_id(job_id) current_job = self.repository.find_job_by_id(job_id)
# Validate status transition # Validate status transition
if current_job.status != ProcessingStatus.PENDING: if current_job.status != ProcessingStatus.PENDING:
raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.PROCESSING) raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.PROCESSING)
# Update status # Update status
return await self.repository.update_job_status(job_id, ProcessingStatus.PROCESSING) return self.repository.update_job_status(job_id, ProcessingStatus.PROCESSING)
async def mark_job_as_completed(self, job_id: PyObjectId) -> ProcessingJob: def mark_job_as_completed(self, job_id: PyObjectId) -> ProcessingJob:
""" """
Mark a job as completed (PROCESSING → COMPLETED). Mark a job as completed (PROCESSING → COMPLETED).
@@ -108,16 +108,18 @@ class JobService:
JobRepositoryError: If database operation fails JobRepositoryError: If database operation fails
""" """
# Get current job to validate transition # Get current job to validate transition
current_job = await self.repository.find_job_by_id(job_id) current_job = self.repository.find_job_by_id(job_id)
# Validate status transition # Validate status transition
if current_job.status != ProcessingStatus.PROCESSING: if current_job.status in (ProcessingStatus.PENDING,
ProcessingStatus.COMPLETED,
ProcessingStatus.FAILED):
raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.COMPLETED) raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.COMPLETED)
# Update status # Update status
return await self.repository.update_job_status(job_id, ProcessingStatus.COMPLETED) return self.repository.update_job_status(job_id, ProcessingStatus.COMPLETED)
async def mark_job_as_failed( def mark_job_as_failed(
self, self,
job_id: PyObjectId, job_id: PyObjectId,
error_message: Optional[str] = None error_message: Optional[str] = None
@@ -138,20 +140,25 @@ class JobService:
JobRepositoryError: If database operation fails JobRepositoryError: If database operation fails
""" """
# Get current job to validate transition # Get current job to validate transition
current_job = await self.repository.find_job_by_id(job_id) current_job = self.repository.find_job_by_id(job_id)
# Validate status transition # Validate status transition
if current_job.status != ProcessingStatus.PROCESSING: if current_job.status in (ProcessingStatus.PENDING, ProcessingStatus.COMPLETED, ProcessingStatus.FAILED):
raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.FAILED) raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.FAILED)
# Update status with error message # Update status with error message
return await self.repository.update_job_status( return self.repository.update_job_status(
job_id, job_id,
ProcessingStatus.FAILED, ProcessingStatus.FAILED,
error_message error_message
) )
async def delete_job(self, job_id: PyObjectId) -> bool: def update_job_status(self, job_id: PyObjectId,
status: ProcessingStatus,
error_message: str = None) -> ProcessingJob:
return self.repository.update_job_status(job_id, status, error_message)
def delete_job(self, job_id: PyObjectId) -> bool:
""" """
Delete a job from the database. Delete a job from the database.
@@ -164,9 +171,9 @@ class JobService:
Raises: Raises:
JobRepositoryError: If database operation fails JobRepositoryError: If database operation fails
""" """
return await self.repository.delete_job(job_id) return self.repository.delete_job(job_id)
async def get_jobs_by_status(self, status: ProcessingStatus) -> list[ProcessingJob]: def get_jobs_by_status(self, status: ProcessingStatus) -> list[ProcessingJob]:
""" """
Retrieve all jobs with a specific status. Retrieve all jobs with a specific status.
@@ -179,4 +186,4 @@ class JobService:
Raises: Raises:
JobRepositoryError: If database operation fails JobRepositoryError: If database operation fails
""" """
return await self.repository.get_jobs_by_status(status) return self.repository.get_jobs_by_status(status)

View File

@@ -33,11 +33,11 @@ class UserService:
self.user_repository = UserRepository(self.db) self.user_repository = UserRepository(self.db)
self.auth_service = AuthService() self.auth_service = AuthService()
async def initialize(self): def initialize(self):
await self.user_repository.initialize() self.user_repository.initialize()
return self return self
async def create_user(self, user_data: UserCreate | UserCreateNoValidation) -> UserInDB: def create_user(self, user_data: UserCreate | UserCreateNoValidation) -> UserInDB:
""" """
Create a new user with business logic validation. Create a new user with business logic validation.
@@ -60,11 +60,11 @@ class UserService:
raise ValueError(f"User with email '{user_data.email}' already exists") raise ValueError(f"User with email '{user_data.email}' already exists")
try: try:
return await self.user_repository.create_user(user_data) return self.user_repository.create_user(user_data)
except DuplicateKeyError: except DuplicateKeyError:
raise ValueError(f"User with username '{user_data.username}' already exists") raise ValueError(f"User with username '{user_data.username}' already exists")
async def get_user_by_username(self, username: str) -> Optional[UserInDB]: def get_user_by_username(self, username: str) -> Optional[UserInDB]:
""" """
Retrieve user by username. Retrieve user by username.
@@ -74,9 +74,9 @@ class UserService:
Returns: Returns:
UserInDB or None: User if found, None otherwise UserInDB or None: User if found, None otherwise
""" """
return await self.user_repository.find_user_by_username(username) return self.user_repository.find_user_by_username(username)
async def get_user_by_id(self, user_id: str) -> Optional[UserInDB]: def get_user_by_id(self, user_id: str) -> Optional[UserInDB]:
""" """
Retrieve user by ID. Retrieve user by ID.
@@ -86,9 +86,9 @@ class UserService:
Returns: Returns:
UserInDB or None: User if found, None otherwise UserInDB or None: User if found, None otherwise
""" """
return await self.user_repository.find_user_by_id(user_id) return self.user_repository.find_user_by_id(user_id)
async def authenticate_user(self, username: str, password: str) -> Optional[UserInDB]: def authenticate_user(self, username: str, password: str) -> Optional[UserInDB]:
""" """
Authenticate user with username and password. Authenticate user with username and password.
@@ -111,7 +111,7 @@ class UserService:
return user return user
async def update_user(self, user_id: str, user_update: UserUpdate) -> Optional[UserInDB]: def update_user(self, user_id: str, user_update: UserUpdate) -> Optional[UserInDB]:
""" """
Update user information. Update user information.
@@ -137,9 +137,9 @@ class UserService:
if existing_user and str(existing_user.id) != user_id: if existing_user and str(existing_user.id) != user_id:
raise ValueError(f"Email '{user_update.email}' is already taken") raise ValueError(f"Email '{user_update.email}' is already taken")
return await self.user_repository.update_user(user_id, user_update) return self.user_repository.update_user(user_id, user_update)
async def delete_user(self, user_id: str) -> bool: def delete_user(self, user_id: str) -> bool:
""" """
Delete user from system. Delete user from system.
@@ -151,7 +151,7 @@ class UserService:
""" """
return self.user_repository.delete_user(user_id) return self.user_repository.delete_user(user_id)
async def list_users(self, skip: int = 0, limit: int = 100) -> List[UserInDB]: def list_users(self, skip: int = 0, limit: int = 100) -> List[UserInDB]:
""" """
List users with pagination. List users with pagination.
@@ -162,18 +162,18 @@ class UserService:
Returns: Returns:
List[UserInDB]: List of users List[UserInDB]: List of users
""" """
return await self.user_repository.list_users(skip=skip, limit=limit) return self.user_repository.list_users(skip=skip, limit=limit)
async def count_users(self) -> int: def count_users(self) -> int:
""" """
Count total number of users. Count total number of users.
Returns: Returns:
int: Total number of users in system int: Total number of users in system
""" """
return await self.user_repository.count_users() return self.user_repository.count_users()
async def user_exists(self, username: str) -> bool: def user_exists(self, username: str) -> bool:
""" """
Check if user exists by username. Check if user exists by username.
@@ -183,4 +183,19 @@ class UserService:
Returns: Returns:
bool: True if user exists, False otherwise bool: True if user exists, False otherwise
""" """
return await self.user_repository.user_exists(username) return self.user_repository.user_exists(username)
def get_preference(self, user_id: str, preference):
user = self.get_user_by_id(user_id)
if user is None:
return None
return user.preferences.get(preference, None)
def set_preference(self, user_id: str, preference, value):
user = self.get_user_by_id(user_id)
if user is None:
return None
user.preferences[preference] = value
self.user_repository.update_user(user_id, UserUpdate(preferences=user.preferences))
return self.get_user_by_id(user_id)

View File

@@ -0,0 +1,241 @@
import fitz # PyMuPDF
class PDFAnnotator:
def __init__(self, pdf_path):
self.doc = fitz.open(pdf_path)
def add_highlight(self, rect, page_num=0, color=(1, 1, 0)):
"""
Add highlight annotation
Args:
rect: (x0, y0, x1, y1) coordinates or fitz.Rect object
page_num: Page number (0-indexed), default first page
color: RGB tuple (0-1 range), default yellow
"""
page = self.doc[page_num]
annot = page.add_highlight_annot(rect)
annot.set_colors(stroke=color)
annot.update()
return annot
def add_rectangle(self, rect, page_num=0, color=(1, 0, 0), width=2):
"""
Add rectangle annotation (border only)
Args:
rect: (x0, y0, x1, y1) coordinates or fitz.Rect object
page_num: Page number (0-indexed), default first page
color: RGB tuple (0-1 range), default red
width: Line width in points
"""
page = self.doc[page_num]
annot = page.add_rect_annot(rect)
annot.set_colors(stroke=color)
annot.set_border(width=width)
annot.update()
return annot
def add_text_note(self, point, text, page_num=0, icon="Note"):
"""
Add sticky note annotation
Args:
point: (x, y) position tuple
text: Note content string
page_num: Page number (0-indexed), default first page
icon: "Note", "Comment", "Help", "Insert", "Key", etc.
"""
page = self.doc[page_num]
annot = page.add_text_annot(point, text, icon=icon)
annot.update()
return annot
def add_free_text(self, rect, text, page_num=0, fontsize=12,
color=(0, 0, 0)):
"""
Add free text annotation (visible text box)
Args:
rect: (x0, y0, x1, y1) bounding box tuple or fitz.Rect
text: Text content string
page_num: Page number (0-indexed), default first page
fontsize: Font size in points
color: Text color RGB tuple (0-1 range)
"""
page = self.doc[page_num]
annot = page.add_freetext_annot(
rect,
text,
fontsize=fontsize,
text_color=color
)
annot.update()
return annot
def add_arrow(self, start_point, end_point, page_num=0,
color=(1, 0, 0), width=2):
"""
Add arrow annotation
Args:
start_point: (x, y) tuple for arrow start
end_point: (x, y) tuple for arrow end
page_num: Page number (0-indexed), default first page
color: Arrow color RGB tuple (0-1 range), default red
width: Line width in points
"""
page = self.doc[page_num]
annot = page.add_line_annot(start_point, end_point)
annot.set_colors(stroke=color)
annot.set_border(width=width)
# Set arrow at end - use integer constant
annot.set_line_ends(0, 1) # 1 = ClosedArrow
annot.update()
return annot
def add_stamp(self, rect, page_num=0, stamp_type=0):
"""
Add stamp annotation
Args:
rect: (x0, y0, x1, y1) bounding box tuple or fitz.Rect
page_num: Page number (0-indexed), default first page
stamp_type: Integer for stamp type:
0=Approved, 1=AsIs, 2=Confidential,
3=Departmental, 4=Draft, 5=Experimental,
6=Expired, 7=Final, 8=ForComment,
9=ForPublicRelease, 10=NotApproved, etc.
"""
page = self.doc[page_num]
annot = page.add_stamp_annot(rect, stamp=stamp_type)
annot.update()
return annot
def add_redaction(self, rect, page_num=0, fill_color=(0, 0, 0)):
"""
Add redaction annotation (marks area for redaction)
Note: Use apply_redactions() to permanently remove content
Args:
rect: (x0, y0, x1, y1) area to redact, tuple or fitz.Rect
page_num: Page number (0-indexed), default first page
fill_color: RGB tuple (0-1 range) for redacted area, default black
"""
page = self.doc[page_num]
annot = page.add_redact_annot(rect, fill=fill_color)
annot.update()
return annot
def apply_redactions(self, page_num=0, images=2, graphics=2, text=2):
"""
Apply all redaction annotations on a page (permanent removal)
Args:
page_num: Page number (0-indexed), default first page
images: 2=remove, 1=blank, 0=ignore
graphics: 2=remove, 1=blank, 0=ignore
text: 2=remove, 1=blank, 0=ignore
Returns:
True if redactions were applied, False otherwise
"""
page = self.doc[page_num]
# Check if page has redaction annotations
has_redactions = any(annot.type[0] == 12 for annot in page.annots())
if has_redactions:
page.apply_redactions(images=images, graphics=graphics, text=text)
return True
return False
def get_all_annotations(self, page_num=0):
"""
Retrieve all annotations from a page
Args:
page_num: Page number (0-indexed), default first page
Returns:
List of dicts with annotation information
"""
page = self.doc[page_num]
annotations = []
for annot in page.annots():
info = {
'type': annot.type[1], # Annotation type name
'rect': annot.rect,
'content': annot.info.get('content', ''),
'author': annot.info.get('title', ''),
'created': annot.info.get('creationDate', ''),
'colors': annot.colors
}
annotations.append(info)
return annotations
def remove_all_annotations(self, page_num=0):
"""
Remove all annotations from a page
Args:
page_num: Page number (0-indexed), default first page
"""
page = self.doc[page_num]
for annot in page.annots():
page.delete_annot(annot)
def save(self, output_path):
"""Save the annotated PDF"""
self.doc.save(output_path)
def close(self):
self.doc.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Example usage
if __name__ == "__main__":
with PDFAnnotator("input.pdf") as annotator:
# Add yellow highlight
annotator.add_highlight((100, 100, 300, 120), page_num=0,
color=(1, 1, 0))
# Add red rectangle border
annotator.add_rectangle((100, 150, 300, 250), page_num=0,
color=(1, 0, 0), width=3)
# Add sticky note
annotator.add_text_note((400, 100), "This is important!",
page_num=0, icon="Comment")
# Add visible text box
annotator.add_free_text((100, 300, 400, 350), "DRAFT VERSION",
page_num=0, fontsize=20, color=(1, 0, 0))
# Add arrow pointing to something
annotator.add_arrow((450, 100), (500, 200), page_num=0,
color=(0, 0, 1), width=2)
# Add "Approved" stamp
annotator.add_stamp((450, 300, 550, 350), page_num=0, stamp_type=0)
# Add redaction (black box over sensitive info)
annotator.add_redaction((100, 400, 300, 420), page_num=0)
annotator.apply_redactions(page_num=0)
# List all annotations
annots = annotator.get_all_annotations(page_num=0)
print(f"Found {len(annots)} annotations:")
for a in annots:
print(f" - {a['type']} at {a['rect']}")
# Save annotated PDF
annotator.save("output_annotated.pdf")

View File

@@ -0,0 +1,210 @@
import datetime
import hashlib
import os
import uuid
from abc import ABC
from pathlib import Path
from typing import Self
import pikepdf
import pypandoc
from PIL import Image
from reportlab.lib.pagesizes import A4
from reportlab.pdfgen import canvas
from tasks.common.converter_utils import detect_file_type
class BaseConverter(ABC):
"""Abstract base class for file converters to PDF."""
def __init__(self, input_path: str, output_dir: str = ".") -> None:
self.input_path = Path(input_path)
self.output_dir = Path(output_dir)
self.output_path = self.output_dir / f"{self.generate_uuid_filename()}.pdf"
def convert(self) -> Self:
"""Convert input file to PDF and return the output path."""
pass
@staticmethod
def generate_uuid_filename() -> str:
"""Generate a unique filename using UUID4."""
return str(uuid.uuid4())
def get_deterministic_date(self) -> str:
"""
Generate a deterministic date based on file content.
This ensures the same file always produces the same PDF.
"""
# Option 1: Use a fixed date
# return "D:20000101000000"
# Option 2: Generate date from content hash (recommended)
with open(self.input_path, 'rb') as f:
content = f.read()
content_hash = hashlib.sha256(content).hexdigest()
# Use first 14 characters of hash to create a valid date
# Format: D:YYYYMMDDHHmmss
hash_int = int(content_hash[:14], 16)
# Create a date between 2000-2099 to keep it reasonable
year = 2000 + (hash_int % 100)
month = 1 + (hash_int % 12)
day = 1 + (hash_int % 28) # Stay safe with 28 days
hour = hash_int % 24
minute = hash_int % 60
second = hash_int % 60
return f"D:{year:04d}{month:02d}{day:02d}{hour:02d}{minute:02d}{second:02d}"
def get_file_creation_date(self):
# Get file creation time (or modification time)
ts = os.path.getctime(self.input_path) # getmtime(self.input_path) for last modification
dt = datetime.datetime.fromtimestamp(ts)
# PDF expects format D:YYYYMMDDHHmmss
creation_date = dt.strftime("D:%Y%m%d%H%M%S")
return creation_date
def clean_pdf(self) -> Self:
"""Remove all non-deterministic metadata from PDF."""
with pikepdf.open(self.output_path, allow_overwriting_input=True) as pdf:
# Remove XMP metadata if it exists
if hasattr(pdf.Root, 'Metadata'):
del pdf.Root.Metadata
# Clear all document info by deleting each key
for key in list(pdf.docinfo.keys()):
del pdf.docinfo[key]
# Set deterministic metadata
pdf.docinfo["/Producer"] = "MyConverter"
pdf.docinfo["/Creator"] = "MyConverter"
pdf.docinfo["/CreationDate"] = self.get_deterministic_date()
pdf.docinfo["/ModDate"] = self.get_deterministic_date()
pdf.docinfo["/Title"] = self.input_path.name
# Save with deterministic IDs
# compress=True ensures consistent compression
# deterministic_id=True (if available) or static_id=True
pdf.save(
self.output_path,
fix_metadata_version=True,
compress_streams=True,
stream_decode_level=pikepdf.StreamDecodeLevel.generalized,
object_stream_mode=pikepdf.ObjectStreamMode.disable,
deterministic_id=True # Use this if pikepdf >= 8.0.0, otherwise use static_id=True
)
return self
class TextToPdfConverter(BaseConverter):
"""Converter for text files to PDF."""
def convert(self) -> Self:
c = canvas.Canvas(str(self.output_path), pagesize=A4)
# Fix metadata with deterministic values
info = c._doc.info
info.producer = "MyConverter"
info.creationDate = self.get_file_creation_date()
info.title = os.path.basename(self.input_path)
width, height = A4
with open(self.input_path, "r", encoding="utf-8") as f:
y = height - 50
for line in f:
c.drawString(50, y, line.strip())
y -= 15
if y < 50:
c.showPage()
y = height - 50
c.save()
return self
class PdfToPdfConverter(BaseConverter):
"""Converter for PDF files to PDF."""
def convert(self) -> Self:
# copy self.input_path to self.output_path
os.system(f"cp {self.input_path} {self.output_path}")
return self
class ImageToPdfConverter(BaseConverter):
"""Converter for image files to PDF."""
def convert(self) -> Self:
image = Image.open(self.input_path)
rgb_image = image.convert("RGB")
rgb_image.save(self.output_path)
return self
class WordToPdfConverter(BaseConverter):
"""Converter for Word files (.docx) to PDF using pypandoc."""
def convert(self) -> Self:
pypandoc.convert_file(
str(self.input_path), "pdf", outputfile=str(self.output_path)
)
return self
# Placeholders for future extensions
class HtmlToPdfConverter(BaseConverter):
"""Placeholder for HTML to PDF converter."""
def convert(self) -> Self:
raise NotImplementedError("HTML to PDF conversion not implemented.")
class ExcelToPdfConverter(BaseConverter):
"""Placeholder for Excel to PDF converter."""
def convert(self) -> Self:
raise NotImplementedError("Excel to PDF conversion not implemented.")
class MarkdownToPdfConverter(BaseConverter):
"""Placeholder for Markdown to PDF converter."""
def convert(self) -> Self:
raise NotImplementedError("Markdown to PDF conversion not implemented.")
def convert_to_pdf(filepath: str, output_dir: str = ".") -> str:
"""
Convert any supported file to PDF.
Args:
filepath (str): Path to the input file.
output_dir (str): Directory to save the output PDF.
Returns:
str: Path to the generated PDF.
Raises:
UnsupportedFileTypeError: If the input file type is not supported.
"""
file_type = detect_file_type(filepath)
if file_type == "text":
converter = TextToPdfConverter(filepath, output_dir=output_dir)
elif file_type == "image":
converter = ImageToPdfConverter(filepath, output_dir=output_dir)
elif file_type == "word":
converter = WordToPdfConverter(filepath, output_dir=output_dir)
elif file_type == "pdf":
converter = PdfToPdfConverter(filepath, output_dir=output_dir)
else:
raise ValueError(f"Unsupported file type: {file_type}")
converter.convert()
converter.clean_pdf()
return str(converter.output_path)

View File

@@ -0,0 +1,167 @@
from pathlib import Path
import fitz # PyMuPDF
class PDFThumbnailGenerator:
def __init__(self, pdf_path):
"""
Initialize PDF thumbnail generator
Args:
pdf_path: Path to the PDF file (string or Path object)
"""
self.pdf_path = pdf_path
self.doc = fitz.open(pdf_path)
def create_thumbnail(self, output_path, page_num=0, width=200, rotation=0, zoom_factor=1.0):
"""
Create a thumbnail with zoom and rotation
Args:
output_path: Path to save the thumbnail (string or Path)
page_num: Page number (0-indexed), default first page
width: Desired width in pixels, default 200
rotation: Rotation angle in degrees (0, 90, 180, 270), default 0
zoom_factor: Additional zoom multiplier (1.0 = normal, 2.0 = 2x), default 1.0
Returns:
Dict with thumbnail info (width, height, rotation, zoom)
"""
page = self.doc[page_num]
# Apply rotation to page
page.set_rotation(rotation)
# Calculate zoom to achieve desired width
base_zoom = width / page.rect.width
final_zoom = base_zoom * zoom_factor
# Create transformation matrix
mat = fitz.Matrix(final_zoom, final_zoom)
# Render page to pixmap
pix = page.get_pixmap(matrix=mat, alpha=False)
# Save thumbnail
pix.save(output_path)
return {
'width': pix.width,
'height': pix.height,
'rotation': rotation,
'zoom': zoom_factor
}
def create_cropped_thumbnail(self, output_path, crop_rect=None, page_num=0, width=200):
"""
Create a thumbnail of a specific region (zoom on area)
Args:
output_path: Path to save the thumbnail (string or Path)
crop_rect: Tuple (x0, y0, x1, y1) in PDF coordinates for cropping,
or None for full page, default None
page_num: Page number (0-indexed), default first page
width: Desired width in pixels, default 200
Returns:
Tuple (width, height) of the generated thumbnail
"""
page = self.doc[page_num]
if crop_rect:
# Create rectangle for cropping
rect = fitz.Rect(crop_rect)
zoom = width / rect.width
else:
rect = page.rect
zoom = width / page.rect.width
mat = fitz.Matrix(zoom, zoom)
# Render only the specified rectangle
pix = page.get_pixmap(matrix=mat, clip=rect)
pix.save(output_path)
return pix.width, pix.height
def get_page_info(self, page_num=0):
"""
Get information about a specific page
Args:
page_num: Page number (0-indexed), default first page
Returns:
Dict with page information (width, height, rotation, number, total_pages)
"""
page = self.doc[page_num]
return {
'width': page.rect.width,
'height': page.rect.height,
'rotation': page.rotation,
'number': page_num + 1,
'total_pages': len(self.doc)
}
def create_multi_resolution_thumbnails(self, output_folder, page_num=0, sizes=(150, 300, 600)):
"""
Create multiple thumbnails at different resolutions
Args:
output_folder: Folder path to save thumbnails (string or Path)
page_num: Page number (0-indexed), default first page
sizes: List of widths in pixels, default [150, 300, 600]
Returns:
Dict mapping each size to thumbnail info
"""
output_folder = Path(output_folder)
output_folder.mkdir(exist_ok=True, parents=True)
results = {}
for size in sizes:
output_path = output_folder / f"thumb_{size}px.png"
info = self.create_thumbnail(output_path, page_num=page_num, width=size)
results[size] = info
return results
def close(self):
"""Close the PDF document and free resources"""
self.doc.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Example usage
if __name__ == "__main__":
# Basic usage with context manager
with PDFThumbnailGenerator("example.pdf") as gen:
# Standard thumbnail
gen.create_thumbnail("thumb_standard.png", page_num=0, width=200)
# Rotated thumbnail
gen.create_thumbnail("thumb_rotated.png", page_num=0,
width=200, rotation=90)
# Zoomed thumbnail (2x zoom)
gen.create_thumbnail("thumb_zoomed.png", page_num=0,
width=200, zoom_factor=2.0)
# Cropped/zoomed on specific area (x0, y0, x1, y1)
gen.create_cropped_thumbnail("thumb_crop.png",
crop_rect=(100, 100, 400, 400),
page_num=0, width=300)
# Multiple resolutions
gen.create_multi_resolution_thumbnails("thumbnails/", page_num=0,
sizes=[150, 300, 600])
# Get page information
info = gen.get_page_info(page_num=0)
print(f"Page info: {info}")

View File

@@ -4,9 +4,10 @@ Password security utilities using bcrypt for secure password hashing.
This module provides secure password hashing and verification functions This module provides secure password hashing and verification functions
using the bcrypt algorithm with automatic salt generation. using the bcrypt algorithm with automatic salt generation.
""" """
import re
import uuid
import bcrypt import bcrypt
from typing import Union
def hash_password(password: str) -> str: def hash_password(password: str) -> str:
@@ -72,3 +73,32 @@ def verify_password(password: str, hashed_password: str) -> bool:
raise RuntimeError(f"Invalid hash format: {str(e)}") raise RuntimeError(f"Invalid hash format: {str(e)}")
except Exception as e: except Exception as e:
raise RuntimeError(f"Failed to verify password: {str(e)}") raise RuntimeError(f"Failed to verify password: {str(e)}")
def generate_uuid_filename() -> str:
"""Generate a unique filename using UUID4."""
return str(uuid.uuid4())
def safe_connection_string(connection_string: str) -> str:
"""
Mask the password in a MongoDB connection string.
Args:
connection_string (str): The complete MongoDB connection string
Returns:
str: The connection string with password replaced by asterisks
Example:
>>> mask_mongodb_password("mongodb://admin:password123@mongodb:27017/mydocmanager?authSource=admin")
"mongodb://admin:***@mongodb:27017/mydocmanager?authSource=admin"
"""
# Pattern to detect password in MongoDB URL
# Format: mongodb://username:password@host:port/database
pattern = r'(mongodb://[^:]+:)([^@]+)(@.*)'
# Replace password with asterisks
masked_string = re.sub(pattern, r'\1*****\3', connection_string)
return masked_string

View File

@@ -1,12 +1,20 @@
asgiref==3.9.1
bcrypt==4.3.0 bcrypt==4.3.0
celery==5.5.3 celery==5.5.3
email-validator==2.3.0 email-validator==2.3.0
fastapi==0.116.1 fastapi==0.116.1
httptools==0.6.4 httptools==0.6.4
motor==3.7.1 motor==3.7.1
pymongo==4.15.0 pikepdf==9.11.0
pillow==11.3.0
pydantic==2.11.9 pydantic==2.11.9
PyJWT==2.10.1
pymongo==4.15.0
PyMuPDF==1.26.4
pypandoc==1.15
python-multipart==0.0.20
redis==6.4.0 redis==6.4.0
reportlab==4.4.4
uvicorn==0.35.0 uvicorn==0.35.0
python-magic==0.4.27 python-magic==0.4.27
watchdog==6.0.0 watchdog==6.0.0

View File

@@ -0,0 +1,41 @@
# Dependencies
node_modules
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Build outputs
dist
build
# Environment files
.env.local
.env.development.local
.env.test.local
.env.production.local
# IDE files
.vscode
.idea
*.swp
*.swo
# OS generated files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
# Git
.git
.gitignore
# Docker
Dockerfile
.dockerignore
# Logs
*.log

20
src/frontend/Dockerfile Normal file
View File

@@ -0,0 +1,20 @@
# Use Node.js 20 Alpine for lightweight container
FROM node:20-alpine
# Set working directory
WORKDIR /app
# Copy package.json and package-lock.json (if available)
COPY package*.json ./
# Install dependencies
RUN npm install
# Copy source code
COPY . .
# Expose Vite default port
EXPOSE 5173
# Start development server with host 0.0.0.0 to accept external connections
CMD ["npm", "run", "dev", "--", "--host", "0.0.0.0", "--port", "5173"]

View File

@@ -1,12 +1,93 @@
# React + Vite
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules. # MyDocManager Frontend
Currently, two official plugins are available: ## Overview
MyDocManager Frontend is a modern web application built with React and Vite that serves as the user interface for the MyDocManager document management system. The application provides a seamless experience for users to manage, process, and organize their documents with an intuitive and responsive interface.
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) for Fast Refresh ## Project Structure
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh frontend/
├── public/ # Public assets and static files
├── src/ # Source code
│ ├── assets/ # Icons, images, and other static assets
│ ├── components/ # Reusable UI components
│ │ ├── auth/ # Authentication-related components
│ │ └── common/ # Shared components (Header, Layout, etc.)
│ ├── contexts/ # React contexts for state management
│ ├── hooks/ # Custom React hooks
│ ├── pages/ # Page components representing full views
│ ├── services/ # API service interfaces
│ └── utils/ # Utility functions and helpers
├── Dockerfile # Container configuration for deployment
├── package.json # Dependencies and scripts
├── tailwind.config.js # Tailwind CSS configuration
└── vite.config.js # Vite bundler configuration
## Expanding the ESLint configuration
If you are developing a production application, we recommend using TypeScript with type-aware lint rules enabled. Check out the [TS template](https://github.com/vitejs/vite/tree/main/packages/create-vite/template-react-ts) for information on how to integrate TypeScript and [`typescript-eslint`](https://typescript-eslint.io) in your project.
## Key Components
### Authentication
- **AuthContext**: Provides authentication state and methods throughout the application
- **AuthLayout**: Layout wrapper specifically for authentication screens
- **LoginForm**: Form component for user authentication
- **ProtectedRoute**: Route guard that ensures authenticated access to protected pages
### UI Components
- **Layout**: Main application layout structure with menu and content areas
- **Header**: Application header with navigation and user controls
- **Menu**: Side navigation menu with application links
- **ThemeSwitcher**: Toggle for switching between light and dark themes
### Pages
- **LoginPage**: User authentication page
- **DashboardPage**: Main dashboard view for authenticated users
### Services
- **authService**: Handles API communication for authentication operations
- **api**: Base API utility for making HTTP requests to the backend
## Getting Started
### Prerequisites
- Node.js (latest LTS version)
- npm or yarn package manager
### Installation
1. Clone the repository
2. Navigate to the frontend directory
3. Install dependencies:
```
npm install
```
### Development
Run the development server:
```
npm run dev
```
This will start the application in development mode at http://localhost:5173
### Building for Production
Create a production build:
```
npm run build
```
## Technologies
- React 19.1.1
- Vite 7.1.2
- Tailwind CSS 4.1.13
- DaisyUI 5.1.24
- React Router 7.9.3
- Axios for API requests
## Features
- Responsive design with Tailwind CSS
- Authentication and authorization
- Light/dark theme support
- Document management interface
- Secure API communication
## Project Integration
This frontend application works in conjunction with the backend services and workers defined in other parts of the MyDocManager project to provide a complete document management solution.

View File

@@ -1,10 +1,10 @@
<!doctype html> <!doctype html>
<html lang="en"> <html lang="en" data-theme="dark">
<head> <head>
<meta charset="UTF-8" /> <meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" /> <link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Vite + React</title> <title>My Documents Manager</title>
</head> </head>
<body> <body>
<div id="root"></div> <div id="root"></div>

File diff suppressed because it is too large Load Diff

View File

@@ -10,18 +10,26 @@
"preview": "vite preview" "preview": "vite preview"
}, },
"dependencies": { "dependencies": {
"@tailwindcss/vite": "^4.1.13",
"axios": "^1.12.2",
"react": "^19.1.1", "react": "^19.1.1",
"react-dom": "^19.1.1" "react-dom": "^19.1.1",
"react-icons": "^5.5.0",
"react-router-dom": "^7.9.3"
}, },
"devDependencies": { "devDependencies": {
"@eslint/js": "^9.33.0", "@eslint/js": "^9.33.0",
"@types/react": "^19.1.10", "@types/react": "^19.1.10",
"@types/react-dom": "^19.1.7", "@types/react-dom": "^19.1.7",
"@vitejs/plugin-react": "^5.0.0", "@vitejs/plugin-react": "^5.0.0",
"autoprefixer": "^10.4.21",
"daisyui": "^5.1.24",
"eslint": "^9.33.0", "eslint": "^9.33.0",
"eslint-plugin-react-hooks": "^5.2.0", "eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-react-refresh": "^0.4.20", "eslint-plugin-react-refresh": "^0.4.20",
"globals": "^16.3.0", "globals": "^16.3.0",
"postcss": "^8.5.6",
"tailwindcss": "^4.1.13",
"vite": "^7.1.2" "vite": "^7.1.2"
} }
} }

View File

@@ -1,42 +1,6 @@
@import "tailwindcss";
#root { #root {
max-width: 1280px; max-width: 1280px;
margin: 0 auto; margin: 0 auto;
padding: 2rem;
text-align: center;
}
.logo {
height: 6em;
padding: 1.5em;
will-change: filter;
transition: filter 300ms;
}
.logo:hover {
filter: drop-shadow(0 0 2em #646cffaa);
}
.logo.react:hover {
filter: drop-shadow(0 0 2em #61dafbaa);
}
@keyframes logo-spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
@media (prefers-reduced-motion: no-preference) {
a:nth-of-type(2) .logo {
animation: logo-spin infinite 20s linear;
}
}
.card {
padding: 2em;
}
.read-the-docs {
color: #888;
} }

View File

@@ -1,35 +1,36 @@
import { useState } from 'react' import { BrowserRouter as Router, Routes, Route, Navigate } from 'react-router-dom';
import reactLogo from './assets/react.svg' import { AuthProvider } from './contexts/AuthContext';
import viteLogo from '/vite.svg' import ProtectedRoute from './components/common/ProtectedRoute';
import './App.css' import Layout from './components/common/Layout';
import LoginPage from './pages/LoginPage';
import DashboardPage from './pages/DashboardPage';
import DocumentsPage from './pages/DocumentsPage';
function App() { function App() {
const [count, setCount] = useState(0)
return ( return (
<> <AuthProvider>
<div> <Router>
<a href="https://vite.dev" target="_blank"> <div className="App">
<img src={viteLogo} className="logo" alt="Vite logo" /> <Routes>
</a> {/* Public Routes */}
<a href="https://react.dev" target="_blank"> <Route path="/login" element={<LoginPage />} />
<img src={reactLogo} className="logo react" alt="React logo" />
</a> {/* Protected Routes */}
<Route path="/" element={<ProtectedRoute><Layout /></ProtectedRoute>}>
<Route index element={<Navigate to="/documents" replace />} />
<Route path="documents" element={<DocumentsPage />} />
<Route path="dashboard" element={<DashboardPage />} />
<Route path="documents" element={<div>Documents Page - Coming Soon</div>} />
<Route path="users" element={<div>User Management - Coming Soon</div>} />
</Route>
{/* Catch all route */}
<Route path="*" element={<Navigate to="/dashboard" replace />} />
</Routes>
</div> </div>
<h1>Vite + React</h1> </Router>
<div className="card"> </AuthProvider>
<button onClick={() => setCount((count) => count + 1)}> );
count is {count}
</button>
<p>
Edit <code>src/App.jsx</code> and save to test HMR
</p>
</div>
<p className="read-the-docs">
Click on the Vite and React logos to learn more
</p>
</>
)
} }
export default App export default App;

View File

@@ -0,0 +1,35 @@
// src/assets/icons.jsx
export const SunIcon = (
<svg
xmlns="http://www.w3.org/2000/svg"
className="h-6 w-6"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="2"
d="M12 3v1m0 16v1m8.66-9h-1M4.34 12h-1m15.36 6.36l-.7-.7M6.34 6.34l-.7-.7m12.02 12.02l-.7-.7M6.34 17.66l-.7-.7M16 12a4 4 0 11-8 0 4 4 0 018 0z"
/>
</svg>
);
export const MoonIcon = (
<svg
xmlns="http://www.w3.org/2000/svg"
className="h-6 w-6"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="2"
d="M21 12.79A9 9 0 1111.21 3a7 7 0 0010.79 9.79z"
/>
</svg>
);

View File

@@ -0,0 +1,41 @@
import React from 'react';
import ThemeSwitcher from "../common/ThemeSwither.jsx";
/**
* AuthLayout component for authentication pages
* Provides centered layout with background and responsive design
*
* @param {Object} props - Component props
* @param {React.ReactNode} props.children - Child components to render
*/
const AuthHeader = () => {
return (
<div className="navbar bg-base-100 shadow-lg">
<div className="navbar-start">
<h1 className="text-xl font-bold">MyDocManager</h1>
</div>
<div className="navbar-end">
<ThemeSwitcher/>
</div>
</div>
)
}
function AuthLayout({children}) {
return (
<div className="min-h-screen bg-gradient-to-br from-primary/10 via-base-200 to-secondary/10">
<AuthHeader/>
{/* Main container with flex centering */}
<div className="min-h-screen flex items-center justify-center p-4">
{/* Content wrapper for responsive spacing */}
<div>
{children}
</div>
</div>
</div>
);
}
export default AuthLayout;

View File

@@ -0,0 +1,202 @@
import React, {useEffect, useState} from 'react';
import {useAuth} from '../../contexts/AuthContext';
/**
* LoginForm component with DaisyUI styling
* Handles user authentication with form validation and error display
*/
function LoginForm() {
const {login, loading, error, clearError} = useAuth();
const [formData, setFormData] = useState({
username: '',
password: '',
});
const [formErrors, setFormErrors] = useState({});
// Clear errors when component mounts or form data changes
useEffect(() => {
if (error) {
const timer = setTimeout(() => {
clearError();
}, 5000); // Clear error after 5 seconds
return () => clearTimeout(timer);
}
}, [error, clearError]);
/**
* Handle input changes and clear related errors
* @param {Event} e - Input change event
*/
const handleInputChange = (e) => {
const {name, value} = e.target;
setFormData(prev => ({
...prev,
[name]: value,
}));
// Clear field error when user starts typing
if (formErrors[name]) {
setFormErrors(prev => ({
...prev,
[name]: '',
}));
}
// Clear global error when user modifies form
if (error) {
clearError();
}
};
/**
* Validate form data before submission
* @returns {boolean} True if form is valid
*/
const validateForm = () => {
const errors = {};
if (!formData.username.trim()) {
errors.username = 'Username is required';
}
if (!formData.password.trim()) {
errors.password = 'Password is required';
} else if (formData.password.length < 3) {
errors.password = 'Password must be at least 3 characters';
}
setFormErrors(errors);
return Object.keys(errors).length === 0;
};
/**
* Handle form submission
* @param {Event} e - Form submission event
*/
const handleSubmit = async (e) => {
e.preventDefault();
if (!validateForm()) {
return;
}
const success = await login(formData.username, formData.password);
if (success) {
// Reset form on successful login
setFormData({username: '', password: ''});
setFormErrors({});
}
};
return (
<div className="card max-w-md shadow-xl bg-base-100">
<div className="card-body">
{/* Card Header */}
<div className="text-center mb-6">
<p className="text-base-content/70 mt-2">Sign in to your account</p>
</div>
{/* Global Error Alert */}
{error && (
<div className="alert alert-error mb-4">
<svg
xmlns="http://www.w3.org/2000/svg"
className="stroke-current shrink-0 h-6 w-6"
fill="none"
viewBox="0 0 24 24"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="2"
d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z"
/>
</svg>
<span>{error}</span>
</div>
)}
{/* Login Form */}
<form onSubmit={handleSubmit}>
{/* Username Field */}
<div id="username">
<label className="label">
<span className="label-text font-medium">Username</span>
</label>
<input
type="text"
name="username"
value={formData.username}
onChange={handleInputChange}
placeholder="Enter your username"
className={`input input-bordered w-full${
formErrors.username ? 'input-error' : ''
}`}
disabled={loading}
autoComplete="username"
/>
{formErrors.username && (
<label className="label">
<span className="label-text-alt text-error">{formErrors.username}</span>
</label>
)}
</div>
{/* Password Field */}
<div id="password">
<label className="label">
<span className="label-text font-medium">Password</span>
</label>
<input
type="password"
name="password"
value={formData.password}
onChange={handleInputChange}
placeholder="Enter your password"
className={`input input-bordered ${
formErrors.password ? 'input-error' : ''
}`}
disabled={loading}
autoComplete="current-password"
/>
{formErrors.password && (
<label className="label">
<span className="label-text-alt text-error">{formErrors.password}</span>
</label>
)}
</div>
{/* Submit Button */}
<div className="form-control mt-6">
<button
type="submit"
className={`btn btn-primary w-1/3 btn-hover-effect ${loading ? 'loading' : ''}`}
disabled={loading}
>
{loading ? (
<>
<span className="loading loading-spinner loading-sm"></span>
Signing in...
</>
) : (
'Sign In'
)}
</button>
</div>
</form>
{/* Additional Info */}
<div className="text-center mt-4">
<p className="text-sm text-base-content/60">
Don't have an account? Contact your administrator.
</p>
</div>
</div>
</div>
);
}
export default LoginForm;

View File

@@ -0,0 +1,49 @@
import {useAuth} from '../../hooks/useAuth';
import {useNavigate} from 'react-router-dom';
import ThemeSwitcher from "./ThemeSwither.jsx";
import React from "react";
const Header = () => {
const {user, logout} = useAuth();
const navigate = useNavigate();
const handleLogout = async () => {
await logout();
navigate('/login');
};
return (
<div className="navbar bg-base-100">
<div className="navbar-start">
<h1 className="text-xl font-bold">MyDocManager</h1>
</div>
<div className="navbar-end">
<div className="dropdown dropdown-end">
<div tabIndex={0} role="button" className="btn btn-ghost btn-circle avatar">
<div className="w-10 rounded-full bg-primary text-primary-content flex items-center justify-center">
<span className="text-sm font-medium">
{user?.username?.charAt(0).toUpperCase()}
</span>
</div>
</div>
<ul tabIndex={0} className="menu menu-sm dropdown-content bg-base-100 rounded-box z-[1] mt-3 w-52 p-2 shadow">
<li>
<div className="justify-between">
Profile
<span className="badge badge-sm">{user?.role}</span>
</div>
</li>
<li><a>Settings</a></li>
<li><ThemeSwitcher/></li>
<li>
<button onClick={handleLogout}>Logout</button>
</li>
</ul>
</div>
</div>
</div>
);
};
export default Header;

View File

@@ -0,0 +1,24 @@
import Header from './Header';
import {Outlet} from 'react-router-dom';
import Menu from "./Menu.jsx";
import styles from './Layout.module.css';
const Layout = () => {
return (
<div className={styles.layoutContainer}>
<Header/>
<div className="flex flex-1 overflow-hidden">
<aside className={styles.sidebar}>
<Menu/>
</aside>
<main className={styles.mainContent}>
<div className={styles.mainContentInner}>
<Outlet/>
</div>
</main>
</div>
</div>
);
};
export default Layout;

View File

@@ -0,0 +1,36 @@
/* Layout Container */
.layoutContainer {
height: 100vh;
display: flex;
flex-direction: column;
background-color: var(--color-base-200);
}
/* Sidebar */
.sidebar {
width: 16rem; /* 64px = 4rem, donc 256px = 16rem */
background-color: var(--color-base-100);
box-shadow: 0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1);
overflow-y: auto;
}
/* Main Content Area */
.mainContent {
flex: 1;
display: flex;
flex-direction: column;
min-height: 0; /* Important for flex to work properly with scrolling */
}
/* Main Content Inner Container */
.mainContentInner {
max-width: 80rem; /* container max-width */
margin-left: auto;
margin-right: auto;
padding: 0.5rem 1rem;
flex: 1;
display: flex;
flex-direction: column;
min-height: 0; /* Important for flex to work properly with scrolling */
}

View File

@@ -0,0 +1,18 @@
import {FaBuffer, FaPlus} from "react-icons/fa6";
import { Link } from "react-router-dom";
const Menu = () => {
return (
<div className="p-4">
<ul className="menu">
<li className="menu-title">Exploration</li>
<li><Link to="/dashboard"><FaBuffer/>Dashboard</Link></li>
<li><Link to="/documents"><FaBuffer/>To Review</Link></li>
<li className="menu-title mt-4">Catégories</li>
<li><a><i className="fas fa-plus"></i>Item</a></li>
</ul>
</div>
)
}
export default Menu;

View File

@@ -0,0 +1,69 @@
import React from 'react';
import {Navigate, useLocation} from 'react-router-dom';
import {useAuth} from '../../contexts/AuthContext';
/**
* ProtectedRoute component to guard routes that require authentication
* Redirects to login if user is not authenticated, preserving intended destination
*
* @param {Object} props - Component props
* @param {React.ReactNode} props.children - Child components to render if authenticated
* @param {string[]} props.allowedRoles - Array of roles allowed to access this route (optional)
*/
function ProtectedRoute({children, allowedRoles = []}) {
const {isAuthenticated, loading, user} = useAuth();
const location = useLocation();
// Show loading spinner while checking authentication
if (loading) {
return (
<div className="min-h-screen flex items-center justify-center bg-base-200">
<div className="text-center">
<span className="loading loading-spinner loading-lg text-primary"></span>
<p className="text-base-content/70 mt-4">Checking authentication...</p>
</div>
</div>
);
}
// Redirect to login if not authenticated
if (!isAuthenticated) {
return (
<Navigate
to="/login"
state={{from: location}}
replace
/>
);
}
// Check role-based access if allowedRoles is specified
if (allowedRoles.length > 0 && user && !allowedRoles.includes(user.role)) {
return (
<div className="min-h-screen flex items-center justify-center bg-base-200">
<div className="card w-full max-w-md shadow-xl bg-base-100">
<div className="card-body text-center">
<div className="text-6xl mb-4">🚫</div>
<h2 className="card-title justify-center text-error">Access Denied</h2>
<p className="text-base-content/70 mb-4">
You don't have permission to access this page.
</p>
<div className="card-actions justify-center">
<button
className="btn btn-primary"
onClick={() => window.history.back()}
>
Go Back
</button>
</div>
</div>
</div>
</div>
);
}
// User is authenticated and authorized, render children
return children;
}
export default ProtectedRoute;

View File

@@ -0,0 +1,29 @@
import {useEffect, useState} from "react";
import {MoonIcon, SunIcon} from "../../assets/icons.jsx";
function ThemeSwitcher() {
// State to store current theme
const [theme, setTheme] = useState("light");
// When theme changes, apply it to <html data-theme="">
useEffect(() => {
document.querySelector("html").setAttribute("data-theme", theme);
}, [theme]);
// Toggle between light and dark
const toggleTheme = () => {
setTheme(theme === "light" ? "dark" : "light");
};
return (
<button
onClick={toggleTheme}
className="btn btn-ghost btn-circle"
>
{theme === "light" ? MoonIcon : SunIcon}
</button>
);
}
export default ThemeSwitcher;

View File

@@ -0,0 +1,68 @@
/**
* DeleteConfirmModal Component
* Modal dialog to confirm document deletion
*/
import React from 'react';
/**
* DeleteConfirmModal component
* @param {Object} props
* @param {boolean} props.isOpen - Whether the modal is open
* @param {Object|null} props.document - Document to delete
* @param {function(): void} props.onClose - Callback when modal is closed
* @param {function(): void} props.onConfirm - Callback when deletion is confirmed
* @param {boolean} props.isDeleting - Whether deletion is in progress
* @returns {JSX.Element}
*/
const DeleteConfirmModal = ({
isOpen,
document,
onClose,
onConfirm,
isDeleting = false
}) => {
if (!isOpen || !document) return null;
return (
<dialog className="modal modal-open">
<div className="modal-box">
<h3 className="font-bold text-lg">Confirm Deletion</h3>
<p className="py-4">
Are you sure you want to delete <span className="font-semibold">"{document.name}"</span>?
</p>
<p className="text-sm text-gray-500">
This action cannot be undone.
</p>
<div className="modal-action">
<button
className="btn btn-ghost"
onClick={onClose}
disabled={isDeleting}
>
Cancel
</button>
<button
className="btn btn-error"
onClick={onConfirm}
disabled={isDeleting}
>
{isDeleting ? (
<>
<span className="loading loading-spinner loading-sm"></span>
Deleting...
</>
) : (
'Delete'
)}
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop" onClick={onClose}>
<button disabled={isDeleting}>close</button>
</form>
</dialog>
);
};
export default DeleteConfirmModal;

View File

@@ -0,0 +1,285 @@
/**
* DocumentCard Component
* Displays a document as a DaisyUI card with thumbnail and metadata
* Supports different view modes: small, large, and detail
*/
import React, {memo, useState, useEffect} from 'react';
import {API_BASE_URL} from "../../utils/api.js";
/**
* Formats file size to human-readable format
* @param {number} bytes - File size in bytes
* @returns {string} Formatted file size
*/
const formatFileSize = (bytes) => {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return Math.round((bytes / Math.pow(k, i)) * 100) / 100 + ' ' + sizes[i];
};
/**
* Formats date to localized string
* @param {string} dateString - ISO date string
* @returns {string} Formatted date
*/
const formatDate = (dateString) => {
return new Date(dateString).toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric'
});
};
/**
* Builds full URL from relative path
* @param {string} relativePath - Relative API path
* @returns {string} Full URL
*/
const buildFullUrl = (relativePath) => {
if (!relativePath) return '';
const baseUrl = import.meta.env.VITE_API_BASE_URL || API_BASE_URL;
return `${baseUrl}${relativePath}`;
};
/**
* Hook to load protected images with bearer token
* @param {string} url - Image URL
* @returns {Object} { imageSrc, loading, error }
*/
const useProtectedImage = (url) => {
const [imageSrc, setImageSrc] = useState(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState(false);
useEffect(() => {
if (!url) {
setLoading(false);
return;
}
let objectUrl;
const fetchImage = async () => {
try {
const token = localStorage.getItem('access_token');
const fullUrl = buildFullUrl(url);
const response = await fetch(fullUrl, {
headers: {
'Authorization': `Bearer ${token}`
}
});
if (!response.ok) {
throw new Error('Failed to load image');
}
const blob = await response.blob();
objectUrl = URL.createObjectURL(blob);
setImageSrc(objectUrl);
setLoading(false);
} catch (err) {
console.error('Error loading thumbnail:', err);
setError(true);
setLoading(false);
}
};
fetchImage();
// Cleanup: revoke object URL on unmount
return () => {
if (objectUrl) {
URL.revokeObjectURL(objectUrl);
}
};
}, [url]);
return { imageSrc, loading, error };
};
/**
* DocumentCard component
* @param {Object} props
* @param {Object} props.document - Document object
* @param {'small'|'large'|'detail'} props.viewMode - Current view mode
* @param {function(): void} props.onEdit - Callback when edit is clicked
* @param {function(): void} props.onDelete - Callback when delete is clicked
* @returns {JSX.Element}
*/
const DocumentCard = memo(({document, viewMode, onEdit, onDelete}) => {
const {name, originalFileType, thumbnailUrl, pageCount, fileSize, createdAt, tags, categories} = document;
// Load protected image
const { imageSrc, loading, error } = useProtectedImage(thumbnailUrl);
// Determine card classes based on view mode
const getCardClasses = () => {
const baseClasses = 'card bg-base-100 shadow-xl hover:shadow-2xl transition-shadow group relative';
switch (viewMode) {
case 'small':
return `${baseClasses} w-full`;
case 'large':
return `${baseClasses} w-full`;
case 'detail':
return `${baseClasses} w-full`;
default:
return baseClasses;
}
};
// Render thumbnail with hover actions
const renderThumbnail = () => {
const heightClass = viewMode === 'small' ? 'h-48' : viewMode === 'large' ? 'h-64' : 'h-64';
return (
<figure className="relative overflow-hidden">
{loading ? (
<div className={`w-[200px] ${heightClass} bg-gray-200 animate-pulse flex items-center justify-center`}>
<svg className="w-8 h-8 text-gray-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
</svg>
</div>
) : error ? (
<div className={`w-[200px] ${heightClass} bg-gray-300 flex flex-col items-center justify-center`}>
<svg className="w-8 h-8 text-gray-500 mb-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
<span className="text-gray-500 text-xs">Failed to load</span>
</div>
) : (
<img
src={imageSrc}
alt={`${name}`}
className={`object-cover ${heightClass}`}
/>
)}
{/* Hover overlay with actions */}
<div className="absolute top-2 right-2 flex gap-2 opacity-0 group-hover:opacity-100 transition-opacity">
<button
className="btn btn-sm btn-circle btn-primary"
onClick={onEdit}
aria-label="Edit document"
title="Edit"
>
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24"
stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2}
d="M11 5H6a2 2 0 00-2 2v11a2 2 0 002 2h11a2 2 0 002-2v-5m-1.414-9.414a2 2 0 112.828 2.828L11.828 15H9v-2.828l8.586-8.586z"/>
</svg>
</button>
<button
className="btn btn-sm btn-circle btn-error"
onClick={onDelete}
aria-label="Delete document"
title="Delete"
>
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24"
stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2}
d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"/>
</svg>
</button>
</div>
{/* File type badge */}
<div className="absolute bottom-2 left-2">
<span className="badge badge-accent badge-sm">{originalFileType}</span>
</div>
</figure>
);
};
// Render card body based on view mode
const renderCardBody = () => {
if (viewMode === 'small') {
return (
<div className="card-body p-3">
<h3 className="card-title text-sm truncate" title={name}>{name}</h3>
<p className="text-xs text-gray-500">{pageCount} page{pageCount > 1 ? 's' : ''}</p>
</div>
);
}
if (viewMode === 'large') {
return (
<div className="card-body p-4">
<h3 className="card-title text-base truncate" title={name}>{name}</h3>
<div className="flex flex-wrap gap-1 mb-2">
{tags.slice(0, 3).map(tag => (
<span key={tag} className="badge badge-primary badge-xs">{tag}</span>
))}
{tags.length > 3 && (
<span className="badge badge-ghost badge-xs">+{tags.length - 3}</span>
)}
</div>
<div className="text-sm space-y-1">
<p className="text-gray-500">{pageCount} page{pageCount > 1 ? 's' : ''}</p>
<p className="text-gray-500">{formatFileSize(fileSize)}</p>
</div>
</div>
);
}
// Detail mode
return (
<div className="card-body">
<h3 className="card-title text-lg" title={name}>{name}</h3>
{/* Tags */}
{tags.length > 0 && (
<div className="flex flex-wrap gap-1 mb-2">
{tags.map(tag => (
<span key={tag} className="badge badge-primary badge-sm">{tag}</span>
))}
</div>
)}
{/* Categories */}
{categories.length > 0 && (
<div className="flex flex-wrap gap-1 mb-3">
{categories.map(category => (
<span key={category} className="badge badge-secondary badge-sm">{category}</span>
))}
</div>
)}
{/* Metadata */}
<div className="grid grid-cols-2 gap-2 text-sm">
<div>
<span className="font-semibold">Pages:</span>
<span className="ml-2 text-gray-500">{pageCount}</span>
</div>
<div>
<span className="font-semibold">Size:</span>
<span className="ml-2 text-gray-500">{formatFileSize(fileSize)}</span>
</div>
<div>
<span className="font-semibold">Type:</span>
<span className="ml-2 text-gray-500">{originalFileType}</span>
</div>
<div>
<span className="font-semibold">Date:</span>
<span className="ml-2 text-gray-500">{formatDate(createdAt)}</span>
</div>
</div>
</div>
);
};
return (
<div className={getCardClasses()}>
{renderThumbnail()}
{renderCardBody()}
</div>
);
});
DocumentCard.displayName = 'DocumentCard';
export default DocumentCard;

View File

@@ -0,0 +1,164 @@
/**
* DocumentDetailView Component
* Displays a document in detail mode with all pages visible
* This is a placeholder that shows multiple page thumbnails
* When real PDF backend is ready, this can be replaced with actual PDF rendering
*/
import React from 'react';
/**
* Formats file size to human-readable format
* @param {number} bytes - File size in bytes
* @returns {string} Formatted file size
*/
const formatFileSize = (bytes) => {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return Math.round((bytes / Math.pow(k, i)) * 100) / 100 + ' ' + sizes[i];
};
/**
* Formats date to localized string
* @param {string} dateString - ISO date string
* @returns {string} Formatted date
*/
const formatDate = (dateString) => {
return new Date(dateString).toLocaleDateString('en-US', {
year: 'numeric',
month: 'long',
day: 'numeric',
hour: '2-digit',
minute: '2-digit'
});
};
/**
* DocumentDetailView component
* @param {Object} props
* @param {Object} props.document - Document object
* @param {function(): void} props.onEdit - Callback when edit is clicked
* @param {function(): void} props.onDelete - Callback when delete is clicked
* @returns {JSX.Element}
*/
const DocumentDetailView = ({ document, onEdit, onDelete }) => {
const {
name,
originalFileType,
thumbnailUrl,
pageCount,
fileSize,
createdAt,
tags,
categories
} = document;
// Generate placeholder pages (in real implementation, these would be actual PDF pages)
const pages = Array.from({ length: pageCount }, (_, i) => ({
pageNumber: i + 1,
thumbnailUrl: thumbnailUrl.replace('Page+1', `Page+${i + 1}`)
}));
return (
<div className="card bg-base-100 shadow-xl">
{/* Header with actions */}
<div className="card-body">
<div className="flex justify-between items-start mb-4">
<div className="flex-1">
<h2 className="card-title text-2xl mb-2">{name}</h2>
{/* Tags */}
{tags.length > 0 && (
<div className="flex flex-wrap gap-2 mb-2">
<span className="text-sm font-semibold text-gray-600">Tags:</span>
{tags.map(tag => (
<span key={tag} className="badge badge-primary">{tag}</span>
))}
</div>
)}
{/* Categories */}
{categories.length > 0 && (
<div className="flex flex-wrap gap-2 mb-3">
<span className="text-sm font-semibold text-gray-600">Categories:</span>
{categories.map(category => (
<span key={category} className="badge badge-secondary">{category}</span>
))}
</div>
)}
</div>
{/* Action buttons */}
<div className="flex gap-2">
<button
className="btn btn-primary btn-sm"
onClick={onEdit}
aria-label="Edit document"
>
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M11 5H6a2 2 0 00-2 2v11a2 2 0 002 2h11a2 2 0 002-2v-5m-1.414-9.414a2 2 0 112.828 2.828L11.828 15H9v-2.828l8.586-8.586z" />
</svg>
Edit
</button>
<button
className="btn btn-error btn-sm"
onClick={onDelete}
aria-label="Delete document"
>
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
</svg>
Delete
</button>
</div>
</div>
{/* Metadata grid */}
<div className="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6 p-4 bg-base-200 rounded-lg">
<div>
<span className="text-sm font-semibold text-gray-600">Original Type</span>
<p className="text-lg">{originalFileType}</p>
</div>
<div>
<span className="text-sm font-semibold text-gray-600">Pages</span>
<p className="text-lg">{pageCount}</p>
</div>
<div>
<span className="text-sm font-semibold text-gray-600">File Size</span>
<p className="text-lg">{formatFileSize(fileSize)}</p>
</div>
<div>
<span className="text-sm font-semibold text-gray-600">Created</span>
<p className="text-lg">{formatDate(createdAt)}</p>
</div>
</div>
{/* Pages preview */}
<div>
<h3 className="text-lg font-semibold mb-4">Document Pages ({pageCount})</h3>
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4">
{pages.map((page) => (
<div key={page.pageNumber} className="relative group">
<div className="aspect-[3/4] bg-base-200 rounded-lg overflow-hidden shadow-md hover:shadow-xl transition-shadow">
<img
src={page.thumbnailUrl}
alt={`Page ${page.pageNumber}`}
className="w-full h-full object-cover"
loading="lazy"
/>
</div>
<div className="text-center mt-2">
<span className="text-sm text-gray-600">Page {page.pageNumber}</span>
</div>
</div>
))}
</div>
</div>
</div>
</div>
);
};
export default DocumentDetailView;

View File

@@ -0,0 +1,181 @@
/**
* DocumentGallery Component
* Main container for displaying documents in different view modes
*/
import React, { useState } from 'react';
import DocumentCard from './DocumentCard';
import DocumentDetailView from './DocumentDetailView';
import ViewModeSwitcher from './ViewModeSwitcher';
import EditDocumentModal from './EditDocumentModal';
import DeleteConfirmModal from './DeleteConfirmModal';
import { useDocuments } from '../../hooks/useDocuments';
/**
* DocumentGallery component
* @returns {JSX.Element}
*/
const DocumentGallery = () => {
const { documents, loading, error, updateDocument, deleteDocument } = useDocuments();
const [viewMode, setViewMode] = useState('large');
const [editingDocument, setEditingDocument] = useState(null);
const [deletingDocument, setDeletingDocument] = useState(null);
const [isSaving, setIsSaving] = useState(false);
const [isDeleting, setIsDeleting] = useState(false);
/**
* Handles opening the edit modal
* @param {Object} document - Document to edit
*/
const handleEditClick = (document) => {
setEditingDocument(document);
};
/**
* Handles opening the delete confirmation modal
* @param {Object} document - Document to delete
*/
const handleDeleteClick = (document) => {
setDeletingDocument(document);
};
/**
* Handles saving document changes
* @param {Object} updates - Updates object with tags and categories
*/
const handleSaveEdit = async (updates) => {
if (!editingDocument) return;
setIsSaving(true);
const success = await updateDocument(editingDocument.id, updates);
setIsSaving(false);
if (success) {
setEditingDocument(null);
}
};
/**
* Handles confirming document deletion
*/
const handleConfirmDelete = async () => {
if (!deletingDocument) return;
setIsDeleting(true);
const success = await deleteDocument(deletingDocument.id);
setIsDeleting(false);
if (success) {
setDeletingDocument(null);
}
};
/**
* Gets grid classes based on view mode
* @returns {string} Tailwind CSS classes
*/
const getGridClasses = () => {
switch (viewMode) {
case 'small':
return 'grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6 gap-4';
case 'large':
return 'grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-6';
case 'detail':
return 'flex flex-col gap-6';
default:
return 'grid grid-cols-1 gap-4';
}
};
// Loading state
if (loading) {
return (
<div className="flex justify-center items-center min-h-[400px] ">
<span className="loading loading-spinner loading-lg"></span>
</div>
);
}
// Error state
if (error) {
return (
<div className="alert alert-error">
<svg xmlns="http://www.w3.org/2000/svg" className="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2" d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
<span>Error loading documents: {error}</span>
</div>
);
}
// Empty state
if (documents.length === 0) {
return (
<div className="flex flex-col items-center justify-center min-h-[400px] text-center">
<svg xmlns="http://www.w3.org/2000/svg" className="h-24 w-24 text-gray-300 mb-4" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={1.5} d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z" />
</svg>
<h3 className="text-xl font-semibold mb-2">No documents yet</h3>
<p className="text-gray-500">Upload your first document to get started</p>
</div>
);
}
return (
<div className="h-full flex flex-col">
{/* Header with view mode switcher - Always visible */}
<div className="flex justify-between items-center mb-6 flex-shrink-0">
<div>
<p className="text-gray-500">{documents.length} document{documents.length !== 1 ? 's' : ''}</p>
</div>
<ViewModeSwitcher
currentMode={viewMode}
onModeChange={setViewMode}
/>
</div>
{/* Document grid/list - Scrollable */}
<div className="flex-1 overflow-y-auto">
<div className={getGridClasses()}>
{documents.map(document => (
viewMode === 'detail' ? (
<DocumentDetailView
key={document.id}
document={document}
onEdit={() => handleEditClick(document)}
onDelete={() => handleDeleteClick(document)}
/>
) : (
<DocumentCard
key={document.id}
document={document}
viewMode={viewMode}
onEdit={() => handleEditClick(document)}
onDelete={() => handleDeleteClick(document)}
/>
)
))}
</div>
</div>
{/* Modals */}
<EditDocumentModal
isOpen={!!editingDocument}
document={editingDocument}
onClose={() => setEditingDocument(null)}
onSave={handleSaveEdit}
isSaving={isSaving}
/>
<DeleteConfirmModal
isOpen={!!deletingDocument}
document={deletingDocument}
onClose={() => setDeletingDocument(null)}
onConfirm={handleConfirmDelete}
isDeleting={isDeleting}
/>
</div>
);
};
export default DocumentGallery;

View File

@@ -0,0 +1,225 @@
/**
* EditDocumentModal Component
* Modal dialog for editing document tags and categories
*/
import React, { useState, useEffect } from 'react';
import { getAvailableTags, getAvailableCategories } from '../../services/documentService';
/**
* EditDocumentModal component
* @param {Object} props
* @param {boolean} props.isOpen - Whether the modal is open
* @param {Object|null} props.document - Document to edit
* @param {function(): void} props.onClose - Callback when modal is closed
* @param {function(Object): void} props.onSave - Callback when changes are saved
* @param {boolean} props.isSaving - Whether save is in progress
* @returns {JSX.Element}
*/
const EditDocumentModal = ({
isOpen,
document,
onClose,
onSave,
isSaving = false
}) => {
const [selectedTags, setSelectedTags] = useState([]);
const [selectedCategories, setSelectedCategories] = useState([]);
const [availableTags, setAvailableTags] = useState([]);
const [availableCategories, setAvailableCategories] = useState([]);
const [newTag, setNewTag] = useState('');
const [newCategory, setNewCategory] = useState('');
// Load available tags and categories
useEffect(() => {
const loadOptions = async () => {
const [tags, categories] = await Promise.all([
getAvailableTags(),
getAvailableCategories()
]);
setAvailableTags(tags);
setAvailableCategories(categories);
};
loadOptions();
}, []);
// Initialize selected values when document changes
useEffect(() => {
if (document) {
setSelectedTags(document.tags || []);
setSelectedCategories(document.categories || []);
}
}, [document]);
const handleAddTag = (tag) => {
if (tag && !selectedTags.includes(tag)) {
setSelectedTags([...selectedTags, tag]);
}
setNewTag('');
};
const handleRemoveTag = (tag) => {
setSelectedTags(selectedTags.filter(t => t !== tag));
};
const handleAddCategory = (category) => {
if (category && !selectedCategories.includes(category)) {
setSelectedCategories([...selectedCategories, category]);
}
setNewCategory('');
};
const handleRemoveCategory = (category) => {
setSelectedCategories(selectedCategories.filter(c => c !== category));
};
const handleSave = () => {
onSave({
tags: selectedTags,
categories: selectedCategories
});
};
if (!isOpen || !document) return null;
return (
<dialog className="modal modal-open">
<div className="modal-box max-w-2xl">
<h3 className="font-bold text-lg mb-4">Edit Document</h3>
<div className="mb-4">
<p className="text-sm text-gray-500">
Document: <span className="font-semibold">{document.name}</span>
</p>
</div>
{/* Tags Section */}
<div className="mb-6">
<label className="label">
<span className="label-text font-semibold">Tags</span>
</label>
{/* Selected Tags */}
<div className="flex flex-wrap gap-2 mb-3">
{selectedTags.map(tag => (
<div key={tag} className="badge badge-primary gap-2">
{tag}
<button
type="button"
className="btn btn-ghost btn-xs"
onClick={() => handleRemoveTag(tag)}
disabled={isSaving}
>
</button>
</div>
))}
</div>
{/* Add Tag */}
<div className="flex gap-2">
<select
className="select select-bordered flex-1"
value={newTag}
onChange={(e) => setNewTag(e.target.value)}
disabled={isSaving}
>
<option value="">Select a tag...</option>
{availableTags
.filter(tag => !selectedTags.includes(tag))
.map(tag => (
<option key={tag} value={tag}>{tag}</option>
))
}
</select>
<button
className="btn btn-primary"
onClick={() => handleAddTag(newTag)}
disabled={!newTag || isSaving}
>
Add
</button>
</div>
</div>
{/* Categories Section */}
<div className="mb-6">
<label className="label">
<span className="label-text font-semibold">Categories</span>
</label>
{/* Selected Categories */}
<div className="flex flex-wrap gap-2 mb-3">
{selectedCategories.map(category => (
<div key={category} className="badge badge-secondary gap-2">
{category}
<button
type="button"
className="btn btn-ghost btn-xs"
onClick={() => handleRemoveCategory(category)}
disabled={isSaving}
>
</button>
</div>
))}
</div>
{/* Add Category */}
<div className="flex gap-2">
<select
className="select select-bordered flex-1"
value={newCategory}
onChange={(e) => setNewCategory(e.target.value)}
disabled={isSaving}
>
<option value="">Select a category...</option>
{availableCategories
.filter(cat => !selectedCategories.includes(cat))
.map(cat => (
<option key={cat} value={cat}>{cat}</option>
))
}
</select>
<button
className="btn btn-secondary"
onClick={() => handleAddCategory(newCategory)}
disabled={!newCategory || isSaving}
>
Add
</button>
</div>
</div>
<div className="modal-action">
<button
className="btn btn-ghost"
onClick={onClose}
disabled={isSaving}
>
Cancel
</button>
<button
className="btn btn-primary"
onClick={handleSave}
disabled={isSaving}
>
{isSaving ? (
<>
<span className="loading loading-spinner loading-sm"></span>
Saving...
</>
) : (
'Save Changes'
)}
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop" onClick={onClose}>
<button disabled={isSaving}>close</button>
</form>
</dialog>
);
};
export default EditDocumentModal;

View File

@@ -0,0 +1,51 @@
/**
* ViewModeSwitcher Component
* Allows users to switch between different view modes (small, large, detail)
*/
import React from 'react';
import {FaList} from "react-icons/fa6";
import {FaTh, FaThLarge} from "react-icons/fa";
/**
* @typedef {'small' | 'large' | 'detail'} ViewMode
*/
/**
* ViewModeSwitcher component
* @param {Object} props
* @param {ViewMode} props.currentMode - Current active view mode
* @param {function(ViewMode): void} props.onModeChange - Callback when mode changes
* @returns {JSX.Element}
*/
const ViewModeSwitcher = ({ currentMode, onModeChange }) => {
const modes = [
{ id: 'small', label: 'Small', icon: FaTh },
{ id: 'large', label: 'Large', icon: FaThLarge },
{ id: 'detail', label: 'Detail', icon: FaList }
];
return (
<div className="flex gap-2">
{modes.map(mode => {
const IconComponent = mode.icon;
return (
<button
key={mode.id}
onClick={() => onModeChange(mode.id)}
className={`btn btn-sm ${
currentMode === mode.id ? 'btn-primary' : 'btn-ghost'
}`}
aria-label={`Switch to ${mode.label} view`}
title={`${mode.label} view`}
>
<IconComponent />
<span className="hidden sm:inline ml-1">{mode.label}</span>
</button>
);
})}
</div>
);
};
export default ViewModeSwitcher;

View File

@@ -0,0 +1,205 @@
import React, {createContext, useContext, useEffect, useReducer} from 'react';
import authService from '../services/authService';
// Auth state actions
const AUTH_ACTIONS = {
LOGIN_START: 'LOGIN_START',
LOGIN_SUCCESS: 'LOGIN_SUCCESS',
LOGIN_FAILURE: 'LOGIN_FAILURE',
LOGOUT: 'LOGOUT',
LOAD_USER: 'LOAD_USER',
CLEAR_ERROR: 'CLEAR_ERROR',
};
// Initial state
const initialState = {
user: null,
token: null,
isAuthenticated: false,
loading: true, // Loading true initially to check stored auth
error: null,
};
// Auth reducer to manage state transitions
function authReducer(state, action) {
switch (action.type) {
case AUTH_ACTIONS.LOGIN_START:
return {
...state,
loading: true,
error: null,
};
case AUTH_ACTIONS.LOGIN_SUCCESS:
return {
...state,
user: action.payload.user,
token: action.payload.token,
isAuthenticated: true,
loading: false,
error: null,
};
case AUTH_ACTIONS.LOGIN_FAILURE:
return {
...state,
user: null,
token: null,
isAuthenticated: false,
loading: false,
error: action.payload.error,
};
case AUTH_ACTIONS.LOGOUT:
return {
...state,
user: null,
token: null,
isAuthenticated: false,
loading: false,
error: null,
};
case AUTH_ACTIONS.LOAD_USER:
return {
...state,
user: action.payload.user,
token: action.payload.token,
isAuthenticated: !!action.payload.token,
loading: false,
error: null,
};
case AUTH_ACTIONS.CLEAR_ERROR:
return {
...state,
error: null,
};
default:
return state;
}
}
// Create context
const AuthContext = createContext(null);
/**
* AuthProvider component to wrap the app and provide authentication state
* @param {Object} props - Component props
* @param {React.ReactNode} props.children - Child components
*/
export function AuthProvider({children}) {
const [state, dispatch] = useReducer(authReducer, initialState);
// Load stored authentication data on app startup
useEffect(() => {
const loadStoredAuth = () => {
const token = authService.getStoredToken();
const user = authService.getStoredUser();
dispatch({
type: AUTH_ACTIONS.LOAD_USER,
payload: {user, token},
});
};
loadStoredAuth();
}, []);
/**
* Login function to authenticate user
* @param {string} username - User's username
* @param {string} password - User's password
* @returns {Promise<boolean>} True if login successful
*/
const login = async (username, password) => {
try {
dispatch({type: AUTH_ACTIONS.LOGIN_START});
const {access_token, user} = await authService.login(username, password);
dispatch({
type: AUTH_ACTIONS.LOGIN_SUCCESS,
payload: {user, token: access_token},
});
return true;
} catch (error) {
dispatch({
type: AUTH_ACTIONS.LOGIN_FAILURE,
payload: {error: error.message},
});
return false;
}
};
/**
* Logout function to clear authentication state
*/
const logout = () => {
authService.logout();
dispatch({type: AUTH_ACTIONS.LOGOUT});
};
/**
* Clear error message from state
*/
const clearError = () => {
dispatch({type: AUTH_ACTIONS.CLEAR_ERROR});
};
/**
* Refresh user data from API
*/
const refreshUser = async () => {
try {
const user = await authService.getCurrentUser();
dispatch({
type: AUTH_ACTIONS.LOGIN_SUCCESS,
payload: {user, token: state.token},
});
} catch (error) {
console.error('Failed to refresh user data:', error);
// Don't logout on refresh failure, just log error
}
};
// Context value object
const value = {
// State
user: state.user,
token: state.token,
isAuthenticated: state.isAuthenticated,
loading: state.loading,
error: state.error,
// Actions
login,
logout,
clearError,
refreshUser,
};
return (
<AuthContext.Provider value={value}>
{children}
</AuthContext.Provider>
);
}
/**
* Custom hook to use authentication context
* @returns {Object} Auth context value
* @throws {Error} If used outside AuthProvider
*/
export function useAuth() {
const context = useContext(AuthContext);
if (!context) {
throw new Error('useAuth must be used within an AuthProvider');
}
return context;
}
export { AuthContext };

View File

@@ -0,0 +1,12 @@
import {useContext} from 'react';
import {AuthContext} from '../contexts/AuthContext';
export const useAuth = () => {
const context = useContext(AuthContext);
if (!context) {
throw new Error('useAuth must be used within an AuthProvider');
}
return context;
};

View File

@@ -0,0 +1,85 @@
/**
* Custom hook for managing documents
* Handles fetching, updating, and deleting documents
*/
import { useState, useEffect, useCallback } from 'react';
import * as documentService from '../services/documentService';
/**
* Hook for managing documents state and operations
* @returns {Object} Documents state and operations
*/
export const useDocuments = () => {
const [documents, setDocuments] = useState([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState(null);
/**
* Fetches all documents from the service
*/
const fetchDocuments = useCallback(async () => {
try {
setLoading(true);
setError(null);
const data = await documentService.getAllDocuments();
setDocuments(data);
} catch (err) {
setError(err.message);
console.error('Error fetching documents:', err);
} finally {
setLoading(false);
}
}, []);
/**
* Updates a document's tags and categories
* @param {string} id - Document ID
* @param {Object} updates - Updates object
* @returns {Promise<boolean>} Success status
*/
const updateDocument = useCallback(async (id, updates) => {
try {
const updatedDoc = await documentService.updateDocument(id, updates);
setDocuments(prevDocs =>
prevDocs.map(doc => (doc.id === id ? updatedDoc : doc))
);
return true;
} catch (err) {
setError(err.message);
console.error('Error updating document:', err);
return false;
}
}, []);
/**
* Deletes a document
* @param {string} id - Document ID
* @returns {Promise<boolean>} Success status
*/
const deleteDocument = useCallback(async (id) => {
try {
await documentService.deleteDocument(id);
setDocuments(prevDocs => prevDocs.filter(doc => doc.id !== id));
return true;
} catch (err) {
setError(err.message);
console.error('Error deleting document:', err);
return false;
}
}, []);
// Fetch documents on mount
useEffect(() => {
fetchDocuments();
}, [fetchDocuments]);
return {
documents,
loading,
error,
fetchDocuments,
updateDocument,
deleteDocument
};
};

View File

@@ -1,68 +1,11 @@
:root { @tailwind base;
font-family: system-ui, Avenir, Helvetica, Arial, sans-serif; @tailwind components;
line-height: 1.5; @tailwind utilities;
font-weight: 400; @plugin "daisyui";
color-scheme: light dark;
color: rgba(255, 255, 255, 0.87);
background-color: #242424;
font-synthesis: none;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
a {
font-weight: 500;
color: #646cff;
text-decoration: inherit;
}
a:hover {
color: #535bf2;
}
/* Custom styles for the application */
body { body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
margin: 0; margin: 0;
display: flex;
place-items: center;
min-width: 320px;
min-height: 100vh;
} }
h1 {
font-size: 3.2em;
line-height: 1.1;
}
button {
border-radius: 8px;
border: 1px solid transparent;
padding: 0.6em 1.2em;
font-size: 1em;
font-weight: 500;
font-family: inherit;
background-color: #1a1a1a;
cursor: pointer;
transition: border-color 0.25s;
}
button:hover {
border-color: #646cff;
}
button:focus,
button:focus-visible {
outline: 4px auto -webkit-focus-ring-color;
}
@media (prefers-color-scheme: light) {
:root {
color: #213547;
background-color: #ffffff;
}
a:hover {
color: #747bff;
}
button {
background-color: #f9f9f9;
}
}

View File

@@ -1,6 +1,7 @@
import { StrictMode } from 'react' import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client' import { createRoot } from 'react-dom/client'
import './index.css' import './index.css'
import './App.css'
import App from './App.jsx' import App from './App.jsx'
createRoot(document.getElementById('root')).render( createRoot(document.getElementById('root')).render(

View File

@@ -0,0 +1,239 @@
import {useEffect, useState} from 'react';
import {useAuth} from '../hooks/useAuth';
const DashboardPage = () => {
const {user} = useAuth();
const [stats, setStats] = useState({
totalDocuments: 0,
processingJobs: 0,
completedJobs: 0,
failedJobs: 0
});
const [recentFiles, setRecentFiles] = useState([]);
const [loading, setLoading] = useState(true);
useEffect(() => {
// Simulate API calls for dashboard data
const fetchDashboardData = async () => {
try {
// TODO: Replace with actual API calls
setTimeout(() => {
setStats({
totalDocuments: 42,
processingJobs: 3,
completedJobs: 38,
failedJobs: 1
});
setRecentFiles([
{
id: 1,
filename: 'invoice_2024.pdf',
status: 'completed',
processedAt: '2024-01-15 14:30:00',
fileType: 'pdf'
},
{
id: 2,
filename: 'contract_draft.docx',
status: 'processing',
processedAt: '2024-01-15 14:25:00',
fileType: 'docx'
},
{
id: 3,
filename: 'receipt_scan.jpg',
status: 'completed',
processedAt: '2024-01-15 14:20:00',
fileType: 'image'
}
]);
setLoading(false);
}, 1000);
} catch (error) {
console.error('Error fetching dashboard data:', error);
setLoading(false);
}
};
fetchDashboardData();
}, []);
const getStatusBadge = (status) => {
const statusColors = {
completed: 'badge-success',
processing: 'badge-warning',
failed: 'badge-error',
pending: 'badge-info'
};
return `badge ${statusColors[status] || 'badge-neutral'}`;
};
const getFileTypeIcon = (fileType) => {
const icons = {
pdf: '📄',
docx: '📝',
image: '🖼️',
txt: '📄'
};
return icons[fileType] || '📄';
};
if (loading) {
return (
<div className="flex justify-center items-center h-64">
<span className="loading loading-spinner loading-lg"></span>
</div>
);
}
return (
<div className="space-y-6">
{/* Welcome Header */}
<div className="bg-base-100 rounded-lg shadow p-6">
<h1 className="text-3xl font-bold text-base-content">
Welcome back, {user?.username}!
</h1>
<p className="text-base-content/60 mt-2">
Here's your document processing overview
</p>
</div>
{/* Stats Cards */}
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4">
<div className="stat bg-base-100 rounded-lg shadow">
<div className="stat-figure text-primary">
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"/>
</svg>
</div>
<div className="stat-title">Total Documents</div>
<div className="stat-value text-primary">{stats.totalDocuments}</div>
</div>
<div className="stat bg-base-100 rounded-lg shadow">
<div className="stat-figure text-warning">
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</div>
<div className="stat-title">Processing</div>
<div className="stat-value text-warning">{stats.processingJobs}</div>
</div>
<div className="stat bg-base-100 rounded-lg shadow">
<div className="stat-figure text-success">
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</div>
<div className="stat-title">Completed</div>
<div className="stat-value text-success">{stats.completedJobs}</div>
</div>
<div className="stat bg-base-100 rounded-lg shadow">
<div className="stat-figure text-error">
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</div>
<div className="stat-title">Failed</div>
<div className="stat-value text-error">{stats.failedJobs}</div>
</div>
</div>
{/* Recent Files */}
<div className="bg-base-100 rounded-lg shadow">
<div className="p-6 border-b border-base-300">
<h2 className="text-xl font-semibold">Recent Files</h2>
</div>
<div className="overflow-x-auto">
<table className="table table-zebra">
<thead>
<tr>
<th>File</th>
<th>Type</th>
<th>Status</th>
<th>Processed At</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{recentFiles.map((file) => (
<tr key={file.id}>
<td>
<div className="flex items-center space-x-3">
<div className="text-2xl">
{getFileTypeIcon(file.fileType)}
</div>
<div className="font-medium">{file.filename}</div>
</div>
</td>
<td>
<span className="badge badge-outline">
{file.fileType.toUpperCase()}
</span>
</td>
<td>
<span className={getStatusBadge(file.status)}>
{file.status.charAt(0).toUpperCase() + file.status.slice(1)}
</span>
</td>
<td>{file.processedAt}</td>
<td>
<div className="flex space-x-2">
<button className="btn btn-sm btn-ghost">View</button>
<button className="btn btn-sm btn-ghost">Download</button>
</div>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
{/* Quick Actions */}
<div className="bg-base-100 rounded-lg shadow p-6">
<h2 className="text-xl font-semibold mb-4">Quick Actions</h2>
<div className="flex flex-wrap gap-4">
<button className="btn btn-primary">
<svg className="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
d="M7 16a4 4 0 01-.88-7.903A5 5 0 1115.9 6L16 6a5 5 0 011 9.9M15 13l-3-3m0 0l-3 3m3-3v12"/>
</svg>
Upload Documents
</button>
<button className="btn btn-outline">
<svg className="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
d="M9 17v-2m3 2v-4m3 4v-6m2 10H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"/>
</svg>
View Reports
</button>
{user?.role === 'admin' && (
<button className="btn btn-outline">
<svg className="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
d="M12 4.354a4 4 0 110 5.292M15 21H3v-1a6 6 0 0112 0v1zm0 0h6v-1a6 6 0 00-9-5.197m13.5-9a2.5 2.5 0 11-5 0 2.5 2.5 0 015 0z"/>
</svg>
Manage Users
</button>
)}
</div>
</div>
</div>
);
};
export default DashboardPage;

View File

@@ -0,0 +1,21 @@
/**
* DocumentsPage Component
* Main page for displaying and managing documents
*/
import React from 'react';
import DocumentGallery from '../components/documents/DocumentGallery';
/**
* DocumentsPage component
* @returns {JSX.Element}
*/
const DocumentsPage = () => {
return (
<div className="h-full flex flex-col">
<DocumentGallery />
</div>
);
};
export default DocumentsPage;

View File

@@ -0,0 +1,48 @@
import React, {useEffect} from 'react';
import {useNavigate} from 'react-router-dom';
import {useAuth} from '../contexts/AuthContext';
import AuthLayout from '../components/auth/AuthLayout';
import LoginForm from '../components/auth/LoginForm';
/**
* LoginPage component
* Full page component that handles login functionality and redirects
*/
function LoginPage() {
const {isAuthenticated, loading} = useAuth();
const navigate = useNavigate();
// Redirect to dashboard if already authenticated
useEffect(() => {
if (!loading && isAuthenticated) {
navigate('/dashboard', {replace: true});
}
}, [isAuthenticated, loading, navigate]);
// Show loading spinner while checking authentication
if (loading) {
return (
<AuthLayout>
<div className="card w-full max-w-md shadow-xl bg-base-100">
<div className="card-body items-center">
<span className="loading loading-spinner loading-lg text-primary"></span>
<p className="text-base-content/70 mt-4">Loading...</p>
</div>
</div>
</AuthLayout>
);
}
// Don't render login form if user is authenticated (prevents flash)
if (isAuthenticated) {
return null;
}
return (
<AuthLayout>
<LoginForm/>
</AuthLayout>
);
}
export default LoginPage;

View File

@@ -0,0 +1,101 @@
import api from '../utils/api';
/**
* Authentication service for handling login, logout, and user profile operations
*/
class AuthService {
/**
* Login user with username and password
* @param {string} username - User's username
* @param {string} password - User's password
* @returns {Promise<{access_token: string, user: Object}>} Login response with token and user data
*/
async login(username, password) {
try {
// FastAPI expects form data for OAuth2PasswordRequestForm
const formData = new FormData();
formData.append('username', username);
formData.append('password', password);
const response = await api.post('/auth/login', formData, {
headers: {
'Content-Type': 'multipart/form-data',
},
});
const {access_token, user} = response.data;
// Store token and user data in localStorage
localStorage.setItem('access_token', access_token);
localStorage.setItem('user', JSON.stringify(user));
return {access_token, user};
} catch (error) {
// Extract error message from response
const errorMessage = error.response?.data?.detail || 'Login failed';
throw new Error(errorMessage);
}
}
/**
* Logout user by clearing stored data
*/
logout() {
localStorage.removeItem('access_token');
localStorage.removeItem('user');
}
/**
* Get current user profile from API
* @returns {Promise<Object>} Current user profile
*/
async getCurrentUser() {
try {
const response = await api.get('/auth/me');
const user = response.data;
// Update stored user data
localStorage.setItem('user', JSON.stringify(user));
return user;
} catch (error) {
const errorMessage = error.response?.data?.detail || 'Failed to get user profile';
throw new Error(errorMessage);
}
}
/**
* Check if user is authenticated by verifying token existence
* @returns {boolean} True if user has valid token
*/
isAuthenticated() {
const token = localStorage.getItem('access_token');
return !!token;
}
/**
* Get stored user data from localStorage
* @returns {Object|null} User data or null if not found
*/
getStoredUser() {
try {
const userStr = localStorage.getItem('user');
return userStr ? JSON.parse(userStr) : null;
} catch (error) {
console.error('Error parsing stored user data:', error);
return null;
}
}
/**
* Get stored access token from localStorage
* @returns {string|null} Access token or null if not found
*/
getStoredToken() {
return localStorage.getItem('access_token');
}
}
// Export singleton instance
const authService = new AuthService();
export default authService;

View File

@@ -0,0 +1,97 @@
/**
* Document Service
* Handles all API calls related to documents
* Currently using mock data for development
*/
import { mockDocuments, availableTags, availableCategories } from '../utils/mockData';
import api from '../utils/api';
// Simulate network delay
const delay = (ms) => new Promise(resolve => setTimeout(resolve, ms));
/**
* Fetches all documents from the API
* @returns {Promise<Array>} Array of document objects
*/
export const getAllDocuments = async () => {
try {
const response = await api.get('/api/documents');
return response.data;
} catch (error) {
console.error('Failed to fetch documents:', error);
// Fallback to mock data in case of API error during development
console.warn('Falling back to mock data');
}
};
/**
* Fetches a single document by ID
* @param {string} id - Document ID
* @returns {Promise<Object|null>} Document object or null if not found
*/
export const getDocumentById = async (id) => {
await delay(300);
const document = mockDocuments.find(doc => doc.id === id);
return document || null;
};
/**
* Updates a document's tags and categories
* @param {string} id - Document ID
* @param {Object} updates - Object containing tags and/or categories
* @param {Array<string>} updates.tags - New tags array
* @param {Array<string>} updates.categories - New categories array
* @returns {Promise<Object>} Updated document object
*/
export const updateDocument = async (id, updates) => {
await delay(400);
const index = mockDocuments.findIndex(doc => doc.id === id);
if (index === -1) {
throw new Error('Document not found');
}
// Update the document
mockDocuments[index] = {
...mockDocuments[index],
...updates
};
return mockDocuments[index];
};
/**
* Deletes a document
* @param {string} id - Document ID
* @returns {Promise<boolean>} True if deletion was successful
*/
export const deleteDocument = async (id) => {
await delay(300);
const index = mockDocuments.findIndex(doc => doc.id === id);
if (index === -1) {
throw new Error('Document not found');
}
mockDocuments.splice(index, 1);
return true;
};
/**
* Gets all available tags
* @returns {Promise<Array<string>>} Array of tag strings
*/
export const getAvailableTags = async () => {
await delay(200);
return [...availableTags];
};
/**
* Gets all available categories
* @returns {Promise<Array<string>>} Array of category strings
*/
export const getAvailableCategories = async () => {
await delay(200);
return [...availableCategories];
};

View File

@@ -0,0 +1,57 @@
import axios from 'axios';
// Base API configuration
const API_BASE_URL = 'http://localhost:8000';
// Create axios instance with default configuration
const api = axios.create({
baseURL: API_BASE_URL,
timeout: 10000, // 10 seconds timeout
headers: {
'Content-Type': 'application/json',
},
});
export { API_BASE_URL };
// Request interceptor to add authentication token
api.interceptors.request.use(
(config) => {
// Get token from localStorage
const token = localStorage.getItem('access_token');
if (token) {
config.headers.Authorization = `Bearer ${token}`;
}
return config;
},
(error) => {
return Promise.reject(error);
}
);
// Response interceptor to handle common errors
api.interceptors.response.use(
(response) => {
return response;
},
(error) => {
// Handle 401 errors (unauthorized)
if (error.response?.status === 401) {
// Clear token from localStorage on 401
localStorage.removeItem('access_token');
localStorage.removeItem('user');
// Redirect to login page
window.location.href = '/login';
}
// Handle other common errors
if (error.response?.status >= 500) {
console.error('Server error:', error.response.data);
}
return Promise.reject(error);
}
);
export default api;

View File

@@ -0,0 +1,155 @@
/**
* Mock data for PDF documents
* This file provides sample data for development and testing purposes
*/
/**
* Generates a placeholder thumbnail URL
* @param {number} index - Document index for unique colors
* @returns {string} Placeholder image URL
*/
const generateThumbnailUrl = (index) => {
const colors = ['3B82F6', '10B981', 'F59E0B', 'EF4444', '8B5CF6', 'EC4899'];
const color = colors[index % colors.length];
return `https://via.placeholder.com/300x400/${color}/FFFFFF?text=Page+1`;
};
/**
* Mock documents data
* @type {Array<Object>}
*/
export const mockDocuments = [
{
id: 'doc-001',
name: 'Contrat-2025.pdf',
originalFileType: 'DOCX',
createdAt: '2025-10-01T10:30:00Z',
fileSize: 2048576, // 2 MB
pageCount: 12,
thumbnailUrl: generateThumbnailUrl(0),
pdfUrl: '/mock/contrat-2025.pdf',
tags: ['contrat', '2025'],
categories: ['legal']
},
{
id: 'doc-002',
name: 'Facture-Janvier.pdf',
originalFileType: 'XLSX',
createdAt: '2025-09-15T14:20:00Z',
fileSize: 512000, // 512 KB
pageCount: 3,
thumbnailUrl: generateThumbnailUrl(1),
pdfUrl: '/mock/facture-janvier.pdf',
tags: ['facture', 'comptabilité'],
categories: ['finance']
},
{
id: 'doc-003',
name: 'Présentation-Projet.pdf',
originalFileType: 'PPTX',
createdAt: '2025-09-28T09:15:00Z',
fileSize: 5242880, // 5 MB
pageCount: 24,
thumbnailUrl: generateThumbnailUrl(2),
pdfUrl: '/mock/presentation-projet.pdf',
tags: ['présentation', 'projet'],
categories: ['marketing']
},
{
id: 'doc-004',
name: 'Photo-Identité.pdf',
originalFileType: 'JPG',
createdAt: '2025-10-05T16:45:00Z',
fileSize: 204800, // 200 KB
pageCount: 1,
thumbnailUrl: generateThumbnailUrl(3),
pdfUrl: '/mock/photo-identite.pdf',
tags: ['photo', 'identité'],
categories: ['personnel']
},
{
id: 'doc-005',
name: 'Manuel-Utilisateur.pdf',
originalFileType: 'PDF',
createdAt: '2025-09-20T11:00:00Z',
fileSize: 3145728, // 3 MB
pageCount: 45,
thumbnailUrl: generateThumbnailUrl(4),
pdfUrl: '/mock/manuel-utilisateur.pdf',
tags: ['manuel', 'documentation'],
categories: ['technique']
},
{
id: 'doc-006',
name: 'Rapport-Annuel.pdf',
originalFileType: 'DOCX',
createdAt: '2025-08-30T13:30:00Z',
fileSize: 4194304, // 4 MB
pageCount: 67,
thumbnailUrl: generateThumbnailUrl(5),
pdfUrl: '/mock/rapport-annuel.pdf',
tags: ['rapport', 'annuel'],
categories: ['finance', 'management']
},
{
id: 'doc-007',
name: 'CV-Candidat.pdf',
originalFileType: 'DOCX',
createdAt: '2025-10-02T08:00:00Z',
fileSize: 153600, // 150 KB
pageCount: 2,
thumbnailUrl: generateThumbnailUrl(0),
pdfUrl: '/mock/cv-candidat.pdf',
tags: ['cv', 'recrutement'],
categories: ['rh']
},
{
id: 'doc-008',
name: 'Devis-Travaux.pdf',
originalFileType: 'XLSX',
createdAt: '2025-09-25T15:20:00Z',
fileSize: 409600, // 400 KB
pageCount: 5,
thumbnailUrl: generateThumbnailUrl(1),
pdfUrl: '/mock/devis-travaux.pdf',
tags: ['devis', 'travaux'],
categories: ['finance']
}
];
/**
* Available tags for documents
* @type {Array<string>}
*/
export const availableTags = [
'contrat',
'facture',
'présentation',
'photo',
'manuel',
'rapport',
'cv',
'devis',
'comptabilité',
'projet',
'identité',
'documentation',
'annuel',
'recrutement',
'travaux',
'2025'
];
/**
* Available categories for documents
* @type {Array<string>}
*/
export const availableCategories = [
'legal',
'finance',
'marketing',
'personnel',
'technique',
'management',
'rh'
];

View File

@@ -0,0 +1,15 @@
/** @type {import('tailwindcss').Config} */
export default {
content: [
"./index.html",
"./src/**/*.{js,ts,jsx,tsx}",
],
theme: {
extend: {},
},
plugins: [require("daisyui")],
daisyui: {
themes: ["light", "dark", "cupcake"],
darkTheme: "dark",
},
}

View File

@@ -1,7 +1,8 @@
import { defineConfig } from 'vite' import {defineConfig} from 'vite'
import tailwindcss from '@tailwindcss/vite'
import react from '@vitejs/plugin-react' import react from '@vitejs/plugin-react'
// https://vite.dev/config/ // https://vite.dev/config/
export default defineConfig({ export default defineConfig({
plugins: [react()], plugins: [tailwindcss(), react()],
}) })

View File

@@ -3,12 +3,26 @@ FROM python:3.12-slim
# Set working directory # Set working directory
WORKDIR /app WORKDIR /app
# Install libmagic
RUN apt-get update && apt-get install -y --no-install-recommends \
libmagic1 \
file \
pandoc \
ghostscript \
texlive-xetex \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements and install dependencies # Copy requirements and install dependencies
COPY requirements.txt . COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
# Change the user
USER 1002:1002
# Copy application code # Copy application code
COPY . . COPY . .
# Command will be overridden by docker-compose # Command will be overridden by docker-compose
CMD ["celery", "-A", "main", "worker", "--loglevel=info"] CMD ["celery", "-A", "main", "worker", "--loglevel=info"]

View File

@@ -1,4 +1,20 @@
asgiref==3.9.1
bcrypt==4.3.0
celery==5.5.3 celery==5.5.3
redis==6.4.0 email-validator==2.3.0
fastapi==0.116.1
httptools==0.6.4
motor==3.7.1
pikepdf==9.11.0
pillow==11.3.0
pydantic==2.11.9
PyJWT==2.10.1
pymongo==4.15.0 pymongo==4.15.0
PyMuPDF==1.26.4
pypandoc==1.15
python-multipart==0.0.20
redis==6.4.0
reportlab==4.4.4
uvicorn==0.35.0
python-magic==0.4.27
watchdog==6.0.0

View File

View File

@@ -0,0 +1,73 @@
import subprocess
from pathlib import Path
import magic # python-magic
class UnsupportedFileTypeError(Exception):
"""Exception raised when a file type is not supported."""
pass
def detect_file_type(file_path: str) -> str:
"""
Detect the type of file using python-magic.
Returns:
'text', 'image', 'word'
Raises:
UnsupportedFileTypeError: If file type is not supported.
"""
mime = magic.from_file(file_path, mime=True)
extension = Path(file_path).suffix
if mime.startswith("text/"):
return "text"
elif mime.startswith("image/"):
return "image"
elif mime in ("application/vnd.openxmlformats-officedocument.wordprocessingml.document",):
return "word"
elif mime == "application/pdf":
return "pdf"
elif mime == "application/vnd.ms-powerpoint":
return "powerpoint"
elif mime == "application/octet-stream" and extension in (".jpg", ".jpeg", ".png", ".gif"):
return "image"
else:
raise UnsupportedFileTypeError(f"Unsupported file type: {mime}")
def compress_pdf(input_pdf: str, output_pdf: str, quality: str = "ebook") -> None:
"""
Compress a PDF using Ghostscript.
Args:
input_pdf (str): Path to the input PDF.
output_pdf (str): Path to save the compressed PDF.
quality (str): Ghostscript PDFSETTINGS option: screen, ebook, printer, prepress.
Raises:
FileNotFoundError: If input PDF does not exist.
RuntimeError: If Ghostscript returns an error.
"""
input_path = Path(input_pdf)
output_path = Path(output_pdf)
if not input_path.exists():
raise FileNotFoundError(f"Input PDF not found: {input_pdf}")
cmd = [
"gs",
"-sDEVICE=pdfwrite",
"-dCompatibilityLevel=1.4",
f"-dPDFSETTINGS=/{quality}",
"-dNOPAUSE",
"-dQUIET",
"-dBATCH",
f"-sOutputFile={str(output_path)}",
str(input_path),
]
result = subprocess.run(cmd)
if result.returncode != 0:
raise RuntimeError(f"Ghostscript failed with return code {result.returncode}")

View File

@@ -0,0 +1,64 @@
import hashlib
import logging
import os
from pathlib import Path
from app.config import settings
logger = logging.getLogger(__name__)
def get_file_hash(file_bytes: bytes) -> str:
"""
Calculate SHA256 hash of file content.
Args:
file_bytes: Raw file content as bytes
Returns:
Hexadecimal SHA256 hash string
"""
return hashlib.sha256(file_bytes).hexdigest()
def get_object_path(file_hash):
"""
:param file_hash:
:return:
"""
root = settings.get_objects_folder()
return os.path.join(root, file_hash[:24], file_hash)
def save_as_object(file_path, remove_on_success=True) -> str:
"""
Read the file, get the hash and save using the hash as the filename.
:param file_path:
:param remove_on_success:
:return: hash of the file
"""
logger.info(f"Saving file {file_path} as object")
path = Path(file_path)
as_bytes = path.read_bytes()
file_hash = get_file_hash(as_bytes)
logger.info(f"File hash: {file_hash}")
object_path = get_object_path(file_hash)
if os.path.exists(object_path):
logger.info(f"Object already exists: {object_path}")
return file_hash
if not os.path.exists(os.path.dirname(object_path)):
os.makedirs(os.path.dirname(object_path))
logger.info(f"Saving object to: {object_path}")
with open(object_path, "wb") as f:
f.write(as_bytes)
if remove_on_success:
logger.info(f"Removing file: {file_path}")
path.unlink()
return file_hash

View File

@@ -6,82 +6,28 @@ and update processing job statuses throughout the task lifecycle.
""" """
import logging import logging
import os
from typing import Any, Dict from typing import Any, Dict
from tasks.main import app as celery_app from app.config import settings
from app.database.connection import get_database
from app.models.job import ProcessingStatus
from app.services.document_service import DocumentService, DocumentAlreadyExists
from app.services.job_service import JobService
from tasks.main import celery_app
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def get_services():
database = get_database()
document_service = DocumentService(database=database, objects_folder=settings.get_objects_folder())
job_service = JobService(database=database)
return document_service, job_service
# @celery_app.task(bind=True, autoretry_for=(Exception,), retry_kwargs={'max_retries': 3, 'countdown': 60}) # @celery_app.task(bind=True, autoretry_for=(Exception,), retry_kwargs={'max_retries': 3, 'countdown': 60})
# def process_document(self, document_service, job_service, filepath: str) -> Dict[str, Any]: @celery_app.task(bind=True)
# """
# Process a document file and extract its content.
#
# This task:
# 1. Updates the processing job status to PROCESSING
# 2. Performs document content extraction
# 3. Updates job status to COMPLETED or FAILED based on result
#
# Args:
# self : Celery task instance
# job_service : Instance of JobService
# document_service : Instance of DocumentService
# filepath: Full path to the document file to process
#
# Returns:
# Dictionary containing processing results
#
# Raises:
# Exception: Any processing error (will trigger retry)
# """
# task_id = self.request.id
# logger.info(f"Starting document processing task {task_id} for file: {filepath}")
#
# try:
# # Step 1: Mark job as started
# await job_service.mark_job_as_started(task_id=task_id)
# logger.info(f"Job {task_id} marked as PROCESSING")
#
# # Step 2: Process the document (extract content, OCR, etc.)
# document = await self.document_service.create_document(filepath)
# logger.info(f"Created document record with ID: {document.id}")
#
# result = document_service.extract_document_content(filepath)
# logger.info(f"Document content extracted successfully for task {task_id}")
#
# # Step 3: Mark job as completed
# await job_service.mark_job_as_completed(task_id=task_id)
# logger.info(f"Job {task_id} marked as COMPLETED")
#
# return {
# "task_id": task_id,
# "filepath": filepath,
# "status": "completed",
# "content_length": len(result.get("content", "")),
# "extraction_method": result.get("extraction_method"),
# "processing_time": result.get("processing_time")
# }
#
# except Exception as e:
# error_message = f"Document processing failed: {str(e)}"
# logger.error(f"Task {task_id} failed: {error_message}")
#
# try:
# # Mark job as failed
# job_service.mark_job_as_failed(task_id=task_id, error_message=error_message)
# logger.info(f"Job {task_id} marked as FAILED")
# except Exception as job_error:
# logger.error(f"Failed to update job status for task {task_id}: {str(job_error)}")
#
# # Re-raise the exception to trigger Celery retry mechanism
# raise
@celery_app.task(name="tasks.document_processing.process_document",
bind=True,
autoretry_for=(Exception,),
retry_kwargs={'max_retries': 3, 'countdown': 60})
def process_document(self, filepath: str) -> Dict[str, Any]: def process_document(self, filepath: str) -> Dict[str, Any]:
""" """
Process a document file and extract its content. Process a document file and extract its content.
@@ -93,8 +39,6 @@ def process_document(self, filepath: str) -> Dict[str, Any]:
Args: Args:
self : Celery task instance self : Celery task instance
job_service : Instance of JobService
document_service : Instance of DocumentService
filepath: Full path to the document file to process filepath: Full path to the document file to process
Returns: Returns:
@@ -104,76 +48,72 @@ def process_document(self, filepath: str) -> Dict[str, Any]:
Exception: Any processing error (will trigger retry) Exception: Any processing error (will trigger retry)
""" """
task_id = self.request.id task_id = self.request.id
logger.info(f"Starting document processing task {task_id} for file: {filepath}") logger.info(f'Task {task_id} : Starting document processing for file: "{filepath}"')
# get services
document_service, job_service = get_services()
@celery_app.task(bind=True) job = None
def cleanup_old_processing_jobs(self, days_old: int = 30) -> Dict[str, Any]: document = None
"""
Clean up old processing jobs from the database.
This maintenance task removes completed and failed jobs older than
the specified number of days.
Args:
days_old: Number of days after which to clean up jobs
Returns:
Dictionary containing cleanup statistics
"""
task_id = self.request.id
logger.info(f"Starting cleanup task {task_id} for jobs older than {days_old} days")
job_service = JobService()
try: try:
# Perform cleanup # Step 1: Create the document and a new job record for the document
cleanup_result = job_service.cleanup_old_jobs(days_old=days_old) document = document_service.create_document(filepath)
job = job_service.create_job(task_id=task_id, document_id=document.id)
job_service.mark_job_as_started(job_id=job.id)
logger.info(f'Task {task_id} : Created document "{document.id}". Started job "{job.id}"')
logger.info( logger.info(f"Task {task_id} : Creating associated PDF")
f"Cleanup task {task_id} completed: " job_service.update_job_status(job_id=job.id, status=ProcessingStatus.SAVING_PDF)
f"deleted {cleanup_result['deleted_count']} jobs" document_service.create_pdf(document.id)
)
logger.info(f"Task {task_id} : Creating thumbnail")
job_service.update_job_status(job_id=job.id, status=ProcessingStatus.CREATING_THUMBNAIL)
document_service.create_thumbnail(document.id)
# remove the file from the watch folder
os.remove(filepath)
# Step x: Mark job as completed
job_service.mark_job_as_completed(job_id=job.id)
logger.info(f"Task {task_id} marked as COMPLETED")
return { return {
"task_id": task_id, "task_id": task_id,
"filepath": filepath,
"status": "completed", "status": "completed",
"deleted_count": cleanup_result["deleted_count"],
"days_old": days_old
} }
except Exception as e: except DocumentAlreadyExists as e:
error_message = f"Cleanup task failed: {str(e)}" logger.info(f"Task {task_id} completed: {str(e)}")
logger.error(f"Cleanup task {task_id} failed: {error_message}") if job is not None:
raise job_service.mark_job_as_completed(job_id=job.id)
logger.info(f"Job {task_id} marked as COMPLETED")
@celery_app.task(bind=True)
def get_processing_statistics(self) -> Dict[str, Any]:
"""
Generate processing statistics for monitoring.
Returns:
Dictionary containing current processing statistics
"""
task_id = self.request.id
logger.info(f"Generating processing statistics for task {task_id}")
job_service = JobService()
try:
stats = job_service.get_processing_statistics()
logger.info(f"Statistics generated for task {task_id}")
return { return {
"task_id": task_id, "task_id": task_id,
"filepath": filepath,
"status": "completed", "status": "completed",
"statistics": stats, "message": str(e),
"timestamp": stats.get("generated_at")
} }
except Exception as e: except Exception as e:
error_message = f"Statistics generation failed: {str(e)}" error_message = f"Document processing failed: {str(e)}"
logger.error(f"Statistics task {task_id} failed: {error_message}") logger.error(f"Task {task_id} failed: {error_message}")
try:
# Mark job as failed
if job is not None:
job_service.mark_job_as_failed(job_id=job.id, error_message=error_message)
logger.info(f"Job {task_id} marked as FAILED")
else:
logger.error(f"Failed to process {filepath}. error = {str(e)}")
if document is not None:
document_service.move_to_errors(document.id, filepath)
logger.info(f"Moved file {filepath} to errors/{document.id}")
except Exception as job_error:
logger.error(f"Failed to update job status for task {task_id}: {str(job_error)}")
# Re-raise the exception to trigger Celery retry mechanism
raise raise

View File

@@ -3,25 +3,31 @@ Celery worker for MyDocManager document processing tasks.
This module contains all Celery tasks for processing documents. This module contains all Celery tasks for processing documents.
""" """
import logging
import os import os
import time
from celery import Celery from celery import Celery
from celery.signals import worker_process_init
from app.config import settings
# Environment variables # Environment variables
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0") REDIS_URL = settings.get_redis_url()
MONGODB_URL = os.getenv("MONGODB_URL", "mongodb://localhost:27017") MONGODB_URL = settings.get_mongodb_url()
logger = logging.getLogger(__name__)
# Initialize Celery app # Initialize Celery app
app = Celery( celery_app = Celery(
"mydocmanager_worker", "mydocmanager_worker",
broker=REDIS_URL, broker=REDIS_URL,
backend=REDIS_URL backend=REDIS_URL,
) )
celery_app.autodiscover_tasks(["tasks.document_processing"])
# Celery configuration # Celery configuration
app.conf.update( celery_app.conf.update(
task_serializer="json", task_serializer="json",
accept_content=["json"], accept_content=["json"],
result_serializer="json", result_serializer="json",
@@ -33,82 +39,15 @@ app.conf.update(
) )
@app.task(bind=True) def global_init(**kwargs):
def test_task(self, message: str): """Initialize global variables."""
""" logger.info(f"{'*' * 45}")
Test task for validating worker functionality. logger.info(f"{'--' * 5}" + " Starting MyDocManager worker " + f"{'--' * 5}")
logger.info(f"{'*' * 45}")
Args:
message: Test message to process
Returns:
dict: Task result with processing information
"""
try:
print(f"[WORKER] Starting test task with message: {message}")
# Simulate some work
for i in range(5):
print(f"[WORKER] Processing step {i + 1}/5...")
time.sleep(1)
# Update task progress
self.update_state(
state="PROGRESS",
meta={
"current": i + 1,
"total": 5,
"message": f"Processing step {i + 1}"
}
)
result = {
"status": "completed",
"message": f"Successfully processed: {message}",
"processed_at": time.time(),
"worker_id": self.request.id
}
print(f"[WORKER] Test task completed successfully: {result}")
return result
except Exception as exc:
print(f"[WORKER] Test task failed: {str(exc)}")
raise self.retry(exc=exc, countdown=60, max_retries=3)
@app.task(bind=True) global_init()
def process_document_task(self, file_path: str):
"""
Placeholder task for document processing.
Args:
file_path: Path to the document to process
Returns:
dict: Processing result
"""
try:
print(f"[WORKER] Starting document processing for: {file_path}")
# Placeholder for document processing logic
time.sleep(2) # Simulate processing time
result = {
"status": "completed",
"file_path": file_path,
"processed_at": time.time(),
"content": f"Placeholder content for {file_path}",
"worker_id": self.request.id
}
print(f"[WORKER] Document processing completed: {file_path}")
return result
except Exception as exc:
print(f"[WORKER] Document processing failed for {file_path}: {str(exc)}")
raise self.retry(exc=exc, countdown=60, max_retries=3)
if __name__ == "__main__": if __name__ == "__main__":
app.start() global_init()
celery_app.start()

0
tests/api/__init__.py Normal file
View File

View File

@@ -0,0 +1,149 @@
from datetime import datetime
from unittest.mock import MagicMock
import pytest
from fastapi import status, HTTPException
from fastapi.testclient import TestClient
from mongomock.mongo_client import MongoClient
from app.api.dependencies import get_auth_service, get_user_service, get_current_user
from app.main import app # Assuming you have FastAPI app defined in app/main.py
from app.models.auth import UserRole
from app.models.types import PyObjectId
from app.models.user import UserInDB
from app.services.auth_service import AuthService
from app.services.user_service import UserService
@pytest.fixture
def client():
return TestClient(app)
@pytest.fixture
def fake_user():
return UserInDB(
_id=PyObjectId(),
username="testuser",
email="test@example.com",
role=UserRole.USER,
is_active=True,
hashed_password="hashed-secret",
created_at=datetime(2025, 1, 1),
updated_at=datetime(2025, 1, 2),
)
def override_auth_service():
mock = MagicMock(spec=AuthService)
mock.verify_user_password.return_value = True
mock.create_access_token.return_value = "fake-jwt-token"
return mock
def override_user_service(fake_user):
mock = MagicMock(spec=UserService)
mock.get_user_by_username.return_value = fake_user
return mock
def override_get_current_user(fake_user):
def _override():
return fake_user
return _override
def override_get_database():
def _override():
client = MongoClient()
db = client.test_database
return db
return _override
# ---------------------- TESTS FOR /auth/login ----------------------
class TestLogin:
def test_i_can_login_with_valid_credentials(self, client, fake_user):
auth_service = override_auth_service()
user_service = override_user_service(fake_user)
client.app.dependency_overrides[get_auth_service] = lambda: auth_service
client.app.dependency_overrides[get_user_service] = lambda: user_service
response = client.post(
"/auth/login",
data={"username": "testuser", "password": "secret"},
)
assert response.status_code == status.HTTP_200_OK
data = response.json()
assert "access_token" in data
assert data["user"]["username"] == "testuser"
def test_i_cannot_login_with_invalid_username(self, client):
auth_service = override_auth_service()
user_service = MagicMock(spec=UserService)
user_service.get_user_by_username.return_value = None
client.app.dependency_overrides[get_auth_service] = lambda: auth_service
client.app.dependency_overrides[get_user_service] = lambda: user_service
response = client.post(
"/auth/login",
data={"username": "unknown", "password": "secret"},
)
assert response.status_code == status.HTTP_401_UNAUTHORIZED
def test_i_cannot_login_with_inactive_user(self, client, fake_user):
fake_user.is_active = False
auth_service = override_auth_service()
user_service = override_user_service(fake_user)
client.app.dependency_overrides[get_auth_service] = lambda: auth_service
client.app.dependency_overrides[get_user_service] = lambda: user_service
response = client.post(
"/auth/login",
data={"username": "testuser", "password": "secret"},
)
assert response.status_code == status.HTTP_401_UNAUTHORIZED
def test_i_cannot_login_with_wrong_password(self, client, fake_user):
auth_service = override_auth_service()
auth_service.verify_user_password.return_value = False
user_service = override_user_service(fake_user)
client.app.dependency_overrides[get_auth_service] = lambda: auth_service
client.app.dependency_overrides[get_user_service] = lambda: user_service
response = client.post(
"/auth/login",
data={"username": "testuser", "password": "wrong"},
)
assert response.status_code == status.HTTP_401_UNAUTHORIZED
# ---------------------- TESTS FOR /auth/me ----------------------
class TesteMe:
def test_i_can_get_current_user_profile(self, client, fake_user):
client.app.dependency_overrides[get_current_user] = override_get_current_user(fake_user)
response = client.get("/auth/me")
assert response.status_code == status.HTTP_200_OK
data = response.json()
assert data["username"] == fake_user.username
assert data["email"] == fake_user.email
def test_i_cannot_get_profile_without_authentication(self, client, monkeypatch):
def raise_http_exception():
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)
client.app.dependency_overrides[get_current_user] = raise_http_exception
response = client.get("/auth/me")
assert response.status_code == status.HTTP_401_UNAUTHORIZED

167
tests/api/test_users.py Normal file
View File

@@ -0,0 +1,167 @@
# File: tests/api/test_users.py
from datetime import datetime
from unittest.mock import MagicMock
import pytest
from fastapi import status
from fastapi.testclient import TestClient
from app.api.dependencies import get_admin_user, get_user_service
from app.main import app
from app.models.auth import UserRole
from app.models.types import PyObjectId
from app.models.user import UserInDB, UserCreate
from app.services.user_service import UserService
# -----------------------
# Fixtures
# -----------------------
@pytest.fixture
def fake_user_admin():
return UserInDB(
_id=PyObjectId(),
username="admin",
email="admin@example.com",
role=UserRole.ADMIN,
is_active=True,
hashed_password="hashed-secret",
created_at=datetime(2025, 1, 1),
updated_at=datetime(2025, 1, 2),
)
@pytest.fixture
def fake_user_response():
return UserInDB(
_id=PyObjectId(),
username="other",
email="other@example.com",
role=UserRole.USER,
is_active=True,
hashed_password="hashed-secret-2",
created_at=datetime(2025, 1, 1),
updated_at=datetime(2025, 1, 2),
)
@pytest.fixture
def client(fake_user_admin):
# Fake admin dependency
def get_admin_user_override():
return fake_user_admin
# Fake user service
user_service_mock = MagicMock(spec=UserService)
def get_user_service_override():
return user_service_mock
client = TestClient(app)
client.app.dependency_overrides = {
get_admin_user: get_admin_user_override,
get_user_service: get_user_service_override
}
client.user_service_mock = user_service_mock
return client
# -----------------------
# Tests
# -----------------------
class TestListUsers:
def test_i_can_list_users(self, client, fake_user_admin, fake_user_response):
client.user_service_mock.list_users.return_value = [fake_user_admin, fake_user_response]
response = client.get("/users")
assert response.status_code == status.HTTP_200_OK
data = response.json()
assert len(data) == 2
assert data[0]["username"] == "admin"
def test_i_can_list_users_when_empty(self, client):
client.user_service_mock.list_users.return_value = []
response = client.get("/users")
assert response.status_code == status.HTTP_200_OK
assert response.json() == []
class TestGetUserById:
def test_i_can_get_user_by_id(self, client, fake_user_response):
client.user_service_mock.get_user_by_id.return_value = fake_user_response
response = client.get(f"/users/{fake_user_response.id}")
assert response.status_code == status.HTTP_200_OK
data = response.json()
assert data["username"] == fake_user_response.username
def test_i_cannot_get_user_by_id_not_found(self, client):
client.user_service_mock.get_user_by_id.return_value = None
response = client.get("/users/64f0c9f4b0d1c8b7b8e1f0a2")
assert response.status_code == status.HTTP_404_NOT_FOUND
assert response.json()["detail"] == "User not found"
class TestCreateUser:
def test_i_can_create_user(self, client, fake_user_response):
user_data = UserCreate(username="newuser",
email="new@example.com",
password="#Passw0rd!",
role=UserRole.USER)
client.user_service_mock.create_user.return_value = fake_user_response
response = client.post("/users", json=user_data.model_dump(mode="json"))
assert response.status_code == status.HTTP_201_CREATED
data = response.json()
assert data["username"] == fake_user_response.username
def test_i_cannot_create_user_when_service_raises_value_error(self, client):
user_data = {"username": "baduser", "email": "bad@example.com", "role": "user", "password": "password"}
client.user_service_mock.create_user.side_effect = ValueError("Invalid data")
response = client.post("/users", json=user_data)
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
class TestUpdateUser:
def test_i_can_update_user(self, client, fake_user_response):
user_data = {"username": "updateduser", "email": "updated@example.com"}
client.user_service_mock.update_user.return_value = fake_user_response
response = client.put(f"/users/{fake_user_response.id}", json=user_data)
assert response.status_code == status.HTTP_200_OK
data = response.json()
assert data["username"] == fake_user_response.username
def test_i_cannot_update_user_not_found(self, client):
client.user_service_mock.update_user.return_value = None
user_data = {"username": "updateduser"}
response = client.put("/users/64f0c9f4b0d1c8b7b8e1f0a2", json=user_data)
assert response.status_code == status.HTTP_404_NOT_FOUND
assert response.json()["detail"] == "User not found"
def test_i_cannot_update_user_when_service_raises_value_error(self, client):
client.user_service_mock.update_user.side_effect = ValueError("Invalid update")
user_data = {"username": "badupdate"}
response = client.put("/users/64f0c9f4b0d1c8b7b8e1f0a2", json=user_data)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json()["detail"] == "Invalid update"
class TestDeleteUser:
def test_i_can_delete_user(self, client):
client.user_service_mock.delete_user.return_value = True
response = client.delete("/users/64f0c9f4b0d1c8b7b8e1f0a1")
assert response.status_code == status.HTTP_200_OK
data = response.json()
assert data["message"] == "User successfully deleted"
def test_i_cannot_delete_user_not_found(self, client):
client.user_service_mock.delete_user.return_value = False
response = client.delete("/users/64f0c9f4b0d1c8b7b8e1f0a2")
assert response.status_code == status.HTTP_404_NOT_FOUND
assert response.json()["detail"] == "User not found"

0
tests/common/__init__.py Normal file
View File

View File

@@ -0,0 +1,52 @@
import shutil
import tempfile
from pathlib import Path
import pytest
from tasks.common.converter_utils import detect_file_type, UnsupportedFileTypeError
@pytest.fixture
def temp_dir():
"""Create a temporary directory for output PDFs."""
dir_path = tempfile.mkdtemp()
yield dir_path
shutil.rmtree(dir_path)
def test_i_can_detect_text_file(temp_dir):
txt_file = Path(temp_dir) / "sample.txt"
txt_file.write_text("Sample text content")
detected_type = detect_file_type(str(txt_file))
assert detected_type == "text"
def test_i_can_detect_image_file(temp_dir):
from PIL import Image
img_file = Path(temp_dir) / "sample.jpg"
image = Image.new("RGB", (50, 50), color="blue")
image.save(img_file)
detected_type = detect_file_type(str(img_file))
assert detected_type == "image"
def test_i_can_detect_word_file(temp_dir):
import docx
docx_file = Path(temp_dir) / "sample.docx"
doc = docx.Document()
doc.add_paragraph("Sample content")
doc.save(docx_file)
detected_type = detect_file_type(str(docx_file))
assert detected_type == "word"
def test_i_cannot_detect_unsupported_file(temp_dir):
exe_file = Path(temp_dir) / "sample.exe"
exe_file.write_bytes(b'\x4D\x5A\x90\x00\x03\x00\x00\x00')
with pytest.raises(UnsupportedFileTypeError):
detect_file_type(str(exe_file))

View File

@@ -10,8 +10,8 @@ from pydantic import ValidationError
from datetime import datetime from datetime import datetime
from bson import ObjectId from bson import ObjectId
from app.models.user import UserCreate, UserUpdate, UserInDB, UserResponse from app.models.user import UserCreate, UserUpdate, UserInDB
from app.models.auth import UserRole from app.models.auth import UserRole, UserResponse
class TestUserCreateModel: class TestUserCreateModel:
@@ -349,7 +349,7 @@ class TestUserResponseModel:
# Convert to response model (excluding password_hash) # Convert to response model (excluding password_hash)
user_response = UserResponse( user_response = UserResponse(
id=user_in_db.id, _id=user_in_db.id,
username=user_in_db.username, username=user_in_db.username,
email=user_in_db.email, email=user_in_db.email,
role=user_in_db.role, role=user_in_db.role,

View File

@@ -1,18 +1,16 @@
""" """
Test suite for FileDocumentRepository with async/await support. Test suite for FileDocumentRepository with async/support.
This module contains comprehensive tests for all FileDocumentRepository methods This module contains comprehensive tests for all FileDocumentRepository methods
using mongomock-motor for in-memory MongoDB testing. using mongomock-motor for in-memory MongoDB testing.
""" """
import pytest
from datetime import datetime from datetime import datetime
from typing import Dict, Any
import pytest_asyncio import pytest
from bson import ObjectId from bson import ObjectId
from pymongo.errors import DuplicateKeyError, PyMongoError from mongomock.mongo_client import MongoClient
from mongomock_motor import AsyncMongoMockClient from pymongo.errors import PyMongoError
from app.database.repositories.document_repository import ( from app.database.repositories.document_repository import (
FileDocumentRepository, FileDocumentRepository,
@@ -23,13 +21,13 @@ from app.database.repositories.document_repository import (
from app.models.document import FileDocument, FileType, ExtractionMethod from app.models.document import FileDocument, FileType, ExtractionMethod
@pytest_asyncio.fixture @pytest.fixture
async def in_memory_repository(): def in_memory_repository():
"""Create an in-memory FileDocumentRepository for testing.""" """Create an in-memory FileDocumentRepository for testing."""
client = AsyncMongoMockClient() client = MongoClient()
db = client.test_database db = client.test_database
repo = FileDocumentRepository(db) repo = FileDocumentRepository(db)
await repo.initialize() repo.initialize()
return repo return repo
@@ -107,14 +105,13 @@ def multiple_sample_files():
class TestFileDocumentRepositoryInitialization: class TestFileDocumentRepositoryInitialization:
"""Tests for repository initialization.""" """Tests for repository initialization."""
@pytest.mark.asyncio def test_i_can_initialize_repository(self):
async def test_i_can_initialize_repository(self):
"""Test repository initialization.""" """Test repository initialization."""
# Arrange # Arrange
client = AsyncMongoMockClient() client = MongoClient()
db = client.test_database db = client.test_database
repo = FileDocumentRepository(db) repo = FileDocumentRepository(db)
await repo.initialize() repo.initialize()
# Act & Assert (should not raise any exception) # Act & Assert (should not raise any exception)
assert repo.db is not None assert repo.db is not None
@@ -125,11 +122,10 @@ class TestFileDocumentRepositoryInitialization:
class TestFileDocumentRepositoryCreation: class TestFileDocumentRepositoryCreation:
"""Tests for file document creation functionality.""" """Tests for file document creation functionality."""
@pytest.mark.asyncio def test_i_can_create_file_document(self, in_memory_repository, sample_file_document):
async def test_i_can_create_file_document(self, in_memory_repository, sample_file_document):
"""Test successful file document creation.""" """Test successful file document creation."""
# Act # Act
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
# Assert # Assert
assert created_file is not None assert created_file is not None
@@ -144,46 +140,20 @@ class TestFileDocumentRepositoryCreation:
assert created_file.id is not None assert created_file.id is not None
assert isinstance(created_file.id, ObjectId) assert isinstance(created_file.id, ObjectId)
@pytest.mark.asyncio def test_i_can_create_file_document_without_id(self, in_memory_repository, sample_file_document):
async def test_i_can_create_file_document_without_id(self, in_memory_repository, sample_file_document):
"""Test creating file document with _id set to None (should be removed).""" """Test creating file document with _id set to None (should be removed)."""
# Arrange # Arrange
sample_file_document.id = None sample_file_document.id = None
# Act # Act
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
# Assert # Assert
assert created_file is not None assert created_file is not None
assert created_file.id is not None assert created_file.id is not None
assert isinstance(created_file.id, ObjectId) assert isinstance(created_file.id, ObjectId)
@pytest.mark.asyncio def test_i_cannot_create_file_document_with_pymongo_error(self, in_memory_repository,
async def test_i_cannot_create_duplicate_file_document(self, in_memory_repository, sample_file_document):
"""Test that creating file document with duplicate filepath raises DuplicateKeyError."""
# Arrange
await in_memory_repository.create_document(sample_file_document)
duplicate_file = FileDocument(
filename="different_name.pdf",
filepath=sample_file_document.filepath, # Same filepath
file_type=FileType.PDF,
extraction_method=ExtractionMethod.OCR,
metadata={"different": "metadata"},
detected_at=datetime.now(),
file_hash="different_hash_123456789012345678901234567890123456789012345678",
encoding="utf-8",
file_size=2000,
mime_type="application/pdf"
)
# Act & Assert
with pytest.raises(DuplicateKeyError) as exc_info:
await in_memory_repository.create_document(duplicate_file)
assert "already exists" in str(exc_info.value)
@pytest.mark.asyncio
async def test_i_cannot_create_file_document_with_pymongo_error(self, in_memory_repository,
sample_file_document, mocker): sample_file_document, mocker):
"""Test handling of PyMongo errors during file document creation.""" """Test handling of PyMongo errors during file document creation."""
# Arrange # Arrange
@@ -191,7 +161,7 @@ class TestFileDocumentRepositoryCreation:
# Act & Assert # Act & Assert
with pytest.raises(ValueError) as exc_info: with pytest.raises(ValueError) as exc_info:
await in_memory_repository.create_document(sample_file_document) in_memory_repository.create_document(sample_file_document)
assert "Failed to create file document" in str(exc_info.value) assert "Failed to create file document" in str(exc_info.value)
@@ -199,14 +169,13 @@ class TestFileDocumentRepositoryCreation:
class TestFileDocumentRepositoryFinding: class TestFileDocumentRepositoryFinding:
"""Tests for file document finding functionality.""" """Tests for file document finding functionality."""
@pytest.mark.asyncio def test_i_can_find_document_by_valid_id(self, in_memory_repository, sample_file_document):
async def test_i_can_find_document_by_valid_id(self, in_memory_repository, sample_file_document):
"""Test finding file document by valid ObjectId.""" """Test finding file document by valid ObjectId."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
# Act # Act
found_file = await in_memory_repository.find_document_by_id(str(created_file.id)) found_file = in_memory_repository.find_document_by_id(str(created_file.id))
# Assert # Assert
assert found_file is not None assert found_file is not None
@@ -214,81 +183,74 @@ class TestFileDocumentRepositoryFinding:
assert found_file.filename == created_file.filename assert found_file.filename == created_file.filename
assert found_file.filepath == created_file.filepath assert found_file.filepath == created_file.filepath
@pytest.mark.asyncio def test_i_cannot_find_document_with_invalid_id(self, in_memory_repository):
async def test_i_cannot_find_document_with_invalid_id(self, in_memory_repository):
"""Test that invalid ObjectId returns None.""" """Test that invalid ObjectId returns None."""
# Act # Act
found_file = await in_memory_repository.find_document_by_id("invalid_id") found_file = in_memory_repository.find_document_by_id("invalid_id")
# Assert # Assert
assert found_file is None assert found_file is None
@pytest.mark.asyncio def test_i_cannot_find_document_by_nonexistent_id(self, in_memory_repository):
async def test_i_cannot_find_document_by_nonexistent_id(self, in_memory_repository):
"""Test that nonexistent but valid ObjectId returns None.""" """Test that nonexistent but valid ObjectId returns None."""
# Arrange # Arrange
nonexistent_id = str(ObjectId()) nonexistent_id = str(ObjectId())
# Act # Act
found_file = await in_memory_repository.find_document_by_id(nonexistent_id) found_file = in_memory_repository.find_document_by_id(nonexistent_id)
# Assert # Assert
assert found_file is None assert found_file is None
@pytest.mark.asyncio def test_i_can_find_document_by_file_hash(self, in_memory_repository, sample_file_document):
async def test_i_can_find_document_by_file_hash(self, in_memory_repository, sample_file_document):
"""Test finding file document by file hash.""" """Test finding file document by file hash."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
# Act # Act
found_file = await in_memory_repository.find_document_by_hash(sample_file_document.file_hash) found_file = in_memory_repository.find_document_by_hash(sample_file_document.file_hash)
# Assert # Assert
assert found_file is not None assert found_file is not None
assert found_file.file_hash == created_file.file_hash assert found_file.file_hash == created_file.file_hash
assert found_file.id == created_file.id assert found_file.id == created_file.id
@pytest.mark.asyncio def test_i_cannot_find_document_with_nonexistent_file_hash(self, in_memory_repository):
async def test_i_cannot_find_document_with_nonexistent_file_hash(self, in_memory_repository):
"""Test that nonexistent file hash returns None.""" """Test that nonexistent file hash returns None."""
# Act # Act
found_file = await in_memory_repository.find_document_by_hash("nonexistent_hash") found_file = in_memory_repository.find_document_by_hash("nonexistent_hash")
# Assert # Assert
assert found_file is None assert found_file is None
@pytest.mark.asyncio def test_i_can_find_document_by_filepath(self, in_memory_repository, sample_file_document):
async def test_i_can_find_document_by_filepath(self, in_memory_repository, sample_file_document):
"""Test finding file document by filepath.""" """Test finding file document by filepath."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
# Act # Act
found_file = await in_memory_repository.find_document_by_filepath(sample_file_document.filepath) found_file = in_memory_repository.find_document_by_filepath(sample_file_document.filepath)
# Assert # Assert
assert found_file is not None assert found_file is not None
assert found_file.filepath == created_file.filepath assert found_file.filepath == created_file.filepath
assert found_file.id == created_file.id assert found_file.id == created_file.id
@pytest.mark.asyncio def test_i_cannot_find_document_with_nonexistent_filepath(self, in_memory_repository):
async def test_i_cannot_find_document_with_nonexistent_filepath(self, in_memory_repository):
"""Test that nonexistent filepath returns None.""" """Test that nonexistent filepath returns None."""
# Act # Act
found_file = await in_memory_repository.find_document_by_filepath("/nonexistent/path/file.pdf") found_file = in_memory_repository.find_document_by_filepath("/nonexistent/path/file.pdf")
# Assert # Assert
assert found_file is None assert found_file is None
@pytest.mark.asyncio def test_i_cannot_find_document_with_pymongo_error(self, in_memory_repository, mocker):
async def test_i_cannot_find_document_with_pymongo_error(self, in_memory_repository, mocker):
"""Test handling of PyMongo errors during file document finding.""" """Test handling of PyMongo errors during file document finding."""
# Arrange # Arrange
mocker.patch.object(in_memory_repository.collection, 'find_one', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'find_one', side_effect=PyMongoError("Database error"))
# Act # Act
found_file = await in_memory_repository.find_document_by_hash("test_hash") found_file = in_memory_repository.find_document_by_hash("test_hash")
# Assert # Assert
assert found_file is None assert found_file is None
@@ -297,16 +259,15 @@ class TestFileDocumentRepositoryFinding:
class TestFileDocumentRepositoryNameMatching: class TestFileDocumentRepositoryNameMatching:
"""Tests for file document name matching functionality.""" """Tests for file document name matching functionality."""
@pytest.mark.asyncio def test_i_can_find_documents_by_name_with_fuzzy_matching(self, in_memory_repository, multiple_sample_files):
async def test_i_can_find_documents_by_name_with_fuzzy_matching(self, in_memory_repository, multiple_sample_files):
"""Test finding file documents by filename using fuzzy matching.""" """Test finding file documents by filename using fuzzy matching."""
# Arrange # Arrange
for file_doc in multiple_sample_files: for file_doc in multiple_sample_files:
await in_memory_repository.create_document(file_doc) in_memory_repository.create_document(file_doc)
# Act # Act
fuzzy_method = FuzzyMatching(threshold=0.5) fuzzy_method = FuzzyMatching(threshold=0.5)
found_files = await in_memory_repository.find_document_by_name("document", fuzzy_method) found_files = in_memory_repository.find_document_by_name("document", fuzzy_method)
# Assert # Assert
assert len(found_files) >= 1 assert len(found_files) >= 1
@@ -315,44 +276,41 @@ class TestFileDocumentRepositoryNameMatching:
found_filenames = [f.filename for f in found_files] found_filenames = [f.filename for f in found_files]
assert any("document" in fname.lower() for fname in found_filenames) assert any("document" in fname.lower() for fname in found_filenames)
@pytest.mark.asyncio def test_i_can_find_documents_by_name_with_subsequence_matching(self, in_memory_repository,
async def test_i_can_find_documents_by_name_with_subsequence_matching(self, in_memory_repository,
multiple_sample_files): multiple_sample_files):
"""Test finding file documents by filename using subsequence matching.""" """Test finding file documents by filename using subsequence matching."""
# Arrange # Arrange
for file_doc in multiple_sample_files: for file_doc in multiple_sample_files:
await in_memory_repository.create_document(file_doc) in_memory_repository.create_document(file_doc)
# Act # Act
subsequence_method = SubsequenceMatching() subsequence_method = SubsequenceMatching()
found_files = await in_memory_repository.find_document_by_name("doc", subsequence_method) found_files = in_memory_repository.find_document_by_name("doc", subsequence_method)
# Assert # Assert
assert len(found_files) >= 1 assert len(found_files) >= 1
assert all(isinstance(file_doc, FileDocument) for file_doc in found_files) assert all(isinstance(file_doc, FileDocument) for file_doc in found_files)
@pytest.mark.asyncio def test_i_can_find_documents_by_name_with_default_method(self, in_memory_repository, multiple_sample_files):
async def test_i_can_find_documents_by_name_with_default_method(self, in_memory_repository, multiple_sample_files):
"""Test finding file documents by filename with default matching method.""" """Test finding file documents by filename with default matching method."""
# Arrange # Arrange
for file_doc in multiple_sample_files: for file_doc in multiple_sample_files:
await in_memory_repository.create_document(file_doc) in_memory_repository.create_document(file_doc)
# Act # Act
found_files = await in_memory_repository.find_document_by_name("first") found_files = in_memory_repository.find_document_by_name("first")
# Assert # Assert
assert len(found_files) >= 0 assert len(found_files) >= 0
assert all(isinstance(file_doc, FileDocument) for file_doc in found_files) assert all(isinstance(file_doc, FileDocument) for file_doc in found_files)
@pytest.mark.asyncio def test_i_cannot_find_documents_by_name_with_pymongo_error(self, in_memory_repository, mocker):
async def test_i_cannot_find_documents_by_name_with_pymongo_error(self, in_memory_repository, mocker):
"""Test handling of PyMongo errors during document name matching.""" """Test handling of PyMongo errors during document name matching."""
# Arrange # Arrange
mocker.patch.object(in_memory_repository.collection, 'find', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'find', side_effect=PyMongoError("Database error"))
# Act # Act
found_files = await in_memory_repository.find_document_by_name("test") found_files = in_memory_repository.find_document_by_name("test")
# Assert # Assert
assert found_files == [] assert found_files == []
@@ -361,30 +319,28 @@ class TestFileDocumentRepositoryNameMatching:
class TestFileDocumentRepositoryListing: class TestFileDocumentRepositoryListing:
"""Tests for file document listing functionality.""" """Tests for file document listing functionality."""
@pytest.mark.asyncio def test_i_can_list_documents_with_default_pagination(self, in_memory_repository, multiple_sample_files):
async def test_i_can_list_documents_with_default_pagination(self, in_memory_repository, multiple_sample_files):
"""Test listing file documents with default pagination.""" """Test listing file documents with default pagination."""
# Arrange # Arrange
for file_doc in multiple_sample_files: for file_doc in multiple_sample_files:
await in_memory_repository.create_document(file_doc) in_memory_repository.create_document(file_doc)
# Act # Act
files = await in_memory_repository.list_documents() files = in_memory_repository.list_documents()
# Assert # Assert
assert len(files) == len(multiple_sample_files) assert len(files) == len(multiple_sample_files)
assert all(isinstance(file_doc, FileDocument) for file_doc in files) assert all(isinstance(file_doc, FileDocument) for file_doc in files)
@pytest.mark.asyncio def test_i_can_list_documents_with_custom_pagination(self, in_memory_repository, multiple_sample_files):
async def test_i_can_list_documents_with_custom_pagination(self, in_memory_repository, multiple_sample_files):
"""Test listing file documents with custom pagination.""" """Test listing file documents with custom pagination."""
# Arrange # Arrange
for file_doc in multiple_sample_files: for file_doc in multiple_sample_files:
await in_memory_repository.create_document(file_doc) in_memory_repository.create_document(file_doc)
# Act # Act
files_page1 = await in_memory_repository.list_documents(skip=0, limit=2) files_page1 = in_memory_repository.list_documents(skip=0, limit=2)
files_page2 = await in_memory_repository.list_documents(skip=2, limit=2) files_page2 = in_memory_repository.list_documents(skip=2, limit=2)
# Assert # Assert
assert len(files_page1) == 2 assert len(files_page1) == 2
@@ -395,8 +351,7 @@ class TestFileDocumentRepositoryListing:
page2_ids = [file_doc.id for file_doc in files_page2] page2_ids = [file_doc.id for file_doc in files_page2]
assert len(set(page1_ids).intersection(set(page2_ids))) == 0 assert len(set(page1_ids).intersection(set(page2_ids))) == 0
@pytest.mark.asyncio def test_i_can_list_documents_sorted_by_detected_at(self, in_memory_repository, sample_file_document):
async def test_i_can_list_documents_sorted_by_detected_at(self, in_memory_repository, sample_file_document):
"""Test that file documents are sorted by detected_at in descending order.""" """Test that file documents are sorted by detected_at in descending order."""
# Arrange # Arrange
file1 = sample_file_document.model_copy() file1 = sample_file_document.model_copy()
@@ -411,11 +366,11 @@ class TestFileDocumentRepositoryListing:
file2.file_hash = "hash2" + "0" * 58 file2.file_hash = "hash2" + "0" * 58
file2.detected_at = datetime(2024, 1, 2, 10, 0, 0) # Later date file2.detected_at = datetime(2024, 1, 2, 10, 0, 0) # Later date
created_file1 = await in_memory_repository.create_document(file1) created_file1 = in_memory_repository.create_document(file1)
created_file2 = await in_memory_repository.create_document(file2) created_file2 = in_memory_repository.create_document(file2)
# Act # Act
files = await in_memory_repository.list_documents() files = in_memory_repository.list_documents()
# Assert # Assert
assert len(files) == 2 assert len(files) == 2
@@ -423,23 +378,21 @@ class TestFileDocumentRepositoryListing:
assert files[0].id == created_file2.id assert files[0].id == created_file2.id
assert files[1].id == created_file1.id assert files[1].id == created_file1.id
@pytest.mark.asyncio def test_i_can_list_empty_documents(self, in_memory_repository):
async def test_i_can_list_empty_documents(self, in_memory_repository):
"""Test listing file documents from empty collection.""" """Test listing file documents from empty collection."""
# Act # Act
files = await in_memory_repository.list_documents() files = in_memory_repository.list_documents()
# Assert # Assert
assert files == [] assert files == []
@pytest.mark.asyncio def test_i_cannot_list_documents_with_pymongo_error(self, in_memory_repository, mocker):
async def test_i_cannot_list_documents_with_pymongo_error(self, in_memory_repository, mocker):
"""Test handling of PyMongo errors during file document listing.""" """Test handling of PyMongo errors during file document listing."""
# Arrange # Arrange
mocker.patch.object(in_memory_repository.collection, 'find', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'find', side_effect=PyMongoError("Database error"))
# Act # Act
files = await in_memory_repository.list_documents() files = in_memory_repository.list_documents()
# Assert # Assert
assert files == [] assert files == []
@@ -448,15 +401,14 @@ class TestFileDocumentRepositoryListing:
class TestFileDocumentRepositoryUpdate: class TestFileDocumentRepositoryUpdate:
"""Tests for file document update functionality.""" """Tests for file document update functionality."""
@pytest.mark.asyncio def test_i_can_update_document_successfully(self, in_memory_repository, sample_file_document,
async def test_i_can_update_document_successfully(self, in_memory_repository, sample_file_document,
sample_update_data): sample_update_data):
"""Test successful file document update.""" """Test successful file document update."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
# Act # Act
updated_file = await in_memory_repository.update_document(str(created_file.id), sample_update_data) updated_file = in_memory_repository.update_document(str(created_file.id), sample_update_data)
# Assert # Assert
assert updated_file is not None assert updated_file is not None
@@ -467,15 +419,14 @@ class TestFileDocumentRepositoryUpdate:
assert updated_file.filename == created_file.filename # Unchanged fields remain assert updated_file.filename == created_file.filename # Unchanged fields remain
assert updated_file.filepath == created_file.filepath assert updated_file.filepath == created_file.filepath
@pytest.mark.asyncio def test_i_can_update_document_with_partial_data(self, in_memory_repository, sample_file_document):
async def test_i_can_update_document_with_partial_data(self, in_memory_repository, sample_file_document):
"""Test updating file document with partial data.""" """Test updating file document with partial data."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
partial_update = {"file_size": 999999} partial_update = {"file_size": 999999}
# Act # Act
updated_file = await in_memory_repository.update_document(str(created_file.id), partial_update) updated_file = in_memory_repository.update_document(str(created_file.id), partial_update)
# Assert # Assert
assert updated_file is not None assert updated_file is not None
@@ -483,30 +434,28 @@ class TestFileDocumentRepositoryUpdate:
assert updated_file.filename == created_file.filename # Should remain unchanged assert updated_file.filename == created_file.filename # Should remain unchanged
assert updated_file.metadata == created_file.metadata # Should remain unchanged assert updated_file.metadata == created_file.metadata # Should remain unchanged
@pytest.mark.asyncio def test_i_can_update_document_filtering_none_values(self, in_memory_repository, sample_file_document):
async def test_i_can_update_document_filtering_none_values(self, in_memory_repository, sample_file_document):
"""Test that None values are filtered out from update data.""" """Test that None values are filtered out from update data."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
update_with_none = {"file_size": 777777, "metadata": None} update_with_none = {"file_size": 777777, "metadata": None}
# Act # Act
updated_file = await in_memory_repository.update_document(str(created_file.id), update_with_none) updated_file = in_memory_repository.update_document(str(created_file.id), update_with_none)
# Assert # Assert
assert updated_file is not None assert updated_file is not None
assert updated_file.file_size == 777777 assert updated_file.file_size == 777777
assert updated_file.metadata == created_file.metadata # Should remain unchanged (None filtered out) assert updated_file.metadata == created_file.metadata # Should remain unchanged (None filtered out)
@pytest.mark.asyncio def test_i_can_update_document_with_empty_data(self, in_memory_repository, sample_file_document):
async def test_i_can_update_document_with_empty_data(self, in_memory_repository, sample_file_document):
"""Test updating file document with empty data returns current document.""" """Test updating file document with empty data returns current document."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
empty_update = {} empty_update = {}
# Act # Act
result = await in_memory_repository.update_document(str(created_file.id), empty_update) result = in_memory_repository.update_document(str(created_file.id), empty_update)
# Assert # Assert
assert result is not None assert result is not None
@@ -514,38 +463,35 @@ class TestFileDocumentRepositoryUpdate:
assert result.filepath == created_file.filepath assert result.filepath == created_file.filepath
assert result.metadata == created_file.metadata assert result.metadata == created_file.metadata
@pytest.mark.asyncio def test_i_cannot_update_document_with_invalid_id(self, in_memory_repository, sample_update_data):
async def test_i_cannot_update_document_with_invalid_id(self, in_memory_repository, sample_update_data):
"""Test that updating with invalid ID returns None.""" """Test that updating with invalid ID returns None."""
# Act # Act
result = await in_memory_repository.update_document("invalid_id", sample_update_data) result = in_memory_repository.update_document("invalid_id", sample_update_data)
# Assert # Assert
assert result is None assert result is None
@pytest.mark.asyncio def test_i_cannot_update_nonexistent_document(self, in_memory_repository, sample_update_data):
async def test_i_cannot_update_nonexistent_document(self, in_memory_repository, sample_update_data):
"""Test that updating nonexistent file document returns None.""" """Test that updating nonexistent file document returns None."""
# Arrange # Arrange
nonexistent_id = str(ObjectId()) nonexistent_id = str(ObjectId())
# Act # Act
result = await in_memory_repository.update_document(nonexistent_id, sample_update_data) result = in_memory_repository.update_document(nonexistent_id, sample_update_data)
# Assert # Assert
assert result is None assert result is None
@pytest.mark.asyncio def test_i_cannot_update_document_with_pymongo_error(self, in_memory_repository, sample_file_document,
async def test_i_cannot_update_document_with_pymongo_error(self, in_memory_repository, sample_file_document,
sample_update_data, mocker): sample_update_data, mocker):
"""Test handling of PyMongo errors during file document update.""" """Test handling of PyMongo errors during file document update."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
mocker.patch.object(in_memory_repository.collection, 'find_one_and_update', mocker.patch.object(in_memory_repository.collection, 'find_one_and_update',
side_effect=PyMongoError("Database error")) side_effect=PyMongoError("Database error"))
# Act # Act
result = await in_memory_repository.update_document(str(created_file.id), sample_update_data) result = in_memory_repository.update_document(str(created_file.id), sample_update_data)
# Assert # Assert
assert result is None assert result is None
@@ -554,52 +500,48 @@ class TestFileDocumentRepositoryUpdate:
class TestFileDocumentRepositoryDeletion: class TestFileDocumentRepositoryDeletion:
"""Tests for file document deletion functionality.""" """Tests for file document deletion functionality."""
@pytest.mark.asyncio def test_i_can_delete_existing_document(self, in_memory_repository, sample_file_document):
async def test_i_can_delete_existing_document(self, in_memory_repository, sample_file_document):
"""Test successful file document deletion.""" """Test successful file document deletion."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
# Act # Act
deletion_result = await in_memory_repository.delete_document(str(created_file.id)) deletion_result = in_memory_repository.delete_document(str(created_file.id))
# Assert # Assert
assert deletion_result is True assert deletion_result is True
# Verify document is actually deleted # Verify document is actually deleted
found_file = await in_memory_repository.find_document_by_id(str(created_file.id)) found_file = in_memory_repository.find_document_by_id(str(created_file.id))
assert found_file is None assert found_file is None
@pytest.mark.asyncio def test_i_cannot_delete_document_with_invalid_id(self, in_memory_repository):
async def test_i_cannot_delete_document_with_invalid_id(self, in_memory_repository):
"""Test that deleting with invalid ID returns False.""" """Test that deleting with invalid ID returns False."""
# Act # Act
result = await in_memory_repository.delete_document("invalid_id") result = in_memory_repository.delete_document("invalid_id")
# Assert # Assert
assert result is False assert result is False
@pytest.mark.asyncio def test_i_cannot_delete_nonexistent_document(self, in_memory_repository):
async def test_i_cannot_delete_nonexistent_document(self, in_memory_repository):
"""Test that deleting nonexistent file document returns False.""" """Test that deleting nonexistent file document returns False."""
# Arrange # Arrange
nonexistent_id = str(ObjectId()) nonexistent_id = str(ObjectId())
# Act # Act
result = await in_memory_repository.delete_document(nonexistent_id) result = in_memory_repository.delete_document(nonexistent_id)
# Assert # Assert
assert result is False assert result is False
@pytest.mark.asyncio def test_i_cannot_delete_document_with_pymongo_error(self, in_memory_repository, sample_file_document, mocker):
async def test_i_cannot_delete_document_with_pymongo_error(self, in_memory_repository, sample_file_document, mocker):
"""Test handling of PyMongo errors during file document deletion.""" """Test handling of PyMongo errors during file document deletion."""
# Arrange # Arrange
created_file = await in_memory_repository.create_document(sample_file_document) created_file = in_memory_repository.create_document(sample_file_document)
mocker.patch.object(in_memory_repository.collection, 'delete_one', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'delete_one', side_effect=PyMongoError("Database error"))
# Act # Act
result = await in_memory_repository.delete_document(str(created_file.id)) result = in_memory_repository.delete_document(str(created_file.id))
# Assert # Assert
assert result is False assert result is False
@@ -608,36 +550,33 @@ class TestFileDocumentRepositoryDeletion:
class TestFileDocumentRepositoryUtilities: class TestFileDocumentRepositoryUtilities:
"""Tests for utility methods.""" """Tests for utility methods."""
@pytest.mark.asyncio def test_i_can_count_documents(self, in_memory_repository, sample_file_document):
async def test_i_can_count_documents(self, in_memory_repository, sample_file_document):
"""Test counting file documents.""" """Test counting file documents."""
# Arrange # Arrange
initial_count = await in_memory_repository.count_documents() initial_count = in_memory_repository.count_documents()
await in_memory_repository.create_document(sample_file_document) in_memory_repository.create_document(sample_file_document)
# Act # Act
final_count = await in_memory_repository.count_documents() final_count = in_memory_repository.count_documents()
# Assert # Assert
assert final_count == initial_count + 1 assert final_count == initial_count + 1
@pytest.mark.asyncio def test_i_can_count_zero_documents(self, in_memory_repository):
async def test_i_can_count_zero_documents(self, in_memory_repository):
"""Test counting file documents in empty collection.""" """Test counting file documents in empty collection."""
# Act # Act
count = await in_memory_repository.count_documents() count = in_memory_repository.count_documents()
# Assert # Assert
assert count == 0 assert count == 0
@pytest.mark.asyncio def test_i_cannot_count_documents_with_pymongo_error(self, in_memory_repository, mocker):
async def test_i_cannot_count_documents_with_pymongo_error(self, in_memory_repository, mocker):
"""Test handling of PyMongo errors during file document counting.""" """Test handling of PyMongo errors during file document counting."""
# Arrange # Arrange
mocker.patch.object(in_memory_repository.collection, 'count_documents', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'count_documents', side_effect=PyMongoError("Database error"))
# Act # Act
count = await in_memory_repository.count_documents() count = in_memory_repository.count_documents()
# Assert # Assert
assert count == 0 assert count == 0

View File

@@ -1,5 +1,5 @@
""" """
Test suite for JobRepository with async/await support. Test suite for JobRepository with async/support.
This module contains comprehensive tests for all JobRepository methods This module contains comprehensive tests for all JobRepository methods
using mongomock-motor for in-memory MongoDB testing. using mongomock-motor for in-memory MongoDB testing.
@@ -8,8 +8,8 @@ using mongomock-motor for in-memory MongoDB testing.
from datetime import datetime from datetime import datetime
import pytest import pytest
import pytest_asyncio
from bson import ObjectId from bson import ObjectId
from mongomock.mongo_client import MongoClient
from mongomock_motor import AsyncMongoMockClient from mongomock_motor import AsyncMongoMockClient
from pymongo.errors import PyMongoError from pymongo.errors import PyMongoError
@@ -19,13 +19,13 @@ from app.models.job import ProcessingJob, ProcessingStatus
from app.models.types import PyObjectId from app.models.types import PyObjectId
@pytest_asyncio.fixture @pytest.fixture
async def in_memory_repository(): def in_memory_repository():
"""Create an in-memory JobRepository for testing.""" """Create an in-memory JobRepository for testing."""
client = AsyncMongoMockClient() client = MongoClient()
db = client.test_database db = client.test_database
repo = JobRepository(db) repo = JobRepository(db)
await repo.initialize() repo.initialize()
return repo return repo
@@ -82,8 +82,7 @@ def multiple_sample_jobs():
class TestJobRepositoryInitialization: class TestJobRepositoryInitialization:
"""Tests for repository initialization.""" """Tests for repository initialization."""
@pytest.mark.asyncio def test_i_can_initialize_repository(self):
async def test_i_can_initialize_repository(self):
"""Test repository initialization.""" """Test repository initialization."""
# Arrange # Arrange
client = AsyncMongoMockClient() client = AsyncMongoMockClient()
@@ -91,7 +90,7 @@ class TestJobRepositoryInitialization:
repo = JobRepository(db) repo = JobRepository(db)
# Act # Act
initialized_repo = await repo.initialize() initialized_repo = repo.initialize()
# Assert # Assert
assert initialized_repo is repo assert initialized_repo is repo
@@ -102,11 +101,10 @@ class TestJobRepositoryInitialization:
class TestJobRepositoryCreation: class TestJobRepositoryCreation:
"""Tests for job creation functionality.""" """Tests for job creation functionality."""
@pytest.mark.asyncio def test_i_can_create_job_with_task_id(self, in_memory_repository, sample_document_id, sample_task_id):
async def test_i_can_create_job_with_task_id(self, in_memory_repository, sample_document_id, sample_task_id):
"""Test successful job creation with task ID.""" """Test successful job creation with task ID."""
# Act # Act
created_job = await in_memory_repository.create_job(sample_document_id, sample_task_id) created_job = in_memory_repository.create_job(sample_document_id, sample_task_id)
# Assert # Assert
assert created_job is not None assert created_job is not None
@@ -120,11 +118,10 @@ class TestJobRepositoryCreation:
assert created_job.id is not None assert created_job.id is not None
assert isinstance(created_job.id, ObjectId) assert isinstance(created_job.id, ObjectId)
@pytest.mark.asyncio def test_i_can_create_job_without_task_id(self, in_memory_repository, sample_document_id):
async def test_i_can_create_job_without_task_id(self, in_memory_repository, sample_document_id):
"""Test successful job creation without task ID.""" """Test successful job creation without task ID."""
# Act # Act
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
# Assert # Assert
assert created_job is not None assert created_job is not None
@@ -138,28 +135,26 @@ class TestJobRepositoryCreation:
assert created_job.id is not None assert created_job.id is not None
assert isinstance(created_job.id, ObjectId) assert isinstance(created_job.id, ObjectId)
@pytest.mark.asyncio def test_i_cannot_create_duplicate_job_for_document(self, in_memory_repository, sample_document_id,
async def test_i_cannot_create_duplicate_job_for_document(self, in_memory_repository, sample_document_id,
sample_task_id): sample_task_id):
"""Test that creating job with duplicate document_id raises DuplicateKeyError.""" """Test that creating job with duplicate document_id raises DuplicateKeyError."""
# Arrange # Arrange
await in_memory_repository.create_job(sample_document_id, sample_task_id) in_memory_repository.create_job(sample_document_id, sample_task_id)
# Act & Assert # Act & Assert
with pytest.raises(JobRepositoryError) as exc_info: with pytest.raises(JobRepositoryError) as exc_info:
await in_memory_repository.create_job(sample_document_id, "different-task-id") in_memory_repository.create_job(sample_document_id, "different-task-id")
assert "create_job" in str(exc_info.value) assert "create_job" in str(exc_info.value)
@pytest.mark.asyncio def test_i_cannot_create_job_with_pymongo_error(self, in_memory_repository, sample_document_id, mocker):
async def test_i_cannot_create_job_with_pymongo_error(self, in_memory_repository, sample_document_id, mocker):
"""Test handling of PyMongo errors during job creation.""" """Test handling of PyMongo errors during job creation."""
# Arrange # Arrange
mocker.patch.object(in_memory_repository.collection, 'insert_one', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'insert_one', side_effect=PyMongoError("Database error"))
# Act & Assert # Act & Assert
with pytest.raises(JobRepositoryError) as exc_info: with pytest.raises(JobRepositoryError) as exc_info:
await in_memory_repository.create_job(sample_document_id) in_memory_repository.create_job(sample_document_id)
assert "create_job" in str(exc_info.value) assert "create_job" in str(exc_info.value)
@@ -167,14 +162,13 @@ class TestJobRepositoryCreation:
class TestJobRepositoryFinding: class TestJobRepositoryFinding:
"""Tests for job finding functionality.""" """Tests for job finding functionality."""
@pytest.mark.asyncio def test_i_can_find_job_by_valid_id(self, in_memory_repository, sample_document_id, sample_task_id):
async def test_i_can_find_job_by_valid_id(self, in_memory_repository, sample_document_id, sample_task_id):
"""Test finding job by valid ObjectId.""" """Test finding job by valid ObjectId."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id, sample_task_id) created_job = in_memory_repository.create_job(sample_document_id, sample_task_id)
# Act # Act
found_job = await in_memory_repository.find_job_by_id(created_job.id) found_job = in_memory_repository.find_job_by_id(created_job.id)
# Assert # Assert
assert found_job is not None assert found_job is not None
@@ -183,97 +177,90 @@ class TestJobRepositoryFinding:
assert found_job.task_id == created_job.task_id assert found_job.task_id == created_job.task_id
assert found_job.status == created_job.status assert found_job.status == created_job.status
@pytest.mark.asyncio def test_i_cannot_find_job_by_nonexistent_id(self, in_memory_repository):
async def test_i_cannot_find_job_by_nonexistent_id(self, in_memory_repository):
"""Test that nonexistent ObjectId returns None.""" """Test that nonexistent ObjectId returns None."""
# Arrange # Arrange
nonexistent_id = PyObjectId() nonexistent_id = PyObjectId()
# Act # Act
found_job = await in_memory_repository.find_job_by_id(nonexistent_id) found_job = in_memory_repository.find_job_by_id(nonexistent_id)
# Assert # Assert
assert found_job is None assert found_job is None
@pytest.mark.asyncio def test_i_cannot_find_job_with_pymongo_error(self, in_memory_repository, mocker):
async def test_i_cannot_find_job_with_pymongo_error(self, in_memory_repository, mocker):
"""Test handling of PyMongo errors during job finding.""" """Test handling of PyMongo errors during job finding."""
# Arrange # Arrange
mocker.patch.object(in_memory_repository.collection, 'find_one', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'find_one', side_effect=PyMongoError("Database error"))
# Act & Assert # Act & Assert
with pytest.raises(JobRepositoryError) as exc_info: with pytest.raises(JobRepositoryError) as exc_info:
await in_memory_repository.find_job_by_id(PyObjectId()) in_memory_repository.find_job_by_id(PyObjectId())
assert "get_job_by_id" in str(exc_info.value) assert "get_job_by_id" in str(exc_info.value)
@pytest.mark.asyncio def test_i_can_find_jobs_by_document_id(self, in_memory_repository, sample_document_id, sample_task_id):
async def test_i_can_find_jobs_by_document_id(self, in_memory_repository, sample_document_id, sample_task_id):
"""Test finding jobs by document ID.""" """Test finding jobs by document ID."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id, sample_task_id) created_job = in_memory_repository.create_job(sample_document_id, sample_task_id)
# Act # Act
found_jobs = await in_memory_repository.find_jobs_by_document_id(sample_document_id) found_jobs = in_memory_repository.find_jobs_by_document_id(sample_document_id)
# Assert # Assert
assert len(found_jobs) == 1 assert len(found_jobs) == 1
assert found_jobs[0].id == created_job.id assert found_jobs[0].id == created_job.id
assert found_jobs[0].document_id == sample_document_id assert found_jobs[0].document_id == sample_document_id
@pytest.mark.asyncio def test_i_can_find_empty_jobs_list_for_nonexistent_document(self, in_memory_repository):
async def test_i_can_find_empty_jobs_list_for_nonexistent_document(self, in_memory_repository):
"""Test that nonexistent document ID returns empty list.""" """Test that nonexistent document ID returns empty list."""
# Arrange # Arrange
nonexistent_id = ObjectId() nonexistent_id = ObjectId()
# Act # Act
found_jobs = await in_memory_repository.find_jobs_by_document_id(nonexistent_id) found_jobs = in_memory_repository.find_jobs_by_document_id(nonexistent_id)
# Assert # Assert
assert found_jobs == [] assert found_jobs == []
@pytest.mark.asyncio def test_i_cannot_find_jobs_by_document_with_pymongo_error(self, in_memory_repository, mocker):
async def test_i_cannot_find_jobs_by_document_with_pymongo_error(self, in_memory_repository, mocker):
"""Test handling of PyMongo errors during finding jobs by document ID.""" """Test handling of PyMongo errors during finding jobs by document ID."""
# Arrange # Arrange
mocker.patch.object(in_memory_repository.collection, 'find', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'find', side_effect=PyMongoError("Database error"))
# Act & Assert # Act & Assert
with pytest.raises(JobRepositoryError) as exc_info: with pytest.raises(JobRepositoryError) as exc_info:
await in_memory_repository.find_jobs_by_document_id(PyObjectId()) in_memory_repository.find_jobs_by_document_id(PyObjectId())
assert "get_jobs_by_file_id" in str(exc_info.value) assert "get_jobs_by_file_id" in str(exc_info.value)
@pytest.mark.asyncio
@pytest.mark.parametrize("status", [ @pytest.mark.parametrize("status", [
ProcessingStatus.PENDING, ProcessingStatus.PENDING,
ProcessingStatus.PROCESSING, ProcessingStatus.PROCESSING,
ProcessingStatus.COMPLETED ProcessingStatus.COMPLETED
]) ])
async def test_i_can_find_jobs_by_pending_status(self, in_memory_repository, sample_document_id, status): def test_i_can_find_jobs_by_pending_status(self, in_memory_repository, sample_document_id, status):
"""Test finding jobs by PENDING status.""" """Test finding jobs by PENDING status."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
await in_memory_repository.update_job_status(created_job.id, status) in_memory_repository.update_job_status(created_job.id, status)
# Act # Act
found_jobs = await in_memory_repository.get_jobs_by_status(status) found_jobs = in_memory_repository.get_jobs_by_status(status)
# Assert # Assert
assert len(found_jobs) == 1 assert len(found_jobs) == 1
assert found_jobs[0].id == created_job.id assert found_jobs[0].id == created_job.id
assert found_jobs[0].status == status assert found_jobs[0].status == status
@pytest.mark.asyncio def test_i_can_find_jobs_by_failed_status(self, in_memory_repository, sample_document_id):
async def test_i_can_find_jobs_by_failed_status(self, in_memory_repository, sample_document_id):
"""Test finding jobs by FAILED status.""" """Test finding jobs by FAILED status."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
await in_memory_repository.update_job_status(created_job.id, ProcessingStatus.FAILED, "Test error") in_memory_repository.update_job_status(created_job.id, ProcessingStatus.FAILED, "Test error")
# Act # Act
found_jobs = await in_memory_repository.get_jobs_by_status(ProcessingStatus.FAILED) found_jobs = in_memory_repository.get_jobs_by_status(ProcessingStatus.FAILED)
# Assert # Assert
assert len(found_jobs) == 1 assert len(found_jobs) == 1
@@ -281,24 +268,22 @@ class TestJobRepositoryFinding:
assert found_jobs[0].status == ProcessingStatus.FAILED assert found_jobs[0].status == ProcessingStatus.FAILED
assert found_jobs[0].error_message == "Test error" assert found_jobs[0].error_message == "Test error"
@pytest.mark.asyncio def test_i_can_find_empty_jobs_list_for_unused_status(self, in_memory_repository):
async def test_i_can_find_empty_jobs_list_for_unused_status(self, in_memory_repository):
"""Test that unused status returns empty list.""" """Test that unused status returns empty list."""
# Act # Act
found_jobs = await in_memory_repository.get_jobs_by_status(ProcessingStatus.COMPLETED) found_jobs = in_memory_repository.get_jobs_by_status(ProcessingStatus.COMPLETED)
# Assert # Assert
assert found_jobs == [] assert found_jobs == []
@pytest.mark.asyncio def test_i_cannot_find_jobs_by_status_with_pymongo_error(self, in_memory_repository, mocker):
async def test_i_cannot_find_jobs_by_status_with_pymongo_error(self, in_memory_repository, mocker):
"""Test handling of PyMongo errors during finding jobs by status.""" """Test handling of PyMongo errors during finding jobs by status."""
# Arrange # Arrange
mocker.patch.object(in_memory_repository.collection, 'find', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'find', side_effect=PyMongoError("Database error"))
# Act & Assert # Act & Assert
with pytest.raises(JobRepositoryError) as exc_info: with pytest.raises(JobRepositoryError) as exc_info:
await in_memory_repository.get_jobs_by_status(ProcessingStatus.PENDING) in_memory_repository.get_jobs_by_status(ProcessingStatus.PENDING)
assert "get_jobs_by_status" in str(exc_info.value) assert "get_jobs_by_status" in str(exc_info.value)
@@ -306,14 +291,13 @@ class TestJobRepositoryFinding:
class TestJobRepositoryStatusUpdate: class TestJobRepositoryStatusUpdate:
"""Tests for job status update functionality.""" """Tests for job status update functionality."""
@pytest.mark.asyncio def test_i_can_update_job_status_to_processing(self, in_memory_repository, sample_document_id):
async def test_i_can_update_job_status_to_processing(self, in_memory_repository, sample_document_id):
"""Test updating job status to PROCESSING with started_at timestamp.""" """Test updating job status to PROCESSING with started_at timestamp."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
# Act # Act
updated_job = await in_memory_repository.update_job_status(created_job.id, ProcessingStatus.PROCESSING) updated_job = in_memory_repository.update_job_status(created_job.id, ProcessingStatus.PROCESSING)
# Assert # Assert
assert updated_job is not None assert updated_job is not None
@@ -323,15 +307,14 @@ class TestJobRepositoryStatusUpdate:
assert updated_job.completed_at is None assert updated_job.completed_at is None
assert updated_job.error_message is None assert updated_job.error_message is None
@pytest.mark.asyncio def test_i_can_update_job_status_to_completed(self, in_memory_repository, sample_document_id):
async def test_i_can_update_job_status_to_completed(self, in_memory_repository, sample_document_id):
"""Test updating job status to COMPLETED with completed_at timestamp.""" """Test updating job status to COMPLETED with completed_at timestamp."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
await in_memory_repository.update_job_status(created_job.id, ProcessingStatus.PROCESSING) in_memory_repository.update_job_status(created_job.id, ProcessingStatus.PROCESSING)
# Act # Act
updated_job = await in_memory_repository.update_job_status(created_job.id, ProcessingStatus.COMPLETED) updated_job = in_memory_repository.update_job_status(created_job.id, ProcessingStatus.COMPLETED)
# Assert # Assert
assert updated_job is not None assert updated_job is not None
@@ -341,15 +324,14 @@ class TestJobRepositoryStatusUpdate:
assert updated_job.completed_at is not None assert updated_job.completed_at is not None
assert updated_job.error_message is None assert updated_job.error_message is None
@pytest.mark.asyncio def test_i_can_update_job_status_to_failed_with_error(self, in_memory_repository, sample_document_id):
async def test_i_can_update_job_status_to_failed_with_error(self, in_memory_repository, sample_document_id):
"""Test updating job status to FAILED with error message and completed_at timestamp.""" """Test updating job status to FAILED with error message and completed_at timestamp."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
error_message = "Processing failed due to invalid format" error_message = "Processing failed due to invalid format"
# Act # Act
updated_job = await in_memory_repository.update_job_status( updated_job = in_memory_repository.update_job_status(
created_job.id, ProcessingStatus.FAILED, error_message created_job.id, ProcessingStatus.FAILED, error_message
) )
@@ -360,14 +342,13 @@ class TestJobRepositoryStatusUpdate:
assert updated_job.completed_at is not None assert updated_job.completed_at is not None
assert updated_job.error_message == error_message assert updated_job.error_message == error_message
@pytest.mark.asyncio def test_i_can_update_job_status_to_failed_without_error(self, in_memory_repository, sample_document_id):
async def test_i_can_update_job_status_to_failed_without_error(self, in_memory_repository, sample_document_id):
"""Test updating job status to FAILED without error message.""" """Test updating job status to FAILED without error message."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
# Act # Act
updated_job = await in_memory_repository.update_job_status(created_job.id, ProcessingStatus.FAILED) updated_job = in_memory_repository.update_job_status(created_job.id, ProcessingStatus.FAILED)
# Assert # Assert
assert updated_job is not None assert updated_job is not None
@@ -376,29 +357,27 @@ class TestJobRepositoryStatusUpdate:
assert updated_job.completed_at is not None assert updated_job.completed_at is not None
assert updated_job.error_message is None assert updated_job.error_message is None
@pytest.mark.asyncio def test_i_cannot_update_nonexistent_job_status(self, in_memory_repository):
async def test_i_cannot_update_nonexistent_job_status(self, in_memory_repository):
"""Test that updating nonexistent job returns None.""" """Test that updating nonexistent job returns None."""
# Arrange # Arrange
nonexistent_id = ObjectId() nonexistent_id = ObjectId()
# Act # Act
result = await in_memory_repository.update_job_status(nonexistent_id, ProcessingStatus.COMPLETED) result = in_memory_repository.update_job_status(nonexistent_id, ProcessingStatus.COMPLETED)
# Assert # Assert
assert result is None assert result is None
@pytest.mark.asyncio def test_i_cannot_update_job_status_with_pymongo_error(self, in_memory_repository, sample_document_id, mocker):
async def test_i_cannot_update_job_status_with_pymongo_error(self, in_memory_repository, sample_document_id, mocker):
"""Test handling of PyMongo errors during job status update.""" """Test handling of PyMongo errors during job status update."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
mocker.patch.object(in_memory_repository.collection, 'find_one_and_update', mocker.patch.object(in_memory_repository.collection, 'find_one_and_update',
side_effect=PyMongoError("Database error")) side_effect=PyMongoError("Database error"))
# Act & Assert # Act & Assert
with pytest.raises(JobRepositoryError) as exc_info: with pytest.raises(JobRepositoryError) as exc_info:
await in_memory_repository.update_job_status(created_job.id, ProcessingStatus.COMPLETED) in_memory_repository.update_job_status(created_job.id, ProcessingStatus.COMPLETED)
assert "update_job_status" in str(exc_info.value) assert "update_job_status" in str(exc_info.value)
@@ -406,44 +385,41 @@ class TestJobRepositoryStatusUpdate:
class TestJobRepositoryDeletion: class TestJobRepositoryDeletion:
"""Tests for job deletion functionality.""" """Tests for job deletion functionality."""
@pytest.mark.asyncio def test_i_can_delete_existing_job(self, in_memory_repository, sample_document_id):
async def test_i_can_delete_existing_job(self, in_memory_repository, sample_document_id):
"""Test successful job deletion.""" """Test successful job deletion."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
# Act # Act
deletion_result = await in_memory_repository.delete_job(created_job.id) deletion_result = in_memory_repository.delete_job(created_job.id)
# Assert # Assert
assert deletion_result is True assert deletion_result is True
# Verify job is actually deleted # Verify job is actually deleted
found_job = await in_memory_repository.find_job_by_id(created_job.id) found_job = in_memory_repository.find_job_by_id(created_job.id)
assert found_job is None assert found_job is None
@pytest.mark.asyncio def test_i_cannot_delete_nonexistent_job(self, in_memory_repository):
async def test_i_cannot_delete_nonexistent_job(self, in_memory_repository):
"""Test that deleting nonexistent job returns False.""" """Test that deleting nonexistent job returns False."""
# Arrange # Arrange
nonexistent_id = ObjectId() nonexistent_id = ObjectId()
# Act # Act
result = await in_memory_repository.delete_job(nonexistent_id) result = in_memory_repository.delete_job(nonexistent_id)
# Assert # Assert
assert result is False assert result is False
@pytest.mark.asyncio def test_i_cannot_delete_job_with_pymongo_error(self, in_memory_repository, sample_document_id, mocker):
async def test_i_cannot_delete_job_with_pymongo_error(self, in_memory_repository, sample_document_id, mocker):
"""Test handling of PyMongo errors during job deletion.""" """Test handling of PyMongo errors during job deletion."""
# Arrange # Arrange
created_job = await in_memory_repository.create_job(sample_document_id) created_job = in_memory_repository.create_job(sample_document_id)
mocker.patch.object(in_memory_repository.collection, 'delete_one', side_effect=PyMongoError("Database error")) mocker.patch.object(in_memory_repository.collection, 'delete_one', side_effect=PyMongoError("Database error"))
# Act & Assert # Act & Assert
with pytest.raises(JobRepositoryError) as exc_info: with pytest.raises(JobRepositoryError) as exc_info:
await in_memory_repository.delete_job(created_job.id) in_memory_repository.delete_job(created_job.id)
assert "delete_job" in str(exc_info.value) assert "delete_job" in str(exc_info.value)
@@ -451,38 +427,36 @@ class TestJobRepositoryDeletion:
class TestJobRepositoryComplexScenarios: class TestJobRepositoryComplexScenarios:
"""Tests for complex job repository scenarios.""" """Tests for complex job repository scenarios."""
@pytest.mark.asyncio def test_i_can_handle_complete_job_lifecycle(self, in_memory_repository, sample_document_id, sample_task_id):
async def test_i_can_handle_complete_job_lifecycle(self, in_memory_repository, sample_document_id, sample_task_id):
"""Test complete job lifecycle from creation to completion.""" """Test complete job lifecycle from creation to completion."""
# Create job # Create job
job = await in_memory_repository.create_job(sample_document_id, sample_task_id) job = in_memory_repository.create_job(sample_document_id, sample_task_id)
assert job.status == ProcessingStatus.PENDING assert job.status == ProcessingStatus.PENDING
assert job.started_at is None assert job.started_at is None
assert job.completed_at is None assert job.completed_at is None
# Start processing # Start processing
job = await in_memory_repository.update_job_status(job.id, ProcessingStatus.PROCESSING) job = in_memory_repository.update_job_status(job.id, ProcessingStatus.PROCESSING)
assert job.status == ProcessingStatus.PROCESSING assert job.status == ProcessingStatus.PROCESSING
assert job.started_at is not None assert job.started_at is not None
assert job.completed_at is None assert job.completed_at is None
# Complete job # Complete job
job = await in_memory_repository.update_job_status(job.id, ProcessingStatus.COMPLETED) job = in_memory_repository.update_job_status(job.id, ProcessingStatus.COMPLETED)
assert job.status == ProcessingStatus.COMPLETED assert job.status == ProcessingStatus.COMPLETED
assert job.started_at is not None assert job.started_at is not None
assert job.completed_at is not None assert job.completed_at is not None
assert job.error_message is None assert job.error_message is None
@pytest.mark.asyncio def test_i_can_handle_job_failure_scenario(self, in_memory_repository, sample_document_id, sample_task_id):
async def test_i_can_handle_job_failure_scenario(self, in_memory_repository, sample_document_id, sample_task_id):
"""Test job failure scenario with error message.""" """Test job failure scenario with error message."""
# Create and start job # Create and start job
job = await in_memory_repository.create_job(sample_document_id, sample_task_id) job = in_memory_repository.create_job(sample_document_id, sample_task_id)
job = await in_memory_repository.update_job_status(job.id, ProcessingStatus.PROCESSING) job = in_memory_repository.update_job_status(job.id, ProcessingStatus.PROCESSING)
# Fail job with error # Fail job with error
error_msg = "File format not supported" error_msg = "File format not supported"
job = await in_memory_repository.update_job_status(job.id, ProcessingStatus.FAILED, error_msg) job = in_memory_repository.update_job_status(job.id, ProcessingStatus.FAILED, error_msg)
# Assert failure state # Assert failure state
assert job.status == ProcessingStatus.FAILED assert job.status == ProcessingStatus.FAILED
@@ -490,28 +464,27 @@ class TestJobRepositoryComplexScenarios:
assert job.completed_at is not None assert job.completed_at is not None
assert job.error_message == error_msg assert job.error_message == error_msg
@pytest.mark.asyncio def test_i_can_handle_multiple_documents_with_different_statuses(self, in_memory_repository):
async def test_i_can_handle_multiple_documents_with_different_statuses(self, in_memory_repository):
"""Test managing multiple jobs for different documents with various statuses.""" """Test managing multiple jobs for different documents with various statuses."""
# Create jobs for different documents # Create jobs for different documents
doc1 = PyObjectId() doc1 = PyObjectId()
doc2 = PyObjectId() doc2 = PyObjectId()
doc3 = PyObjectId() doc3 = PyObjectId()
job1 = await in_memory_repository.create_job(doc1, "task-1") job1 = in_memory_repository.create_job(doc1, "task-1")
job2 = await in_memory_repository.create_job(doc2, "task-2") job2 = in_memory_repository.create_job(doc2, "task-2")
job3 = await in_memory_repository.create_job(doc3, "task-3") job3 = in_memory_repository.create_job(doc3, "task-3")
# Update to different statuses # Update to different statuses
await in_memory_repository.update_job_status(job1.id, ProcessingStatus.PROCESSING) in_memory_repository.update_job_status(job1.id, ProcessingStatus.PROCESSING)
await in_memory_repository.update_job_status(job2.id, ProcessingStatus.COMPLETED) in_memory_repository.update_job_status(job2.id, ProcessingStatus.COMPLETED)
await in_memory_repository.update_job_status(job3.id, ProcessingStatus.FAILED, "Error occurred") in_memory_repository.update_job_status(job3.id, ProcessingStatus.FAILED, "Error occurred")
# Verify status queries # Verify status queries
pending_jobs = await in_memory_repository.get_jobs_by_status(ProcessingStatus.PENDING) pending_jobs = in_memory_repository.get_jobs_by_status(ProcessingStatus.PENDING)
processing_jobs = await in_memory_repository.get_jobs_by_status(ProcessingStatus.PROCESSING) processing_jobs = in_memory_repository.get_jobs_by_status(ProcessingStatus.PROCESSING)
completed_jobs = await in_memory_repository.get_jobs_by_status(ProcessingStatus.COMPLETED) completed_jobs = in_memory_repository.get_jobs_by_status(ProcessingStatus.COMPLETED)
failed_jobs = await in_memory_repository.get_jobs_by_status(ProcessingStatus.FAILED) failed_jobs = in_memory_repository.get_jobs_by_status(ProcessingStatus.FAILED)
assert len(pending_jobs) == 0 assert len(pending_jobs) == 0
assert len(processing_jobs) == 1 assert len(processing_jobs) == 1

View File

@@ -1,29 +1,26 @@
""" """
Test suite for UserRepository with async/await support. Test suite for UserRepository with async/support.
This module contains comprehensive tests for all UserRepository methods This module contains comprehensive tests for all UserRepository methods
using mongomock-motor for in-memory MongoDB testing. using mongomock-motor for in-memory MongoDB testing.
""" """
import pytest import pytest
from datetime import datetime
import pytest_asyncio
from bson import ObjectId from bson import ObjectId
from mongomock.mongo_client import MongoClient
from pymongo.errors import DuplicateKeyError from pymongo.errors import DuplicateKeyError
from mongomock_motor import AsyncMongoMockClient
from app.database.repositories.user_repository import UserRepository from app.database.repositories.user_repository import UserRepository
from app.models.user import UserCreate, UserUpdate, UserInDB from app.models.user import UserCreate, UserUpdate
@pytest_asyncio.fixture @pytest.fixture
async def in_memory_repository(): def in_memory_repository():
"""Create an in-memory UserRepository for testing.""" """Create an in-memory UserRepository for testing."""
client = AsyncMongoMockClient() client = MongoClient()
db = client.test_database db = client.test_database
repo = UserRepository(db) repo = UserRepository(db)
await repo.initialize() repo.initialize()
return repo return repo
@@ -51,11 +48,10 @@ def sample_user_update():
class TestUserRepositoryCreation: class TestUserRepositoryCreation:
"""Tests for user creation functionality.""" """Tests for user creation functionality."""
@pytest.mark.asyncio def test_i_can_create_user(self, in_memory_repository, sample_user_create):
async def test_i_can_create_user(self, in_memory_repository, sample_user_create):
"""Test successful user creation.""" """Test successful user creation."""
# Act # Act
created_user = await in_memory_repository.create_user(sample_user_create) created_user = in_memory_repository.create_user(sample_user_create)
# Assert # Assert
assert created_user is not None assert created_user is not None
@@ -68,15 +64,14 @@ class TestUserRepositoryCreation:
assert created_user.updated_at is not None assert created_user.updated_at is not None
assert created_user.hashed_password != sample_user_create.password # Should be hashed assert created_user.hashed_password != sample_user_create.password # Should be hashed
@pytest.mark.asyncio def test_i_cannot_create_user_with_duplicate_username(self, in_memory_repository, sample_user_create):
async def test_i_cannot_create_user_with_duplicate_username(self, in_memory_repository, sample_user_create):
"""Test that creating user with duplicate username raises DuplicateKeyError.""" """Test that creating user with duplicate username raises DuplicateKeyError."""
# Arrange # Arrange
await in_memory_repository.create_user(sample_user_create) in_memory_repository.create_user(sample_user_create)
# Act & Assert # Act & Assert
with pytest.raises(DuplicateKeyError) as exc_info: with pytest.raises(DuplicateKeyError) as exc_info:
await in_memory_repository.create_user(sample_user_create) in_memory_repository.create_user(sample_user_create)
assert "already exists" in str(exc_info.value) assert "already exists" in str(exc_info.value)
@@ -84,14 +79,13 @@ class TestUserRepositoryCreation:
class TestUserRepositoryFinding: class TestUserRepositoryFinding:
"""Tests for user finding functionality.""" """Tests for user finding functionality."""
@pytest.mark.asyncio def test_i_can_find_user_by_id(self, in_memory_repository, sample_user_create):
async def test_i_can_find_user_by_id(self, in_memory_repository, sample_user_create):
"""Test finding user by valid ID.""" """Test finding user by valid ID."""
# Arrange # Arrange
created_user = await in_memory_repository.create_user(sample_user_create) created_user = in_memory_repository.create_user(sample_user_create)
# Act # Act
found_user = await in_memory_repository.find_user_by_id(str(created_user.id)) found_user = in_memory_repository.find_user_by_id(str(created_user.id))
# Assert # Assert
assert found_user is not None assert found_user is not None
@@ -99,69 +93,63 @@ class TestUserRepositoryFinding:
assert found_user.username == created_user.username assert found_user.username == created_user.username
assert found_user.email == created_user.email assert found_user.email == created_user.email
@pytest.mark.asyncio def test_i_cannot_find_user_by_invalid_id(self, in_memory_repository):
async def test_i_cannot_find_user_by_invalid_id(self, in_memory_repository):
"""Test that invalid ObjectId returns None.""" """Test that invalid ObjectId returns None."""
# Act # Act
found_user = await in_memory_repository.find_user_by_id("invalid_id") found_user = in_memory_repository.find_user_by_id("invalid_id")
# Assert # Assert
assert found_user is None assert found_user is None
@pytest.mark.asyncio def test_i_cannot_find_user_by_nonexistent_id(self, in_memory_repository):
async def test_i_cannot_find_user_by_nonexistent_id(self, in_memory_repository):
"""Test that nonexistent but valid ObjectId returns None.""" """Test that nonexistent but valid ObjectId returns None."""
# Arrange # Arrange
nonexistent_id = str(ObjectId()) nonexistent_id = str(ObjectId())
# Act # Act
found_user = await in_memory_repository.find_user_by_id(nonexistent_id) found_user = in_memory_repository.find_user_by_id(nonexistent_id)
# Assert # Assert
assert found_user is None assert found_user is None
@pytest.mark.asyncio def test_i_can_find_user_by_username(self, in_memory_repository, sample_user_create):
async def test_i_can_find_user_by_username(self, in_memory_repository, sample_user_create):
"""Test finding user by username.""" """Test finding user by username."""
# Arrange # Arrange
created_user = await in_memory_repository.create_user(sample_user_create) created_user = in_memory_repository.create_user(sample_user_create)
# Act # Act
found_user = await in_memory_repository.find_user_by_username(sample_user_create.username) found_user = in_memory_repository.find_user_by_username(sample_user_create.username)
# Assert # Assert
assert found_user is not None assert found_user is not None
assert found_user.username == created_user.username assert found_user.username == created_user.username
assert found_user.id == created_user.id assert found_user.id == created_user.id
@pytest.mark.asyncio def test_i_cannot_find_user_by_nonexistent_username(self, in_memory_repository):
async def test_i_cannot_find_user_by_nonexistent_username(self, in_memory_repository):
"""Test that nonexistent username returns None.""" """Test that nonexistent username returns None."""
# Act # Act
found_user = await in_memory_repository.find_user_by_username("nonexistent") found_user = in_memory_repository.find_user_by_username("nonexistent")
# Assert # Assert
assert found_user is None assert found_user is None
@pytest.mark.asyncio def test_i_can_find_user_by_email(self, in_memory_repository, sample_user_create):
async def test_i_can_find_user_by_email(self, in_memory_repository, sample_user_create):
"""Test finding user by email.""" """Test finding user by email."""
# Arrange # Arrange
created_user = await in_memory_repository.create_user(sample_user_create) created_user = in_memory_repository.create_user(sample_user_create)
# Act # Act
found_user = await in_memory_repository.find_user_by_email(str(sample_user_create.email)) found_user = in_memory_repository.find_user_by_email(str(sample_user_create.email))
# Assert # Assert
assert found_user is not None assert found_user is not None
assert found_user.email == created_user.email assert found_user.email == created_user.email
assert found_user.id == created_user.id assert found_user.id == created_user.id
@pytest.mark.asyncio def test_i_cannot_find_user_by_nonexistent_email(self, in_memory_repository):
async def test_i_cannot_find_user_by_nonexistent_email(self, in_memory_repository):
"""Test that nonexistent email returns None.""" """Test that nonexistent email returns None."""
# Act # Act
found_user = await in_memory_repository.find_user_by_email("nonexistent@example.com") found_user = in_memory_repository.find_user_by_email("nonexistent@example.com")
# Assert # Assert
assert found_user is None assert found_user is None
@@ -170,15 +158,14 @@ class TestUserRepositoryFinding:
class TestUserRepositoryUpdate: class TestUserRepositoryUpdate:
"""Tests for user update functionality.""" """Tests for user update functionality."""
@pytest.mark.asyncio def test_i_can_update_user(self, in_memory_repository, sample_user_create, sample_user_update):
async def test_i_can_update_user(self, in_memory_repository, sample_user_create, sample_user_update):
"""Test successful user update.""" """Test successful user update."""
# Arrange # Arrange
created_user = await in_memory_repository.create_user(sample_user_create) created_user = in_memory_repository.create_user(sample_user_create)
original_updated_at = created_user.updated_at original_updated_at = created_user.updated_at
# Act # Act
updated_user = await in_memory_repository.update_user(str(created_user.id), sample_user_update) updated_user = in_memory_repository.update_user(str(created_user.id), sample_user_update)
# Assert # Assert
assert updated_user is not None assert updated_user is not None
@@ -187,24 +174,22 @@ class TestUserRepositoryUpdate:
assert updated_user.role == sample_user_update.role assert updated_user.role == sample_user_update.role
assert updated_user.id == created_user.id assert updated_user.id == created_user.id
@pytest.mark.asyncio def test_i_cannot_update_user_with_invalid_id(self, in_memory_repository, sample_user_update):
async def test_i_cannot_update_user_with_invalid_id(self, in_memory_repository, sample_user_update):
"""Test that updating with invalid ID returns None.""" """Test that updating with invalid ID returns None."""
# Act # Act
result = await in_memory_repository.update_user("invalid_id", sample_user_update) result = in_memory_repository.update_user("invalid_id", sample_user_update)
# Assert # Assert
assert result is None assert result is None
@pytest.mark.asyncio def test_i_can_update_user_with_partial_data(self, in_memory_repository, sample_user_create):
async def test_i_can_update_user_with_partial_data(self, in_memory_repository, sample_user_create):
"""Test updating user with partial data.""" """Test updating user with partial data."""
# Arrange # Arrange
created_user = await in_memory_repository.create_user(sample_user_create) created_user = in_memory_repository.create_user(sample_user_create)
partial_update = UserUpdate(username="newusername") partial_update = UserUpdate(username="newusername")
# Act # Act
updated_user = await in_memory_repository.update_user(str(created_user.id), partial_update) updated_user = in_memory_repository.update_user(str(created_user.id), partial_update)
# Assert # Assert
assert updated_user is not None assert updated_user is not None
@@ -212,15 +197,14 @@ class TestUserRepositoryUpdate:
assert updated_user.email == created_user.email # Should remain unchanged assert updated_user.email == created_user.email # Should remain unchanged
assert updated_user.role == created_user.role # Should remain unchanged assert updated_user.role == created_user.role # Should remain unchanged
@pytest.mark.asyncio def test_i_can_update_user_with_empty_data(self, in_memory_repository, sample_user_create):
async def test_i_can_update_user_with_empty_data(self, in_memory_repository, sample_user_create):
"""Test updating user with empty data returns current user.""" """Test updating user with empty data returns current user."""
# Arrange # Arrange
created_user = await in_memory_repository.create_user(sample_user_create) created_user = in_memory_repository.create_user(sample_user_create)
empty_update = UserUpdate() empty_update = UserUpdate()
# Act # Act
result = await in_memory_repository.update_user(str(created_user.id), empty_update) result = in_memory_repository.update_user(str(created_user.id), empty_update)
# Assert # Assert
assert result is not None assert result is not None
@@ -231,39 +215,36 @@ class TestUserRepositoryUpdate:
class TestUserRepositoryDeletion: class TestUserRepositoryDeletion:
"""Tests for user deletion functionality.""" """Tests for user deletion functionality."""
@pytest.mark.asyncio def test_i_can_delete_user(self, in_memory_repository, sample_user_create):
async def test_i_can_delete_user(self, in_memory_repository, sample_user_create):
"""Test successful user deletion.""" """Test successful user deletion."""
# Arrange # Arrange
created_user = await in_memory_repository.create_user(sample_user_create) created_user = in_memory_repository.create_user(sample_user_create)
# Act # Act
deletion_result = await in_memory_repository.delete_user(str(created_user.id)) deletion_result = in_memory_repository.delete_user(str(created_user.id))
# Assert # Assert
assert deletion_result is True assert deletion_result is True
# Verify user is actually deleted # Verify user is actually deleted
found_user = await in_memory_repository.find_user_by_id(str(created_user.id)) found_user = in_memory_repository.find_user_by_id(str(created_user.id))
assert found_user is None assert found_user is None
@pytest.mark.asyncio def test_i_cannot_delete_user_with_invalid_id(self, in_memory_repository):
async def test_i_cannot_delete_user_with_invalid_id(self, in_memory_repository):
"""Test that deleting with invalid ID returns False.""" """Test that deleting with invalid ID returns False."""
# Act # Act
result = await in_memory_repository.delete_user("invalid_id") result = in_memory_repository.delete_user("invalid_id")
# Assert # Assert
assert result is False assert result is False
@pytest.mark.asyncio def test_i_cannot_delete_nonexistent_user(self, in_memory_repository):
async def test_i_cannot_delete_nonexistent_user(self, in_memory_repository):
"""Test that deleting nonexistent user returns False.""" """Test that deleting nonexistent user returns False."""
# Arrange # Arrange
nonexistent_id = str(ObjectId()) nonexistent_id = str(ObjectId())
# Act # Act
result = await in_memory_repository.delete_user(nonexistent_id) result = in_memory_repository.delete_user(nonexistent_id)
# Assert # Assert
assert result is False assert result is False
@@ -272,30 +253,27 @@ class TestUserRepositoryDeletion:
class TestUserRepositoryUtilities: class TestUserRepositoryUtilities:
"""Tests for utility methods.""" """Tests for utility methods."""
@pytest.mark.asyncio def test_i_can_count_users(self, in_memory_repository, sample_user_create):
async def test_i_can_count_users(self, in_memory_repository, sample_user_create):
"""Test counting users.""" """Test counting users."""
# Arrange # Arrange
initial_count = await in_memory_repository.count_users() initial_count = in_memory_repository.count_users()
await in_memory_repository.create_user(sample_user_create) in_memory_repository.create_user(sample_user_create)
# Act # Act
final_count = await in_memory_repository.count_users() final_count = in_memory_repository.count_users()
# Assert # Assert
assert final_count == initial_count + 1 assert final_count == initial_count + 1
@pytest.mark.asyncio def test_i_can_check_user_exists(self, in_memory_repository, sample_user_create):
async def test_i_can_check_user_exists(self, in_memory_repository, sample_user_create):
"""Test checking if user exists.""" """Test checking if user exists."""
# Arrange # Arrange
await in_memory_repository.create_user(sample_user_create) in_memory_repository.create_user(sample_user_create)
# Act # Act
exists = await in_memory_repository.user_exists(sample_user_create.username) exists = in_memory_repository.user_exists(sample_user_create.username)
not_exists = await in_memory_repository.user_exists("nonexistent") not_exists = in_memory_repository.user_exists("nonexistent")
# Assert # Assert
assert exists is True assert exists is True
assert not_exists is False assert not_exists is False

View File

@@ -11,7 +11,7 @@ from unittest.mock import patch
import pytest import pytest
import pytest_asyncio import pytest_asyncio
from bson import ObjectId from bson import ObjectId
from mongomock_motor import AsyncMongoMockClient from mongomock.mongo_client import MongoClient
from app.models.document import FileType from app.models.document import FileType
from app.services.document_service import DocumentService from app.services.document_service import DocumentService
@@ -24,15 +24,15 @@ def cleanup_test_folder():
shutil.rmtree("test_folder", ignore_errors=True) shutil.rmtree("test_folder", ignore_errors=True)
@pytest_asyncio.fixture @pytest.fixture
async def in_memory_database(): def in_memory_database():
"""Create an in-memory database for testing.""" """Create an in-memory database for testing."""
client = AsyncMongoMockClient() client = MongoClient()
return client.test_database return client.test_database
@pytest_asyncio.fixture @pytest_asyncio.fixture
async def document_service(in_memory_database): def document_service(in_memory_database):
"""Create DocumentService with in-memory repositories.""" """Create DocumentService with in-memory repositories."""
service = DocumentService(in_memory_database, objects_folder="test_folder") service = DocumentService(in_memory_database, objects_folder="test_folder")
return service return service
@@ -72,8 +72,7 @@ class TestCreateDocument:
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@patch('app.services.document_service.datetime') @patch('app.services.document_service.datetime')
@pytest.mark.asyncio def test_i_can_create_document_with_new_content(
async def test_i_can_create_document_with_new_content(
self, self,
mock_datetime, mock_datetime,
mock_magic, mock_magic,
@@ -87,7 +86,7 @@ class TestCreateDocument:
mock_magic.return_value = "application/pdf" mock_magic.return_value = "application/pdf"
# Execute # Execute
result = await document_service.create_document( result = document_service.create_document(
"/test/test.pdf", "/test/test.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
@@ -102,7 +101,7 @@ class TestCreateDocument:
assert result.file_hash == document_service._calculate_file_hash(sample_file_bytes) assert result.file_hash == document_service._calculate_file_hash(sample_file_bytes)
# Verify document created in database # Verify document created in database
doc_in_db = await document_service.document_repository.find_document_by_id(result.id) doc_in_db = document_service.document_repository.find_document_by_id(result.id)
assert doc_in_db is not None assert doc_in_db is not None
assert doc_in_db.id == result.id assert doc_in_db.id == result.id
assert doc_in_db.filename == result.filename assert doc_in_db.filename == result.filename
@@ -116,8 +115,7 @@ class TestCreateDocument:
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@patch('app.services.document_service.datetime') @patch('app.services.document_service.datetime')
@pytest.mark.asyncio def test_i_can_create_document_with_existing_content(
async def test_i_can_create_document_with_existing_content(
self, self,
mock_datetime, mock_datetime,
mock_magic, mock_magic,
@@ -131,14 +129,14 @@ class TestCreateDocument:
mock_magic.return_value = "application/pdf" mock_magic.return_value = "application/pdf"
# Create first document # Create first document
first_doc = await document_service.create_document( first_doc = document_service.create_document(
"/test/first.pdf", "/test/first.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
) )
# Create second document with same content # Create second document with same content
second_doc = await document_service.create_document( second_doc = document_service.create_document(
"/test/second.pdf", "/test/second.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
@@ -149,37 +147,34 @@ class TestCreateDocument:
assert first_doc.filename != second_doc.filename assert first_doc.filename != second_doc.filename
assert first_doc.filepath != second_doc.filepath assert first_doc.filepath != second_doc.filepath
@pytest.mark.asyncio def test_i_cannot_create_document_with_unsupported_file_type(
async def test_i_cannot_create_document_with_unsupported_file_type(
self, self,
document_service, document_service,
sample_file_bytes sample_file_bytes
): ):
"""Test that unsupported file types raise ValueError.""" """Test that unsupported file types raise ValueError."""
with pytest.raises(ValueError, match="Unsupported file type"): with pytest.raises(ValueError, match="Unsupported file type"):
await document_service.create_document( document_service.create_document(
"/test/test.xyz", # Unsupported extension "/test/test.xyz", # Unsupported extension
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
) )
@pytest.mark.asyncio def test_i_cannot_create_document_with_empty_file_path(
async def test_i_cannot_create_document_with_empty_file_path(
self, self,
document_service, document_service,
sample_file_bytes sample_file_bytes
): ):
"""Test that empty file path raises ValueError.""" """Test that empty file path raises ValueError."""
with pytest.raises(ValueError): with pytest.raises(ValueError):
await document_service.create_document( document_service.create_document(
"", # Empty path "", # Empty path
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
) )
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_create_document_with_empty_bytes(
async def test_i_can_create_document_with_empty_bytes(
self, self,
mock_magic, mock_magic,
document_service document_service
@@ -189,7 +184,7 @@ class TestCreateDocument:
mock_magic.return_value = "text/plain" mock_magic.return_value = "text/plain"
# Execute with empty bytes # Execute with empty bytes
result = await document_service.create_document( result = document_service.create_document(
"/test/empty.txt", "/test/empty.txt",
b"", # Empty bytes b"", # Empty bytes
"utf-8" "utf-8"
@@ -203,8 +198,7 @@ class TestGetMethods:
"""Tests for document retrieval methods.""" """Tests for document retrieval methods."""
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_get_document_by_id(
async def test_i_can_get_document_by_id(
self, self,
mock_magic, mock_magic,
document_service, document_service,
@@ -215,14 +209,14 @@ class TestGetMethods:
mock_magic.return_value = "application/pdf" mock_magic.return_value = "application/pdf"
# Create a document first # Create a document first
created_doc = await document_service.create_document( created_doc = document_service.create_document(
"/test/test.pdf", "/test/test.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
) )
# Execute # Execute
result = await document_service.get_document_by_id(created_doc.id) result = document_service.get_document_by_id(created_doc.id)
# Verify # Verify
assert result is not None assert result is not None
@@ -230,8 +224,7 @@ class TestGetMethods:
assert result.filename == created_doc.filename assert result.filename == created_doc.filename
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_get_document_by_hash(
async def test_i_can_get_document_by_hash(
self, self,
mock_magic, mock_magic,
document_service, document_service,
@@ -242,14 +235,14 @@ class TestGetMethods:
mock_magic.return_value = "application/pdf" mock_magic.return_value = "application/pdf"
# Create a document first # Create a document first
created_doc = await document_service.create_document( created_doc = document_service.create_document(
"/test/test.pdf", "/test/test.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
) )
# Execute # Execute
result = await document_service.get_document_by_hash(created_doc.file_hash) result = document_service.get_document_by_hash(created_doc.file_hash)
# Verify # Verify
assert result is not None assert result is not None
@@ -257,8 +250,7 @@ class TestGetMethods:
assert result.filename == created_doc.filename assert result.filename == created_doc.filename
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_get_document_by_filepath(
async def test_i_can_get_document_by_filepath(
self, self,
mock_magic, mock_magic,
document_service, document_service,
@@ -270,14 +262,14 @@ class TestGetMethods:
test_path = "/test/unique_test.pdf" test_path = "/test/unique_test.pdf"
# Create a document first # Create a document first
created_doc = await document_service.create_document( created_doc = document_service.create_document(
test_path, test_path,
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
) )
# Execute # Execute
result = await document_service.get_document_by_filepath(test_path) result = document_service.get_document_by_filepath(test_path)
# Verify # Verify
assert result is not None assert result is not None
@@ -285,8 +277,7 @@ class TestGetMethods:
assert result.id == created_doc.id assert result.id == created_doc.id
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_get_document_content(
async def test_i_can_get_document_content(
self, self,
mock_magic, mock_magic,
document_service, document_service,
@@ -297,38 +288,36 @@ class TestGetMethods:
mock_magic.return_value = "application/pdf" mock_magic.return_value = "application/pdf"
# Create a document first # Create a document first
created_doc = await document_service.create_document( created_doc = document_service.create_document(
"/test/test.pdf", "/test/test.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
) )
# Execute # Execute
result = await document_service.get_document_content_by_hash(created_doc.file_hash) result = document_service.get_document_content_by_hash(created_doc.file_hash)
# Verify # Verify
assert result == sample_file_bytes assert result == sample_file_bytes
@pytest.mark.asyncio def test_i_cannot_get_nonexistent_document_by_id(
async def test_i_cannot_get_nonexistent_document_by_id(
self, self,
document_service document_service
): ):
"""Test that nonexistent document returns None.""" """Test that nonexistent document returns None."""
# Execute with random ObjectId # Execute with random ObjectId
result = await document_service.get_document_by_id(ObjectId()) result = document_service.get_document_by_id(ObjectId())
# Verify # Verify
assert result is None assert result is None
@pytest.mark.asyncio def test_i_cannot_get_nonexistent_document_by_hash(
async def test_i_cannot_get_nonexistent_document_by_hash(
self, self,
document_service document_service
): ):
"""Test that nonexistent document hash returns None.""" """Test that nonexistent document hash returns None."""
# Execute # Execute
result = await document_service.get_document_by_hash("nonexistent_hash") result = document_service.get_document_by_hash("nonexistent_hash")
# Verify # Verify
assert result is None assert result is None
@@ -338,8 +327,7 @@ class TestPaginationAndCounting:
"""Tests for document listing and counting.""" """Tests for document listing and counting."""
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_list_documents_with_pagination(
async def test_i_can_list_documents_with_pagination(
self, self,
mock_magic, mock_magic,
document_service, document_service,
@@ -351,25 +339,24 @@ class TestPaginationAndCounting:
# Create multiple documents # Create multiple documents
for i in range(5): for i in range(5):
await document_service.create_document( document_service.create_document(
f"/test/test{i}.pdf", f"/test/test{i}.pdf",
sample_file_bytes + bytes(str(i), 'utf-8'), # Make each file unique sample_file_bytes + bytes(str(i), 'utf-8'), # Make each file unique
"utf-8" "utf-8"
) )
# Execute with pagination # Execute with pagination
result = await document_service.list_documents(skip=1, limit=2) result = document_service.list_documents(skip=1, limit=2)
# Verify # Verify
assert len(result) == 2 assert len(result) == 2
# Test counting # Test counting
total_count = await document_service.count_documents() total_count = document_service.count_documents()
assert total_count == 5 assert total_count == 5
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_count_documents(
async def test_i_can_count_documents(
self, self,
mock_magic, mock_magic,
document_service, document_service,
@@ -380,19 +367,19 @@ class TestPaginationAndCounting:
mock_magic.return_value = "text/plain" mock_magic.return_value = "text/plain"
# Initially should be 0 # Initially should be 0
initial_count = await document_service.count_documents() initial_count = document_service.count_documents()
assert initial_count == 0 assert initial_count == 0
# Create some documents # Create some documents
for i in range(3): for i in range(3):
await document_service.create_document( document_service.create_document(
f"/test/test{i}.txt", f"/test/test{i}.txt",
sample_file_bytes + bytes(str(i), 'utf-8'), sample_file_bytes + bytes(str(i), 'utf-8'),
"utf-8" "utf-8"
) )
# Execute # Execute
final_count = await document_service.count_documents() final_count = document_service.count_documents()
# Verify # Verify
assert final_count == 3 assert final_count == 3
@@ -402,8 +389,7 @@ class TestUpdateAndDelete:
"""Tests for document update and deletion operations.""" """Tests for document update and deletion operations."""
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_update_document_metadata(
async def test_i_can_update_document_metadata(
self, self,
mock_magic, mock_magic,
document_service, document_service,
@@ -414,7 +400,7 @@ class TestUpdateAndDelete:
mock_magic.return_value = "application/pdf" mock_magic.return_value = "application/pdf"
# Create a document first # Create a document first
created_doc = await document_service.create_document( created_doc = document_service.create_document(
"/test/test.pdf", "/test/test.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
@@ -422,7 +408,7 @@ class TestUpdateAndDelete:
# Execute update # Execute update
update_data = {"metadata": {"page_count": 5}} update_data = {"metadata": {"page_count": 5}}
result = await document_service.update_document(created_doc.id, update_data) result = document_service.update_document(created_doc.id, update_data)
# Verify # Verify
assert result is not None assert result is not None
@@ -433,14 +419,13 @@ class TestUpdateAndDelete:
assert result.file_type == created_doc.file_type assert result.file_type == created_doc.file_type
assert result.metadata == update_data['metadata'] assert result.metadata == update_data['metadata']
@pytest.mark.asyncio def test_i_can_update_document_content(
async def test_i_can_update_document_content(
self, self,
document_service, document_service,
sample_file_bytes sample_file_bytes
): ):
# Create a document first # Create a document first
created_doc = await document_service.create_document( created_doc = document_service.create_document(
"/test/test.pdf", "/test/test.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
@@ -448,7 +433,7 @@ class TestUpdateAndDelete:
# Execute update # Execute update
update_data = {"file_bytes": b"this is an updated file content"} update_data = {"file_bytes": b"this is an updated file content"}
result = await document_service.update_document(created_doc.id, update_data) result = document_service.update_document(created_doc.id, update_data)
assert result.filename == created_doc.filename assert result.filename == created_doc.filename
assert result.filepath == created_doc.filepath assert result.filepath == created_doc.filepath
@@ -460,8 +445,7 @@ class TestUpdateAndDelete:
validate_file_saved(document_service, result.file_hash, b"this is an updated file content") validate_file_saved(document_service, result.file_hash, b"this is an updated file content")
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_delete_document_and_orphaned_content(
async def test_i_can_delete_document_and_orphaned_content(
self, self,
mock_magic, mock_magic,
document_service, document_service,
@@ -472,7 +456,7 @@ class TestUpdateAndDelete:
mock_magic.return_value = "application/pdf" mock_magic.return_value = "application/pdf"
# Create a document # Create a document
created_doc = await document_service.create_document( created_doc = document_service.create_document(
"/test/test.pdf", "/test/test.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
@@ -482,12 +466,12 @@ class TestUpdateAndDelete:
validate_file_saved(document_service, created_doc.file_hash, sample_file_bytes) validate_file_saved(document_service, created_doc.file_hash, sample_file_bytes)
# Execute deletion # Execute deletion
result = await document_service.delete_document(created_doc.id) result = document_service.delete_document(created_doc.id)
# Verify document and content are deleted # Verify document and content are deleted
assert result is True assert result is True
deleted_doc = await document_service.get_document_by_id(created_doc.id) deleted_doc = document_service.get_document_by_id(created_doc.id)
assert deleted_doc is None assert deleted_doc is None
# validate content is deleted # validate content is deleted
@@ -496,8 +480,7 @@ class TestUpdateAndDelete:
assert not os.path.exists(target_file_path) assert not os.path.exists(target_file_path)
@patch('app.services.document_service.magic.from_buffer') @patch('app.services.document_service.magic.from_buffer')
@pytest.mark.asyncio def test_i_can_delete_document_without_affecting_shared_content(
async def test_i_can_delete_document_without_affecting_shared_content(
self, self,
mock_magic, mock_magic,
document_service, document_service,
@@ -508,13 +491,13 @@ class TestUpdateAndDelete:
mock_magic.return_value = "application/pdf" mock_magic.return_value = "application/pdf"
# Create two documents with same content # Create two documents with same content
doc1 = await document_service.create_document( doc1 = document_service.create_document(
"/test/test1.pdf", "/test/test1.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
) )
doc2 = await document_service.create_document( doc2 = document_service.create_document(
"/test/test2.pdf", "/test/test2.pdf",
sample_file_bytes, sample_file_bytes,
"utf-8" "utf-8"
@@ -524,14 +507,14 @@ class TestUpdateAndDelete:
assert doc1.file_hash == doc2.file_hash assert doc1.file_hash == doc2.file_hash
# Delete first document # Delete first document
result = await document_service.delete_document(doc1.id) result = document_service.delete_document(doc1.id)
assert result is True assert result is True
# Verify first document is deleted but content still exists # Verify first document is deleted but content still exists
deleted_doc = await document_service.get_document_by_id(doc1.id) deleted_doc = document_service.get_document_by_id(doc1.id)
assert deleted_doc is None assert deleted_doc is None
remaining_doc = await document_service.get_document_by_id(doc2.id) remaining_doc = document_service.get_document_by_id(doc2.id)
assert remaining_doc is not None assert remaining_doc is not None
validate_file_saved(document_service, doc2.file_hash, sample_file_bytes) validate_file_saved(document_service, doc2.file_hash, sample_file_bytes)
@@ -585,3 +568,137 @@ class TestFileTypeDetection:
"""Test unsupported file type raises ValueError.""" """Test unsupported file type raises ValueError."""
with pytest.raises(ValueError, match="Unsupported file type"): with pytest.raises(ValueError, match="Unsupported file type"):
document_service._detect_file_type("/path/to/document.xyz") document_service._detect_file_type("/path/to/document.xyz")
class TestCreatePdf:
"""Tests for create_pdf method."""
@patch('app.services.document_service.convert_to_pdf')
@patch('app.services.document_service.magic.from_buffer')
def test_i_can_create_pdf_successfully(
self,
mock_magic,
mock_convert_to_pdf,
document_service,
sample_file_bytes
):
"""Test creating PDF from an existing document."""
# Setup
mock_magic.return_value = "text/plain"
# Create a document first
created_doc = document_service.create_document(
"/test/test.txt",
sample_file_bytes,
"utf-8"
)
# Mock the PDF conversion
pdf_path = os.path.join(document_service.temp_folder, "converted.pdf")
mock_convert_to_pdf.return_value = pdf_path
# Write a sample PDF file that the conversion would create
pdf_content = b"This is PDF content"
os.makedirs(os.path.dirname(pdf_path), exist_ok=True)
with open(pdf_path, "wb") as f:
f.write(pdf_content)
# Execute
result = document_service.create_pdf(created_doc.id)
# Verify
assert result is True
# Get the updated document
updated_doc = document_service.get_document_by_id(created_doc.id)
assert updated_doc.pdf_file_hash is not None
# Verify the PDF content was saved
pdf_hash = document_service._calculate_file_hash(pdf_content)
assert updated_doc.pdf_file_hash == pdf_hash
# Verify convert_to_pdf was called with correct arguments
doc_path = document_service.get_document_path(created_doc.file_hash)
mock_convert_to_pdf.assert_called_once_with(doc_path, document_service.temp_folder)
# Verify content exists on disk
validate_file_saved(document_service, pdf_hash, pdf_content)
# Verify PDF hash was added to document
updated_doc = document_service.get_document_by_id(created_doc.id)
pdf_hash = document_service._calculate_file_hash(pdf_content)
assert updated_doc.pdf_file_hash == pdf_hash
@patch('app.services.document_service.convert_to_pdf')
@patch('app.services.document_service.magic.from_buffer')
def test_i_can_reuse_existing_pdf(
self,
mock_magic,
mock_convert_to_pdf,
document_service,
sample_file_bytes
):
"""Test that if PDF already exists, it doesn't recreate it."""
# Setup
mock_magic.return_value = "text/plain"
# Create a document first
created_doc = document_service.create_document(
"/test/test.txt",
sample_file_bytes,
"utf-8"
)
# Create a fake PDF file and update the document
pdf_content = b"This is PDF content"
pdf_hash = document_service._calculate_file_hash(pdf_content)
document_service.save_content_if_needed(pdf_hash, pdf_content)
document_service.update_document(created_doc.id, {"pdf_file_hash": pdf_hash})
# Execute
result = document_service.create_pdf(created_doc.id)
# Verify
assert result is True
# Verify convert_to_pdf was NOT called
mock_convert_to_pdf.assert_not_called()
def test_i_cannot_create_pdf_for_nonexistent_document(
self,
document_service
):
"""Test behavior when document ID doesn't exist."""
# Execute with random ObjectId
result = document_service.create_pdf(ObjectId())
# Verify
assert result is False
@patch('app.services.document_service.magic.from_buffer')
def test_i_cannot_create_pdf_when_file_content_missing(
self,
mock_magic,
document_service,
sample_file_bytes
):
"""Test behavior when file content doesn't exist."""
# Setup
mock_magic.return_value = "text/plain"
# Create a document
created_doc = document_service.create_document(
"/test/test.txt",
sample_file_bytes,
"utf-8"
)
# Simulate missing content by removing file
file_path = document_service.get_document_path(created_doc.file_hash)
os.remove(file_path)
# Execute
result = document_service.create_pdf(created_doc.id)
# Verify
assert result is False

View File

@@ -6,9 +6,8 @@ using mongomock for better integration testing.
""" """
import pytest import pytest
import pytest_asyncio
from bson import ObjectId from bson import ObjectId
from mongomock_motor import AsyncMongoMockClient from mongomock.mongo_client import MongoClient
from app.exceptions.job_exceptions import InvalidStatusTransitionError from app.exceptions.job_exceptions import InvalidStatusTransitionError
from app.models.job import ProcessingStatus from app.models.job import ProcessingStatus
@@ -16,17 +15,17 @@ from app.models.types import PyObjectId
from app.services.job_service import JobService from app.services.job_service import JobService
@pytest_asyncio.fixture @pytest.fixture
async def in_memory_database(): def in_memory_database():
"""Create an in-memory database for testing.""" """Create an in-memory database for testing."""
client = AsyncMongoMockClient() client = MongoClient()
return client.test_database return client.test_database
@pytest_asyncio.fixture @pytest.fixture
async def job_service(in_memory_database): def job_service(in_memory_database):
"""Create JobService with in-memory repositories.""" """Create JobService with in-memory repositories."""
service = await JobService(in_memory_database).initialize() service = JobService(in_memory_database).initialize()
return service return service
@@ -45,8 +44,7 @@ def sample_task_id():
class TestCreateJob: class TestCreateJob:
"""Tests for create_job method.""" """Tests for create_job method."""
@pytest.mark.asyncio def test_i_can_create_job_with_task_id(
async def test_i_can_create_job_with_task_id(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -54,7 +52,7 @@ class TestCreateJob:
): ):
"""Test creating job with task ID.""" """Test creating job with task ID."""
# Execute # Execute
result = await job_service.create_job(sample_document_id, sample_task_id) result = job_service.create_job(sample_document_id, sample_task_id)
# Verify job creation # Verify job creation
assert result is not None assert result is not None
@@ -66,22 +64,21 @@ class TestCreateJob:
assert result.error_message is None assert result.error_message is None
# Verify job exists in database # Verify job exists in database
job_in_db = await job_service.get_job_by_id(result.id) job_in_db = job_service.get_job_by_id(result.id)
assert job_in_db is not None assert job_in_db is not None
assert job_in_db.id == result.id assert job_in_db.id == result.id
assert job_in_db.document_id == sample_document_id assert job_in_db.document_id == sample_document_id
assert job_in_db.task_id == sample_task_id assert job_in_db.task_id == sample_task_id
assert job_in_db.status == ProcessingStatus.PENDING assert job_in_db.status == ProcessingStatus.PENDING
@pytest.mark.asyncio def test_i_can_create_job_without_task_id(
async def test_i_can_create_job_without_task_id(
self, self,
job_service, job_service,
sample_document_id sample_document_id
): ):
"""Test creating job without task ID.""" """Test creating job without task ID."""
# Execute # Execute
result = await job_service.create_job(sample_document_id) result = job_service.create_job(sample_document_id)
# Verify job creation # Verify job creation
assert result is not None assert result is not None
@@ -96,8 +93,7 @@ class TestCreateJob:
class TestGetJobMethods: class TestGetJobMethods:
"""Tests for job retrieval methods.""" """Tests for job retrieval methods."""
@pytest.mark.asyncio def test_i_can_get_job_by_id(
async def test_i_can_get_job_by_id(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -105,10 +101,10 @@ class TestGetJobMethods:
): ):
"""Test retrieving job by ID.""" """Test retrieving job by ID."""
# Create a job first # Create a job first
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
# Execute # Execute
result = await job_service.get_job_by_id(created_job.id) result = job_service.get_job_by_id(created_job.id)
# Verify # Verify
assert result is not None assert result is not None
@@ -117,25 +113,24 @@ class TestGetJobMethods:
assert result.task_id == created_job.task_id assert result.task_id == created_job.task_id
assert result.status == created_job.status assert result.status == created_job.status
@pytest.mark.asyncio def test_i_can_get_jobs_by_status(
async def test_i_can_get_jobs_by_status(
self, self,
job_service, job_service,
sample_document_id sample_document_id
): ):
"""Test retrieving jobs by status.""" """Test retrieving jobs by status."""
# Create jobs with different statuses # Create jobs with different statuses
pending_job = await job_service.create_job(sample_document_id, "pending-task") pending_job = job_service.create_job(sample_document_id, "pending-task")
processing_job = await job_service.create_job(ObjectId(), "processing-task") processing_job = job_service.create_job(ObjectId(), "processing-task")
await job_service.mark_job_as_started(processing_job.id) job_service.mark_job_as_started(processing_job.id)
completed_job = await job_service.create_job(ObjectId(), "completed-task") completed_job = job_service.create_job(ObjectId(), "completed-task")
await job_service.mark_job_as_started(completed_job.id) job_service.mark_job_as_started(completed_job.id)
await job_service.mark_job_as_completed(completed_job.id) job_service.mark_job_as_completed(completed_job.id)
# Execute - get pending jobs # Execute - get pending jobs
pending_results = await job_service.get_jobs_by_status(ProcessingStatus.PENDING) pending_results = job_service.get_jobs_by_status(ProcessingStatus.PENDING)
# Verify # Verify
assert len(pending_results) == 1 assert len(pending_results) == 1
@@ -143,12 +138,12 @@ class TestGetJobMethods:
assert pending_results[0].status == ProcessingStatus.PENDING assert pending_results[0].status == ProcessingStatus.PENDING
# Execute - get processing jobs # Execute - get processing jobs
processing_results = await job_service.get_jobs_by_status(ProcessingStatus.PROCESSING) processing_results = job_service.get_jobs_by_status(ProcessingStatus.PROCESSING)
assert len(processing_results) == 1 assert len(processing_results) == 1
assert processing_results[0].status == ProcessingStatus.PROCESSING assert processing_results[0].status == ProcessingStatus.PROCESSING
# Execute - get completed jobs # Execute - get completed jobs
completed_results = await job_service.get_jobs_by_status(ProcessingStatus.COMPLETED) completed_results = job_service.get_jobs_by_status(ProcessingStatus.COMPLETED)
assert len(completed_results) == 1 assert len(completed_results) == 1
assert completed_results[0].status == ProcessingStatus.COMPLETED assert completed_results[0].status == ProcessingStatus.COMPLETED
@@ -156,8 +151,7 @@ class TestGetJobMethods:
class TestUpdateStatus: class TestUpdateStatus:
"""Tests for mark_job_as_started method.""" """Tests for mark_job_as_started method."""
@pytest.mark.asyncio def test_i_can_mark_pending_job_as_started(
async def test_i_can_mark_pending_job_as_started(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -165,11 +159,11 @@ class TestUpdateStatus:
): ):
"""Test marking pending job as started (PENDING → PROCESSING).""" """Test marking pending job as started (PENDING → PROCESSING)."""
# Create a pending job # Create a pending job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
assert created_job.status == ProcessingStatus.PENDING assert created_job.status == ProcessingStatus.PENDING
# Execute # Execute
result = await job_service.mark_job_as_started(created_job.id) result = job_service.mark_job_as_started(created_job.id)
# Verify status transition # Verify status transition
assert result is not None assert result is not None
@@ -177,11 +171,10 @@ class TestUpdateStatus:
assert result.status == ProcessingStatus.PROCESSING assert result.status == ProcessingStatus.PROCESSING
# Verify in database # Verify in database
updated_job = await job_service.get_job_by_id(created_job.id) updated_job = job_service.get_job_by_id(created_job.id)
assert updated_job.status == ProcessingStatus.PROCESSING assert updated_job.status == ProcessingStatus.PROCESSING
@pytest.mark.asyncio def test_i_cannot_mark_processing_job_as_started(
async def test_i_cannot_mark_processing_job_as_started(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -189,19 +182,18 @@ class TestUpdateStatus:
): ):
"""Test that processing job cannot be marked as started.""" """Test that processing job cannot be marked as started."""
# Create and start a job # Create and start a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
# Try to start it again # Try to start it again
with pytest.raises(InvalidStatusTransitionError) as exc_info: with pytest.raises(InvalidStatusTransitionError) as exc_info:
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
# Verify exception details # Verify exception details
assert exc_info.value.current_status == ProcessingStatus.PROCESSING assert exc_info.value.current_status == ProcessingStatus.PROCESSING
assert exc_info.value.target_status == ProcessingStatus.PROCESSING assert exc_info.value.target_status == ProcessingStatus.PROCESSING
@pytest.mark.asyncio def test_i_cannot_mark_completed_job_as_started(
async def test_i_cannot_mark_completed_job_as_started(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -209,20 +201,19 @@ class TestUpdateStatus:
): ):
"""Test that completed job cannot be marked as started.""" """Test that completed job cannot be marked as started."""
# Create, start, and complete a job # Create, start, and complete a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
await job_service.mark_job_as_completed(created_job.id) job_service.mark_job_as_completed(created_job.id)
# Try to start it again # Try to start it again
with pytest.raises(InvalidStatusTransitionError) as exc_info: with pytest.raises(InvalidStatusTransitionError) as exc_info:
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
# Verify exception details # Verify exception details
assert exc_info.value.current_status == ProcessingStatus.COMPLETED assert exc_info.value.current_status == ProcessingStatus.COMPLETED
assert exc_info.value.target_status == ProcessingStatus.PROCESSING assert exc_info.value.target_status == ProcessingStatus.PROCESSING
@pytest.mark.asyncio def test_i_cannot_mark_failed_job_as_started(
async def test_i_cannot_mark_failed_job_as_started(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -230,20 +221,19 @@ class TestUpdateStatus:
): ):
"""Test that failed job cannot be marked as started.""" """Test that failed job cannot be marked as started."""
# Create, start, and fail a job # Create, start, and fail a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
await job_service.mark_job_as_failed(created_job.id, "Test error") job_service.mark_job_as_failed(created_job.id, "Test error")
# Try to start it again # Try to start it again
with pytest.raises(InvalidStatusTransitionError) as exc_info: with pytest.raises(InvalidStatusTransitionError) as exc_info:
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
# Verify exception details # Verify exception details
assert exc_info.value.current_status == ProcessingStatus.FAILED assert exc_info.value.current_status == ProcessingStatus.FAILED
assert exc_info.value.target_status == ProcessingStatus.PROCESSING assert exc_info.value.target_status == ProcessingStatus.PROCESSING
@pytest.mark.asyncio def test_i_can_mark_processing_job_as_completed(
async def test_i_can_mark_processing_job_as_completed(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -251,11 +241,11 @@ class TestUpdateStatus:
): ):
"""Test marking processing job as completed (PROCESSING → COMPLETED).""" """Test marking processing job as completed (PROCESSING → COMPLETED)."""
# Create and start a job # Create and start a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
started_job = await job_service.mark_job_as_started(created_job.id) started_job = job_service.mark_job_as_started(created_job.id)
# Execute # Execute
result = await job_service.mark_job_as_completed(created_job.id) result = job_service.mark_job_as_completed(created_job.id)
# Verify status transition # Verify status transition
assert result is not None assert result is not None
@@ -263,11 +253,10 @@ class TestUpdateStatus:
assert result.status == ProcessingStatus.COMPLETED assert result.status == ProcessingStatus.COMPLETED
# Verify in database # Verify in database
updated_job = await job_service.get_job_by_id(created_job.id) updated_job = job_service.get_job_by_id(created_job.id)
assert updated_job.status == ProcessingStatus.COMPLETED assert updated_job.status == ProcessingStatus.COMPLETED
@pytest.mark.asyncio def test_i_cannot_mark_pending_job_as_completed(
async def test_i_cannot_mark_pending_job_as_completed(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -275,18 +264,17 @@ class TestUpdateStatus:
): ):
"""Test that pending job cannot be marked as completed.""" """Test that pending job cannot be marked as completed."""
# Create a pending job # Create a pending job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
# Try to complete it directly # Try to complete it directly
with pytest.raises(InvalidStatusTransitionError) as exc_info: with pytest.raises(InvalidStatusTransitionError) as exc_info:
await job_service.mark_job_as_completed(created_job.id) job_service.mark_job_as_completed(created_job.id)
# Verify exception details # Verify exception details
assert exc_info.value.current_status == ProcessingStatus.PENDING assert exc_info.value.current_status == ProcessingStatus.PENDING
assert exc_info.value.target_status == ProcessingStatus.COMPLETED assert exc_info.value.target_status == ProcessingStatus.COMPLETED
@pytest.mark.asyncio def test_i_cannot_mark_completed_job_as_completed(
async def test_i_cannot_mark_completed_job_as_completed(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -294,20 +282,19 @@ class TestUpdateStatus:
): ):
"""Test that completed job cannot be marked as completed again.""" """Test that completed job cannot be marked as completed again."""
# Create, start, and complete a job # Create, start, and complete a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
await job_service.mark_job_as_completed(created_job.id) job_service.mark_job_as_completed(created_job.id)
# Try to complete it again # Try to complete it again
with pytest.raises(InvalidStatusTransitionError) as exc_info: with pytest.raises(InvalidStatusTransitionError) as exc_info:
await job_service.mark_job_as_completed(created_job.id) job_service.mark_job_as_completed(created_job.id)
# Verify exception details # Verify exception details
assert exc_info.value.current_status == ProcessingStatus.COMPLETED assert exc_info.value.current_status == ProcessingStatus.COMPLETED
assert exc_info.value.target_status == ProcessingStatus.COMPLETED assert exc_info.value.target_status == ProcessingStatus.COMPLETED
@pytest.mark.asyncio def test_i_cannot_mark_failed_job_as_completed(
async def test_i_cannot_mark_failed_job_as_completed(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -315,20 +302,19 @@ class TestUpdateStatus:
): ):
"""Test that failed job cannot be marked as completed.""" """Test that failed job cannot be marked as completed."""
# Create, start, and fail a job # Create, start, and fail a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
await job_service.mark_job_as_failed(created_job.id, "Test error") job_service.mark_job_as_failed(created_job.id, "Test error")
# Try to complete it # Try to complete it
with pytest.raises(InvalidStatusTransitionError) as exc_info: with pytest.raises(InvalidStatusTransitionError) as exc_info:
await job_service.mark_job_as_completed(created_job.id) job_service.mark_job_as_completed(created_job.id)
# Verify exception details # Verify exception details
assert exc_info.value.current_status == ProcessingStatus.FAILED assert exc_info.value.current_status == ProcessingStatus.FAILED
assert exc_info.value.target_status == ProcessingStatus.COMPLETED assert exc_info.value.target_status == ProcessingStatus.COMPLETED
@pytest.mark.asyncio def test_i_can_mark_processing_job_as_failed_with_error_message(
async def test_i_can_mark_processing_job_as_failed_with_error_message(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -336,13 +322,13 @@ class TestUpdateStatus:
): ):
"""Test marking processing job as failed with error message.""" """Test marking processing job as failed with error message."""
# Create and start a job # Create and start a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
started_job = await job_service.mark_job_as_started(created_job.id) started_job = job_service.mark_job_as_started(created_job.id)
error_message = "Processing failed due to invalid file format" error_message = "Processing failed due to invalid file format"
# Execute # Execute
result = await job_service.mark_job_as_failed(created_job.id, error_message) result = job_service.mark_job_as_failed(created_job.id, error_message)
# Verify status transition # Verify status transition
assert result is not None assert result is not None
@@ -351,12 +337,11 @@ class TestUpdateStatus:
assert result.error_message == error_message assert result.error_message == error_message
# Verify in database # Verify in database
updated_job = await job_service.get_job_by_id(created_job.id) updated_job = job_service.get_job_by_id(created_job.id)
assert updated_job.status == ProcessingStatus.FAILED assert updated_job.status == ProcessingStatus.FAILED
assert updated_job.error_message == error_message assert updated_job.error_message == error_message
@pytest.mark.asyncio def test_i_can_mark_processing_job_as_failed_without_error_message(
async def test_i_can_mark_processing_job_as_failed_without_error_message(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -364,19 +349,18 @@ class TestUpdateStatus:
): ):
"""Test marking processing job as failed without error message.""" """Test marking processing job as failed without error message."""
# Create and start a job # Create and start a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
# Execute without error message # Execute without error message
result = await job_service.mark_job_as_failed(created_job.id) result = job_service.mark_job_as_failed(created_job.id)
# Verify status transition # Verify status transition
assert result is not None assert result is not None
assert result.status == ProcessingStatus.FAILED assert result.status == ProcessingStatus.FAILED
assert result.error_message is None assert result.error_message is None
@pytest.mark.asyncio def test_i_cannot_mark_pending_job_as_failed(
async def test_i_cannot_mark_pending_job_as_failed(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -384,18 +368,17 @@ class TestUpdateStatus:
): ):
"""Test that pending job cannot be marked as failed.""" """Test that pending job cannot be marked as failed."""
# Create a pending job # Create a pending job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
# Try to fail it directly # Try to fail it directly
with pytest.raises(InvalidStatusTransitionError) as exc_info: with pytest.raises(InvalidStatusTransitionError) as exc_info:
await job_service.mark_job_as_failed(created_job.id, "Test error") job_service.mark_job_as_failed(created_job.id, "Test error")
# Verify exception details # Verify exception details
assert exc_info.value.current_status == ProcessingStatus.PENDING assert exc_info.value.current_status == ProcessingStatus.PENDING
assert exc_info.value.target_status == ProcessingStatus.FAILED assert exc_info.value.target_status == ProcessingStatus.FAILED
@pytest.mark.asyncio def test_i_cannot_mark_completed_job_as_failed(
async def test_i_cannot_mark_completed_job_as_failed(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -403,20 +386,19 @@ class TestUpdateStatus:
): ):
"""Test that completed job cannot be marked as failed.""" """Test that completed job cannot be marked as failed."""
# Create, start, and complete a job # Create, start, and complete a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
await job_service.mark_job_as_completed(created_job.id) job_service.mark_job_as_completed(created_job.id)
# Try to fail it # Try to fail it
with pytest.raises(InvalidStatusTransitionError) as exc_info: with pytest.raises(InvalidStatusTransitionError) as exc_info:
await job_service.mark_job_as_failed(created_job.id, "Test error") job_service.mark_job_as_failed(created_job.id, "Test error")
# Verify exception details # Verify exception details
assert exc_info.value.current_status == ProcessingStatus.COMPLETED assert exc_info.value.current_status == ProcessingStatus.COMPLETED
assert exc_info.value.target_status == ProcessingStatus.FAILED assert exc_info.value.target_status == ProcessingStatus.FAILED
@pytest.mark.asyncio def test_i_cannot_mark_failed_job_as_failed(
async def test_i_cannot_mark_failed_job_as_failed(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -424,24 +406,42 @@ class TestUpdateStatus:
): ):
"""Test that failed job cannot be marked as failed again.""" """Test that failed job cannot be marked as failed again."""
# Create, start, and fail a job # Create, start, and fail a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
await job_service.mark_job_as_started(created_job.id) job_service.mark_job_as_started(created_job.id)
await job_service.mark_job_as_failed(created_job.id, "First error") job_service.mark_job_as_failed(created_job.id, "First error")
# Try to fail it again # Try to fail it again
with pytest.raises(InvalidStatusTransitionError) as exc_info: with pytest.raises(InvalidStatusTransitionError) as exc_info:
await job_service.mark_job_as_failed(created_job.id, "Second error") job_service.mark_job_as_failed(created_job.id, "Second error")
# Verify exception details # Verify exception details
assert exc_info.value.current_status == ProcessingStatus.FAILED assert exc_info.value.current_status == ProcessingStatus.FAILED
assert exc_info.value.target_status == ProcessingStatus.FAILED assert exc_info.value.target_status == ProcessingStatus.FAILED
def test_i_can_update_job_status(
self,
job_service,
sample_document_id,
sample_task_id
):
"""Test that failed job cannot be marked as failed again."""
# Create, start, and fail a job
created_job = job_service.create_job(sample_document_id, sample_task_id)
job_service.mark_job_as_started(created_job.id)
# Execute without error message
result = job_service.update_job_status(created_job.id, ProcessingStatus.SAVING_OBJECT)
# Verify status transition
assert result is not None
assert result.status == ProcessingStatus.SAVING_OBJECT
assert result.error_message is None
class TestDeleteJob: class TestDeleteJob:
"""Tests for delete_job method.""" """Tests for delete_job method."""
@pytest.mark.asyncio def test_i_can_delete_existing_job(
async def test_i_can_delete_existing_job(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -449,30 +449,29 @@ class TestDeleteJob:
): ):
"""Test deleting an existing job.""" """Test deleting an existing job."""
# Create a job # Create a job
created_job = await job_service.create_job(sample_document_id, sample_task_id) created_job = job_service.create_job(sample_document_id, sample_task_id)
# Verify job exists # Verify job exists
job_before_delete = await job_service.get_job_by_id(created_job.id) job_before_delete = job_service.get_job_by_id(created_job.id)
assert job_before_delete is not None assert job_before_delete is not None
# Execute deletion # Execute deletion
result = await job_service.delete_job(created_job.id) result = job_service.delete_job(created_job.id)
# Verify deletion # Verify deletion
assert result is True assert result is True
# Verify job no longer exists # Verify job no longer exists
deleted_job = await job_service.get_job_by_id(created_job.id) deleted_job = job_service.get_job_by_id(created_job.id)
assert deleted_job is None assert deleted_job is None
@pytest.mark.asyncio def test_i_cannot_delete_nonexistent_job(
async def test_i_cannot_delete_nonexistent_job(
self, self,
job_service job_service
): ):
"""Test deleting a nonexistent job returns False.""" """Test deleting a nonexistent job returns False."""
# Execute deletion with random ObjectId # Execute deletion with random ObjectId
result = await job_service.delete_job(ObjectId()) result = job_service.delete_job(ObjectId())
# Verify # Verify
assert result is False assert result is False
@@ -481,8 +480,7 @@ class TestDeleteJob:
class TestStatusTransitionValidation: class TestStatusTransitionValidation:
"""Tests for status transition validation across different scenarios.""" """Tests for status transition validation across different scenarios."""
@pytest.mark.asyncio def test_valid_job_lifecycle_flow(
async def test_valid_job_lifecycle_flow(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -490,19 +488,18 @@ class TestStatusTransitionValidation:
): ):
"""Test complete valid job lifecycle: PENDING → PROCESSING → COMPLETED.""" """Test complete valid job lifecycle: PENDING → PROCESSING → COMPLETED."""
# Create job (PENDING) # Create job (PENDING)
job = await job_service.create_job(sample_document_id, sample_task_id) job = job_service.create_job(sample_document_id, sample_task_id)
assert job.status == ProcessingStatus.PENDING assert job.status == ProcessingStatus.PENDING
# Start job (PENDING → PROCESSING) # Start job (PENDING → PROCESSING)
started_job = await job_service.mark_job_as_started(job.id) started_job = job_service.mark_job_as_started(job.id)
assert started_job.status == ProcessingStatus.PROCESSING assert started_job.status == ProcessingStatus.PROCESSING
# Complete job (PROCESSING → COMPLETED) # Complete job (PROCESSING → COMPLETED)
completed_job = await job_service.mark_job_as_completed(job.id) completed_job = job_service.mark_job_as_completed(job.id)
assert completed_job.status == ProcessingStatus.COMPLETED assert completed_job.status == ProcessingStatus.COMPLETED
@pytest.mark.asyncio def test_valid_job_failure_flow(
async def test_valid_job_failure_flow(
self, self,
job_service, job_service,
sample_document_id, sample_document_id,
@@ -510,69 +507,31 @@ class TestStatusTransitionValidation:
): ):
"""Test valid job failure: PENDING → PROCESSING → FAILED.""" """Test valid job failure: PENDING → PROCESSING → FAILED."""
# Create job (PENDING) # Create job (PENDING)
job = await job_service.create_job(sample_document_id, sample_task_id) job = job_service.create_job(sample_document_id, sample_task_id)
assert job.status == ProcessingStatus.PENDING assert job.status == ProcessingStatus.PENDING
# Start job (PENDING → PROCESSING) # Start job (PENDING → PROCESSING)
started_job = await job_service.mark_job_as_started(job.id) started_job = job_service.mark_job_as_started(job.id)
assert started_job.status == ProcessingStatus.PROCESSING assert started_job.status == ProcessingStatus.PROCESSING
# Fail job (PROCESSING → FAILED) # Fail job (PROCESSING → FAILED)
failed_job = await job_service.mark_job_as_failed(job.id, "Test failure") failed_job = job_service.mark_job_as_failed(job.id, "Test failure")
assert failed_job.status == ProcessingStatus.FAILED assert failed_job.status == ProcessingStatus.FAILED
assert failed_job.error_message == "Test failure" assert failed_job.error_message == "Test failure"
def test_job_operations_with_empty_database(
class TestEdgeCases:
"""Tests for edge cases and error conditions."""
#
# @pytest.mark.asyncio
# async def test_multiple_jobs_for_same_file(
# self,
# job_service,
# sample_document_id
# ):
# """Test handling multiple jobs for the same file."""
# # Create multiple jobs for same file
# job1 = await job_service.create_job(sample_document_id, "task-1")
# job2 = await job_service.create_job(sample_document_id, "task-2")
# job3 = await job_service.create_job(sample_document_id, "task-3")
#
# # Verify all jobs exist and are independent
# jobs_for_file = await job_service.get_jobs_by_file_id(sample_document_id)
# assert len(jobs_for_file) == 3
#
# job_ids = [job.id for job in jobs_for_file]
# assert job1.id in job_ids
# assert job2.id in job_ids
# assert job3.id in job_ids
#
# # Verify status transitions work independently
# await job_service.mark_job_as_started(job1.id)
# await job_service.mark_job_as_completed(job1.id)
#
# # Other jobs should still be pending
# updated_job2 = await job_service.get_job_by_id(job2.id)
# updated_job3 = await job_service.get_job_by_id(job3.id)
#
# assert updated_job2.status == ProcessingStatus.PENDING
# assert updated_job3.status == ProcessingStatus.PENDING
@pytest.mark.asyncio
async def test_job_operations_with_empty_database(
self, self,
job_service job_service
): ):
"""Test job operations when database is empty.""" """Test job operations when database is empty."""
# Try to get nonexistent job # Try to get nonexistent job
result = await job_service.get_job_by_id(ObjectId()) result = job_service.get_job_by_id(ObjectId())
assert result is None assert result is None
# Try to get jobs by status when none exist # Try to get jobs by status when none exist
pending_jobs = await job_service.get_jobs_by_status(ProcessingStatus.PENDING) pending_jobs = job_service.get_jobs_by_status(ProcessingStatus.PENDING)
assert pending_jobs == [] assert pending_jobs == []
# Try to delete nonexistent job # Try to delete nonexistent job
delete_result = await job_service.delete_job(ObjectId()) delete_result = job_service.delete_job(ObjectId())
assert delete_result is False assert delete_result is False

View File

@@ -0,0 +1,739 @@
"""
Unit tests for UserService using in-memory MongoDB.
Tests the business logic operations with real MongoDB operations
using mongomock for better integration testing.
"""
import pytest
from bson import ObjectId
from mongomock.mongo_client import MongoClient
from app.models.auth import UserRole
from app.models.user import UserCreate, UserUpdate, UserCreateNoValidation
from app.services.user_service import UserService
@pytest.fixture
def in_memory_database():
"""Create an in-memory database for testing."""
client = MongoClient()
return client.test_database
@pytest.fixture
def user_service(in_memory_database):
"""Create UserService with in-memory repositories."""
service = UserService(in_memory_database).initialize()
return service
@pytest.fixture
def sample_user_data():
"""Sample user data for testing."""
return {
"username": "testuser",
"email": "testuser@example.com",
"password": "SecureP@ssw0rd123"
}
@pytest.fixture
def sample_user_data_2():
"""Second sample user data for testing."""
return {
"username": "anotheruser",
"email": "anotheruser@example.com",
"password": "AnotherP@ssw0rd456"
}
class TestCreateUser:
"""Tests for create_user method."""
def test_i_can_create_user_with_valid_data(
self,
user_service,
sample_user_data
):
"""Test creating user with valid data."""
# Execute
user_create = UserCreate(**sample_user_data)
result = user_service.create_user(user_create)
# Verify user creation
assert result is not None
assert result.username == sample_user_data["username"]
assert result.email == sample_user_data["email"]
assert result.hashed_password is not None
assert result.hashed_password != sample_user_data["password"]
assert result.role == UserRole.USER
assert result.is_active is True
assert result.preferences == {}
assert result.created_at is not None
assert result.updated_at is not None
# Verify user exists in database
user_in_db = user_service.get_user_by_id(str(result.id))
assert user_in_db is not None
assert user_in_db.id == result.id
assert user_in_db.username == sample_user_data["username"]
def test_i_cannot_create_user_with_duplicate_username(
self,
user_service,
sample_user_data
):
"""Test that duplicate username raises ValueError."""
# Create first user
user_create = UserCreate(**sample_user_data)
user_service.create_user(user_create)
# Try to create user with same username but different email
duplicate_user_data = sample_user_data.copy()
duplicate_user_data["email"] = "different@example.com"
duplicate_user_create = UserCreate(**duplicate_user_data)
# Execute and verify exception
with pytest.raises(ValueError) as exc_info:
user_service.create_user(duplicate_user_create)
assert "already exists" in str(exc_info.value)
assert sample_user_data["username"] in str(exc_info.value)
def test_i_cannot_create_user_with_duplicate_email(
self,
user_service,
sample_user_data
):
"""Test that duplicate email raises ValueError."""
# Create first user
user_create = UserCreate(**sample_user_data)
user_service.create_user(user_create)
# Try to create user with same email but different username
duplicate_user_data = sample_user_data.copy()
duplicate_user_data["username"] = "differentuser"
duplicate_user_create = UserCreate(**duplicate_user_data)
# Execute and verify exception
with pytest.raises(ValueError) as exc_info:
user_service.create_user(duplicate_user_create)
assert "already exists" in str(exc_info.value)
assert sample_user_data["email"] in str(exc_info.value)
class TestGetUserMethods:
"""Tests for user retrieval methods."""
def test_i_can_get_user_by_username(
self,
user_service,
sample_user_data
):
"""Test retrieving user by username."""
# Create a user first
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Execute
result = user_service.get_user_by_username(sample_user_data["username"])
# Verify
assert result is not None
assert result.id == created_user.id
assert result.username == sample_user_data["username"]
assert result.email == sample_user_data["email"]
def test_i_can_get_user_by_id(
self,
user_service,
sample_user_data
):
"""Test retrieving user by ID."""
# Create a user first
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Execute
result = user_service.get_user_by_id(str(created_user.id))
# Verify
assert result is not None
assert result.id == created_user.id
assert result.username == sample_user_data["username"]
assert result.email == sample_user_data["email"]
def test_i_can_check_user_exists(
self,
user_service,
sample_user_data
):
"""Test checking if user exists."""
# Initially should not exist
assert user_service.user_exists(sample_user_data["username"]) is False
# Create a user
user_create = UserCreate(**sample_user_data)
user_service.create_user(user_create)
# Now should exist
assert user_service.user_exists(sample_user_data["username"]) is True
def test_i_cannot_get_nonexistent_user_by_username(
self,
user_service
):
"""Test retrieving nonexistent user by username returns None."""
# Execute
result = user_service.get_user_by_username("nonexistentuser")
# Verify
assert result is None
def test_i_cannot_get_nonexistent_user_by_id(
self,
user_service
):
"""Test retrieving nonexistent user by ID returns None."""
# Execute with random ObjectId
result = user_service.get_user_by_id(str(ObjectId()))
# Verify
assert result is None
class TestAuthenticateUser:
"""Tests for authenticate_user method."""
def test_i_can_authenticate_user_with_valid_credentials(
self,
user_service,
sample_user_data
):
"""Test authenticating user with valid credentials."""
# Create a user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Execute authentication
result = user_service.authenticate_user(
sample_user_data["username"],
sample_user_data["password"]
)
# Verify
assert result is not None
assert result.id == created_user.id
assert result.username == sample_user_data["username"]
def test_i_cannot_authenticate_user_with_wrong_password(
self,
user_service,
sample_user_data
):
"""Test authenticating user with wrong password returns None."""
# Create a user
user_create = UserCreate(**sample_user_data)
user_service.create_user(user_create)
# Execute authentication with wrong password
result = user_service.authenticate_user(
sample_user_data["username"],
"WrongP@ssw0rd123"
)
# Verify
assert result is None
def test_i_cannot_authenticate_user_with_wrong_username(
self,
user_service,
sample_user_data
):
"""Test authenticating user with wrong username returns None."""
# Create a user
user_create = UserCreate(**sample_user_data)
user_service.create_user(user_create)
# Execute authentication with wrong username
result = user_service.authenticate_user(
"wrongusername",
sample_user_data["password"]
)
# Verify
assert result is None
def test_i_cannot_authenticate_inactive_user(
self,
user_service,
sample_user_data
):
"""Test authenticating inactive user returns None."""
# Create a user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Deactivate the user
user_service.update_user(str(created_user.id), UserUpdate(is_active=False))
# Execute authentication
result = user_service.authenticate_user(
sample_user_data["username"],
sample_user_data["password"]
)
# Verify
assert result is None
class TestUpdateUser:
"""Tests for update_user method."""
def test_i_can_update_user_username(
self,
user_service,
sample_user_data
):
"""Test updating user username."""
# Create a user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Execute update
new_username = "updatedusername"
result = user_service.update_user(
str(created_user.id),
UserUpdate(username=new_username)
)
# Verify
assert result is not None
assert result.username == new_username
# Verify in database
updated_user = user_service.get_user_by_id(str(created_user.id))
assert updated_user.username == new_username
def test_i_can_update_user_email(
self,
user_service,
sample_user_data
):
"""Test updating user email."""
# Create a user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Execute update
new_email = "newemail@example.com"
result = user_service.update_user(
str(created_user.id),
UserUpdate(email=new_email)
)
# Verify
assert result is not None
assert result.email == new_email
# Verify in database
updated_user = user_service.get_user_by_id(str(created_user.id))
assert updated_user.email == new_email
def test_i_can_update_user_role(
self,
user_service,
sample_user_data
):
"""Test updating user role."""
# Create a user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Execute update
result = user_service.update_user(
str(created_user.id),
UserUpdate(role=UserRole.ADMIN)
)
# Verify
assert result is not None
assert result.role == UserRole.ADMIN
# Verify in database
updated_user = user_service.get_user_by_id(str(created_user.id))
assert updated_user.role == UserRole.ADMIN
def test_i_can_update_user_is_active(
self,
user_service,
sample_user_data
):
"""Test updating user is_active status."""
# Create a user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Execute update
result = user_service.update_user(
str(created_user.id),
UserUpdate(is_active=False)
)
# Verify
assert result is not None
assert result.is_active is False
# Verify in database
updated_user = user_service.get_user_by_id(str(created_user.id))
assert updated_user.is_active is False
def test_i_cannot_update_user_with_duplicate_username(
self,
user_service,
sample_user_data,
sample_user_data_2
):
"""Test that updating to existing username raises ValueError."""
# Create two users
user_create_1 = UserCreate(**sample_user_data)
user_1 = user_service.create_user(user_create_1)
user_create_2 = UserCreate(**sample_user_data_2)
user_2 = user_service.create_user(user_create_2)
# Try to update user_2 with user_1's username
with pytest.raises(ValueError) as exc_info:
user_service.update_user(
str(user_2.id),
UserUpdate(username=sample_user_data["username"])
)
assert "already taken" in str(exc_info.value)
def test_i_cannot_update_user_with_duplicate_email(
self,
user_service,
sample_user_data,
sample_user_data_2
):
"""Test that updating to existing email raises ValueError."""
# Create two users
user_create_1 = UserCreate(**sample_user_data)
user_1 = user_service.create_user(user_create_1)
user_create_2 = UserCreate(**sample_user_data_2)
user_2 = user_service.create_user(user_create_2)
# Try to update user_2 with user_1's email
with pytest.raises(ValueError) as exc_info:
user_service.update_user(
str(user_2.id),
UserUpdate(email=sample_user_data["email"])
)
assert "already taken" in str(exc_info.value)
def test_i_cannot_update_nonexistent_user(
self,
user_service
):
"""Test updating nonexistent user returns None."""
# Execute update with random ObjectId
result = user_service.update_user(
str(ObjectId()),
UserUpdate(username="newusername")
)
# Verify
assert result is None
class TestDeleteUser:
"""Tests for delete_user method."""
def test_i_can_delete_existing_user(
self,
user_service,
sample_user_data
):
"""Test deleting an existing user."""
# Create a user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Verify user exists
user_before_delete = user_service.get_user_by_id(str(created_user.id))
assert user_before_delete is not None
# Execute deletion
result = user_service.delete_user(str(created_user.id))
# Verify deletion
assert result is True
# Verify user no longer exists
deleted_user = user_service.get_user_by_id(str(created_user.id))
assert deleted_user is None
def test_i_cannot_delete_nonexistent_user(
self,
user_service
):
"""Test deleting a nonexistent user returns False."""
# Execute deletion with random ObjectId
result = user_service.delete_user(str(ObjectId()))
# Verify
assert result is False
class TestListAndCountMethods:
"""Tests for list_users and count_users methods."""
def test_i_can_list_users(
self,
user_service,
sample_user_data,
sample_user_data_2
):
"""Test listing all users."""
# Create multiple users
user_create_1 = UserCreate(**sample_user_data)
user_1 = user_service.create_user(user_create_1)
user_create_2 = UserCreate(**sample_user_data_2)
user_2 = user_service.create_user(user_create_2)
# Execute
result = user_service.list_users()
# Verify
assert len(result) == 2
usernames = [user.username for user in result]
assert sample_user_data["username"] in usernames
assert sample_user_data_2["username"] in usernames
def test_i_can_list_users_with_pagination(
self,
user_service
):
"""Test listing users with pagination."""
# Create 5 users
for i in range(5):
user_data = UserCreateNoValidation(
username=f"user{i}",
email=f"user{i}@example.com",
password="SecureP@ssw0rd123"
)
user_service.create_user(user_data)
# Test skip and limit
result_page_1 = user_service.list_users(skip=0, limit=2)
assert len(result_page_1) == 2
result_page_2 = user_service.list_users(skip=2, limit=2)
assert len(result_page_2) == 2
result_page_3 = user_service.list_users(skip=4, limit=2)
assert len(result_page_3) == 1
# Verify different users in each page
page_1_usernames = [user.username for user in result_page_1]
page_2_usernames = [user.username for user in result_page_2]
assert page_1_usernames != page_2_usernames
def test_i_can_count_users(
self,
user_service,
sample_user_data,
sample_user_data_2
):
"""Test counting users."""
# Initially no users
assert user_service.count_users() == 0
# Create first user
user_create_1 = UserCreate(**sample_user_data)
user_service.create_user(user_create_1)
assert user_service.count_users() == 1
# Create second user
user_create_2 = UserCreate(**sample_user_data_2)
user_service.create_user(user_create_2)
assert user_service.count_users() == 2
def test_list_users_returns_empty_list_when_no_users(
self,
user_service
):
"""Test listing users returns empty list when no users exist."""
# Execute
result = user_service.list_users()
# Verify
assert result == []
class TestUserPreferences:
"""Tests for user preferences methods."""
def test_i_can_get_user_preference(
self,
user_service,
sample_user_data
):
"""Test getting user preference."""
# Create a user with preferences
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Set a preference
user_service.set_preference(str(created_user.id), "theme", "dark")
# Execute
result = user_service.get_preference(str(created_user.id), "theme")
# Verify
assert result == "dark"
def test_i_can_set_user_preference(
self,
user_service,
sample_user_data
):
"""Test setting user preference."""
# Create a user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Execute
result = user_service.set_preference(str(created_user.id), "language", "fr")
# Verify
assert result is not None
assert result.preferences.get("language") == "fr"
# Verify in database
updated_user = user_service.get_user_by_id(str(created_user.id))
assert updated_user.preferences.get("language") == "fr"
def test_i_cannot_get_preference_for_nonexistent_user(
self,
user_service
):
"""Test getting preference for nonexistent user returns None."""
# Execute with random ObjectId
result = user_service.get_preference(str(ObjectId()), "theme")
# Verify
assert result is None
def test_i_cannot_set_preference_for_nonexistent_user(
self,
user_service
):
"""Test setting preference for nonexistent user returns None."""
# Execute with random ObjectId
result = user_service.set_preference(str(ObjectId()), "theme", "dark")
# Verify
assert result is None
def test_get_preference_returns_none_for_nonexistent_key(
self,
user_service,
sample_user_data
):
"""Test getting nonexistent preference key returns None."""
# Create a user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
# Execute
result = user_service.get_preference(str(created_user.id), "nonexistent_key")
# Verify
assert result is None
class TestUserLifecycle:
"""Tests for complete user lifecycle scenarios."""
def test_complete_user_lifecycle(
self,
user_service,
sample_user_data
):
"""Test complete user lifecycle: create → authenticate → update → preferences → delete."""
# Create user
user_create = UserCreate(**sample_user_data)
created_user = user_service.create_user(user_create)
assert created_user is not None
assert created_user.username == sample_user_data["username"]
# Authenticate user
authenticated_user = user_service.authenticate_user(
sample_user_data["username"],
sample_user_data["password"]
)
assert authenticated_user is not None
assert authenticated_user.id == created_user.id
# Update user
updated_user = user_service.update_user(
str(created_user.id),
UserUpdate(role=UserRole.ADMIN)
)
assert updated_user.role == UserRole.ADMIN
# Set preference
user_with_pref = user_service.set_preference(
str(created_user.id),
"theme",
"dark"
)
assert user_with_pref.preferences.get("theme") == "dark"
# Get preference
pref_value = user_service.get_preference(str(created_user.id), "theme")
assert pref_value == "dark"
# Delete user
delete_result = user_service.delete_user(str(created_user.id))
assert delete_result is True
# Verify user no longer exists
deleted_user = user_service.get_user_by_id(str(created_user.id))
assert deleted_user is None
def test_user_operations_with_empty_database(
self,
user_service
):
"""Test user operations when database is empty."""
# Try to get nonexistent user
result = user_service.get_user_by_id(str(ObjectId()))
assert result is None
# Try to get user by username
result = user_service.get_user_by_username("nonexistent")
assert result is None
# Try to list users
users = user_service.list_users()
assert users == []
# Try to count users
count = user_service.count_users()
assert count == 0
# Try to delete nonexistent user
delete_result = user_service.delete_user(str(ObjectId()))
assert delete_result is False
# Try to check user existence
exists = user_service.user_exists("nonexistent")
assert exists is False

View File

@@ -0,0 +1,55 @@
import shutil
import tempfile
from pathlib import Path
import pytest
from app.utils.pdf_converter import TextToPdfConverter, ImageToPdfConverter, WordToPdfConverter
@pytest.fixture
def temp_dir():
"""Create a temporary directory for output PDFs."""
dir_path = tempfile.mkdtemp()
yield dir_path
shutil.rmtree(dir_path)
def test_i_can_convert_text_to_pdf(temp_dir):
input_txt = Path(temp_dir) / "test.txt"
input_txt.write_text("Hello World!\nThis is a test.")
converter = TextToPdfConverter(str(input_txt), output_dir=temp_dir)
converter.convert()
assert Path(converter.output_path).exists()
assert str(converter.output_path).endswith(".pdf")
def test_i_can_convert_image_to_pdf(temp_dir):
from PIL import Image
input_img = Path(temp_dir) / "image.png"
image = Image.new("RGB", (100, 100), color="red")
image.save(input_img)
converter = ImageToPdfConverter(str(input_img), output_dir=temp_dir)
converter.convert()
assert Path(converter.output_path).exists()
assert str(converter.output_path).endswith(".pdf")
def test_i_can_convert_word_to_pdf(temp_dir):
import docx
input_docx = Path(temp_dir) / "document.docx"
doc = docx.Document()
doc.add_paragraph("Hello Word!")
doc.save(input_docx)
converter = WordToPdfConverter(str(input_docx), output_dir=temp_dir)
converter.convert()
assert Path(converter.output_path).exists()
assert str(converter.output_path).endswith(".pdf")