Compare commits
12 Commits
master
...
AddingUser
| Author | SHA1 | Date | |
|---|---|---|---|
| 264dac077c | |||
| 707507b128 | |||
| 477d6bf538 | |||
| 79bfae4ba8 | |||
| 8ae9754fde | |||
| bd52f2d296 | |||
| 62c7e46a88 | |||
| 06549c0d02 | |||
| f5e909463a | |||
| 78181e71be | |||
| 56dec3a619 | |||
| fc2e9e621e |
156
.gitignore
vendored
156
.gitignore
vendored
@@ -216,3 +216,159 @@ __marimo__/
|
|||||||
|
|
||||||
# Streamlit
|
# Streamlit
|
||||||
.streamlit/secrets.toml
|
.streamlit/secrets.toml
|
||||||
|
|
||||||
|
### react ###
|
||||||
|
.DS_*
|
||||||
|
*.log
|
||||||
|
logs
|
||||||
|
**/*.backup.*
|
||||||
|
**/*.back.*
|
||||||
|
|
||||||
|
node_modules
|
||||||
|
bower_components
|
||||||
|
|
||||||
|
*.sublime*
|
||||||
|
|
||||||
|
psd
|
||||||
|
thumb
|
||||||
|
sketch
|
||||||
|
|
||||||
|
### Node ###
|
||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
.pnpm-debug.log*
|
||||||
|
|
||||||
|
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||||
|
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
pids
|
||||||
|
*.pid
|
||||||
|
*.seed
|
||||||
|
*.pid.lock
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
coverage
|
||||||
|
*.lcov
|
||||||
|
|
||||||
|
# nyc test coverage
|
||||||
|
.nyc_output
|
||||||
|
|
||||||
|
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
.grunt
|
||||||
|
|
||||||
|
# Bower dependency directory (https://bower.io/)
|
||||||
|
bower_components
|
||||||
|
|
||||||
|
# node-waf configuration
|
||||||
|
.lock-wscript
|
||||||
|
|
||||||
|
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||||
|
build/Release
|
||||||
|
|
||||||
|
# Dependency directories
|
||||||
|
node_modules/
|
||||||
|
jspm_packages/
|
||||||
|
|
||||||
|
# Snowpack dependency directory (https://snowpack.dev/)
|
||||||
|
web_modules/
|
||||||
|
|
||||||
|
# TypeScript cache
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Optional npm cache directory
|
||||||
|
.npm
|
||||||
|
|
||||||
|
# Optional eslint cache
|
||||||
|
.eslintcache
|
||||||
|
|
||||||
|
# Optional stylelint cache
|
||||||
|
.stylelintcache
|
||||||
|
|
||||||
|
# Microbundle cache
|
||||||
|
.rpt2_cache/
|
||||||
|
.rts2_cache_cjs/
|
||||||
|
.rts2_cache_es/
|
||||||
|
.rts2_cache_umd/
|
||||||
|
|
||||||
|
# Optional REPL history
|
||||||
|
.node_repl_history
|
||||||
|
|
||||||
|
# Output of 'npm pack'
|
||||||
|
*.tgz
|
||||||
|
|
||||||
|
# Yarn Integrity file
|
||||||
|
.yarn-integrity
|
||||||
|
|
||||||
|
# dotenv environment variable files
|
||||||
|
.env
|
||||||
|
.env.development.local
|
||||||
|
.env.test.local
|
||||||
|
.env.production.local
|
||||||
|
.env.local
|
||||||
|
|
||||||
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
|
.cache
|
||||||
|
.parcel-cache
|
||||||
|
|
||||||
|
# Next.js build output
|
||||||
|
.next
|
||||||
|
out
|
||||||
|
|
||||||
|
# Nuxt.js build / generate output
|
||||||
|
.nuxt
|
||||||
|
dist
|
||||||
|
|
||||||
|
# Gatsby files
|
||||||
|
.cache/
|
||||||
|
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||||
|
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||||
|
# public
|
||||||
|
|
||||||
|
# vuepress build output
|
||||||
|
.vuepress/dist
|
||||||
|
|
||||||
|
# vuepress v2.x temp and cache directory
|
||||||
|
.temp
|
||||||
|
|
||||||
|
# Docusaurus cache and generated files
|
||||||
|
.docusaurus
|
||||||
|
|
||||||
|
# Serverless directories
|
||||||
|
.serverless/
|
||||||
|
|
||||||
|
# FuseBox cache
|
||||||
|
.fusebox/
|
||||||
|
|
||||||
|
# DynamoDB Local files
|
||||||
|
.dynamodb/
|
||||||
|
|
||||||
|
# TernJS port file
|
||||||
|
.tern-port
|
||||||
|
|
||||||
|
# Stores VSCode versions used for testing VSCode extensions
|
||||||
|
.vscode-test
|
||||||
|
|
||||||
|
# yarn v2
|
||||||
|
.yarn/cache
|
||||||
|
.yarn/unplugged
|
||||||
|
.yarn/build-state.yml
|
||||||
|
.yarn/install-state.gz
|
||||||
|
.pnp.*
|
||||||
|
|
||||||
|
### Node Patch ###
|
||||||
|
# Serverless Webpack directories
|
||||||
|
.webpack/
|
||||||
|
|
||||||
|
# Optional stylelint cache
|
||||||
|
|
||||||
|
# SvelteKit build / generate output
|
||||||
|
.svelte-kit
|
||||||
32
Makefile
Normal file
32
Makefile
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
.PHONY: init up down restart logs clean
|
||||||
|
|
||||||
|
init:
|
||||||
|
@echo "Creating directories and setting permissions..."
|
||||||
|
@mkdir -p ./volumes/watched_files ./volumes/objects
|
||||||
|
@chown -R 1002:1002 ./volumes/watched_files ./volumes/objects
|
||||||
|
@echo "✓ Directories initialized"
|
||||||
|
|
||||||
|
up: init
|
||||||
|
@echo "Starting services..."
|
||||||
|
@docker-compose up -d
|
||||||
|
@echo "✓ Services started"
|
||||||
|
|
||||||
|
down:
|
||||||
|
@docker-compose down
|
||||||
|
|
||||||
|
restart:
|
||||||
|
@docker-compose restart
|
||||||
|
|
||||||
|
logs:
|
||||||
|
@docker-compose logs -f
|
||||||
|
|
||||||
|
clean: down
|
||||||
|
@echo "Cleaning volumes..."
|
||||||
|
@sudo rm -rf ./volumes
|
||||||
|
@echo "✓ Volumes cleaned"
|
||||||
|
|
||||||
|
rebuild: clean init
|
||||||
|
@echo "Rebuilding images..."
|
||||||
|
@docker-compose build --no-cache
|
||||||
|
@docker-compose up -d
|
||||||
|
@echo "✓ Services rebuilt and started"
|
||||||
99
Readme.md
99
Readme.md
@@ -348,14 +348,6 @@ class ProcessingJob(BaseModel):
|
|||||||
- **Rationale**: Full compatibility with Celery workers and simplified workflow
|
- **Rationale**: Full compatibility with Celery workers and simplified workflow
|
||||||
- **Implementation**: All repositories and services operate synchronously for seamless integration
|
- **Implementation**: All repositories and services operate synchronously for seamless integration
|
||||||
|
|
||||||
### Implementation Status
|
|
||||||
|
|
||||||
1. ✅ Pydantic models for MongoDB collections
|
|
||||||
2. ✅ Repository layer for data access (files + processing_jobs + users + documents) - synchronous
|
|
||||||
3. ✅ Service layer for business logic (auth, user, document, job) - synchronous
|
|
||||||
4. ✅ Celery tasks for document processing
|
|
||||||
5. ✅ Watchdog file monitoring implementation
|
|
||||||
6. ✅ FastAPI integration and startup coordination
|
|
||||||
|
|
||||||
## Job Management Layer
|
## Job Management Layer
|
||||||
|
|
||||||
@@ -493,15 +485,88 @@ src/file-processor/app/
|
|||||||
|
|
||||||
### Next Implementation Steps
|
### Next Implementation Steps
|
||||||
|
|
||||||
1. **TODO**: Complete file processing pipeline =>
|
1. Build React Login Page
|
||||||
1. ✅ Create Pydantic models for files and processing_jobs collections
|
2. Build React Registration Page
|
||||||
2. ✅ Implement repository layer for file and processing job data access (synchronous)
|
3. Build React Default Dashboard
|
||||||
3. ✅ Implement service layer for business logic (synchronous)
|
4. Build React User Management Pages
|
||||||
4. ✅ Create Celery tasks for document processing (.txt, .pdf, .docx)
|
|
||||||
5. ✅ Implement Watchdog file monitoring with dedicated observer
|
#### Validated Folders and files
|
||||||
6. ✅ Integrate file watcher with FastAPI startup
|
```
|
||||||
2. Create protected API routes for user management
|
src/frontend/src/
|
||||||
3. Build React monitoring interface with authentication
|
├── components/
|
||||||
|
│ ├── auth/
|
||||||
|
│ │ ├── LoginForm.jsx # Composant formulaire de login => Done
|
||||||
|
│ │ └── AuthLayout.jsx # Layout pour les pages d'auth => Done
|
||||||
|
│ └── common/
|
||||||
|
│ ├── Header.jsx # Header commun => TODO
|
||||||
|
│ ├── Layout.jsx # Header commun => TODO
|
||||||
|
│ └── ProtectedRoutes.jsx # Done
|
||||||
|
├── contexts/
|
||||||
|
│ └── AuthContext.jsx # Done
|
||||||
|
├── pages/
|
||||||
|
│ ├── LoginPage.jsx # Page complète de login => Done
|
||||||
|
│ └── DashboardPage.jsx # Page tableau de bord (exemple) => TODO
|
||||||
|
├── services/
|
||||||
|
│ └── authService.js # Service API pour auth => Done
|
||||||
|
├── hooks/
|
||||||
|
│ └── useAuth.js # Hook React pour gestion auth => TODO
|
||||||
|
├── utils/
|
||||||
|
│ └── api.js # Configuration axios/fetch => Done
|
||||||
|
├── App.jsx # Needs to be updated => TODO
|
||||||
|
```
|
||||||
|
#### Choices already made
|
||||||
|
* Pour la gestion des requêtes API et de l'état d'authentification, je propose
|
||||||
|
* axios (plus de fonctionnalités) :
|
||||||
|
* Installation d'axios pour les requêtes HTTP
|
||||||
|
* Intercepteurs pour gestion automatique du token
|
||||||
|
* Gestion d'erreurs centralisée
|
||||||
|
* Pour la gestion de l'état d'authentification et la navigation : Option A + C en même temps
|
||||||
|
* Option A - Context React + React Router :
|
||||||
|
* React Context pour l'état global d'auth (user, token, isAuthenticated)
|
||||||
|
* React Router pour la navigation entre pages
|
||||||
|
* Routes protégées automatiques
|
||||||
|
* Option C - Context + localStorage pour persistance :
|
||||||
|
* Token sauvegardé en localStorage pour rester connecté
|
||||||
|
* Context qui se recharge au démarrage de l'app
|
||||||
|
* CSS : Utilisation de daisyUI
|
||||||
|
|
||||||
|
#### Package.json
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"name": "frontend",
|
||||||
|
"private": true,
|
||||||
|
"version": "0.0.0",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vite",
|
||||||
|
"build": "vite build",
|
||||||
|
"lint": "eslint .",
|
||||||
|
"preview": "vite preview"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@tailwindcss/vite": "^4.1.13",
|
||||||
|
"axios": "^1.12.2",
|
||||||
|
"react": "^19.1.1",
|
||||||
|
"react-dom": "^19.1.1",
|
||||||
|
"react-router-dom": "^7.9.3"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@eslint/js": "^9.33.0",
|
||||||
|
"@types/react": "^19.1.10",
|
||||||
|
"@types/react-dom": "^19.1.7",
|
||||||
|
"@vitejs/plugin-react": "^5.0.0",
|
||||||
|
"autoprefixer": "^10.4.21",
|
||||||
|
"daisyui": "^5.1.23",
|
||||||
|
"eslint": "^9.33.0",
|
||||||
|
"eslint-plugin-react-hooks": "^5.2.0",
|
||||||
|
"eslint-plugin-react-refresh": "^0.4.20",
|
||||||
|
"globals": "^16.3.0",
|
||||||
|
"postcss": "^8.5.6",
|
||||||
|
"tailwindcss": "^4.1.13",
|
||||||
|
"vite": "^7.1.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Annexes
|
## Annexes
|
||||||
|
|
||||||
|
|||||||
@@ -40,6 +40,8 @@ services:
|
|||||||
- ./src/worker/tasks:/app/tasks # <- Added: shared access to worker tasks
|
- ./src/worker/tasks:/app/tasks # <- Added: shared access to worker tasks
|
||||||
- ./volumes/watched_files:/watched_files
|
- ./volumes/watched_files:/watched_files
|
||||||
- ./volumes/objects:/objects
|
- ./volumes/objects:/objects
|
||||||
|
- ./volumes/errors:/errors
|
||||||
|
- ./volumes/ignored:/ignored
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
- redis
|
||||||
- mongodb
|
- mongodb
|
||||||
@@ -61,6 +63,9 @@ services:
|
|||||||
- ./src/worker:/app
|
- ./src/worker:/app
|
||||||
- ./src/file-processor/app:/app/app # <- Added: shared access file-processor app
|
- ./src/file-processor/app:/app/app # <- Added: shared access file-processor app
|
||||||
- ./volumes/watched_files:/watched_files
|
- ./volumes/watched_files:/watched_files
|
||||||
|
- ./volumes/objects:/objects
|
||||||
|
- ./volumes/errors:/errors
|
||||||
|
- ./volumes/ignored:/ignored
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
- redis
|
||||||
- mongodb
|
- mongodb
|
||||||
|
|||||||
31
package-lock.json
generated
Normal file
31
package-lock.json
generated
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
{
|
||||||
|
"name": "MyDocManager",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"dependencies": {
|
||||||
|
"react-icons": "^5.5.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/react": {
|
||||||
|
"version": "19.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/react/-/react-19.1.1.tgz",
|
||||||
|
"integrity": "sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"peer": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/react-icons": {
|
||||||
|
"version": "5.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.5.0.tgz",
|
||||||
|
"integrity": "sha512-MEFcXdkP3dLo8uumGI5xN3lDFNsRtrjbOEKDLD7yv76v4wpnEq2Lt2qeHaQOr34I/wPN3s3+N08WkQ+CW37Xiw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "*"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
5
package.json
Normal file
5
package.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"dependencies": {
|
||||||
|
"react-icons": "^5.5.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -7,11 +7,13 @@ billiard==4.2.1
|
|||||||
celery==5.5.3
|
celery==5.5.3
|
||||||
certifi==2025.8.3
|
certifi==2025.8.3
|
||||||
cffi==2.0.0
|
cffi==2.0.0
|
||||||
|
charset-normalizer==3.4.3
|
||||||
click==8.2.1
|
click==8.2.1
|
||||||
click-didyoumean==0.3.1
|
click-didyoumean==0.3.1
|
||||||
click-plugins==1.1.1.2
|
click-plugins==1.1.1.2
|
||||||
click-repl==0.3.0
|
click-repl==0.3.0
|
||||||
cryptography==46.0.1
|
cryptography==46.0.1
|
||||||
|
Deprecated==1.2.18
|
||||||
dnspython==2.8.0
|
dnspython==2.8.0
|
||||||
ecdsa==0.19.1
|
ecdsa==0.19.1
|
||||||
email-validator==2.3.0
|
email-validator==2.3.0
|
||||||
@@ -26,10 +28,13 @@ importlib_metadata==8.7.0
|
|||||||
iniconfig==2.1.0
|
iniconfig==2.1.0
|
||||||
izulu==0.50.0
|
izulu==0.50.0
|
||||||
kombu==5.5.4
|
kombu==5.5.4
|
||||||
|
lxml==6.0.2
|
||||||
mongomock==4.3.0
|
mongomock==4.3.0
|
||||||
mongomock-motor==0.0.36
|
mongomock-motor==0.0.36
|
||||||
motor==3.7.1
|
motor==3.7.1
|
||||||
packaging==25.0
|
packaging==25.0
|
||||||
|
pikepdf==9.11.0
|
||||||
|
pillow==11.3.0
|
||||||
pipdeptree==2.28.0
|
pipdeptree==2.28.0
|
||||||
pluggy==1.6.0
|
pluggy==1.6.0
|
||||||
prompt_toolkit==3.0.52
|
prompt_toolkit==3.0.52
|
||||||
@@ -41,15 +46,20 @@ pydantic_core==2.33.2
|
|||||||
Pygments==2.19.2
|
Pygments==2.19.2
|
||||||
PyJWT==2.10.1
|
PyJWT==2.10.1
|
||||||
pymongo==4.15.1
|
pymongo==4.15.1
|
||||||
|
PyMuPDF==1.26.4
|
||||||
|
pypandoc==1.15
|
||||||
pytest==8.4.2
|
pytest==8.4.2
|
||||||
pytest-asyncio==1.2.0
|
pytest-asyncio==1.2.0
|
||||||
pytest-mock==3.15.1
|
pytest-mock==3.15.1
|
||||||
python-dateutil==2.9.0.post0
|
python-dateutil==2.9.0.post0
|
||||||
|
python-docx==1.2.0
|
||||||
python-dotenv==1.1.1
|
python-dotenv==1.1.1
|
||||||
python-magic==0.4.27
|
python-magic==0.4.27
|
||||||
|
python-multipart==0.0.20
|
||||||
pytz==2025.2
|
pytz==2025.2
|
||||||
PyYAML==6.0.2
|
PyYAML==6.0.2
|
||||||
redis==6.4.0
|
redis==6.4.0
|
||||||
|
reportlab==4.4.4
|
||||||
rsa==4.9.1
|
rsa==4.9.1
|
||||||
sentinels==1.1.1
|
sentinels==1.1.1
|
||||||
six==1.17.0
|
six==1.17.0
|
||||||
@@ -65,4 +75,5 @@ watchdog==6.0.0
|
|||||||
watchfiles==1.1.0
|
watchfiles==1.1.0
|
||||||
wcwidth==0.2.13
|
wcwidth==0.2.13
|
||||||
websockets==15.0.1
|
websockets==15.0.1
|
||||||
|
wrapt==1.17.3
|
||||||
zipp==3.23.0
|
zipp==3.23.0
|
||||||
|
|||||||
@@ -7,12 +7,19 @@ WORKDIR /app
|
|||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
libmagic1 \
|
libmagic1 \
|
||||||
file \
|
file \
|
||||||
|
pandoc \
|
||||||
|
ghostscript \
|
||||||
|
texlive-xetex \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|
||||||
# Copy requirements and install dependencies
|
# Copy requirements and install dependencies
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Change the user
|
||||||
|
USER 1002:1002
|
||||||
|
|
||||||
# Copy application code
|
# Copy application code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
@@ -21,5 +28,6 @@ ENV PYTHONPATH=/app
|
|||||||
# Expose port
|
# Expose port
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
|
|
||||||
# Command will be overridden by docker-compose
|
# Command will be overridden by docker-compose
|
||||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
@@ -9,6 +9,7 @@ from app.database.connection import get_database
|
|||||||
from app.models.auth import UserRole
|
from app.models.auth import UserRole
|
||||||
from app.models.user import UserInDB
|
from app.models.user import UserInDB
|
||||||
from app.services.auth_service import AuthService
|
from app.services.auth_service import AuthService
|
||||||
|
from app.services.document_service import DocumentService
|
||||||
from app.services.user_service import UserService
|
from app.services.user_service import UserService
|
||||||
|
|
||||||
security = HTTPBearer()
|
security = HTTPBearer()
|
||||||
@@ -25,6 +26,12 @@ def get_user_service() -> UserService:
|
|||||||
return UserService(database)
|
return UserService(database)
|
||||||
|
|
||||||
|
|
||||||
|
def get_document_service() -> DocumentService:
|
||||||
|
"""Dependency to get DocumentService instance."""
|
||||||
|
database = get_database()
|
||||||
|
return DocumentService(database)
|
||||||
|
|
||||||
|
|
||||||
def get_current_user(
|
def get_current_user(
|
||||||
credentials: HTTPAuthorizationCredentials = Depends(security),
|
credentials: HTTPAuthorizationCredentials = Depends(security),
|
||||||
user_service: UserService = Depends(get_user_service)
|
user_service: UserService = Depends(get_user_service)
|
||||||
@@ -79,7 +86,7 @@ def get_current_user(
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
def get_admin_user(current_user: UserInDB = Depends(get_current_user)) -> UserInDB:
|
def get_admin_user(current_user: UserInDB = Depends(get_current_user)) -> UserInDB:
|
||||||
"""
|
"""
|
||||||
Dependency to ensure current user has admin role.
|
Dependency to ensure current user has admin role.
|
||||||
|
|
||||||
|
|||||||
243
src/file-processor/app/api/routes/document.py
Normal file
243
src/file-processor/app/api/routes/document.py
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
"""
|
||||||
|
Document API routes.
|
||||||
|
|
||||||
|
This module provides REST endpoints for document management operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
import fitz # PyMuPDF
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Query, status, Path
|
||||||
|
from starlette.responses import Response
|
||||||
|
|
||||||
|
from app.api.dependencies import get_document_service, get_current_user
|
||||||
|
from app.models.document import DocumentResponse, FileDocument
|
||||||
|
from app.models.user import UserInDB
|
||||||
|
from app.services.document_service import DocumentService
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Documents"])
|
||||||
|
|
||||||
|
|
||||||
|
def _count_pdf_pages(pdf_file_path: str) -> int:
|
||||||
|
"""
|
||||||
|
Count the number of pages in a PDF file using PyMuPDF.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pdf_file_path: Path to the PDF file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of pages in the PDF, or 0 if file cannot be read
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with fitz.open(pdf_file_path) as doc:
|
||||||
|
return doc.page_count
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not count pages for PDF {pdf_file_path}: {e}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def _build_object_url(file_hash: Optional[str]) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Build object URL from file hash.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_hash: SHA256 hash of the file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
URL string or None if hash is not provided
|
||||||
|
"""
|
||||||
|
if not file_hash:
|
||||||
|
return None
|
||||||
|
return f"/api/objects/{file_hash}"
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_metadata_field(metadata: dict, field_name: str) -> List[str]:
|
||||||
|
"""
|
||||||
|
Extract a list field from metadata dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
metadata: Document metadata dictionary
|
||||||
|
field_name: Name of the field to extract
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of strings, empty list if field doesn't exist or is not a list
|
||||||
|
"""
|
||||||
|
field_value = metadata.get(field_name, [])
|
||||||
|
if isinstance(field_value, list):
|
||||||
|
return [str(item) for item in field_value]
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def _map_file_document_to_response(
|
||||||
|
document: FileDocument,
|
||||||
|
document_service: DocumentService
|
||||||
|
) -> DocumentResponse:
|
||||||
|
"""
|
||||||
|
Map FileDocument to DocumentResponse format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
document: FileDocument instance from database
|
||||||
|
document_service: Document service for file operations
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DocumentResponse instance ready for API response
|
||||||
|
"""
|
||||||
|
# Calculate page count for PDF files
|
||||||
|
page_count = 0
|
||||||
|
if document.pdf_file_hash and document_service.exists(document.pdf_file_hash):
|
||||||
|
pdf_path = document_service.get_document_path(document.pdf_file_hash)
|
||||||
|
page_count = _count_pdf_pages(pdf_path)
|
||||||
|
|
||||||
|
# Build URLs
|
||||||
|
thumbnail_url = _build_object_url(document.thumbnail_file_hash)
|
||||||
|
pdf_url = _build_object_url(document.pdf_file_hash)
|
||||||
|
|
||||||
|
# Extract tags and categories from metadata
|
||||||
|
tags = _extract_metadata_field(document.metadata, "tags")
|
||||||
|
categories = _extract_metadata_field(document.metadata, "categories")
|
||||||
|
|
||||||
|
# Format created_at timestamp
|
||||||
|
created_at = document.detected_at.isoformat() if document.detected_at else ""
|
||||||
|
|
||||||
|
as_dict = {
|
||||||
|
"id": str(document.id),
|
||||||
|
"name": document.filename,
|
||||||
|
"original_file_type": document.file_type.value.upper(),
|
||||||
|
"created_at": created_at,
|
||||||
|
"file_size": document.file_size,
|
||||||
|
"page_count": page_count,
|
||||||
|
"thumbnail_url": thumbnail_url,
|
||||||
|
"pdf_url": pdf_url,
|
||||||
|
"tags": tags,
|
||||||
|
"categories": categories
|
||||||
|
}
|
||||||
|
logger.info(f"Document: {as_dict}")
|
||||||
|
|
||||||
|
return DocumentResponse(**as_dict)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/documents", response_model=List[DocumentResponse])
|
||||||
|
def list_documents(
|
||||||
|
skip: int = Query(0, ge=0, description="Number of documents to skip"),
|
||||||
|
limit: int = Query(100, ge=1, le=1000, description="Maximum number of documents to return"),
|
||||||
|
user: UserInDB = Depends(get_current_user),
|
||||||
|
document_service: DocumentService = Depends(get_document_service)
|
||||||
|
) -> List[DocumentResponse]:
|
||||||
|
"""
|
||||||
|
Retrieve a paginated list of documents.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
skip: Number of documents to skip for pagination
|
||||||
|
limit: Maximum number of documents to return
|
||||||
|
document_service: Document service instance
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of documents in API response format
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPException: If database operation fails
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Get documents from service
|
||||||
|
documents = document_service.list_documents(skip=skip, limit=limit)
|
||||||
|
|
||||||
|
# Map to response format
|
||||||
|
document_responses = [
|
||||||
|
_map_file_document_to_response(doc, document_service)
|
||||||
|
for doc in documents
|
||||||
|
]
|
||||||
|
|
||||||
|
return document_responses
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to list documents: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Failed to retrieve documents"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/objects/{file_hash}")
|
||||||
|
async def get_object_by_hash(
|
||||||
|
file_hash: str = Path(..., description="SHA256 hash of the object to retrieve"),
|
||||||
|
document_service: DocumentService = Depends(get_document_service),
|
||||||
|
user: UserInDB = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Serve object content by its hash.
|
||||||
|
|
||||||
|
This endpoint serves files (original documents, PDFs, thumbnails) by their
|
||||||
|
SHA256 hash. It supports all file types stored in the objects folder.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_hash: SHA256 hash of the object
|
||||||
|
document_service: Document service dependency
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FileResponse with the requested object content
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPException: If object not found (404) or server error (500)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Check if object exists
|
||||||
|
if not document_service.exists(file_hash):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Object not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get file path
|
||||||
|
file_path = document_service.get_document_path(file_hash)
|
||||||
|
|
||||||
|
# Verify file exists on disk
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
logger.error(f"Object {file_hash} registered but file not found at {file_path}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Object file not found on disk"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine media type based on file content
|
||||||
|
try:
|
||||||
|
file_content = document_service.get_document_content_by_hash(file_hash)
|
||||||
|
if not file_content:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Object content not available"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Detect MIME type
|
||||||
|
import magic
|
||||||
|
mime_type = magic.from_buffer(file_content, mime=True)
|
||||||
|
|
||||||
|
# Return file content with appropriate headers
|
||||||
|
return Response(
|
||||||
|
content=file_content,
|
||||||
|
media_type=mime_type,
|
||||||
|
headers={
|
||||||
|
"Content-Length": str(len(file_content)),
|
||||||
|
"Cache-Control": "public, max-age=3600" # Cache for 1 hour
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error reading object content for hash {file_hash}: {str(e)}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Failed to read object content"
|
||||||
|
)
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
# Re-raise HTTP exceptions as-is
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error serving object {file_hash}: {str(e)}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Internal server error while serving object"
|
||||||
|
)
|
||||||
@@ -34,22 +34,6 @@ def get_redis_url() -> str:
|
|||||||
return os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
return os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
||||||
|
|
||||||
|
|
||||||
# def get_redis_host() -> str:
|
|
||||||
# redis_url = get_redis_url()
|
|
||||||
# if redis_url.startswith("redis://"):
|
|
||||||
# return redis_url.split("redis://")[1].split("/")[0]
|
|
||||||
# else:
|
|
||||||
# return redis_url
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# def get_redis_port() -> int:
|
|
||||||
# redis_url = get_redis_url()
|
|
||||||
# if redis_url.startswith("redis://"):
|
|
||||||
# return int(redis_url.split("redis://")[1].split("/")[0].split(":")[1])
|
|
||||||
# else:
|
|
||||||
# return int(redis_url.split(":")[1])
|
|
||||||
|
|
||||||
|
|
||||||
def get_jwt_secret_key() -> str:
|
def get_jwt_secret_key() -> str:
|
||||||
"""
|
"""
|
||||||
Get JWT secret key from environment variables.
|
Get JWT secret key from environment variables.
|
||||||
@@ -114,6 +98,21 @@ def get_objects_folder() -> str:
|
|||||||
return os.getenv("OBJECTS_FOLDER", "/objects")
|
return os.getenv("OBJECTS_FOLDER", "/objects")
|
||||||
|
|
||||||
|
|
||||||
def watch_directory() -> str:
|
def get_watch_folder() -> str:
|
||||||
"""Directory to monitor for new files"""
|
"""Directory to monitor for new files"""
|
||||||
return os.getenv("WATCH_DIRECTORY", "/watched_files")
|
return os.getenv("WATCH_DIRECTORY", "/watched_files")
|
||||||
|
|
||||||
|
|
||||||
|
def get_temp_folder() -> str:
|
||||||
|
"""Directory to store temporary files"""
|
||||||
|
return os.getenv("TEMP_DIRECTORY", "/tmp")
|
||||||
|
|
||||||
|
|
||||||
|
def get_errors_folder() -> str:
|
||||||
|
"""Directory to store temporary files"""
|
||||||
|
return os.getenv("ERRORS_DIRECTORY", "/errors")
|
||||||
|
|
||||||
|
|
||||||
|
def get_ignored_folder() -> str:
|
||||||
|
"""Directory to store temporary files"""
|
||||||
|
return os.getenv("IGNORED_DIRECTORY", "/ignored")
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ MongoDB database connection management.
|
|||||||
This module handles MongoDB connection with fail-fast approach.
|
This module handles MongoDB connection with fail-fast approach.
|
||||||
The application will terminate if MongoDB is not accessible at startup.
|
The application will terminate if MongoDB is not accessible at startup.
|
||||||
"""
|
"""
|
||||||
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
@@ -13,11 +13,14 @@ from pymongo.database import Database
|
|||||||
from pymongo.errors import ConnectionFailure, ServerSelectionTimeoutError
|
from pymongo.errors import ConnectionFailure, ServerSelectionTimeoutError
|
||||||
|
|
||||||
from app.config.settings import get_mongodb_url, get_mongodb_database_name
|
from app.config.settings import get_mongodb_url, get_mongodb_database_name
|
||||||
|
from app.utils.security import safe_connection_string
|
||||||
|
|
||||||
# Global variables for singleton pattern
|
# Global variables for singleton pattern
|
||||||
_client: Optional[MongoClient] = None
|
_client: Optional[MongoClient] = None
|
||||||
_database: Optional[Database] = None
|
_database: Optional[Database] = None
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def create_mongodb_client() -> MongoClient:
|
def create_mongodb_client() -> MongoClient:
|
||||||
"""
|
"""
|
||||||
@@ -43,16 +46,16 @@ def create_mongodb_client() -> MongoClient:
|
|||||||
# Test connection by running admin command
|
# Test connection by running admin command
|
||||||
client.admin.command('ping')
|
client.admin.command('ping')
|
||||||
|
|
||||||
print(f"Successfully connected to MongoDB at {mongodb_url}")
|
logger.info(f"Successfully connected to MongoDB at {safe_connection_string(mongodb_url)}")
|
||||||
return client
|
return client
|
||||||
|
|
||||||
except (ConnectionFailure, ServerSelectionTimeoutError) as e:
|
except (ConnectionFailure, ServerSelectionTimeoutError) as e:
|
||||||
print(f"ERROR: Failed to connect to MongoDB at {mongodb_url}")
|
logger.error(f"ERROR: Failed to connect to MongoDB at {safe_connection_string(mongodb_url)}")
|
||||||
print(f"Connection error: {str(e)}")
|
logger.error(f"Connection error: {str(e)}")
|
||||||
print("MongoDB is required for this application. Please ensure MongoDB is running and accessible.")
|
logger.error("MongoDB is required for this application. Please ensure MongoDB is running and accessible.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"ERROR: Unexpected error connecting to MongoDB: {str(e)}")
|
logger.error(f"ERROR: Unexpected error connecting to MongoDB: {str(e)}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
@@ -74,7 +77,7 @@ def get_database() -> Database:
|
|||||||
|
|
||||||
database_name = get_mongodb_database_name()
|
database_name = get_mongodb_database_name()
|
||||||
_database = _client[database_name]
|
_database = _client[database_name]
|
||||||
print(f"Connected to database: {database_name}")
|
logger.info(f"Connected to database: {database_name}")
|
||||||
|
|
||||||
return _database
|
return _database
|
||||||
|
|
||||||
@@ -92,7 +95,7 @@ def close_database_connection():
|
|||||||
_client.close()
|
_client.close()
|
||||||
_client = None
|
_client = None
|
||||||
_database = None
|
_database = None
|
||||||
print("MongoDB connection closed")
|
logger.info("MongoDB connection closed")
|
||||||
|
|
||||||
|
|
||||||
def get_mongodb_client() -> Optional[MongoClient]:
|
def get_mongodb_client() -> Optional[MongoClient]:
|
||||||
|
|||||||
@@ -130,6 +130,47 @@ class FileDocumentRepository:
|
|||||||
except PyMongoError:
|
except PyMongoError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def find_document_with_pdf_hash(self, file_hash: str) -> Optional[FileDocument]:
|
||||||
|
"""
|
||||||
|
Find file document by file hash with a pdf_file_hash set (not None).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_hash (str): SHA256 hash of file content
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FileDocument or None: File document if found, None otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
file_doc = self.collection.find_one({"file_hash": file_hash,
|
||||||
|
"pdf_file_hash": {"$ne": None}})
|
||||||
|
if file_doc:
|
||||||
|
return FileDocument(**file_doc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
except PyMongoError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def find_same_document(self, filename: str, file_hash: str):
|
||||||
|
"""
|
||||||
|
Find document with the same file_name and the same file hash
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename (str):
|
||||||
|
file_hash (str): SHA256 hash of file content
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FileDocument or None: File document if found, None otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
file_doc = self.collection.find_one({"file_hash": file_hash,
|
||||||
|
"filename": filename})
|
||||||
|
if file_doc:
|
||||||
|
return FileDocument(**file_doc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
except PyMongoError:
|
||||||
|
return None
|
||||||
|
|
||||||
def find_document_by_filepath(self, filepath: str) -> Optional[FileDocument]:
|
def find_document_by_filepath(self, filepath: str) -> Optional[FileDocument]:
|
||||||
"""
|
"""
|
||||||
Find file document by exact filepath.
|
Find file document by exact filepath.
|
||||||
|
|||||||
@@ -174,6 +174,8 @@ class UserRepository:
|
|||||||
update_data["role"] = user_update.role
|
update_data["role"] = user_update.role
|
||||||
if user_update.is_active is not None:
|
if user_update.is_active is not None:
|
||||||
update_data["is_active"] = user_update.is_active
|
update_data["is_active"] = user_update.is_active
|
||||||
|
if user_update.preferences is not None:
|
||||||
|
update_data["preferences"] = user_update.preferences
|
||||||
|
|
||||||
# Remove None values from update data
|
# Remove None values from update data
|
||||||
clean_update_data = {k: v for k, v in update_data.items() if v is not None}
|
clean_update_data = {k: v for k, v in update_data.items() if v is not None}
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ class DocumentFileEventHandler(FileSystemEventHandler):
|
|||||||
dispatching Celery tasks, and managing processing jobs.
|
dispatching Celery tasks, and managing processing jobs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
SUPPORTED_EXTENSIONS = {'.txt', '.pdf', '.docx'}
|
SUPPORTED_EXTENSIONS = {'.txt', '.pdf', '.docx', '.jpg', '.png', '.jpeg'}
|
||||||
|
|
||||||
def __init__(self, document_service: DocumentService, job_service: JobService):
|
def __init__(self, document_service: DocumentService, job_service: JobService):
|
||||||
"""
|
"""
|
||||||
@@ -59,21 +59,20 @@ class DocumentFileEventHandler(FileSystemEventHandler):
|
|||||||
|
|
||||||
if file_extension not in self.SUPPORTED_EXTENSIONS:
|
if file_extension not in self.SUPPORTED_EXTENSIONS:
|
||||||
logger.info(f"Ignoring unsupported file type: {filepath}")
|
logger.info(f"Ignoring unsupported file type: {filepath}")
|
||||||
|
self.document_service.move_to_ignored(filepath, "unsupported file type")
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.info(f"Processing new file: {filepath}")
|
logger.info(f"Processing new file: {filepath}")
|
||||||
|
|
||||||
# try:
|
try:
|
||||||
from tasks.document_processing import process_document
|
from tasks.document_processing import process_document
|
||||||
task_result = process_document.delay(filepath)
|
task_result = process_document.delay(filepath)
|
||||||
print(task_result)
|
task_id = task_result.task_id
|
||||||
print("hello world")
|
logger.info(f"Dispatched Celery task with ID: {task_id}")
|
||||||
# task_id = task_result.task_id
|
|
||||||
# logger.info(f"Dispatched Celery task with ID: {task_id}")
|
|
||||||
|
|
||||||
# except Exception as e:
|
except Exception as e:
|
||||||
# logger.error(f"Failed to process file {filepath}: {str(e)}")
|
logger.error(f"Failed to process file {filepath}: {str(e)}")
|
||||||
# # Note: We don't re-raise the exception to keep the watcher running
|
# Note: We don't re-raise the exception to keep the watcher running
|
||||||
|
|
||||||
|
|
||||||
class FileWatcher:
|
class FileWatcher:
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from fastapi.middleware.cors import CORSMiddleware
|
|||||||
|
|
||||||
from app.api.routes.auth import router as auth_router
|
from app.api.routes.auth import router as auth_router
|
||||||
from app.api.routes.users import router as users_router
|
from app.api.routes.users import router as users_router
|
||||||
|
from app.api.routes.document import router as documents_router
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.database.connection import get_database
|
from app.database.connection import get_database
|
||||||
from app.file_watcher import create_file_watcher, FileWatcher
|
from app.file_watcher import create_file_watcher, FileWatcher
|
||||||
@@ -65,12 +66,12 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
|||||||
|
|
||||||
# Create and start file watcher
|
# Create and start file watcher
|
||||||
file_watcher = create_file_watcher(
|
file_watcher = create_file_watcher(
|
||||||
watch_directory=settings.watch_directory(),
|
watch_directory=settings.get_watch_folder(),
|
||||||
document_service=document_service,
|
document_service=document_service,
|
||||||
job_service=job_service
|
job_service=job_service
|
||||||
)
|
)
|
||||||
file_watcher.start()
|
file_watcher.start()
|
||||||
logger.info(f"FileWatcher started for directory: {settings.watch_directory()}")
|
logger.info(f"FileWatcher started for directory: {settings.get_watch_folder()}")
|
||||||
|
|
||||||
logger.info("Application startup completed successfully")
|
logger.info("Application startup completed successfully")
|
||||||
|
|
||||||
@@ -102,7 +103,7 @@ app = FastAPI(
|
|||||||
# Configure CORS
|
# Configure CORS
|
||||||
app.add_middleware(
|
app.add_middleware(
|
||||||
CORSMiddleware,
|
CORSMiddleware,
|
||||||
allow_origins=["http://localhost:5173"], # React frontend
|
allow_origins=["http://localhost:5173", "http://localhost:5174"], # React frontend
|
||||||
allow_credentials=True,
|
allow_credentials=True,
|
||||||
allow_methods=["*"],
|
allow_methods=["*"],
|
||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
@@ -111,7 +112,7 @@ app.add_middleware(
|
|||||||
# Include routers
|
# Include routers
|
||||||
app.include_router(auth_router, prefix="/auth", tags=["Authentication"])
|
app.include_router(auth_router, prefix="/auth", tags=["Authentication"])
|
||||||
app.include_router(users_router, prefix="/users", tags=["User Management"])
|
app.include_router(users_router, prefix="/users", tags=["User Management"])
|
||||||
# app.include_router(documents_router, prefix="/documents", tags=["Documents"])
|
app.include_router(documents_router, prefix="/api", tags=["Documents"])
|
||||||
# app.include_router(jobs_router, prefix="/jobs", tags=["Processing Jobs"])
|
# app.include_router(jobs_router, prefix="/jobs", tags=["Processing Jobs"])
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,10 +7,9 @@ stored in MongoDB collections.
|
|||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from bson import ObjectId
|
from pydantic import BaseModel, Field, field_validator, ConfigDict
|
||||||
from pydantic import BaseModel, Field, field_validator
|
|
||||||
|
|
||||||
from app.models.types import PyObjectId
|
from app.models.types import PyObjectId
|
||||||
|
|
||||||
@@ -49,6 +48,8 @@ class FileDocument(BaseModel):
|
|||||||
metadata: Dict[str, Any] = Field(default_factory=dict, description="File-specific metadata")
|
metadata: Dict[str, Any] = Field(default_factory=dict, description="File-specific metadata")
|
||||||
detected_at: Optional[datetime] = Field(default=None, description="Timestamp when file was detected")
|
detected_at: Optional[datetime] = Field(default=None, description="Timestamp when file was detected")
|
||||||
file_hash: Optional[str] = Field(default=None, description="SHA256 hash of file content")
|
file_hash: Optional[str] = Field(default=None, description="SHA256 hash of file content")
|
||||||
|
pdf_file_hash: Optional[str] = Field(default=None, description="SHA256 hash of the associated pdf file content")
|
||||||
|
thumbnail_file_hash: Optional[str] = Field(default=None, description="SHA256 hash of the thumbnail")
|
||||||
encoding: str = Field(default="utf-8", description="Character encoding for text files")
|
encoding: str = Field(default="utf-8", description="Character encoding for text files")
|
||||||
file_size: int = Field(..., ge=0, description="File size in bytes")
|
file_size: int = Field(..., ge=0, description="File size in bytes")
|
||||||
mime_type: str = Field(..., description="MIME type detected")
|
mime_type: str = Field(..., description="MIME type detected")
|
||||||
@@ -68,3 +69,28 @@ class FileDocument(BaseModel):
|
|||||||
if not v.strip():
|
if not v.strip():
|
||||||
raise ValueError("Filename cannot be empty")
|
raise ValueError("Filename cannot be empty")
|
||||||
return v.strip()
|
return v.strip()
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentResponse(BaseModel):
|
||||||
|
"""
|
||||||
|
Response model for document API endpoints.
|
||||||
|
|
||||||
|
Represents a document in the format expected by the frontend application.
|
||||||
|
Field names are automatically converted from snake_case to camelCase.
|
||||||
|
"""
|
||||||
|
|
||||||
|
model_config = ConfigDict(alias_generator=lambda field_name: ''.join(
|
||||||
|
word.capitalize() if i > 0 else word
|
||||||
|
for i, word in enumerate(field_name.split('_'))
|
||||||
|
), populate_by_name=True)
|
||||||
|
|
||||||
|
id: str = Field(..., description="Document unique identifier")
|
||||||
|
name: str = Field(..., description="Document filename")
|
||||||
|
original_file_type: str = Field(..., description="Original file type before conversion")
|
||||||
|
created_at: str = Field(..., description="ISO timestamp when document was created")
|
||||||
|
file_size: int = Field(..., description="File size in bytes")
|
||||||
|
page_count: int = Field(..., description="Number of pages in the document")
|
||||||
|
thumbnail_url: Optional[str] = Field(default=None, description="URL to document thumbnail")
|
||||||
|
pdf_url: Optional[str] = Field(default=None, description="URL to PDF version of document")
|
||||||
|
tags: List[str] = Field(default_factory=list, description="Document tags")
|
||||||
|
categories: List[str] = Field(default_factory=list, description="Document categories")
|
||||||
|
|||||||
@@ -14,6 +14,9 @@ class ProcessingStatus(str, Enum):
|
|||||||
PENDING = "pending"
|
PENDING = "pending"
|
||||||
PROCESSING = "processing"
|
PROCESSING = "processing"
|
||||||
COMPLETED = "completed"
|
COMPLETED = "completed"
|
||||||
|
SAVING_OBJECT = "saving_object"
|
||||||
|
SAVING_PDF = "saving_pdf"
|
||||||
|
CREATING_THUMBNAIL = "creating_thumbnail"
|
||||||
FAILED = "failed"
|
FAILED = "failed"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -105,6 +105,7 @@ class UserUpdate(BaseModel):
|
|||||||
password: Optional[str] = None
|
password: Optional[str] = None
|
||||||
role: Optional[UserRole] = None
|
role: Optional[UserRole] = None
|
||||||
is_active: Optional[bool] = None
|
is_active: Optional[bool] = None
|
||||||
|
preferences: Optional[dict] = None
|
||||||
|
|
||||||
@field_validator('username')
|
@field_validator('username')
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -130,6 +131,7 @@ class UserInDB(BaseModel):
|
|||||||
hashed_password: str
|
hashed_password: str
|
||||||
role: UserRole
|
role: UserRole
|
||||||
is_active: bool = True
|
is_active: bool = True
|
||||||
|
preferences: dict = Field(default_factory=dict)
|
||||||
created_at: datetime
|
created_at: datetime
|
||||||
updated_at: datetime
|
updated_at: datetime
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,9 @@ while maintaining data consistency through MongoDB transactions.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Optional, Dict, Any
|
from typing import List, Optional, Dict, Any
|
||||||
@@ -14,13 +16,28 @@ from typing import List, Optional, Dict, Any
|
|||||||
import magic
|
import magic
|
||||||
from pymongo.errors import PyMongoError
|
from pymongo.errors import PyMongoError
|
||||||
|
|
||||||
from app.config.settings import get_objects_folder
|
from app.config.settings import get_objects_folder, get_temp_folder, get_errors_folder, get_ignored_folder
|
||||||
from app.database.repositories.document_repository import FileDocumentRepository
|
from app.database.repositories.document_repository import FileDocumentRepository
|
||||||
from app.models.document import (
|
from app.models.document import (
|
||||||
FileDocument,
|
FileDocument,
|
||||||
FileType,
|
FileType,
|
||||||
)
|
)
|
||||||
from app.models.types import PyObjectId
|
from app.models.types import PyObjectId
|
||||||
|
from app.utils.pdf_converter import convert_to_pdf
|
||||||
|
from app.utils.pdf_thumbmail import PDFThumbnailGenerator
|
||||||
|
from app.utils.security import generate_uuid_filename
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentAlreadyExists(Exception):
|
||||||
|
def __init__(self, message):
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentProcessingError(Exception):
|
||||||
|
def __init__(self, message):
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
|
||||||
class DocumentService:
|
class DocumentService:
|
||||||
@@ -31,7 +48,11 @@ class DocumentService:
|
|||||||
and their content while ensuring data consistency through transactions.
|
and their content while ensuring data consistency through transactions.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, database, objects_folder: str = None):
|
def __init__(self, database,
|
||||||
|
objects_folder: str = None,
|
||||||
|
temp_folder: str = None,
|
||||||
|
errors_folder: str = None,
|
||||||
|
ignored_folder: str = None):
|
||||||
"""
|
"""
|
||||||
Initialize the document service with repository dependencies.
|
Initialize the document service with repository dependencies.
|
||||||
|
|
||||||
@@ -43,6 +64,9 @@ class DocumentService:
|
|||||||
self.db = database
|
self.db = database
|
||||||
self.document_repository = FileDocumentRepository(self.db)
|
self.document_repository = FileDocumentRepository(self.db)
|
||||||
self.objects_folder = objects_folder or get_objects_folder()
|
self.objects_folder = objects_folder or get_objects_folder()
|
||||||
|
self.temp_folder = temp_folder or get_temp_folder()
|
||||||
|
self.errors_folder = errors_folder or get_errors_folder()
|
||||||
|
self.ignored_folder = ignored_folder or get_ignored_folder()
|
||||||
|
|
||||||
def initialize(self):
|
def initialize(self):
|
||||||
self.document_repository.initialize()
|
self.document_repository.initialize()
|
||||||
@@ -117,7 +141,40 @@ class DocumentService:
|
|||||||
|
|
||||||
return path.read_bytes()
|
return path.read_bytes()
|
||||||
|
|
||||||
def _get_document_path(self, file_hash):
|
@staticmethod
|
||||||
|
def _get_safe_path(file_path):
|
||||||
|
"""
|
||||||
|
If the path already exists, add a suffix to the filename.
|
||||||
|
Increment the suffix until a safe path is found.
|
||||||
|
:param file_path:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
path = Path(file_path)
|
||||||
|
|
||||||
|
# If the path doesn't exist, return it as is
|
||||||
|
if not path.exists():
|
||||||
|
return file_path
|
||||||
|
|
||||||
|
# Split the filename and extension
|
||||||
|
stem = path.stem
|
||||||
|
suffix = path.suffix
|
||||||
|
directory = path.parent
|
||||||
|
|
||||||
|
# Try incrementing numbers until a unique path is found
|
||||||
|
counter = 1
|
||||||
|
while True:
|
||||||
|
# Create new filename with counter
|
||||||
|
new_filename = f"{stem}_{counter}{suffix}"
|
||||||
|
new_path = os.path.join(directory, new_filename)
|
||||||
|
|
||||||
|
# Check if this new path exists
|
||||||
|
if not os.path.exists(new_path):
|
||||||
|
return new_path
|
||||||
|
|
||||||
|
# Increment counter for next attempt
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
def get_document_path(self, file_hash):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
:param file_hash:
|
:param file_hash:
|
||||||
@@ -125,8 +182,13 @@ class DocumentService:
|
|||||||
"""
|
"""
|
||||||
return os.path.join(self.objects_folder, file_hash[:24], file_hash)
|
return os.path.join(self.objects_folder, file_hash[:24], file_hash)
|
||||||
|
|
||||||
|
def exists(self, file_hash):
|
||||||
|
if file_hash is None:
|
||||||
|
return False
|
||||||
|
return os.path.exists(self.get_document_path(file_hash))
|
||||||
|
|
||||||
def save_content_if_needed(self, file_hash, content: bytes):
|
def save_content_if_needed(self, file_hash, content: bytes):
|
||||||
target_path = self._get_document_path(file_hash)
|
target_path = self.get_document_path(file_hash)
|
||||||
if os.path.exists(target_path):
|
if os.path.exists(target_path):
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -136,6 +198,19 @@ class DocumentService:
|
|||||||
with open(target_path, "wb") as f:
|
with open(target_path, "wb") as f:
|
||||||
f.write(content)
|
f.write(content)
|
||||||
|
|
||||||
|
def move_to_errors(self, document_id, file_path):
|
||||||
|
logger.info(f"Moving file {file_path} to error folder")
|
||||||
|
error_file_name = f"{document_id}_{os.path.basename(file_path)}"
|
||||||
|
error_file_path = self._get_safe_path(os.path.join(self.errors_folder, error_file_name))
|
||||||
|
shutil.move(file_path, error_file_path)
|
||||||
|
|
||||||
|
def move_to_ignored(self, file_path, reason="Unknown"):
|
||||||
|
logger.info(f"Moving file {file_path} to ignored folder")
|
||||||
|
ignored_file_name = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + f"_### {reason} ###_" + os.path.basename(
|
||||||
|
file_path)
|
||||||
|
ignored_file_path = self._get_safe_path(os.path.join(self.ignored_folder, ignored_file_name))
|
||||||
|
shutil.move(file_path, ignored_file_path)
|
||||||
|
|
||||||
def create_document(
|
def create_document(
|
||||||
self,
|
self,
|
||||||
file_path: str,
|
file_path: str,
|
||||||
@@ -171,7 +246,16 @@ class DocumentService:
|
|||||||
detected_at = datetime.now()
|
detected_at = datetime.now()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
logger.info(f'Creating Document for "{file_path}"')
|
||||||
|
# Skip the document if it already exists
|
||||||
|
same_document = self.document_repository.find_same_document(filename, file_hash)
|
||||||
|
if same_document is not None:
|
||||||
|
logger.info(f" Document with same hash already exists. Skipping...")
|
||||||
|
self.move_to_ignored(file_path, f"already exists ({same_document.id})")
|
||||||
|
raise DocumentAlreadyExists(f"Document with same hash already exists ({same_document.id})")
|
||||||
|
|
||||||
self.save_content_if_needed(file_hash, file_bytes)
|
self.save_content_if_needed(file_hash, file_bytes)
|
||||||
|
logger.info(f" Saved content to {self.get_document_path(file_hash)}")
|
||||||
|
|
||||||
# Create FileDocument
|
# Create FileDocument
|
||||||
file_data = FileDocument(
|
file_data = FileDocument(
|
||||||
@@ -187,14 +271,90 @@ class DocumentService:
|
|||||||
mime_type=mime_type
|
mime_type=mime_type
|
||||||
)
|
)
|
||||||
|
|
||||||
created_file = self.document_repository.create_document(file_data)
|
created_document = self.document_repository.create_document(file_data)
|
||||||
|
logger.info(f" Created document with id '{created_document.id}'")
|
||||||
|
|
||||||
return created_file
|
return created_document
|
||||||
|
|
||||||
|
except DocumentAlreadyExists as e:
|
||||||
|
raise e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Transaction will automatically rollback if supported
|
# Transaction will automatically rollback if supported
|
||||||
raise PyMongoError(f"Failed to create document: {str(e)}")
|
raise PyMongoError(f"Failed to create document: {str(e)}")
|
||||||
|
|
||||||
|
def create_pdf(self, document_id: PyObjectId):
|
||||||
|
"""
|
||||||
|
For all files, a controlled pdf version will be created for standard visualization and action
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
logger.info(f"Creating PDF document for {document_id}")
|
||||||
|
document = self.get_document_by_id(document_id)
|
||||||
|
if document is None:
|
||||||
|
logger.error(f" Document not found")
|
||||||
|
raise DocumentProcessingError(f"Document {document_id} not found.")
|
||||||
|
|
||||||
|
# try to find another document that has the same hash
|
||||||
|
document_with_same_hash = self.get_document_with_pdf_hash(document.file_hash)
|
||||||
|
|
||||||
|
# the pdf will be created only if it does not exist yet
|
||||||
|
if document_with_same_hash and self.exists(document_with_same_hash.pdf_file_hash):
|
||||||
|
logger.info(f'Found document with same hash. Will use pdf "{document_with_same_hash.pdf_file_hash}".')
|
||||||
|
self.update_document(document_id, {"pdf_file_hash": document_with_same_hash.pdf_file_hash})
|
||||||
|
return
|
||||||
|
|
||||||
|
# get the content of the file
|
||||||
|
logger.info(f" No document with same hash and valid pdf found. Will create new pdf content.")
|
||||||
|
file_bytes = self.get_document_content_by_hash(document.file_hash)
|
||||||
|
if file_bytes is None:
|
||||||
|
logger.error(f'Content for document "{document_id}" not found. hash = "{document.file_hash}".')
|
||||||
|
raise DocumentProcessingError(f'Content for document "{document_id}" not found. hash = "{document.file_hash}".')
|
||||||
|
|
||||||
|
# create the pdf file
|
||||||
|
temp_pdf_file = convert_to_pdf(self.get_document_path(document.file_hash), self.temp_folder)
|
||||||
|
pdf_file_hash = self._calculate_file_hash(self._read_file_bytes(temp_pdf_file))
|
||||||
|
self.save_content_if_needed(pdf_file_hash, self._read_file_bytes(temp_pdf_file))
|
||||||
|
os.remove(temp_pdf_file) # remove the temporary file
|
||||||
|
logger.info(f' Created new pdf file with hash "{pdf_file_hash}"')
|
||||||
|
|
||||||
|
# update the document
|
||||||
|
self.update_document(document_id, {"pdf_file_hash": pdf_file_hash})
|
||||||
|
|
||||||
|
def create_thumbnail(self, document_id: PyObjectId):
|
||||||
|
logger.info(f'Creating thumbnail document for "{document_id}"')
|
||||||
|
document = self.get_document_by_id(document_id)
|
||||||
|
if document is None:
|
||||||
|
logger.error(f" Document not found !")
|
||||||
|
raise DocumentProcessingError(f"Document {document_id} not found.")
|
||||||
|
|
||||||
|
# try to find another document that has the same hash
|
||||||
|
document_with_same_hash = self.get_document_with_pdf_hash(document.file_hash)
|
||||||
|
|
||||||
|
# We will use the thumbnail of the pdf if it exists
|
||||||
|
if document_with_same_hash and self.exists(document_with_same_hash.thumbnail_file_hash):
|
||||||
|
logger.info(f" Found document with same hash. Will use thumbnail {document_with_same_hash.thumbnail_file_hash}")
|
||||||
|
self.update_document(document_id, {"thumbnail_file_hash": document_with_same_hash.thumbnail_file_hash})
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f" No document with same hash and valid thumbnail found. Will create new thumbnail")
|
||||||
|
|
||||||
|
if not self.exists(document.pdf_file_hash):
|
||||||
|
logger.error(f" PDF file not found.")
|
||||||
|
raise DocumentProcessingError(f"PDF file for document {document_id} not found")
|
||||||
|
|
||||||
|
tmp_thumbnail_path = os.path.join(self.temp_folder, f"{generate_uuid_filename()}.png")
|
||||||
|
with PDFThumbnailGenerator(self.get_document_path(document.pdf_file_hash)) as gen:
|
||||||
|
# create the thumbnail
|
||||||
|
gen.create_thumbnail(tmp_thumbnail_path, page_num=0, width=200)
|
||||||
|
thumbnail_file_hash = self._calculate_file_hash(self._read_file_bytes(tmp_thumbnail_path))
|
||||||
|
|
||||||
|
# save the thumbnail to the objects folder
|
||||||
|
self.save_content_if_needed(thumbnail_file_hash, self._read_file_bytes(tmp_thumbnail_path))
|
||||||
|
os.remove(tmp_thumbnail_path)
|
||||||
|
|
||||||
|
# update the document
|
||||||
|
self.update_document(document_id, {"thumbnail_file_hash": thumbnail_file_hash})
|
||||||
|
logger.info(f" Created thumbnail {thumbnail_file_hash}")
|
||||||
|
|
||||||
def get_document_by_id(self, document_id: PyObjectId) -> Optional[FileDocument]:
|
def get_document_by_id(self, document_id: PyObjectId) -> Optional[FileDocument]:
|
||||||
"""
|
"""
|
||||||
Retrieve a document by its ID.
|
Retrieve a document by its ID.
|
||||||
@@ -219,6 +379,9 @@ class DocumentService:
|
|||||||
"""
|
"""
|
||||||
return self.document_repository.find_document_by_hash(file_hash)
|
return self.document_repository.find_document_by_hash(file_hash)
|
||||||
|
|
||||||
|
def get_document_with_pdf_hash(self, file_hash) -> Optional[FileDocument]:
|
||||||
|
return self.document_repository.find_document_with_pdf_hash(file_hash)
|
||||||
|
|
||||||
def get_document_by_filepath(self, filepath: str) -> Optional[FileDocument]:
|
def get_document_by_filepath(self, filepath: str) -> Optional[FileDocument]:
|
||||||
"""
|
"""
|
||||||
Retrieve a document by its file path.
|
Retrieve a document by its file path.
|
||||||
@@ -232,7 +395,7 @@ class DocumentService:
|
|||||||
return self.document_repository.find_document_by_filepath(filepath)
|
return self.document_repository.find_document_by_filepath(filepath)
|
||||||
|
|
||||||
def get_document_content_by_hash(self, file_hash):
|
def get_document_content_by_hash(self, file_hash):
|
||||||
target_path = self._get_document_path(file_hash)
|
target_path = self.get_document_path(file_hash)
|
||||||
if not os.path.exists(target_path):
|
if not os.path.exists(target_path):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -323,7 +486,7 @@ class DocumentService:
|
|||||||
# If no other files reference this content, delete it
|
# If no other files reference this content, delete it
|
||||||
if not remaining_files:
|
if not remaining_files:
|
||||||
try:
|
try:
|
||||||
os.remove(self._get_document_path(document.file_hash))
|
os.remove(self.get_document_path(document.file_hash))
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -111,7 +111,9 @@ class JobService:
|
|||||||
current_job = self.repository.find_job_by_id(job_id)
|
current_job = self.repository.find_job_by_id(job_id)
|
||||||
|
|
||||||
# Validate status transition
|
# Validate status transition
|
||||||
if current_job.status != ProcessingStatus.PROCESSING:
|
if current_job.status in (ProcessingStatus.PENDING,
|
||||||
|
ProcessingStatus.COMPLETED,
|
||||||
|
ProcessingStatus.FAILED):
|
||||||
raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.COMPLETED)
|
raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.COMPLETED)
|
||||||
|
|
||||||
# Update status
|
# Update status
|
||||||
@@ -141,7 +143,7 @@ class JobService:
|
|||||||
current_job = self.repository.find_job_by_id(job_id)
|
current_job = self.repository.find_job_by_id(job_id)
|
||||||
|
|
||||||
# Validate status transition
|
# Validate status transition
|
||||||
if current_job.status != ProcessingStatus.PROCESSING:
|
if current_job.status in (ProcessingStatus.PENDING, ProcessingStatus.COMPLETED, ProcessingStatus.FAILED):
|
||||||
raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.FAILED)
|
raise InvalidStatusTransitionError(current_job.status, ProcessingStatus.FAILED)
|
||||||
|
|
||||||
# Update status with error message
|
# Update status with error message
|
||||||
@@ -151,6 +153,11 @@ class JobService:
|
|||||||
error_message
|
error_message
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def update_job_status(self, job_id: PyObjectId,
|
||||||
|
status: ProcessingStatus,
|
||||||
|
error_message: str = None) -> ProcessingJob:
|
||||||
|
return self.repository.update_job_status(job_id, status, error_message)
|
||||||
|
|
||||||
def delete_job(self, job_id: PyObjectId) -> bool:
|
def delete_job(self, job_id: PyObjectId) -> bool:
|
||||||
"""
|
"""
|
||||||
Delete a job from the database.
|
Delete a job from the database.
|
||||||
|
|||||||
@@ -184,3 +184,18 @@ class UserService:
|
|||||||
bool: True if user exists, False otherwise
|
bool: True if user exists, False otherwise
|
||||||
"""
|
"""
|
||||||
return self.user_repository.user_exists(username)
|
return self.user_repository.user_exists(username)
|
||||||
|
|
||||||
|
def get_preference(self, user_id: str, preference):
|
||||||
|
user = self.get_user_by_id(user_id)
|
||||||
|
if user is None:
|
||||||
|
return None
|
||||||
|
return user.preferences.get(preference, None)
|
||||||
|
|
||||||
|
def set_preference(self, user_id: str, preference, value):
|
||||||
|
user = self.get_user_by_id(user_id)
|
||||||
|
if user is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
user.preferences[preference] = value
|
||||||
|
self.user_repository.update_user(user_id, UserUpdate(preferences=user.preferences))
|
||||||
|
return self.get_user_by_id(user_id)
|
||||||
|
|||||||
241
src/file-processor/app/utils/pdf_annotation.py
Normal file
241
src/file-processor/app/utils/pdf_annotation.py
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
import fitz # PyMuPDF
|
||||||
|
|
||||||
|
|
||||||
|
class PDFAnnotator:
|
||||||
|
def __init__(self, pdf_path):
|
||||||
|
self.doc = fitz.open(pdf_path)
|
||||||
|
|
||||||
|
def add_highlight(self, rect, page_num=0, color=(1, 1, 0)):
|
||||||
|
"""
|
||||||
|
Add highlight annotation
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rect: (x0, y0, x1, y1) coordinates or fitz.Rect object
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
color: RGB tuple (0-1 range), default yellow
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
annot = page.add_highlight_annot(rect)
|
||||||
|
annot.set_colors(stroke=color)
|
||||||
|
annot.update()
|
||||||
|
return annot
|
||||||
|
|
||||||
|
def add_rectangle(self, rect, page_num=0, color=(1, 0, 0), width=2):
|
||||||
|
"""
|
||||||
|
Add rectangle annotation (border only)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rect: (x0, y0, x1, y1) coordinates or fitz.Rect object
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
color: RGB tuple (0-1 range), default red
|
||||||
|
width: Line width in points
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
annot = page.add_rect_annot(rect)
|
||||||
|
annot.set_colors(stroke=color)
|
||||||
|
annot.set_border(width=width)
|
||||||
|
annot.update()
|
||||||
|
return annot
|
||||||
|
|
||||||
|
def add_text_note(self, point, text, page_num=0, icon="Note"):
|
||||||
|
"""
|
||||||
|
Add sticky note annotation
|
||||||
|
|
||||||
|
Args:
|
||||||
|
point: (x, y) position tuple
|
||||||
|
text: Note content string
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
icon: "Note", "Comment", "Help", "Insert", "Key", etc.
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
annot = page.add_text_annot(point, text, icon=icon)
|
||||||
|
annot.update()
|
||||||
|
return annot
|
||||||
|
|
||||||
|
def add_free_text(self, rect, text, page_num=0, fontsize=12,
|
||||||
|
color=(0, 0, 0)):
|
||||||
|
"""
|
||||||
|
Add free text annotation (visible text box)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rect: (x0, y0, x1, y1) bounding box tuple or fitz.Rect
|
||||||
|
text: Text content string
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
fontsize: Font size in points
|
||||||
|
color: Text color RGB tuple (0-1 range)
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
annot = page.add_freetext_annot(
|
||||||
|
rect,
|
||||||
|
text,
|
||||||
|
fontsize=fontsize,
|
||||||
|
text_color=color
|
||||||
|
)
|
||||||
|
annot.update()
|
||||||
|
return annot
|
||||||
|
|
||||||
|
def add_arrow(self, start_point, end_point, page_num=0,
|
||||||
|
color=(1, 0, 0), width=2):
|
||||||
|
"""
|
||||||
|
Add arrow annotation
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start_point: (x, y) tuple for arrow start
|
||||||
|
end_point: (x, y) tuple for arrow end
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
color: Arrow color RGB tuple (0-1 range), default red
|
||||||
|
width: Line width in points
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
annot = page.add_line_annot(start_point, end_point)
|
||||||
|
annot.set_colors(stroke=color)
|
||||||
|
annot.set_border(width=width)
|
||||||
|
# Set arrow at end - use integer constant
|
||||||
|
annot.set_line_ends(0, 1) # 1 = ClosedArrow
|
||||||
|
annot.update()
|
||||||
|
return annot
|
||||||
|
|
||||||
|
def add_stamp(self, rect, page_num=0, stamp_type=0):
|
||||||
|
"""
|
||||||
|
Add stamp annotation
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rect: (x0, y0, x1, y1) bounding box tuple or fitz.Rect
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
stamp_type: Integer for stamp type:
|
||||||
|
0=Approved, 1=AsIs, 2=Confidential,
|
||||||
|
3=Departmental, 4=Draft, 5=Experimental,
|
||||||
|
6=Expired, 7=Final, 8=ForComment,
|
||||||
|
9=ForPublicRelease, 10=NotApproved, etc.
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
annot = page.add_stamp_annot(rect, stamp=stamp_type)
|
||||||
|
annot.update()
|
||||||
|
return annot
|
||||||
|
|
||||||
|
def add_redaction(self, rect, page_num=0, fill_color=(0, 0, 0)):
|
||||||
|
"""
|
||||||
|
Add redaction annotation (marks area for redaction)
|
||||||
|
Note: Use apply_redactions() to permanently remove content
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rect: (x0, y0, x1, y1) area to redact, tuple or fitz.Rect
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
fill_color: RGB tuple (0-1 range) for redacted area, default black
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
annot = page.add_redact_annot(rect, fill=fill_color)
|
||||||
|
annot.update()
|
||||||
|
return annot
|
||||||
|
|
||||||
|
def apply_redactions(self, page_num=0, images=2, graphics=2, text=2):
|
||||||
|
"""
|
||||||
|
Apply all redaction annotations on a page (permanent removal)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
images: 2=remove, 1=blank, 0=ignore
|
||||||
|
graphics: 2=remove, 1=blank, 0=ignore
|
||||||
|
text: 2=remove, 1=blank, 0=ignore
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if redactions were applied, False otherwise
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
# Check if page has redaction annotations
|
||||||
|
has_redactions = any(annot.type[0] == 12 for annot in page.annots())
|
||||||
|
|
||||||
|
if has_redactions:
|
||||||
|
page.apply_redactions(images=images, graphics=graphics, text=text)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_all_annotations(self, page_num=0):
|
||||||
|
"""
|
||||||
|
Retrieve all annotations from a page
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of dicts with annotation information
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
annotations = []
|
||||||
|
|
||||||
|
for annot in page.annots():
|
||||||
|
info = {
|
||||||
|
'type': annot.type[1], # Annotation type name
|
||||||
|
'rect': annot.rect,
|
||||||
|
'content': annot.info.get('content', ''),
|
||||||
|
'author': annot.info.get('title', ''),
|
||||||
|
'created': annot.info.get('creationDate', ''),
|
||||||
|
'colors': annot.colors
|
||||||
|
}
|
||||||
|
annotations.append(info)
|
||||||
|
|
||||||
|
return annotations
|
||||||
|
|
||||||
|
def remove_all_annotations(self, page_num=0):
|
||||||
|
"""
|
||||||
|
Remove all annotations from a page
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
for annot in page.annots():
|
||||||
|
page.delete_annot(annot)
|
||||||
|
|
||||||
|
def save(self, output_path):
|
||||||
|
"""Save the annotated PDF"""
|
||||||
|
self.doc.save(output_path)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.doc.close()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
with PDFAnnotator("input.pdf") as annotator:
|
||||||
|
# Add yellow highlight
|
||||||
|
annotator.add_highlight((100, 100, 300, 120), page_num=0,
|
||||||
|
color=(1, 1, 0))
|
||||||
|
|
||||||
|
# Add red rectangle border
|
||||||
|
annotator.add_rectangle((100, 150, 300, 250), page_num=0,
|
||||||
|
color=(1, 0, 0), width=3)
|
||||||
|
|
||||||
|
# Add sticky note
|
||||||
|
annotator.add_text_note((400, 100), "This is important!",
|
||||||
|
page_num=0, icon="Comment")
|
||||||
|
|
||||||
|
# Add visible text box
|
||||||
|
annotator.add_free_text((100, 300, 400, 350), "DRAFT VERSION",
|
||||||
|
page_num=0, fontsize=20, color=(1, 0, 0))
|
||||||
|
|
||||||
|
# Add arrow pointing to something
|
||||||
|
annotator.add_arrow((450, 100), (500, 200), page_num=0,
|
||||||
|
color=(0, 0, 1), width=2)
|
||||||
|
|
||||||
|
# Add "Approved" stamp
|
||||||
|
annotator.add_stamp((450, 300, 550, 350), page_num=0, stamp_type=0)
|
||||||
|
|
||||||
|
# Add redaction (black box over sensitive info)
|
||||||
|
annotator.add_redaction((100, 400, 300, 420), page_num=0)
|
||||||
|
annotator.apply_redactions(page_num=0)
|
||||||
|
|
||||||
|
# List all annotations
|
||||||
|
annots = annotator.get_all_annotations(page_num=0)
|
||||||
|
print(f"Found {len(annots)} annotations:")
|
||||||
|
for a in annots:
|
||||||
|
print(f" - {a['type']} at {a['rect']}")
|
||||||
|
|
||||||
|
# Save annotated PDF
|
||||||
|
annotator.save("output_annotated.pdf")
|
||||||
210
src/file-processor/app/utils/pdf_converter.py
Normal file
210
src/file-processor/app/utils/pdf_converter.py
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
import datetime
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
from abc import ABC
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Self
|
||||||
|
|
||||||
|
import pikepdf
|
||||||
|
import pypandoc
|
||||||
|
from PIL import Image
|
||||||
|
from reportlab.lib.pagesizes import A4
|
||||||
|
from reportlab.pdfgen import canvas
|
||||||
|
|
||||||
|
from tasks.common.converter_utils import detect_file_type
|
||||||
|
|
||||||
|
|
||||||
|
class BaseConverter(ABC):
|
||||||
|
"""Abstract base class for file converters to PDF."""
|
||||||
|
|
||||||
|
def __init__(self, input_path: str, output_dir: str = ".") -> None:
|
||||||
|
self.input_path = Path(input_path)
|
||||||
|
self.output_dir = Path(output_dir)
|
||||||
|
self.output_path = self.output_dir / f"{self.generate_uuid_filename()}.pdf"
|
||||||
|
|
||||||
|
def convert(self) -> Self:
|
||||||
|
"""Convert input file to PDF and return the output path."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_uuid_filename() -> str:
|
||||||
|
"""Generate a unique filename using UUID4."""
|
||||||
|
return str(uuid.uuid4())
|
||||||
|
|
||||||
|
def get_deterministic_date(self) -> str:
|
||||||
|
"""
|
||||||
|
Generate a deterministic date based on file content.
|
||||||
|
This ensures the same file always produces the same PDF.
|
||||||
|
"""
|
||||||
|
# Option 1: Use a fixed date
|
||||||
|
# return "D:20000101000000"
|
||||||
|
|
||||||
|
# Option 2: Generate date from content hash (recommended)
|
||||||
|
with open(self.input_path, 'rb') as f:
|
||||||
|
content = f.read()
|
||||||
|
content_hash = hashlib.sha256(content).hexdigest()
|
||||||
|
|
||||||
|
# Use first 14 characters of hash to create a valid date
|
||||||
|
# Format: D:YYYYMMDDHHmmss
|
||||||
|
hash_int = int(content_hash[:14], 16)
|
||||||
|
|
||||||
|
# Create a date between 2000-2099 to keep it reasonable
|
||||||
|
year = 2000 + (hash_int % 100)
|
||||||
|
month = 1 + (hash_int % 12)
|
||||||
|
day = 1 + (hash_int % 28) # Stay safe with 28 days
|
||||||
|
hour = hash_int % 24
|
||||||
|
minute = hash_int % 60
|
||||||
|
second = hash_int % 60
|
||||||
|
|
||||||
|
return f"D:{year:04d}{month:02d}{day:02d}{hour:02d}{minute:02d}{second:02d}"
|
||||||
|
|
||||||
|
def get_file_creation_date(self):
|
||||||
|
# Get file creation time (or modification time)
|
||||||
|
ts = os.path.getctime(self.input_path) # getmtime(self.input_path) for last modification
|
||||||
|
dt = datetime.datetime.fromtimestamp(ts)
|
||||||
|
|
||||||
|
# PDF expects format D:YYYYMMDDHHmmss
|
||||||
|
creation_date = dt.strftime("D:%Y%m%d%H%M%S")
|
||||||
|
return creation_date
|
||||||
|
|
||||||
|
def clean_pdf(self) -> Self:
|
||||||
|
"""Remove all non-deterministic metadata from PDF."""
|
||||||
|
with pikepdf.open(self.output_path, allow_overwriting_input=True) as pdf:
|
||||||
|
# Remove XMP metadata if it exists
|
||||||
|
if hasattr(pdf.Root, 'Metadata'):
|
||||||
|
del pdf.Root.Metadata
|
||||||
|
|
||||||
|
# Clear all document info by deleting each key
|
||||||
|
for key in list(pdf.docinfo.keys()):
|
||||||
|
del pdf.docinfo[key]
|
||||||
|
|
||||||
|
# Set deterministic metadata
|
||||||
|
pdf.docinfo["/Producer"] = "MyConverter"
|
||||||
|
pdf.docinfo["/Creator"] = "MyConverter"
|
||||||
|
pdf.docinfo["/CreationDate"] = self.get_deterministic_date()
|
||||||
|
pdf.docinfo["/ModDate"] = self.get_deterministic_date()
|
||||||
|
pdf.docinfo["/Title"] = self.input_path.name
|
||||||
|
|
||||||
|
# Save with deterministic IDs
|
||||||
|
# compress=True ensures consistent compression
|
||||||
|
# deterministic_id=True (if available) or static_id=True
|
||||||
|
pdf.save(
|
||||||
|
self.output_path,
|
||||||
|
fix_metadata_version=True,
|
||||||
|
compress_streams=True,
|
||||||
|
stream_decode_level=pikepdf.StreamDecodeLevel.generalized,
|
||||||
|
object_stream_mode=pikepdf.ObjectStreamMode.disable,
|
||||||
|
deterministic_id=True # Use this if pikepdf >= 8.0.0, otherwise use static_id=True
|
||||||
|
)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class TextToPdfConverter(BaseConverter):
|
||||||
|
"""Converter for text files to PDF."""
|
||||||
|
|
||||||
|
def convert(self) -> Self:
|
||||||
|
c = canvas.Canvas(str(self.output_path), pagesize=A4)
|
||||||
|
|
||||||
|
# Fix metadata with deterministic values
|
||||||
|
info = c._doc.info
|
||||||
|
info.producer = "MyConverter"
|
||||||
|
info.creationDate = self.get_file_creation_date()
|
||||||
|
info.title = os.path.basename(self.input_path)
|
||||||
|
|
||||||
|
width, height = A4
|
||||||
|
with open(self.input_path, "r", encoding="utf-8") as f:
|
||||||
|
y = height - 50
|
||||||
|
for line in f:
|
||||||
|
c.drawString(50, y, line.strip())
|
||||||
|
y -= 15
|
||||||
|
if y < 50:
|
||||||
|
c.showPage()
|
||||||
|
y = height - 50
|
||||||
|
|
||||||
|
c.save()
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class PdfToPdfConverter(BaseConverter):
|
||||||
|
"""Converter for PDF files to PDF."""
|
||||||
|
|
||||||
|
def convert(self) -> Self:
|
||||||
|
# copy self.input_path to self.output_path
|
||||||
|
os.system(f"cp {self.input_path} {self.output_path}")
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class ImageToPdfConverter(BaseConverter):
|
||||||
|
"""Converter for image files to PDF."""
|
||||||
|
|
||||||
|
def convert(self) -> Self:
|
||||||
|
image = Image.open(self.input_path)
|
||||||
|
rgb_image = image.convert("RGB")
|
||||||
|
rgb_image.save(self.output_path)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class WordToPdfConverter(BaseConverter):
|
||||||
|
"""Converter for Word files (.docx) to PDF using pypandoc."""
|
||||||
|
|
||||||
|
def convert(self) -> Self:
|
||||||
|
pypandoc.convert_file(
|
||||||
|
str(self.input_path), "pdf", outputfile=str(self.output_path)
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
# Placeholders for future extensions
|
||||||
|
class HtmlToPdfConverter(BaseConverter):
|
||||||
|
"""Placeholder for HTML to PDF converter."""
|
||||||
|
|
||||||
|
def convert(self) -> Self:
|
||||||
|
raise NotImplementedError("HTML to PDF conversion not implemented.")
|
||||||
|
|
||||||
|
|
||||||
|
class ExcelToPdfConverter(BaseConverter):
|
||||||
|
"""Placeholder for Excel to PDF converter."""
|
||||||
|
|
||||||
|
def convert(self) -> Self:
|
||||||
|
raise NotImplementedError("Excel to PDF conversion not implemented.")
|
||||||
|
|
||||||
|
|
||||||
|
class MarkdownToPdfConverter(BaseConverter):
|
||||||
|
"""Placeholder for Markdown to PDF converter."""
|
||||||
|
|
||||||
|
def convert(self) -> Self:
|
||||||
|
raise NotImplementedError("Markdown to PDF conversion not implemented.")
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_pdf(filepath: str, output_dir: str = ".") -> str:
|
||||||
|
"""
|
||||||
|
Convert any supported file to PDF.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filepath (str): Path to the input file.
|
||||||
|
output_dir (str): Directory to save the output PDF.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Path to the generated PDF.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
UnsupportedFileTypeError: If the input file type is not supported.
|
||||||
|
"""
|
||||||
|
file_type = detect_file_type(filepath)
|
||||||
|
|
||||||
|
if file_type == "text":
|
||||||
|
converter = TextToPdfConverter(filepath, output_dir=output_dir)
|
||||||
|
elif file_type == "image":
|
||||||
|
converter = ImageToPdfConverter(filepath, output_dir=output_dir)
|
||||||
|
elif file_type == "word":
|
||||||
|
converter = WordToPdfConverter(filepath, output_dir=output_dir)
|
||||||
|
elif file_type == "pdf":
|
||||||
|
converter = PdfToPdfConverter(filepath, output_dir=output_dir)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported file type: {file_type}")
|
||||||
|
|
||||||
|
converter.convert()
|
||||||
|
converter.clean_pdf()
|
||||||
|
return str(converter.output_path)
|
||||||
167
src/file-processor/app/utils/pdf_thumbmail.py
Normal file
167
src/file-processor/app/utils/pdf_thumbmail.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import fitz # PyMuPDF
|
||||||
|
|
||||||
|
|
||||||
|
class PDFThumbnailGenerator:
|
||||||
|
def __init__(self, pdf_path):
|
||||||
|
"""
|
||||||
|
Initialize PDF thumbnail generator
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pdf_path: Path to the PDF file (string or Path object)
|
||||||
|
"""
|
||||||
|
self.pdf_path = pdf_path
|
||||||
|
self.doc = fitz.open(pdf_path)
|
||||||
|
|
||||||
|
def create_thumbnail(self, output_path, page_num=0, width=200, rotation=0, zoom_factor=1.0):
|
||||||
|
"""
|
||||||
|
Create a thumbnail with zoom and rotation
|
||||||
|
|
||||||
|
Args:
|
||||||
|
output_path: Path to save the thumbnail (string or Path)
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
width: Desired width in pixels, default 200
|
||||||
|
rotation: Rotation angle in degrees (0, 90, 180, 270), default 0
|
||||||
|
zoom_factor: Additional zoom multiplier (1.0 = normal, 2.0 = 2x), default 1.0
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with thumbnail info (width, height, rotation, zoom)
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
|
||||||
|
# Apply rotation to page
|
||||||
|
page.set_rotation(rotation)
|
||||||
|
|
||||||
|
# Calculate zoom to achieve desired width
|
||||||
|
base_zoom = width / page.rect.width
|
||||||
|
final_zoom = base_zoom * zoom_factor
|
||||||
|
|
||||||
|
# Create transformation matrix
|
||||||
|
mat = fitz.Matrix(final_zoom, final_zoom)
|
||||||
|
|
||||||
|
# Render page to pixmap
|
||||||
|
pix = page.get_pixmap(matrix=mat, alpha=False)
|
||||||
|
|
||||||
|
# Save thumbnail
|
||||||
|
pix.save(output_path)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'width': pix.width,
|
||||||
|
'height': pix.height,
|
||||||
|
'rotation': rotation,
|
||||||
|
'zoom': zoom_factor
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_cropped_thumbnail(self, output_path, crop_rect=None, page_num=0, width=200):
|
||||||
|
"""
|
||||||
|
Create a thumbnail of a specific region (zoom on area)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
output_path: Path to save the thumbnail (string or Path)
|
||||||
|
crop_rect: Tuple (x0, y0, x1, y1) in PDF coordinates for cropping,
|
||||||
|
or None for full page, default None
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
width: Desired width in pixels, default 200
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple (width, height) of the generated thumbnail
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
|
||||||
|
if crop_rect:
|
||||||
|
# Create rectangle for cropping
|
||||||
|
rect = fitz.Rect(crop_rect)
|
||||||
|
zoom = width / rect.width
|
||||||
|
else:
|
||||||
|
rect = page.rect
|
||||||
|
zoom = width / page.rect.width
|
||||||
|
|
||||||
|
mat = fitz.Matrix(zoom, zoom)
|
||||||
|
|
||||||
|
# Render only the specified rectangle
|
||||||
|
pix = page.get_pixmap(matrix=mat, clip=rect)
|
||||||
|
pix.save(output_path)
|
||||||
|
|
||||||
|
return pix.width, pix.height
|
||||||
|
|
||||||
|
def get_page_info(self, page_num=0):
|
||||||
|
"""
|
||||||
|
Get information about a specific page
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with page information (width, height, rotation, number, total_pages)
|
||||||
|
"""
|
||||||
|
page = self.doc[page_num]
|
||||||
|
return {
|
||||||
|
'width': page.rect.width,
|
||||||
|
'height': page.rect.height,
|
||||||
|
'rotation': page.rotation,
|
||||||
|
'number': page_num + 1,
|
||||||
|
'total_pages': len(self.doc)
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_multi_resolution_thumbnails(self, output_folder, page_num=0, sizes=(150, 300, 600)):
|
||||||
|
"""
|
||||||
|
Create multiple thumbnails at different resolutions
|
||||||
|
|
||||||
|
Args:
|
||||||
|
output_folder: Folder path to save thumbnails (string or Path)
|
||||||
|
page_num: Page number (0-indexed), default first page
|
||||||
|
sizes: List of widths in pixels, default [150, 300, 600]
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict mapping each size to thumbnail info
|
||||||
|
"""
|
||||||
|
output_folder = Path(output_folder)
|
||||||
|
output_folder.mkdir(exist_ok=True, parents=True)
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
for size in sizes:
|
||||||
|
output_path = output_folder / f"thumb_{size}px.png"
|
||||||
|
info = self.create_thumbnail(output_path, page_num=page_num, width=size)
|
||||||
|
results[size] = info
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the PDF document and free resources"""
|
||||||
|
self.doc.close()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Basic usage with context manager
|
||||||
|
with PDFThumbnailGenerator("example.pdf") as gen:
|
||||||
|
# Standard thumbnail
|
||||||
|
gen.create_thumbnail("thumb_standard.png", page_num=0, width=200)
|
||||||
|
|
||||||
|
# Rotated thumbnail
|
||||||
|
gen.create_thumbnail("thumb_rotated.png", page_num=0,
|
||||||
|
width=200, rotation=90)
|
||||||
|
|
||||||
|
# Zoomed thumbnail (2x zoom)
|
||||||
|
gen.create_thumbnail("thumb_zoomed.png", page_num=0,
|
||||||
|
width=200, zoom_factor=2.0)
|
||||||
|
|
||||||
|
# Cropped/zoomed on specific area (x0, y0, x1, y1)
|
||||||
|
gen.create_cropped_thumbnail("thumb_crop.png",
|
||||||
|
crop_rect=(100, 100, 400, 400),
|
||||||
|
page_num=0, width=300)
|
||||||
|
|
||||||
|
# Multiple resolutions
|
||||||
|
gen.create_multi_resolution_thumbnails("thumbnails/", page_num=0,
|
||||||
|
sizes=[150, 300, 600])
|
||||||
|
|
||||||
|
# Get page information
|
||||||
|
info = gen.get_page_info(page_num=0)
|
||||||
|
print(f"Page info: {info}")
|
||||||
@@ -4,9 +4,10 @@ Password security utilities using bcrypt for secure password hashing.
|
|||||||
This module provides secure password hashing and verification functions
|
This module provides secure password hashing and verification functions
|
||||||
using the bcrypt algorithm with automatic salt generation.
|
using the bcrypt algorithm with automatic salt generation.
|
||||||
"""
|
"""
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
|
||||||
import bcrypt
|
import bcrypt
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
|
|
||||||
def hash_password(password: str) -> str:
|
def hash_password(password: str) -> str:
|
||||||
@@ -72,3 +73,32 @@ def verify_password(password: str, hashed_password: str) -> bool:
|
|||||||
raise RuntimeError(f"Invalid hash format: {str(e)}")
|
raise RuntimeError(f"Invalid hash format: {str(e)}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise RuntimeError(f"Failed to verify password: {str(e)}")
|
raise RuntimeError(f"Failed to verify password: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_uuid_filename() -> str:
|
||||||
|
"""Generate a unique filename using UUID4."""
|
||||||
|
return str(uuid.uuid4())
|
||||||
|
|
||||||
|
|
||||||
|
def safe_connection_string(connection_string: str) -> str:
|
||||||
|
"""
|
||||||
|
Mask the password in a MongoDB connection string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
connection_string (str): The complete MongoDB connection string
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The connection string with password replaced by asterisks
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> mask_mongodb_password("mongodb://admin:password123@mongodb:27017/mydocmanager?authSource=admin")
|
||||||
|
"mongodb://admin:***@mongodb:27017/mydocmanager?authSource=admin"
|
||||||
|
"""
|
||||||
|
# Pattern to detect password in MongoDB URL
|
||||||
|
# Format: mongodb://username:password@host:port/database
|
||||||
|
pattern = r'(mongodb://[^:]+:)([^@]+)(@.*)'
|
||||||
|
|
||||||
|
# Replace password with asterisks
|
||||||
|
masked_string = re.sub(pattern, r'\1*****\3', connection_string)
|
||||||
|
|
||||||
|
return masked_string
|
||||||
|
|||||||
@@ -5,10 +5,16 @@ email-validator==2.3.0
|
|||||||
fastapi==0.116.1
|
fastapi==0.116.1
|
||||||
httptools==0.6.4
|
httptools==0.6.4
|
||||||
motor==3.7.1
|
motor==3.7.1
|
||||||
|
pikepdf==9.11.0
|
||||||
|
pillow==11.3.0
|
||||||
pydantic==2.11.9
|
pydantic==2.11.9
|
||||||
PyJWT==2.10.1
|
PyJWT==2.10.1
|
||||||
pymongo==4.15.0
|
pymongo==4.15.0
|
||||||
|
PyMuPDF==1.26.4
|
||||||
|
pypandoc==1.15
|
||||||
|
python-multipart==0.0.20
|
||||||
redis==6.4.0
|
redis==6.4.0
|
||||||
|
reportlab==4.4.4
|
||||||
uvicorn==0.35.0
|
uvicorn==0.35.0
|
||||||
python-magic==0.4.27
|
python-magic==0.4.27
|
||||||
watchdog==6.0.0
|
watchdog==6.0.0
|
||||||
@@ -1,12 +1,93 @@
|
|||||||
# React + Vite
|
|
||||||
|
|
||||||
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
# MyDocManager Frontend
|
||||||
|
|
||||||
Currently, two official plugins are available:
|
## Overview
|
||||||
|
MyDocManager Frontend is a modern web application built with React and Vite that serves as the user interface for the MyDocManager document management system. The application provides a seamless experience for users to manage, process, and organize their documents with an intuitive and responsive interface.
|
||||||
|
|
||||||
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) for Fast Refresh
|
## Project Structure
|
||||||
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
|
frontend/
|
||||||
|
├── public/ # Public assets and static files
|
||||||
|
├── src/ # Source code
|
||||||
|
│ ├── assets/ # Icons, images, and other static assets
|
||||||
|
│ ├── components/ # Reusable UI components
|
||||||
|
│ │ ├── auth/ # Authentication-related components
|
||||||
|
│ │ └── common/ # Shared components (Header, Layout, etc.)
|
||||||
|
│ ├── contexts/ # React contexts for state management
|
||||||
|
│ ├── hooks/ # Custom React hooks
|
||||||
|
│ ├── pages/ # Page components representing full views
|
||||||
|
│ ├── services/ # API service interfaces
|
||||||
|
│ └── utils/ # Utility functions and helpers
|
||||||
|
├── Dockerfile # Container configuration for deployment
|
||||||
|
├── package.json # Dependencies and scripts
|
||||||
|
├── tailwind.config.js # Tailwind CSS configuration
|
||||||
|
└── vite.config.js # Vite bundler configuration
|
||||||
|
|
||||||
## Expanding the ESLint configuration
|
|
||||||
|
|
||||||
If you are developing a production application, we recommend using TypeScript with type-aware lint rules enabled. Check out the [TS template](https://github.com/vitejs/vite/tree/main/packages/create-vite/template-react-ts) for information on how to integrate TypeScript and [`typescript-eslint`](https://typescript-eslint.io) in your project.
|
|
||||||
|
## Key Components
|
||||||
|
|
||||||
|
### Authentication
|
||||||
|
- **AuthContext**: Provides authentication state and methods throughout the application
|
||||||
|
- **AuthLayout**: Layout wrapper specifically for authentication screens
|
||||||
|
- **LoginForm**: Form component for user authentication
|
||||||
|
- **ProtectedRoute**: Route guard that ensures authenticated access to protected pages
|
||||||
|
|
||||||
|
### UI Components
|
||||||
|
- **Layout**: Main application layout structure with menu and content areas
|
||||||
|
- **Header**: Application header with navigation and user controls
|
||||||
|
- **Menu**: Side navigation menu with application links
|
||||||
|
- **ThemeSwitcher**: Toggle for switching between light and dark themes
|
||||||
|
|
||||||
|
### Pages
|
||||||
|
- **LoginPage**: User authentication page
|
||||||
|
- **DashboardPage**: Main dashboard view for authenticated users
|
||||||
|
|
||||||
|
### Services
|
||||||
|
- **authService**: Handles API communication for authentication operations
|
||||||
|
- **api**: Base API utility for making HTTP requests to the backend
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
- Node.js (latest LTS version)
|
||||||
|
- npm or yarn package manager
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
1. Clone the repository
|
||||||
|
2. Navigate to the frontend directory
|
||||||
|
3. Install dependencies:
|
||||||
|
```
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
### Development
|
||||||
|
Run the development server:
|
||||||
|
```
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
This will start the application in development mode at http://localhost:5173
|
||||||
|
|
||||||
|
### Building for Production
|
||||||
|
Create a production build:
|
||||||
|
```
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Technologies
|
||||||
|
- React 19.1.1
|
||||||
|
- Vite 7.1.2
|
||||||
|
- Tailwind CSS 4.1.13
|
||||||
|
- DaisyUI 5.1.24
|
||||||
|
- React Router 7.9.3
|
||||||
|
- Axios for API requests
|
||||||
|
|
||||||
|
## Features
|
||||||
|
- Responsive design with Tailwind CSS
|
||||||
|
- Authentication and authorization
|
||||||
|
- Light/dark theme support
|
||||||
|
- Document management interface
|
||||||
|
- Secure API communication
|
||||||
|
|
||||||
|
## Project Integration
|
||||||
|
This frontend application works in conjunction with the backend services and workers defined in other parts of the MyDocManager project to provide a complete document management solution.
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
<!doctype html>
|
<!doctype html>
|
||||||
<html lang="en">
|
<html lang="en" data-theme="dark">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>Vite + React</title>
|
<title>My Documents Manager</title>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="root"></div>
|
<div id="root"></div>
|
||||||
|
|||||||
1126
src/frontend/package-lock.json
generated
1126
src/frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -10,18 +10,26 @@
|
|||||||
"preview": "vite preview"
|
"preview": "vite preview"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@tailwindcss/vite": "^4.1.13",
|
||||||
|
"axios": "^1.12.2",
|
||||||
"react": "^19.1.1",
|
"react": "^19.1.1",
|
||||||
"react-dom": "^19.1.1"
|
"react-dom": "^19.1.1",
|
||||||
|
"react-icons": "^5.5.0",
|
||||||
|
"react-router-dom": "^7.9.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint/js": "^9.33.0",
|
"@eslint/js": "^9.33.0",
|
||||||
"@types/react": "^19.1.10",
|
"@types/react": "^19.1.10",
|
||||||
"@types/react-dom": "^19.1.7",
|
"@types/react-dom": "^19.1.7",
|
||||||
"@vitejs/plugin-react": "^5.0.0",
|
"@vitejs/plugin-react": "^5.0.0",
|
||||||
|
"autoprefixer": "^10.4.21",
|
||||||
|
"daisyui": "^5.1.24",
|
||||||
"eslint": "^9.33.0",
|
"eslint": "^9.33.0",
|
||||||
"eslint-plugin-react-hooks": "^5.2.0",
|
"eslint-plugin-react-hooks": "^5.2.0",
|
||||||
"eslint-plugin-react-refresh": "^0.4.20",
|
"eslint-plugin-react-refresh": "^0.4.20",
|
||||||
"globals": "^16.3.0",
|
"globals": "^16.3.0",
|
||||||
|
"postcss": "^8.5.6",
|
||||||
|
"tailwindcss": "^4.1.13",
|
||||||
"vite": "^7.1.2"
|
"vite": "^7.1.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,42 +1,6 @@
|
|||||||
|
@import "tailwindcss";
|
||||||
|
|
||||||
#root {
|
#root {
|
||||||
max-width: 1280px;
|
max-width: 1280px;
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
padding: 2rem;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.logo {
|
|
||||||
height: 6em;
|
|
||||||
padding: 1.5em;
|
|
||||||
will-change: filter;
|
|
||||||
transition: filter 300ms;
|
|
||||||
}
|
|
||||||
.logo:hover {
|
|
||||||
filter: drop-shadow(0 0 2em #646cffaa);
|
|
||||||
}
|
|
||||||
.logo.react:hover {
|
|
||||||
filter: drop-shadow(0 0 2em #61dafbaa);
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes logo-spin {
|
|
||||||
from {
|
|
||||||
transform: rotate(0deg);
|
|
||||||
}
|
|
||||||
to {
|
|
||||||
transform: rotate(360deg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (prefers-reduced-motion: no-preference) {
|
|
||||||
a:nth-of-type(2) .logo {
|
|
||||||
animation: logo-spin infinite 20s linear;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.card {
|
|
||||||
padding: 2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.read-the-docs {
|
|
||||||
color: #888;
|
|
||||||
}
|
}
|
||||||
@@ -1,35 +1,36 @@
|
|||||||
import { useState } from 'react'
|
import { BrowserRouter as Router, Routes, Route, Navigate } from 'react-router-dom';
|
||||||
import reactLogo from './assets/react.svg'
|
import { AuthProvider } from './contexts/AuthContext';
|
||||||
import viteLogo from '/vite.svg'
|
import ProtectedRoute from './components/common/ProtectedRoute';
|
||||||
import './App.css'
|
import Layout from './components/common/Layout';
|
||||||
|
import LoginPage from './pages/LoginPage';
|
||||||
|
import DashboardPage from './pages/DashboardPage';
|
||||||
|
import DocumentsPage from './pages/DocumentsPage';
|
||||||
|
|
||||||
function App() {
|
function App() {
|
||||||
const [count, setCount] = useState(0)
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<AuthProvider>
|
||||||
<div>
|
<Router>
|
||||||
<a href="https://vite.dev" target="_blank">
|
<div className="App">
|
||||||
<img src={viteLogo} className="logo" alt="Vite logo" />
|
<Routes>
|
||||||
</a>
|
{/* Public Routes */}
|
||||||
<a href="https://react.dev" target="_blank">
|
<Route path="/login" element={<LoginPage />} />
|
||||||
<img src={reactLogo} className="logo react" alt="React logo" />
|
|
||||||
</a>
|
{/* Protected Routes */}
|
||||||
</div>
|
<Route path="/" element={<ProtectedRoute><Layout /></ProtectedRoute>}>
|
||||||
<h1>Vite + React</h1>
|
<Route index element={<Navigate to="/documents" replace />} />
|
||||||
<div className="card">
|
<Route path="documents" element={<DocumentsPage />} />
|
||||||
<button onClick={() => setCount((count) => count + 1)}>
|
<Route path="dashboard" element={<DashboardPage />} />
|
||||||
count is {count}
|
<Route path="documents" element={<div>Documents Page - Coming Soon</div>} />
|
||||||
</button>
|
<Route path="users" element={<div>User Management - Coming Soon</div>} />
|
||||||
<p>
|
</Route>
|
||||||
Edit <code>src/App.jsx</code> and save to test HMR
|
|
||||||
</p>
|
{/* Catch all route */}
|
||||||
</div>
|
<Route path="*" element={<Navigate to="/dashboard" replace />} />
|
||||||
<p className="read-the-docs">
|
</Routes>
|
||||||
Click on the Vite and React logos to learn more
|
</div>
|
||||||
</p>
|
</Router>
|
||||||
</>
|
</AuthProvider>
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export default App
|
export default App;
|
||||||
35
src/frontend/src/assets/icons.jsx
Normal file
35
src/frontend/src/assets/icons.jsx
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
// src/assets/icons.jsx
|
||||||
|
|
||||||
|
export const SunIcon = (
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
className="h-6 w-6"
|
||||||
|
fill="none"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke="currentColor"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
strokeLinecap="round"
|
||||||
|
strokeLinejoin="round"
|
||||||
|
strokeWidth="2"
|
||||||
|
d="M12 3v1m0 16v1m8.66-9h-1M4.34 12h-1m15.36 6.36l-.7-.7M6.34 6.34l-.7-.7m12.02 12.02l-.7-.7M6.34 17.66l-.7-.7M16 12a4 4 0 11-8 0 4 4 0 018 0z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
|
||||||
|
export const MoonIcon = (
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
className="h-6 w-6"
|
||||||
|
fill="none"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke="currentColor"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
strokeLinecap="round"
|
||||||
|
strokeLinejoin="round"
|
||||||
|
strokeWidth="2"
|
||||||
|
d="M21 12.79A9 9 0 1111.21 3a7 7 0 0010.79 9.79z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
41
src/frontend/src/components/auth/AuthLayout.jsx
Normal file
41
src/frontend/src/components/auth/AuthLayout.jsx
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import React from 'react';
|
||||||
|
import ThemeSwitcher from "../common/ThemeSwither.jsx";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AuthLayout component for authentication pages
|
||||||
|
* Provides centered layout with background and responsive design
|
||||||
|
*
|
||||||
|
* @param {Object} props - Component props
|
||||||
|
* @param {React.ReactNode} props.children - Child components to render
|
||||||
|
*/
|
||||||
|
|
||||||
|
const AuthHeader = () => {
|
||||||
|
return (
|
||||||
|
<div className="navbar bg-base-100 shadow-lg">
|
||||||
|
<div className="navbar-start">
|
||||||
|
<h1 className="text-xl font-bold">MyDocManager</h1>
|
||||||
|
</div>
|
||||||
|
<div className="navbar-end">
|
||||||
|
<ThemeSwitcher/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function AuthLayout({children}) {
|
||||||
|
return (
|
||||||
|
<div className="min-h-screen bg-gradient-to-br from-primary/10 via-base-200 to-secondary/10">
|
||||||
|
<AuthHeader/>
|
||||||
|
{/* Main container with flex centering */}
|
||||||
|
<div className="min-h-screen flex items-center justify-center p-4">
|
||||||
|
{/* Content wrapper for responsive spacing */}
|
||||||
|
<div>
|
||||||
|
{children}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default AuthLayout;
|
||||||
202
src/frontend/src/components/auth/LoginForm.jsx
Normal file
202
src/frontend/src/components/auth/LoginForm.jsx
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
import React, {useEffect, useState} from 'react';
|
||||||
|
import {useAuth} from '../../contexts/AuthContext';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LoginForm component with DaisyUI styling
|
||||||
|
* Handles user authentication with form validation and error display
|
||||||
|
*/
|
||||||
|
function LoginForm() {
|
||||||
|
const {login, loading, error, clearError} = useAuth();
|
||||||
|
const [formData, setFormData] = useState({
|
||||||
|
username: '',
|
||||||
|
password: '',
|
||||||
|
});
|
||||||
|
const [formErrors, setFormErrors] = useState({});
|
||||||
|
|
||||||
|
// Clear errors when component mounts or form data changes
|
||||||
|
useEffect(() => {
|
||||||
|
if (error) {
|
||||||
|
const timer = setTimeout(() => {
|
||||||
|
clearError();
|
||||||
|
}, 5000); // Clear error after 5 seconds
|
||||||
|
|
||||||
|
return () => clearTimeout(timer);
|
||||||
|
}
|
||||||
|
}, [error, clearError]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle input changes and clear related errors
|
||||||
|
* @param {Event} e - Input change event
|
||||||
|
*/
|
||||||
|
const handleInputChange = (e) => {
|
||||||
|
const {name, value} = e.target;
|
||||||
|
|
||||||
|
setFormData(prev => ({
|
||||||
|
...prev,
|
||||||
|
[name]: value,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Clear field error when user starts typing
|
||||||
|
if (formErrors[name]) {
|
||||||
|
setFormErrors(prev => ({
|
||||||
|
...prev,
|
||||||
|
[name]: '',
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear global error when user modifies form
|
||||||
|
if (error) {
|
||||||
|
clearError();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate form data before submission
|
||||||
|
* @returns {boolean} True if form is valid
|
||||||
|
*/
|
||||||
|
const validateForm = () => {
|
||||||
|
const errors = {};
|
||||||
|
|
||||||
|
if (!formData.username.trim()) {
|
||||||
|
errors.username = 'Username is required';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!formData.password.trim()) {
|
||||||
|
errors.password = 'Password is required';
|
||||||
|
} else if (formData.password.length < 3) {
|
||||||
|
errors.password = 'Password must be at least 3 characters';
|
||||||
|
}
|
||||||
|
|
||||||
|
setFormErrors(errors);
|
||||||
|
return Object.keys(errors).length === 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle form submission
|
||||||
|
* @param {Event} e - Form submission event
|
||||||
|
*/
|
||||||
|
const handleSubmit = async (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
|
||||||
|
if (!validateForm()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const success = await login(formData.username, formData.password);
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
// Reset form on successful login
|
||||||
|
setFormData({username: '', password: ''});
|
||||||
|
setFormErrors({});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="card max-w-md shadow-xl bg-base-100">
|
||||||
|
<div className="card-body">
|
||||||
|
{/* Card Header */}
|
||||||
|
<div className="text-center mb-6">
|
||||||
|
<p className="text-base-content/70 mt-2">Sign in to your account</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Global Error Alert */}
|
||||||
|
{error && (
|
||||||
|
<div className="alert alert-error mb-4">
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
className="stroke-current shrink-0 h-6 w-6"
|
||||||
|
fill="none"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
strokeLinecap="round"
|
||||||
|
strokeLinejoin="round"
|
||||||
|
strokeWidth="2"
|
||||||
|
d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
<span>{error}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Login Form */}
|
||||||
|
<form onSubmit={handleSubmit}>
|
||||||
|
{/* Username Field */}
|
||||||
|
<div id="username">
|
||||||
|
<label className="label">
|
||||||
|
<span className="label-text font-medium">Username</span>
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
name="username"
|
||||||
|
value={formData.username}
|
||||||
|
onChange={handleInputChange}
|
||||||
|
placeholder="Enter your username"
|
||||||
|
className={`input input-bordered w-full${
|
||||||
|
formErrors.username ? 'input-error' : ''
|
||||||
|
}`}
|
||||||
|
disabled={loading}
|
||||||
|
autoComplete="username"
|
||||||
|
/>
|
||||||
|
{formErrors.username && (
|
||||||
|
<label className="label">
|
||||||
|
<span className="label-text-alt text-error">{formErrors.username}</span>
|
||||||
|
</label>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Password Field */}
|
||||||
|
<div id="password">
|
||||||
|
<label className="label">
|
||||||
|
<span className="label-text font-medium">Password</span>
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
type="password"
|
||||||
|
name="password"
|
||||||
|
value={formData.password}
|
||||||
|
onChange={handleInputChange}
|
||||||
|
placeholder="Enter your password"
|
||||||
|
className={`input input-bordered ${
|
||||||
|
formErrors.password ? 'input-error' : ''
|
||||||
|
}`}
|
||||||
|
disabled={loading}
|
||||||
|
autoComplete="current-password"
|
||||||
|
/>
|
||||||
|
{formErrors.password && (
|
||||||
|
<label className="label">
|
||||||
|
<span className="label-text-alt text-error">{formErrors.password}</span>
|
||||||
|
</label>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Submit Button */}
|
||||||
|
<div className="form-control mt-6">
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
className={`btn btn-primary w-1/3 btn-hover-effect ${loading ? 'loading' : ''}`}
|
||||||
|
disabled={loading}
|
||||||
|
>
|
||||||
|
{loading ? (
|
||||||
|
<>
|
||||||
|
<span className="loading loading-spinner loading-sm"></span>
|
||||||
|
Signing in...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Sign In'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
{/* Additional Info */}
|
||||||
|
<div className="text-center mt-4">
|
||||||
|
<p className="text-sm text-base-content/60">
|
||||||
|
Don't have an account? Contact your administrator.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default LoginForm;
|
||||||
49
src/frontend/src/components/common/Header.jsx
Normal file
49
src/frontend/src/components/common/Header.jsx
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import {useAuth} from '../../hooks/useAuth';
|
||||||
|
import {useNavigate} from 'react-router-dom';
|
||||||
|
import ThemeSwitcher from "./ThemeSwither.jsx";
|
||||||
|
import React from "react";
|
||||||
|
|
||||||
|
const Header = () => {
|
||||||
|
const {user, logout} = useAuth();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
const handleLogout = async () => {
|
||||||
|
await logout();
|
||||||
|
navigate('/login');
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="navbar bg-base-100">
|
||||||
|
<div className="navbar-start">
|
||||||
|
<h1 className="text-xl font-bold">MyDocManager</h1>
|
||||||
|
</div>
|
||||||
|
<div className="navbar-end">
|
||||||
|
<div className="dropdown dropdown-end">
|
||||||
|
<div tabIndex={0} role="button" className="btn btn-ghost btn-circle avatar">
|
||||||
|
<div className="w-10 rounded-full bg-primary text-primary-content flex items-center justify-center">
|
||||||
|
<span className="text-sm font-medium">
|
||||||
|
{user?.username?.charAt(0).toUpperCase()}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<ul tabIndex={0} className="menu menu-sm dropdown-content bg-base-100 rounded-box z-[1] mt-3 w-52 p-2 shadow">
|
||||||
|
<li>
|
||||||
|
<div className="justify-between">
|
||||||
|
Profile
|
||||||
|
<span className="badge badge-sm">{user?.role}</span>
|
||||||
|
</div>
|
||||||
|
</li>
|
||||||
|
<li><a>Settings</a></li>
|
||||||
|
<li><ThemeSwitcher/></li>
|
||||||
|
<li>
|
||||||
|
<button onClick={handleLogout}>Logout</button>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Header;
|
||||||
24
src/frontend/src/components/common/Layout.jsx
Normal file
24
src/frontend/src/components/common/Layout.jsx
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import Header from './Header';
|
||||||
|
import {Outlet} from 'react-router-dom';
|
||||||
|
import Menu from "./Menu.jsx";
|
||||||
|
import styles from './Layout.module.css';
|
||||||
|
|
||||||
|
const Layout = () => {
|
||||||
|
return (
|
||||||
|
<div className={styles.layoutContainer}>
|
||||||
|
<Header/>
|
||||||
|
<div className="flex flex-1 overflow-hidden">
|
||||||
|
<aside className={styles.sidebar}>
|
||||||
|
<Menu/>
|
||||||
|
</aside>
|
||||||
|
<main className={styles.mainContent}>
|
||||||
|
<div className={styles.mainContentInner}>
|
||||||
|
<Outlet/>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Layout;
|
||||||
36
src/frontend/src/components/common/Layout.module.css
Normal file
36
src/frontend/src/components/common/Layout.module.css
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
/* Layout Container */
|
||||||
|
.layoutContainer {
|
||||||
|
height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
background-color: var(--color-base-200);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Sidebar */
|
||||||
|
.sidebar {
|
||||||
|
width: 16rem; /* 64px = 4rem, donc 256px = 16rem */
|
||||||
|
background-color: var(--color-base-100);
|
||||||
|
box-shadow: 0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1);
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main Content Area */
|
||||||
|
.mainContent {
|
||||||
|
flex: 1;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
min-height: 0; /* Important for flex to work properly with scrolling */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main Content Inner Container */
|
||||||
|
.mainContentInner {
|
||||||
|
max-width: 80rem; /* container max-width */
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
padding: 0.5rem 1rem;
|
||||||
|
flex: 1;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
min-height: 0; /* Important for flex to work properly with scrolling */
|
||||||
|
}
|
||||||
18
src/frontend/src/components/common/Menu.jsx
Normal file
18
src/frontend/src/components/common/Menu.jsx
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import {FaBuffer, FaPlus} from "react-icons/fa6";
|
||||||
|
import { Link } from "react-router-dom";
|
||||||
|
|
||||||
|
const Menu = () => {
|
||||||
|
return (
|
||||||
|
<div className="p-4">
|
||||||
|
<ul className="menu">
|
||||||
|
<li className="menu-title">Exploration</li>
|
||||||
|
<li><Link to="/dashboard"><FaBuffer/>Dashboard</Link></li>
|
||||||
|
<li><Link to="/documents"><FaBuffer/>To Review</Link></li>
|
||||||
|
<li className="menu-title mt-4">Catégories</li>
|
||||||
|
<li><a><i className="fas fa-plus"></i>Item</a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export default Menu;
|
||||||
69
src/frontend/src/components/common/ProtectedRoute.jsx
Normal file
69
src/frontend/src/components/common/ProtectedRoute.jsx
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import React from 'react';
|
||||||
|
import {Navigate, useLocation} from 'react-router-dom';
|
||||||
|
import {useAuth} from '../../contexts/AuthContext';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ProtectedRoute component to guard routes that require authentication
|
||||||
|
* Redirects to login if user is not authenticated, preserving intended destination
|
||||||
|
*
|
||||||
|
* @param {Object} props - Component props
|
||||||
|
* @param {React.ReactNode} props.children - Child components to render if authenticated
|
||||||
|
* @param {string[]} props.allowedRoles - Array of roles allowed to access this route (optional)
|
||||||
|
*/
|
||||||
|
function ProtectedRoute({children, allowedRoles = []}) {
|
||||||
|
const {isAuthenticated, loading, user} = useAuth();
|
||||||
|
const location = useLocation();
|
||||||
|
|
||||||
|
// Show loading spinner while checking authentication
|
||||||
|
if (loading) {
|
||||||
|
return (
|
||||||
|
<div className="min-h-screen flex items-center justify-center bg-base-200">
|
||||||
|
<div className="text-center">
|
||||||
|
<span className="loading loading-spinner loading-lg text-primary"></span>
|
||||||
|
<p className="text-base-content/70 mt-4">Checking authentication...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Redirect to login if not authenticated
|
||||||
|
if (!isAuthenticated) {
|
||||||
|
return (
|
||||||
|
<Navigate
|
||||||
|
to="/login"
|
||||||
|
state={{from: location}}
|
||||||
|
replace
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check role-based access if allowedRoles is specified
|
||||||
|
if (allowedRoles.length > 0 && user && !allowedRoles.includes(user.role)) {
|
||||||
|
return (
|
||||||
|
<div className="min-h-screen flex items-center justify-center bg-base-200">
|
||||||
|
<div className="card w-full max-w-md shadow-xl bg-base-100">
|
||||||
|
<div className="card-body text-center">
|
||||||
|
<div className="text-6xl mb-4">🚫</div>
|
||||||
|
<h2 className="card-title justify-center text-error">Access Denied</h2>
|
||||||
|
<p className="text-base-content/70 mb-4">
|
||||||
|
You don't have permission to access this page.
|
||||||
|
</p>
|
||||||
|
<div className="card-actions justify-center">
|
||||||
|
<button
|
||||||
|
className="btn btn-primary"
|
||||||
|
onClick={() => window.history.back()}
|
||||||
|
>
|
||||||
|
Go Back
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// User is authenticated and authorized, render children
|
||||||
|
return children;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ProtectedRoute;
|
||||||
29
src/frontend/src/components/common/ThemeSwither.jsx
Normal file
29
src/frontend/src/components/common/ThemeSwither.jsx
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import {useEffect, useState} from "react";
|
||||||
|
import {MoonIcon, SunIcon} from "../../assets/icons.jsx";
|
||||||
|
|
||||||
|
function ThemeSwitcher() {
|
||||||
|
// State to store current theme
|
||||||
|
const [theme, setTheme] = useState("light");
|
||||||
|
|
||||||
|
// When theme changes, apply it to <html data-theme="">
|
||||||
|
useEffect(() => {
|
||||||
|
document.querySelector("html").setAttribute("data-theme", theme);
|
||||||
|
}, [theme]);
|
||||||
|
|
||||||
|
// Toggle between light and dark
|
||||||
|
const toggleTheme = () => {
|
||||||
|
setTheme(theme === "light" ? "dark" : "light");
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
onClick={toggleTheme}
|
||||||
|
className="btn btn-ghost btn-circle"
|
||||||
|
>
|
||||||
|
{theme === "light" ? MoonIcon : SunIcon}
|
||||||
|
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ThemeSwitcher;
|
||||||
68
src/frontend/src/components/documents/DeleteConfirmModal.jsx
Normal file
68
src/frontend/src/components/documents/DeleteConfirmModal.jsx
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
/**
|
||||||
|
* DeleteConfirmModal Component
|
||||||
|
* Modal dialog to confirm document deletion
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DeleteConfirmModal component
|
||||||
|
* @param {Object} props
|
||||||
|
* @param {boolean} props.isOpen - Whether the modal is open
|
||||||
|
* @param {Object|null} props.document - Document to delete
|
||||||
|
* @param {function(): void} props.onClose - Callback when modal is closed
|
||||||
|
* @param {function(): void} props.onConfirm - Callback when deletion is confirmed
|
||||||
|
* @param {boolean} props.isDeleting - Whether deletion is in progress
|
||||||
|
* @returns {JSX.Element}
|
||||||
|
*/
|
||||||
|
const DeleteConfirmModal = ({
|
||||||
|
isOpen,
|
||||||
|
document,
|
||||||
|
onClose,
|
||||||
|
onConfirm,
|
||||||
|
isDeleting = false
|
||||||
|
}) => {
|
||||||
|
if (!isOpen || !document) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<dialog className="modal modal-open">
|
||||||
|
<div className="modal-box">
|
||||||
|
<h3 className="font-bold text-lg">Confirm Deletion</h3>
|
||||||
|
<p className="py-4">
|
||||||
|
Are you sure you want to delete <span className="font-semibold">"{document.name}"</span>?
|
||||||
|
</p>
|
||||||
|
<p className="text-sm text-gray-500">
|
||||||
|
This action cannot be undone.
|
||||||
|
</p>
|
||||||
|
<div className="modal-action">
|
||||||
|
<button
|
||||||
|
className="btn btn-ghost"
|
||||||
|
onClick={onClose}
|
||||||
|
disabled={isDeleting}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="btn btn-error"
|
||||||
|
onClick={onConfirm}
|
||||||
|
disabled={isDeleting}
|
||||||
|
>
|
||||||
|
{isDeleting ? (
|
||||||
|
<>
|
||||||
|
<span className="loading loading-spinner loading-sm"></span>
|
||||||
|
Deleting...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Delete'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<form method="dialog" className="modal-backdrop" onClick={onClose}>
|
||||||
|
<button disabled={isDeleting}>close</button>
|
||||||
|
</form>
|
||||||
|
</dialog>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default DeleteConfirmModal;
|
||||||
285
src/frontend/src/components/documents/DocumentCard.jsx
Normal file
285
src/frontend/src/components/documents/DocumentCard.jsx
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
/**
|
||||||
|
* DocumentCard Component
|
||||||
|
* Displays a document as a DaisyUI card with thumbnail and metadata
|
||||||
|
* Supports different view modes: small, large, and detail
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React, {memo, useState, useEffect} from 'react';
|
||||||
|
import {API_BASE_URL} from "../../utils/api.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Formats file size to human-readable format
|
||||||
|
* @param {number} bytes - File size in bytes
|
||||||
|
* @returns {string} Formatted file size
|
||||||
|
*/
|
||||||
|
const formatFileSize = (bytes) => {
|
||||||
|
if (bytes === 0) return '0 Bytes';
|
||||||
|
const k = 1024;
|
||||||
|
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||||
|
return Math.round((bytes / Math.pow(k, i)) * 100) / 100 + ' ' + sizes[i];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Formats date to localized string
|
||||||
|
* @param {string} dateString - ISO date string
|
||||||
|
* @returns {string} Formatted date
|
||||||
|
*/
|
||||||
|
const formatDate = (dateString) => {
|
||||||
|
return new Date(dateString).toLocaleDateString('en-US', {
|
||||||
|
year: 'numeric',
|
||||||
|
month: 'short',
|
||||||
|
day: 'numeric'
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds full URL from relative path
|
||||||
|
* @param {string} relativePath - Relative API path
|
||||||
|
* @returns {string} Full URL
|
||||||
|
*/
|
||||||
|
const buildFullUrl = (relativePath) => {
|
||||||
|
if (!relativePath) return '';
|
||||||
|
const baseUrl = import.meta.env.VITE_API_BASE_URL || API_BASE_URL;
|
||||||
|
return `${baseUrl}${relativePath}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hook to load protected images with bearer token
|
||||||
|
* @param {string} url - Image URL
|
||||||
|
* @returns {Object} { imageSrc, loading, error }
|
||||||
|
*/
|
||||||
|
const useProtectedImage = (url) => {
|
||||||
|
const [imageSrc, setImageSrc] = useState(null);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!url) {
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let objectUrl;
|
||||||
|
|
||||||
|
const fetchImage = async () => {
|
||||||
|
try {
|
||||||
|
const token = localStorage.getItem('access_token');
|
||||||
|
const fullUrl = buildFullUrl(url);
|
||||||
|
|
||||||
|
const response = await fetch(fullUrl, {
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${token}`
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error('Failed to load image');
|
||||||
|
}
|
||||||
|
|
||||||
|
const blob = await response.blob();
|
||||||
|
objectUrl = URL.createObjectURL(blob);
|
||||||
|
setImageSrc(objectUrl);
|
||||||
|
setLoading(false);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error loading thumbnail:', err);
|
||||||
|
setError(true);
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchImage();
|
||||||
|
|
||||||
|
// Cleanup: revoke object URL on unmount
|
||||||
|
return () => {
|
||||||
|
if (objectUrl) {
|
||||||
|
URL.revokeObjectURL(objectUrl);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [url]);
|
||||||
|
|
||||||
|
return { imageSrc, loading, error };
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DocumentCard component
|
||||||
|
* @param {Object} props
|
||||||
|
* @param {Object} props.document - Document object
|
||||||
|
* @param {'small'|'large'|'detail'} props.viewMode - Current view mode
|
||||||
|
* @param {function(): void} props.onEdit - Callback when edit is clicked
|
||||||
|
* @param {function(): void} props.onDelete - Callback when delete is clicked
|
||||||
|
* @returns {JSX.Element}
|
||||||
|
*/
|
||||||
|
const DocumentCard = memo(({document, viewMode, onEdit, onDelete}) => {
|
||||||
|
const {name, originalFileType, thumbnailUrl, pageCount, fileSize, createdAt, tags, categories} = document;
|
||||||
|
|
||||||
|
// Load protected image
|
||||||
|
const { imageSrc, loading, error } = useProtectedImage(thumbnailUrl);
|
||||||
|
|
||||||
|
// Determine card classes based on view mode
|
||||||
|
const getCardClasses = () => {
|
||||||
|
const baseClasses = 'card bg-base-100 shadow-xl hover:shadow-2xl transition-shadow group relative';
|
||||||
|
|
||||||
|
switch (viewMode) {
|
||||||
|
case 'small':
|
||||||
|
return `${baseClasses} w-full`;
|
||||||
|
case 'large':
|
||||||
|
return `${baseClasses} w-full`;
|
||||||
|
case 'detail':
|
||||||
|
return `${baseClasses} w-full`;
|
||||||
|
default:
|
||||||
|
return baseClasses;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Render thumbnail with hover actions
|
||||||
|
const renderThumbnail = () => {
|
||||||
|
const heightClass = viewMode === 'small' ? 'h-48' : viewMode === 'large' ? 'h-64' : 'h-64';
|
||||||
|
|
||||||
|
return (
|
||||||
|
<figure className="relative overflow-hidden">
|
||||||
|
{loading ? (
|
||||||
|
<div className={`w-[200px] ${heightClass} bg-gray-200 animate-pulse flex items-center justify-center`}>
|
||||||
|
<svg className="w-8 h-8 text-gray-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
) : error ? (
|
||||||
|
<div className={`w-[200px] ${heightClass} bg-gray-300 flex flex-col items-center justify-center`}>
|
||||||
|
<svg className="w-8 h-8 text-gray-500 mb-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||||
|
</svg>
|
||||||
|
<span className="text-gray-500 text-xs">Failed to load</span>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<img
|
||||||
|
src={imageSrc}
|
||||||
|
alt={`${name}`}
|
||||||
|
className={`object-cover ${heightClass}`}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Hover overlay with actions */}
|
||||||
|
<div className="absolute top-2 right-2 flex gap-2 opacity-0 group-hover:opacity-100 transition-opacity">
|
||||||
|
<button
|
||||||
|
className="btn btn-sm btn-circle btn-primary"
|
||||||
|
onClick={onEdit}
|
||||||
|
aria-label="Edit document"
|
||||||
|
title="Edit"
|
||||||
|
>
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24"
|
||||||
|
stroke="currentColor">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2}
|
||||||
|
d="M11 5H6a2 2 0 00-2 2v11a2 2 0 002 2h11a2 2 0 002-2v-5m-1.414-9.414a2 2 0 112.828 2.828L11.828 15H9v-2.828l8.586-8.586z"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="btn btn-sm btn-circle btn-error"
|
||||||
|
onClick={onDelete}
|
||||||
|
aria-label="Delete document"
|
||||||
|
title="Delete"
|
||||||
|
>
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24"
|
||||||
|
stroke="currentColor">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2}
|
||||||
|
d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* File type badge */}
|
||||||
|
<div className="absolute bottom-2 left-2">
|
||||||
|
<span className="badge badge-accent badge-sm">{originalFileType}</span>
|
||||||
|
</div>
|
||||||
|
</figure>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Render card body based on view mode
|
||||||
|
const renderCardBody = () => {
|
||||||
|
if (viewMode === 'small') {
|
||||||
|
return (
|
||||||
|
<div className="card-body p-3">
|
||||||
|
<h3 className="card-title text-sm truncate" title={name}>{name}</h3>
|
||||||
|
<p className="text-xs text-gray-500">{pageCount} page{pageCount > 1 ? 's' : ''}</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (viewMode === 'large') {
|
||||||
|
return (
|
||||||
|
<div className="card-body p-4">
|
||||||
|
<h3 className="card-title text-base truncate" title={name}>{name}</h3>
|
||||||
|
<div className="flex flex-wrap gap-1 mb-2">
|
||||||
|
{tags.slice(0, 3).map(tag => (
|
||||||
|
<span key={tag} className="badge badge-primary badge-xs">{tag}</span>
|
||||||
|
))}
|
||||||
|
{tags.length > 3 && (
|
||||||
|
<span className="badge badge-ghost badge-xs">+{tags.length - 3}</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div className="text-sm space-y-1">
|
||||||
|
<p className="text-gray-500">{pageCount} page{pageCount > 1 ? 's' : ''}</p>
|
||||||
|
<p className="text-gray-500">{formatFileSize(fileSize)}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detail mode
|
||||||
|
return (
|
||||||
|
<div className="card-body">
|
||||||
|
<h3 className="card-title text-lg" title={name}>{name}</h3>
|
||||||
|
|
||||||
|
{/* Tags */}
|
||||||
|
{tags.length > 0 && (
|
||||||
|
<div className="flex flex-wrap gap-1 mb-2">
|
||||||
|
{tags.map(tag => (
|
||||||
|
<span key={tag} className="badge badge-primary badge-sm">{tag}</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Categories */}
|
||||||
|
{categories.length > 0 && (
|
||||||
|
<div className="flex flex-wrap gap-1 mb-3">
|
||||||
|
{categories.map(category => (
|
||||||
|
<span key={category} className="badge badge-secondary badge-sm">{category}</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Metadata */}
|
||||||
|
<div className="grid grid-cols-2 gap-2 text-sm">
|
||||||
|
<div>
|
||||||
|
<span className="font-semibold">Pages:</span>
|
||||||
|
<span className="ml-2 text-gray-500">{pageCount}</span>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="font-semibold">Size:</span>
|
||||||
|
<span className="ml-2 text-gray-500">{formatFileSize(fileSize)}</span>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="font-semibold">Type:</span>
|
||||||
|
<span className="ml-2 text-gray-500">{originalFileType}</span>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="font-semibold">Date:</span>
|
||||||
|
<span className="ml-2 text-gray-500">{formatDate(createdAt)}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={getCardClasses()}>
|
||||||
|
{renderThumbnail()}
|
||||||
|
{renderCardBody()}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
DocumentCard.displayName = 'DocumentCard';
|
||||||
|
|
||||||
|
export default DocumentCard;
|
||||||
164
src/frontend/src/components/documents/DocumentDetailView.jsx
Normal file
164
src/frontend/src/components/documents/DocumentDetailView.jsx
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
/**
|
||||||
|
* DocumentDetailView Component
|
||||||
|
* Displays a document in detail mode with all pages visible
|
||||||
|
* This is a placeholder that shows multiple page thumbnails
|
||||||
|
* When real PDF backend is ready, this can be replaced with actual PDF rendering
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Formats file size to human-readable format
|
||||||
|
* @param {number} bytes - File size in bytes
|
||||||
|
* @returns {string} Formatted file size
|
||||||
|
*/
|
||||||
|
const formatFileSize = (bytes) => {
|
||||||
|
if (bytes === 0) return '0 Bytes';
|
||||||
|
const k = 1024;
|
||||||
|
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||||
|
return Math.round((bytes / Math.pow(k, i)) * 100) / 100 + ' ' + sizes[i];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Formats date to localized string
|
||||||
|
* @param {string} dateString - ISO date string
|
||||||
|
* @returns {string} Formatted date
|
||||||
|
*/
|
||||||
|
const formatDate = (dateString) => {
|
||||||
|
return new Date(dateString).toLocaleDateString('en-US', {
|
||||||
|
year: 'numeric',
|
||||||
|
month: 'long',
|
||||||
|
day: 'numeric',
|
||||||
|
hour: '2-digit',
|
||||||
|
minute: '2-digit'
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DocumentDetailView component
|
||||||
|
* @param {Object} props
|
||||||
|
* @param {Object} props.document - Document object
|
||||||
|
* @param {function(): void} props.onEdit - Callback when edit is clicked
|
||||||
|
* @param {function(): void} props.onDelete - Callback when delete is clicked
|
||||||
|
* @returns {JSX.Element}
|
||||||
|
*/
|
||||||
|
const DocumentDetailView = ({ document, onEdit, onDelete }) => {
|
||||||
|
const {
|
||||||
|
name,
|
||||||
|
originalFileType,
|
||||||
|
thumbnailUrl,
|
||||||
|
pageCount,
|
||||||
|
fileSize,
|
||||||
|
createdAt,
|
||||||
|
tags,
|
||||||
|
categories
|
||||||
|
} = document;
|
||||||
|
|
||||||
|
// Generate placeholder pages (in real implementation, these would be actual PDF pages)
|
||||||
|
const pages = Array.from({ length: pageCount }, (_, i) => ({
|
||||||
|
pageNumber: i + 1,
|
||||||
|
thumbnailUrl: thumbnailUrl.replace('Page+1', `Page+${i + 1}`)
|
||||||
|
}));
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="card bg-base-100 shadow-xl">
|
||||||
|
{/* Header with actions */}
|
||||||
|
<div className="card-body">
|
||||||
|
<div className="flex justify-between items-start mb-4">
|
||||||
|
<div className="flex-1">
|
||||||
|
<h2 className="card-title text-2xl mb-2">{name}</h2>
|
||||||
|
|
||||||
|
{/* Tags */}
|
||||||
|
{tags.length > 0 && (
|
||||||
|
<div className="flex flex-wrap gap-2 mb-2">
|
||||||
|
<span className="text-sm font-semibold text-gray-600">Tags:</span>
|
||||||
|
{tags.map(tag => (
|
||||||
|
<span key={tag} className="badge badge-primary">{tag}</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Categories */}
|
||||||
|
{categories.length > 0 && (
|
||||||
|
<div className="flex flex-wrap gap-2 mb-3">
|
||||||
|
<span className="text-sm font-semibold text-gray-600">Categories:</span>
|
||||||
|
{categories.map(category => (
|
||||||
|
<span key={category} className="badge badge-secondary">{category}</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Action buttons */}
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<button
|
||||||
|
className="btn btn-primary btn-sm"
|
||||||
|
onClick={onEdit}
|
||||||
|
aria-label="Edit document"
|
||||||
|
>
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M11 5H6a2 2 0 00-2 2v11a2 2 0 002 2h11a2 2 0 002-2v-5m-1.414-9.414a2 2 0 112.828 2.828L11.828 15H9v-2.828l8.586-8.586z" />
|
||||||
|
</svg>
|
||||||
|
Edit
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="btn btn-error btn-sm"
|
||||||
|
onClick={onDelete}
|
||||||
|
aria-label="Delete document"
|
||||||
|
>
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||||
|
</svg>
|
||||||
|
Delete
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Metadata grid */}
|
||||||
|
<div className="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6 p-4 bg-base-200 rounded-lg">
|
||||||
|
<div>
|
||||||
|
<span className="text-sm font-semibold text-gray-600">Original Type</span>
|
||||||
|
<p className="text-lg">{originalFileType}</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="text-sm font-semibold text-gray-600">Pages</span>
|
||||||
|
<p className="text-lg">{pageCount}</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="text-sm font-semibold text-gray-600">File Size</span>
|
||||||
|
<p className="text-lg">{formatFileSize(fileSize)}</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="text-sm font-semibold text-gray-600">Created</span>
|
||||||
|
<p className="text-lg">{formatDate(createdAt)}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Pages preview */}
|
||||||
|
<div>
|
||||||
|
<h3 className="text-lg font-semibold mb-4">Document Pages ({pageCount})</h3>
|
||||||
|
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4">
|
||||||
|
{pages.map((page) => (
|
||||||
|
<div key={page.pageNumber} className="relative group">
|
||||||
|
<div className="aspect-[3/4] bg-base-200 rounded-lg overflow-hidden shadow-md hover:shadow-xl transition-shadow">
|
||||||
|
<img
|
||||||
|
src={page.thumbnailUrl}
|
||||||
|
alt={`Page ${page.pageNumber}`}
|
||||||
|
className="w-full h-full object-cover"
|
||||||
|
loading="lazy"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="text-center mt-2">
|
||||||
|
<span className="text-sm text-gray-600">Page {page.pageNumber}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default DocumentDetailView;
|
||||||
181
src/frontend/src/components/documents/DocumentGallery.jsx
Normal file
181
src/frontend/src/components/documents/DocumentGallery.jsx
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
/**
|
||||||
|
* DocumentGallery Component
|
||||||
|
* Main container for displaying documents in different view modes
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React, { useState } from 'react';
|
||||||
|
import DocumentCard from './DocumentCard';
|
||||||
|
import DocumentDetailView from './DocumentDetailView';
|
||||||
|
import ViewModeSwitcher from './ViewModeSwitcher';
|
||||||
|
import EditDocumentModal from './EditDocumentModal';
|
||||||
|
import DeleteConfirmModal from './DeleteConfirmModal';
|
||||||
|
import { useDocuments } from '../../hooks/useDocuments';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DocumentGallery component
|
||||||
|
* @returns {JSX.Element}
|
||||||
|
*/
|
||||||
|
const DocumentGallery = () => {
|
||||||
|
const { documents, loading, error, updateDocument, deleteDocument } = useDocuments();
|
||||||
|
const [viewMode, setViewMode] = useState('large');
|
||||||
|
const [editingDocument, setEditingDocument] = useState(null);
|
||||||
|
const [deletingDocument, setDeletingDocument] = useState(null);
|
||||||
|
const [isSaving, setIsSaving] = useState(false);
|
||||||
|
const [isDeleting, setIsDeleting] = useState(false);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles opening the edit modal
|
||||||
|
* @param {Object} document - Document to edit
|
||||||
|
*/
|
||||||
|
const handleEditClick = (document) => {
|
||||||
|
setEditingDocument(document);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles opening the delete confirmation modal
|
||||||
|
* @param {Object} document - Document to delete
|
||||||
|
*/
|
||||||
|
const handleDeleteClick = (document) => {
|
||||||
|
setDeletingDocument(document);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles saving document changes
|
||||||
|
* @param {Object} updates - Updates object with tags and categories
|
||||||
|
*/
|
||||||
|
const handleSaveEdit = async (updates) => {
|
||||||
|
if (!editingDocument) return;
|
||||||
|
|
||||||
|
setIsSaving(true);
|
||||||
|
const success = await updateDocument(editingDocument.id, updates);
|
||||||
|
setIsSaving(false);
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
setEditingDocument(null);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles confirming document deletion
|
||||||
|
*/
|
||||||
|
const handleConfirmDelete = async () => {
|
||||||
|
if (!deletingDocument) return;
|
||||||
|
|
||||||
|
setIsDeleting(true);
|
||||||
|
const success = await deleteDocument(deletingDocument.id);
|
||||||
|
setIsDeleting(false);
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
setDeletingDocument(null);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets grid classes based on view mode
|
||||||
|
* @returns {string} Tailwind CSS classes
|
||||||
|
*/
|
||||||
|
const getGridClasses = () => {
|
||||||
|
switch (viewMode) {
|
||||||
|
case 'small':
|
||||||
|
return 'grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6 gap-4';
|
||||||
|
case 'large':
|
||||||
|
return 'grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-6';
|
||||||
|
case 'detail':
|
||||||
|
return 'flex flex-col gap-6';
|
||||||
|
default:
|
||||||
|
return 'grid grid-cols-1 gap-4';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Loading state
|
||||||
|
if (loading) {
|
||||||
|
return (
|
||||||
|
<div className="flex justify-center items-center min-h-[400px] ">
|
||||||
|
<span className="loading loading-spinner loading-lg"></span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error state
|
||||||
|
if (error) {
|
||||||
|
return (
|
||||||
|
<div className="alert alert-error">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" className="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2" d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||||
|
</svg>
|
||||||
|
<span>Error loading documents: {error}</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Empty state
|
||||||
|
if (documents.length === 0) {
|
||||||
|
return (
|
||||||
|
<div className="flex flex-col items-center justify-center min-h-[400px] text-center">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" className="h-24 w-24 text-gray-300 mb-4" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={1.5} d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z" />
|
||||||
|
</svg>
|
||||||
|
<h3 className="text-xl font-semibold mb-2">No documents yet</h3>
|
||||||
|
<p className="text-gray-500">Upload your first document to get started</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="h-full flex flex-col">
|
||||||
|
{/* Header with view mode switcher - Always visible */}
|
||||||
|
<div className="flex justify-between items-center mb-6 flex-shrink-0">
|
||||||
|
<div>
|
||||||
|
<p className="text-gray-500">{documents.length} document{documents.length !== 1 ? 's' : ''}</p>
|
||||||
|
</div>
|
||||||
|
<ViewModeSwitcher
|
||||||
|
currentMode={viewMode}
|
||||||
|
onModeChange={setViewMode}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Document grid/list - Scrollable */}
|
||||||
|
<div className="flex-1 overflow-y-auto">
|
||||||
|
<div className={getGridClasses()}>
|
||||||
|
{documents.map(document => (
|
||||||
|
viewMode === 'detail' ? (
|
||||||
|
<DocumentDetailView
|
||||||
|
key={document.id}
|
||||||
|
document={document}
|
||||||
|
onEdit={() => handleEditClick(document)}
|
||||||
|
onDelete={() => handleDeleteClick(document)}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<DocumentCard
|
||||||
|
key={document.id}
|
||||||
|
document={document}
|
||||||
|
viewMode={viewMode}
|
||||||
|
onEdit={() => handleEditClick(document)}
|
||||||
|
onDelete={() => handleDeleteClick(document)}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Modals */}
|
||||||
|
<EditDocumentModal
|
||||||
|
isOpen={!!editingDocument}
|
||||||
|
document={editingDocument}
|
||||||
|
onClose={() => setEditingDocument(null)}
|
||||||
|
onSave={handleSaveEdit}
|
||||||
|
isSaving={isSaving}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<DeleteConfirmModal
|
||||||
|
isOpen={!!deletingDocument}
|
||||||
|
document={deletingDocument}
|
||||||
|
onClose={() => setDeletingDocument(null)}
|
||||||
|
onConfirm={handleConfirmDelete}
|
||||||
|
isDeleting={isDeleting}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default DocumentGallery;
|
||||||
225
src/frontend/src/components/documents/EditDocumentModal.jsx
Normal file
225
src/frontend/src/components/documents/EditDocumentModal.jsx
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
/**
|
||||||
|
* EditDocumentModal Component
|
||||||
|
* Modal dialog for editing document tags and categories
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React, { useState, useEffect } from 'react';
|
||||||
|
import { getAvailableTags, getAvailableCategories } from '../../services/documentService';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* EditDocumentModal component
|
||||||
|
* @param {Object} props
|
||||||
|
* @param {boolean} props.isOpen - Whether the modal is open
|
||||||
|
* @param {Object|null} props.document - Document to edit
|
||||||
|
* @param {function(): void} props.onClose - Callback when modal is closed
|
||||||
|
* @param {function(Object): void} props.onSave - Callback when changes are saved
|
||||||
|
* @param {boolean} props.isSaving - Whether save is in progress
|
||||||
|
* @returns {JSX.Element}
|
||||||
|
*/
|
||||||
|
const EditDocumentModal = ({
|
||||||
|
isOpen,
|
||||||
|
document,
|
||||||
|
onClose,
|
||||||
|
onSave,
|
||||||
|
isSaving = false
|
||||||
|
}) => {
|
||||||
|
const [selectedTags, setSelectedTags] = useState([]);
|
||||||
|
const [selectedCategories, setSelectedCategories] = useState([]);
|
||||||
|
const [availableTags, setAvailableTags] = useState([]);
|
||||||
|
const [availableCategories, setAvailableCategories] = useState([]);
|
||||||
|
const [newTag, setNewTag] = useState('');
|
||||||
|
const [newCategory, setNewCategory] = useState('');
|
||||||
|
|
||||||
|
// Load available tags and categories
|
||||||
|
useEffect(() => {
|
||||||
|
const loadOptions = async () => {
|
||||||
|
const [tags, categories] = await Promise.all([
|
||||||
|
getAvailableTags(),
|
||||||
|
getAvailableCategories()
|
||||||
|
]);
|
||||||
|
setAvailableTags(tags);
|
||||||
|
setAvailableCategories(categories);
|
||||||
|
};
|
||||||
|
loadOptions();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Initialize selected values when document changes
|
||||||
|
useEffect(() => {
|
||||||
|
if (document) {
|
||||||
|
setSelectedTags(document.tags || []);
|
||||||
|
setSelectedCategories(document.categories || []);
|
||||||
|
}
|
||||||
|
}, [document]);
|
||||||
|
|
||||||
|
const handleAddTag = (tag) => {
|
||||||
|
if (tag && !selectedTags.includes(tag)) {
|
||||||
|
setSelectedTags([...selectedTags, tag]);
|
||||||
|
}
|
||||||
|
setNewTag('');
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleRemoveTag = (tag) => {
|
||||||
|
setSelectedTags(selectedTags.filter(t => t !== tag));
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleAddCategory = (category) => {
|
||||||
|
if (category && !selectedCategories.includes(category)) {
|
||||||
|
setSelectedCategories([...selectedCategories, category]);
|
||||||
|
}
|
||||||
|
setNewCategory('');
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleRemoveCategory = (category) => {
|
||||||
|
setSelectedCategories(selectedCategories.filter(c => c !== category));
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSave = () => {
|
||||||
|
onSave({
|
||||||
|
tags: selectedTags,
|
||||||
|
categories: selectedCategories
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!isOpen || !document) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<dialog className="modal modal-open">
|
||||||
|
<div className="modal-box max-w-2xl">
|
||||||
|
<h3 className="font-bold text-lg mb-4">Edit Document</h3>
|
||||||
|
|
||||||
|
<div className="mb-4">
|
||||||
|
<p className="text-sm text-gray-500">
|
||||||
|
Document: <span className="font-semibold">{document.name}</span>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tags Section */}
|
||||||
|
<div className="mb-6">
|
||||||
|
<label className="label">
|
||||||
|
<span className="label-text font-semibold">Tags</span>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
{/* Selected Tags */}
|
||||||
|
<div className="flex flex-wrap gap-2 mb-3">
|
||||||
|
{selectedTags.map(tag => (
|
||||||
|
<div key={tag} className="badge badge-primary gap-2">
|
||||||
|
{tag}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="btn btn-ghost btn-xs"
|
||||||
|
onClick={() => handleRemoveTag(tag)}
|
||||||
|
disabled={isSaving}
|
||||||
|
>
|
||||||
|
✕
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Add Tag */}
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<select
|
||||||
|
className="select select-bordered flex-1"
|
||||||
|
value={newTag}
|
||||||
|
onChange={(e) => setNewTag(e.target.value)}
|
||||||
|
disabled={isSaving}
|
||||||
|
>
|
||||||
|
<option value="">Select a tag...</option>
|
||||||
|
{availableTags
|
||||||
|
.filter(tag => !selectedTags.includes(tag))
|
||||||
|
.map(tag => (
|
||||||
|
<option key={tag} value={tag}>{tag}</option>
|
||||||
|
))
|
||||||
|
}
|
||||||
|
</select>
|
||||||
|
<button
|
||||||
|
className="btn btn-primary"
|
||||||
|
onClick={() => handleAddTag(newTag)}
|
||||||
|
disabled={!newTag || isSaving}
|
||||||
|
>
|
||||||
|
Add
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Categories Section */}
|
||||||
|
<div className="mb-6">
|
||||||
|
<label className="label">
|
||||||
|
<span className="label-text font-semibold">Categories</span>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
{/* Selected Categories */}
|
||||||
|
<div className="flex flex-wrap gap-2 mb-3">
|
||||||
|
{selectedCategories.map(category => (
|
||||||
|
<div key={category} className="badge badge-secondary gap-2">
|
||||||
|
{category}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="btn btn-ghost btn-xs"
|
||||||
|
onClick={() => handleRemoveCategory(category)}
|
||||||
|
disabled={isSaving}
|
||||||
|
>
|
||||||
|
✕
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Add Category */}
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<select
|
||||||
|
className="select select-bordered flex-1"
|
||||||
|
value={newCategory}
|
||||||
|
onChange={(e) => setNewCategory(e.target.value)}
|
||||||
|
disabled={isSaving}
|
||||||
|
>
|
||||||
|
<option value="">Select a category...</option>
|
||||||
|
{availableCategories
|
||||||
|
.filter(cat => !selectedCategories.includes(cat))
|
||||||
|
.map(cat => (
|
||||||
|
<option key={cat} value={cat}>{cat}</option>
|
||||||
|
))
|
||||||
|
}
|
||||||
|
</select>
|
||||||
|
<button
|
||||||
|
className="btn btn-secondary"
|
||||||
|
onClick={() => handleAddCategory(newCategory)}
|
||||||
|
disabled={!newCategory || isSaving}
|
||||||
|
>
|
||||||
|
Add
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="modal-action">
|
||||||
|
<button
|
||||||
|
className="btn btn-ghost"
|
||||||
|
onClick={onClose}
|
||||||
|
disabled={isSaving}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="btn btn-primary"
|
||||||
|
onClick={handleSave}
|
||||||
|
disabled={isSaving}
|
||||||
|
>
|
||||||
|
{isSaving ? (
|
||||||
|
<>
|
||||||
|
<span className="loading loading-spinner loading-sm"></span>
|
||||||
|
Saving...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Save Changes'
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<form method="dialog" className="modal-backdrop" onClick={onClose}>
|
||||||
|
<button disabled={isSaving}>close</button>
|
||||||
|
</form>
|
||||||
|
</dialog>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default EditDocumentModal;
|
||||||
51
src/frontend/src/components/documents/ViewModeSwitcher.jsx
Normal file
51
src/frontend/src/components/documents/ViewModeSwitcher.jsx
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
/**
|
||||||
|
* ViewModeSwitcher Component
|
||||||
|
* Allows users to switch between different view modes (small, large, detail)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
|
import {FaList} from "react-icons/fa6";
|
||||||
|
import {FaTh, FaThLarge} from "react-icons/fa";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {'small' | 'large' | 'detail'} ViewMode
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ViewModeSwitcher component
|
||||||
|
* @param {Object} props
|
||||||
|
* @param {ViewMode} props.currentMode - Current active view mode
|
||||||
|
* @param {function(ViewMode): void} props.onModeChange - Callback when mode changes
|
||||||
|
* @returns {JSX.Element}
|
||||||
|
*/
|
||||||
|
const ViewModeSwitcher = ({ currentMode, onModeChange }) => {
|
||||||
|
const modes = [
|
||||||
|
{ id: 'small', label: 'Small', icon: FaTh },
|
||||||
|
{ id: 'large', label: 'Large', icon: FaThLarge },
|
||||||
|
{ id: 'detail', label: 'Detail', icon: FaList }
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex gap-2">
|
||||||
|
{modes.map(mode => {
|
||||||
|
const IconComponent = mode.icon;
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
key={mode.id}
|
||||||
|
onClick={() => onModeChange(mode.id)}
|
||||||
|
className={`btn btn-sm ${
|
||||||
|
currentMode === mode.id ? 'btn-primary' : 'btn-ghost'
|
||||||
|
}`}
|
||||||
|
aria-label={`Switch to ${mode.label} view`}
|
||||||
|
title={`${mode.label} view`}
|
||||||
|
>
|
||||||
|
<IconComponent />
|
||||||
|
<span className="hidden sm:inline ml-1">{mode.label}</span>
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ViewModeSwitcher;
|
||||||
205
src/frontend/src/contexts/AuthContext.jsx
Normal file
205
src/frontend/src/contexts/AuthContext.jsx
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
import React, {createContext, useContext, useEffect, useReducer} from 'react';
|
||||||
|
import authService from '../services/authService';
|
||||||
|
|
||||||
|
// Auth state actions
|
||||||
|
const AUTH_ACTIONS = {
|
||||||
|
LOGIN_START: 'LOGIN_START',
|
||||||
|
LOGIN_SUCCESS: 'LOGIN_SUCCESS',
|
||||||
|
LOGIN_FAILURE: 'LOGIN_FAILURE',
|
||||||
|
LOGOUT: 'LOGOUT',
|
||||||
|
LOAD_USER: 'LOAD_USER',
|
||||||
|
CLEAR_ERROR: 'CLEAR_ERROR',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initial state
|
||||||
|
const initialState = {
|
||||||
|
user: null,
|
||||||
|
token: null,
|
||||||
|
isAuthenticated: false,
|
||||||
|
loading: true, // Loading true initially to check stored auth
|
||||||
|
error: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Auth reducer to manage state transitions
|
||||||
|
function authReducer(state, action) {
|
||||||
|
switch (action.type) {
|
||||||
|
case AUTH_ACTIONS.LOGIN_START:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
loading: true,
|
||||||
|
error: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
case AUTH_ACTIONS.LOGIN_SUCCESS:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
user: action.payload.user,
|
||||||
|
token: action.payload.token,
|
||||||
|
isAuthenticated: true,
|
||||||
|
loading: false,
|
||||||
|
error: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
case AUTH_ACTIONS.LOGIN_FAILURE:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
user: null,
|
||||||
|
token: null,
|
||||||
|
isAuthenticated: false,
|
||||||
|
loading: false,
|
||||||
|
error: action.payload.error,
|
||||||
|
};
|
||||||
|
|
||||||
|
case AUTH_ACTIONS.LOGOUT:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
user: null,
|
||||||
|
token: null,
|
||||||
|
isAuthenticated: false,
|
||||||
|
loading: false,
|
||||||
|
error: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
case AUTH_ACTIONS.LOAD_USER:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
user: action.payload.user,
|
||||||
|
token: action.payload.token,
|
||||||
|
isAuthenticated: !!action.payload.token,
|
||||||
|
loading: false,
|
||||||
|
error: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
case AUTH_ACTIONS.CLEAR_ERROR:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
error: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
default:
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create context
|
||||||
|
const AuthContext = createContext(null);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AuthProvider component to wrap the app and provide authentication state
|
||||||
|
* @param {Object} props - Component props
|
||||||
|
* @param {React.ReactNode} props.children - Child components
|
||||||
|
*/
|
||||||
|
export function AuthProvider({children}) {
|
||||||
|
const [state, dispatch] = useReducer(authReducer, initialState);
|
||||||
|
|
||||||
|
// Load stored authentication data on app startup
|
||||||
|
useEffect(() => {
|
||||||
|
const loadStoredAuth = () => {
|
||||||
|
const token = authService.getStoredToken();
|
||||||
|
const user = authService.getStoredUser();
|
||||||
|
|
||||||
|
dispatch({
|
||||||
|
type: AUTH_ACTIONS.LOAD_USER,
|
||||||
|
payload: {user, token},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
loadStoredAuth();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Login function to authenticate user
|
||||||
|
* @param {string} username - User's username
|
||||||
|
* @param {string} password - User's password
|
||||||
|
* @returns {Promise<boolean>} True if login successful
|
||||||
|
*/
|
||||||
|
const login = async (username, password) => {
|
||||||
|
try {
|
||||||
|
dispatch({type: AUTH_ACTIONS.LOGIN_START});
|
||||||
|
|
||||||
|
const {access_token, user} = await authService.login(username, password);
|
||||||
|
|
||||||
|
dispatch({
|
||||||
|
type: AUTH_ACTIONS.LOGIN_SUCCESS,
|
||||||
|
payload: {user, token: access_token},
|
||||||
|
});
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
dispatch({
|
||||||
|
type: AUTH_ACTIONS.LOGIN_FAILURE,
|
||||||
|
payload: {error: error.message},
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logout function to clear authentication state
|
||||||
|
*/
|
||||||
|
const logout = () => {
|
||||||
|
authService.logout();
|
||||||
|
dispatch({type: AUTH_ACTIONS.LOGOUT});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear error message from state
|
||||||
|
*/
|
||||||
|
const clearError = () => {
|
||||||
|
dispatch({type: AUTH_ACTIONS.CLEAR_ERROR});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh user data from API
|
||||||
|
*/
|
||||||
|
const refreshUser = async () => {
|
||||||
|
try {
|
||||||
|
const user = await authService.getCurrentUser();
|
||||||
|
dispatch({
|
||||||
|
type: AUTH_ACTIONS.LOGIN_SUCCESS,
|
||||||
|
payload: {user, token: state.token},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to refresh user data:', error);
|
||||||
|
// Don't logout on refresh failure, just log error
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Context value object
|
||||||
|
const value = {
|
||||||
|
// State
|
||||||
|
user: state.user,
|
||||||
|
token: state.token,
|
||||||
|
isAuthenticated: state.isAuthenticated,
|
||||||
|
loading: state.loading,
|
||||||
|
error: state.error,
|
||||||
|
|
||||||
|
// Actions
|
||||||
|
login,
|
||||||
|
logout,
|
||||||
|
clearError,
|
||||||
|
refreshUser,
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<AuthContext.Provider value={value}>
|
||||||
|
{children}
|
||||||
|
</AuthContext.Provider>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Custom hook to use authentication context
|
||||||
|
* @returns {Object} Auth context value
|
||||||
|
* @throws {Error} If used outside AuthProvider
|
||||||
|
*/
|
||||||
|
export function useAuth() {
|
||||||
|
const context = useContext(AuthContext);
|
||||||
|
|
||||||
|
if (!context) {
|
||||||
|
throw new Error('useAuth must be used within an AuthProvider');
|
||||||
|
}
|
||||||
|
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
export { AuthContext };
|
||||||
12
src/frontend/src/hooks/useAuth.js
Normal file
12
src/frontend/src/hooks/useAuth.js
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import {useContext} from 'react';
|
||||||
|
import {AuthContext} from '../contexts/AuthContext';
|
||||||
|
|
||||||
|
export const useAuth = () => {
|
||||||
|
const context = useContext(AuthContext);
|
||||||
|
|
||||||
|
if (!context) {
|
||||||
|
throw new Error('useAuth must be used within an AuthProvider');
|
||||||
|
}
|
||||||
|
|
||||||
|
return context;
|
||||||
|
};
|
||||||
85
src/frontend/src/hooks/useDocuments.js
Normal file
85
src/frontend/src/hooks/useDocuments.js
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
/**
|
||||||
|
* Custom hook for managing documents
|
||||||
|
* Handles fetching, updating, and deleting documents
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
|
import * as documentService from '../services/documentService';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hook for managing documents state and operations
|
||||||
|
* @returns {Object} Documents state and operations
|
||||||
|
*/
|
||||||
|
export const useDocuments = () => {
|
||||||
|
const [documents, setDocuments] = useState([]);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState(null);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches all documents from the service
|
||||||
|
*/
|
||||||
|
const fetchDocuments = useCallback(async () => {
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
const data = await documentService.getAllDocuments();
|
||||||
|
setDocuments(data);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err.message);
|
||||||
|
console.error('Error fetching documents:', err);
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates a document's tags and categories
|
||||||
|
* @param {string} id - Document ID
|
||||||
|
* @param {Object} updates - Updates object
|
||||||
|
* @returns {Promise<boolean>} Success status
|
||||||
|
*/
|
||||||
|
const updateDocument = useCallback(async (id, updates) => {
|
||||||
|
try {
|
||||||
|
const updatedDoc = await documentService.updateDocument(id, updates);
|
||||||
|
setDocuments(prevDocs =>
|
||||||
|
prevDocs.map(doc => (doc.id === id ? updatedDoc : doc))
|
||||||
|
);
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
setError(err.message);
|
||||||
|
console.error('Error updating document:', err);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a document
|
||||||
|
* @param {string} id - Document ID
|
||||||
|
* @returns {Promise<boolean>} Success status
|
||||||
|
*/
|
||||||
|
const deleteDocument = useCallback(async (id) => {
|
||||||
|
try {
|
||||||
|
await documentService.deleteDocument(id);
|
||||||
|
setDocuments(prevDocs => prevDocs.filter(doc => doc.id !== id));
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
setError(err.message);
|
||||||
|
console.error('Error deleting document:', err);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Fetch documents on mount
|
||||||
|
useEffect(() => {
|
||||||
|
fetchDocuments();
|
||||||
|
}, [fetchDocuments]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
documents,
|
||||||
|
loading,
|
||||||
|
error,
|
||||||
|
fetchDocuments,
|
||||||
|
updateDocument,
|
||||||
|
deleteDocument
|
||||||
|
};
|
||||||
|
};
|
||||||
@@ -1,68 +1,11 @@
|
|||||||
:root {
|
@tailwind base;
|
||||||
font-family: system-ui, Avenir, Helvetica, Arial, sans-serif;
|
@tailwind components;
|
||||||
line-height: 1.5;
|
@tailwind utilities;
|
||||||
font-weight: 400;
|
@plugin "daisyui";
|
||||||
|
|
||||||
color-scheme: light dark;
|
|
||||||
color: rgba(255, 255, 255, 0.87);
|
|
||||||
background-color: #242424;
|
|
||||||
|
|
||||||
font-synthesis: none;
|
|
||||||
text-rendering: optimizeLegibility;
|
|
||||||
-webkit-font-smoothing: antialiased;
|
|
||||||
-moz-osx-font-smoothing: grayscale;
|
|
||||||
}
|
|
||||||
|
|
||||||
a {
|
|
||||||
font-weight: 500;
|
|
||||||
color: #646cff;
|
|
||||||
text-decoration: inherit;
|
|
||||||
}
|
|
||||||
a:hover {
|
|
||||||
color: #535bf2;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
/* Custom styles for the application */
|
||||||
body {
|
body {
|
||||||
margin: 0;
|
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
|
||||||
display: flex;
|
margin: 0;
|
||||||
place-items: center;
|
|
||||||
min-width: 320px;
|
|
||||||
min-height: 100vh;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
h1 {
|
|
||||||
font-size: 3.2em;
|
|
||||||
line-height: 1.1;
|
|
||||||
}
|
|
||||||
|
|
||||||
button {
|
|
||||||
border-radius: 8px;
|
|
||||||
border: 1px solid transparent;
|
|
||||||
padding: 0.6em 1.2em;
|
|
||||||
font-size: 1em;
|
|
||||||
font-weight: 500;
|
|
||||||
font-family: inherit;
|
|
||||||
background-color: #1a1a1a;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: border-color 0.25s;
|
|
||||||
}
|
|
||||||
button:hover {
|
|
||||||
border-color: #646cff;
|
|
||||||
}
|
|
||||||
button:focus,
|
|
||||||
button:focus-visible {
|
|
||||||
outline: 4px auto -webkit-focus-ring-color;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (prefers-color-scheme: light) {
|
|
||||||
:root {
|
|
||||||
color: #213547;
|
|
||||||
background-color: #ffffff;
|
|
||||||
}
|
|
||||||
a:hover {
|
|
||||||
color: #747bff;
|
|
||||||
}
|
|
||||||
button {
|
|
||||||
background-color: #f9f9f9;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { StrictMode } from 'react'
|
import { StrictMode } from 'react'
|
||||||
import { createRoot } from 'react-dom/client'
|
import { createRoot } from 'react-dom/client'
|
||||||
import './index.css'
|
import './index.css'
|
||||||
|
import './App.css'
|
||||||
import App from './App.jsx'
|
import App from './App.jsx'
|
||||||
|
|
||||||
createRoot(document.getElementById('root')).render(
|
createRoot(document.getElementById('root')).render(
|
||||||
|
|||||||
239
src/frontend/src/pages/DashboardPage.jsx
Normal file
239
src/frontend/src/pages/DashboardPage.jsx
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
import {useEffect, useState} from 'react';
|
||||||
|
import {useAuth} from '../hooks/useAuth';
|
||||||
|
|
||||||
|
const DashboardPage = () => {
|
||||||
|
const {user} = useAuth();
|
||||||
|
const [stats, setStats] = useState({
|
||||||
|
totalDocuments: 0,
|
||||||
|
processingJobs: 0,
|
||||||
|
completedJobs: 0,
|
||||||
|
failedJobs: 0
|
||||||
|
});
|
||||||
|
|
||||||
|
const [recentFiles, setRecentFiles] = useState([]);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Simulate API calls for dashboard data
|
||||||
|
const fetchDashboardData = async () => {
|
||||||
|
try {
|
||||||
|
// TODO: Replace with actual API calls
|
||||||
|
setTimeout(() => {
|
||||||
|
setStats({
|
||||||
|
totalDocuments: 42,
|
||||||
|
processingJobs: 3,
|
||||||
|
completedJobs: 38,
|
||||||
|
failedJobs: 1
|
||||||
|
});
|
||||||
|
|
||||||
|
setRecentFiles([
|
||||||
|
{
|
||||||
|
id: 1,
|
||||||
|
filename: 'invoice_2024.pdf',
|
||||||
|
status: 'completed',
|
||||||
|
processedAt: '2024-01-15 14:30:00',
|
||||||
|
fileType: 'pdf'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 2,
|
||||||
|
filename: 'contract_draft.docx',
|
||||||
|
status: 'processing',
|
||||||
|
processedAt: '2024-01-15 14:25:00',
|
||||||
|
fileType: 'docx'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 3,
|
||||||
|
filename: 'receipt_scan.jpg',
|
||||||
|
status: 'completed',
|
||||||
|
processedAt: '2024-01-15 14:20:00',
|
||||||
|
fileType: 'image'
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
|
setLoading(false);
|
||||||
|
}, 1000);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching dashboard data:', error);
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchDashboardData();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const getStatusBadge = (status) => {
|
||||||
|
const statusColors = {
|
||||||
|
completed: 'badge-success',
|
||||||
|
processing: 'badge-warning',
|
||||||
|
failed: 'badge-error',
|
||||||
|
pending: 'badge-info'
|
||||||
|
};
|
||||||
|
|
||||||
|
return `badge ${statusColors[status] || 'badge-neutral'}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getFileTypeIcon = (fileType) => {
|
||||||
|
const icons = {
|
||||||
|
pdf: '📄',
|
||||||
|
docx: '📝',
|
||||||
|
image: '🖼️',
|
||||||
|
txt: '📄'
|
||||||
|
};
|
||||||
|
|
||||||
|
return icons[fileType] || '📄';
|
||||||
|
};
|
||||||
|
|
||||||
|
if (loading) {
|
||||||
|
return (
|
||||||
|
<div className="flex justify-center items-center h-64">
|
||||||
|
<span className="loading loading-spinner loading-lg"></span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Welcome Header */}
|
||||||
|
<div className="bg-base-100 rounded-lg shadow p-6">
|
||||||
|
<h1 className="text-3xl font-bold text-base-content">
|
||||||
|
Welcome back, {user?.username}!
|
||||||
|
</h1>
|
||||||
|
<p className="text-base-content/60 mt-2">
|
||||||
|
Here's your document processing overview
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Stats Cards */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4">
|
||||||
|
<div className="stat bg-base-100 rounded-lg shadow">
|
||||||
|
<div className="stat-figure text-primary">
|
||||||
|
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
|
||||||
|
d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<div className="stat-title">Total Documents</div>
|
||||||
|
<div className="stat-value text-primary">{stats.totalDocuments}</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="stat bg-base-100 rounded-lg shadow">
|
||||||
|
<div className="stat-figure text-warning">
|
||||||
|
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
|
||||||
|
d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<div className="stat-title">Processing</div>
|
||||||
|
<div className="stat-value text-warning">{stats.processingJobs}</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="stat bg-base-100 rounded-lg shadow">
|
||||||
|
<div className="stat-figure text-success">
|
||||||
|
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
|
||||||
|
d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<div className="stat-title">Completed</div>
|
||||||
|
<div className="stat-value text-success">{stats.completedJobs}</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="stat bg-base-100 rounded-lg shadow">
|
||||||
|
<div className="stat-figure text-error">
|
||||||
|
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
|
||||||
|
d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<div className="stat-title">Failed</div>
|
||||||
|
<div className="stat-value text-error">{stats.failedJobs}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Recent Files */}
|
||||||
|
<div className="bg-base-100 rounded-lg shadow">
|
||||||
|
<div className="p-6 border-b border-base-300">
|
||||||
|
<h2 className="text-xl font-semibold">Recent Files</h2>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="table table-zebra">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>File</th>
|
||||||
|
<th>Type</th>
|
||||||
|
<th>Status</th>
|
||||||
|
<th>Processed At</th>
|
||||||
|
<th>Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{recentFiles.map((file) => (
|
||||||
|
<tr key={file.id}>
|
||||||
|
<td>
|
||||||
|
<div className="flex items-center space-x-3">
|
||||||
|
<div className="text-2xl">
|
||||||
|
{getFileTypeIcon(file.fileType)}
|
||||||
|
</div>
|
||||||
|
<div className="font-medium">{file.filename}</div>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<span className="badge badge-outline">
|
||||||
|
{file.fileType.toUpperCase()}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<span className={getStatusBadge(file.status)}>
|
||||||
|
{file.status.charAt(0).toUpperCase() + file.status.slice(1)}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td>{file.processedAt}</td>
|
||||||
|
<td>
|
||||||
|
<div className="flex space-x-2">
|
||||||
|
<button className="btn btn-sm btn-ghost">View</button>
|
||||||
|
<button className="btn btn-sm btn-ghost">Download</button>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Quick Actions */}
|
||||||
|
<div className="bg-base-100 rounded-lg shadow p-6">
|
||||||
|
<h2 className="text-xl font-semibold mb-4">Quick Actions</h2>
|
||||||
|
<div className="flex flex-wrap gap-4">
|
||||||
|
<button className="btn btn-primary">
|
||||||
|
<svg className="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
|
||||||
|
d="M7 16a4 4 0 01-.88-7.903A5 5 0 1115.9 6L16 6a5 5 0 011 9.9M15 13l-3-3m0 0l-3 3m3-3v12"/>
|
||||||
|
</svg>
|
||||||
|
Upload Documents
|
||||||
|
</button>
|
||||||
|
|
||||||
|
<button className="btn btn-outline">
|
||||||
|
<svg className="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
|
||||||
|
d="M9 17v-2m3 2v-4m3 4v-6m2 10H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"/>
|
||||||
|
</svg>
|
||||||
|
View Reports
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{user?.role === 'admin' && (
|
||||||
|
<button className="btn btn-outline">
|
||||||
|
<svg className="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2"
|
||||||
|
d="M12 4.354a4 4 0 110 5.292M15 21H3v-1a6 6 0 0112 0v1zm0 0h6v-1a6 6 0 00-9-5.197m13.5-9a2.5 2.5 0 11-5 0 2.5 2.5 0 015 0z"/>
|
||||||
|
</svg>
|
||||||
|
Manage Users
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default DashboardPage;
|
||||||
21
src/frontend/src/pages/DocumentsPage.jsx
Normal file
21
src/frontend/src/pages/DocumentsPage.jsx
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
/**
|
||||||
|
* DocumentsPage Component
|
||||||
|
* Main page for displaying and managing documents
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
|
import DocumentGallery from '../components/documents/DocumentGallery';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DocumentsPage component
|
||||||
|
* @returns {JSX.Element}
|
||||||
|
*/
|
||||||
|
const DocumentsPage = () => {
|
||||||
|
return (
|
||||||
|
<div className="h-full flex flex-col">
|
||||||
|
<DocumentGallery />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default DocumentsPage;
|
||||||
48
src/frontend/src/pages/LoginPage.jsx
Normal file
48
src/frontend/src/pages/LoginPage.jsx
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import React, {useEffect} from 'react';
|
||||||
|
import {useNavigate} from 'react-router-dom';
|
||||||
|
import {useAuth} from '../contexts/AuthContext';
|
||||||
|
import AuthLayout from '../components/auth/AuthLayout';
|
||||||
|
import LoginForm from '../components/auth/LoginForm';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LoginPage component
|
||||||
|
* Full page component that handles login functionality and redirects
|
||||||
|
*/
|
||||||
|
function LoginPage() {
|
||||||
|
const {isAuthenticated, loading} = useAuth();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
// Redirect to dashboard if already authenticated
|
||||||
|
useEffect(() => {
|
||||||
|
if (!loading && isAuthenticated) {
|
||||||
|
navigate('/dashboard', {replace: true});
|
||||||
|
}
|
||||||
|
}, [isAuthenticated, loading, navigate]);
|
||||||
|
|
||||||
|
// Show loading spinner while checking authentication
|
||||||
|
if (loading) {
|
||||||
|
return (
|
||||||
|
<AuthLayout>
|
||||||
|
<div className="card w-full max-w-md shadow-xl bg-base-100">
|
||||||
|
<div className="card-body items-center">
|
||||||
|
<span className="loading loading-spinner loading-lg text-primary"></span>
|
||||||
|
<p className="text-base-content/70 mt-4">Loading...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</AuthLayout>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't render login form if user is authenticated (prevents flash)
|
||||||
|
if (isAuthenticated) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<AuthLayout>
|
||||||
|
<LoginForm/>
|
||||||
|
</AuthLayout>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default LoginPage;
|
||||||
101
src/frontend/src/services/authService.js
Normal file
101
src/frontend/src/services/authService.js
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import api from '../utils/api';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Authentication service for handling login, logout, and user profile operations
|
||||||
|
*/
|
||||||
|
class AuthService {
|
||||||
|
/**
|
||||||
|
* Login user with username and password
|
||||||
|
* @param {string} username - User's username
|
||||||
|
* @param {string} password - User's password
|
||||||
|
* @returns {Promise<{access_token: string, user: Object}>} Login response with token and user data
|
||||||
|
*/
|
||||||
|
async login(username, password) {
|
||||||
|
try {
|
||||||
|
// FastAPI expects form data for OAuth2PasswordRequestForm
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append('username', username);
|
||||||
|
formData.append('password', password);
|
||||||
|
|
||||||
|
const response = await api.post('/auth/login', formData, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'multipart/form-data',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const {access_token, user} = response.data;
|
||||||
|
|
||||||
|
// Store token and user data in localStorage
|
||||||
|
localStorage.setItem('access_token', access_token);
|
||||||
|
localStorage.setItem('user', JSON.stringify(user));
|
||||||
|
|
||||||
|
return {access_token, user};
|
||||||
|
} catch (error) {
|
||||||
|
// Extract error message from response
|
||||||
|
const errorMessage = error.response?.data?.detail || 'Login failed';
|
||||||
|
throw new Error(errorMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logout user by clearing stored data
|
||||||
|
*/
|
||||||
|
logout() {
|
||||||
|
localStorage.removeItem('access_token');
|
||||||
|
localStorage.removeItem('user');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current user profile from API
|
||||||
|
* @returns {Promise<Object>} Current user profile
|
||||||
|
*/
|
||||||
|
async getCurrentUser() {
|
||||||
|
try {
|
||||||
|
const response = await api.get('/auth/me');
|
||||||
|
const user = response.data;
|
||||||
|
|
||||||
|
// Update stored user data
|
||||||
|
localStorage.setItem('user', JSON.stringify(user));
|
||||||
|
|
||||||
|
return user;
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error.response?.data?.detail || 'Failed to get user profile';
|
||||||
|
throw new Error(errorMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if user is authenticated by verifying token existence
|
||||||
|
* @returns {boolean} True if user has valid token
|
||||||
|
*/
|
||||||
|
isAuthenticated() {
|
||||||
|
const token = localStorage.getItem('access_token');
|
||||||
|
return !!token;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get stored user data from localStorage
|
||||||
|
* @returns {Object|null} User data or null if not found
|
||||||
|
*/
|
||||||
|
getStoredUser() {
|
||||||
|
try {
|
||||||
|
const userStr = localStorage.getItem('user');
|
||||||
|
return userStr ? JSON.parse(userStr) : null;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error parsing stored user data:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get stored access token from localStorage
|
||||||
|
* @returns {string|null} Access token or null if not found
|
||||||
|
*/
|
||||||
|
getStoredToken() {
|
||||||
|
return localStorage.getItem('access_token');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export singleton instance
|
||||||
|
const authService = new AuthService();
|
||||||
|
export default authService;
|
||||||
97
src/frontend/src/services/documentService.js
Normal file
97
src/frontend/src/services/documentService.js
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
/**
|
||||||
|
* Document Service
|
||||||
|
* Handles all API calls related to documents
|
||||||
|
* Currently using mock data for development
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { mockDocuments, availableTags, availableCategories } from '../utils/mockData';
|
||||||
|
import api from '../utils/api';
|
||||||
|
|
||||||
|
// Simulate network delay
|
||||||
|
const delay = (ms) => new Promise(resolve => setTimeout(resolve, ms));
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches all documents from the API
|
||||||
|
* @returns {Promise<Array>} Array of document objects
|
||||||
|
*/
|
||||||
|
export const getAllDocuments = async () => {
|
||||||
|
try {
|
||||||
|
const response = await api.get('/api/documents');
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to fetch documents:', error);
|
||||||
|
// Fallback to mock data in case of API error during development
|
||||||
|
console.warn('Falling back to mock data');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches a single document by ID
|
||||||
|
* @param {string} id - Document ID
|
||||||
|
* @returns {Promise<Object|null>} Document object or null if not found
|
||||||
|
*/
|
||||||
|
export const getDocumentById = async (id) => {
|
||||||
|
await delay(300);
|
||||||
|
const document = mockDocuments.find(doc => doc.id === id);
|
||||||
|
return document || null;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates a document's tags and categories
|
||||||
|
* @param {string} id - Document ID
|
||||||
|
* @param {Object} updates - Object containing tags and/or categories
|
||||||
|
* @param {Array<string>} updates.tags - New tags array
|
||||||
|
* @param {Array<string>} updates.categories - New categories array
|
||||||
|
* @returns {Promise<Object>} Updated document object
|
||||||
|
*/
|
||||||
|
export const updateDocument = async (id, updates) => {
|
||||||
|
await delay(400);
|
||||||
|
|
||||||
|
const index = mockDocuments.findIndex(doc => doc.id === id);
|
||||||
|
if (index === -1) {
|
||||||
|
throw new Error('Document not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the document
|
||||||
|
mockDocuments[index] = {
|
||||||
|
...mockDocuments[index],
|
||||||
|
...updates
|
||||||
|
};
|
||||||
|
|
||||||
|
return mockDocuments[index];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a document
|
||||||
|
* @param {string} id - Document ID
|
||||||
|
* @returns {Promise<boolean>} True if deletion was successful
|
||||||
|
*/
|
||||||
|
export const deleteDocument = async (id) => {
|
||||||
|
await delay(300);
|
||||||
|
|
||||||
|
const index = mockDocuments.findIndex(doc => doc.id === id);
|
||||||
|
if (index === -1) {
|
||||||
|
throw new Error('Document not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
mockDocuments.splice(index, 1);
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all available tags
|
||||||
|
* @returns {Promise<Array<string>>} Array of tag strings
|
||||||
|
*/
|
||||||
|
export const getAvailableTags = async () => {
|
||||||
|
await delay(200);
|
||||||
|
return [...availableTags];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all available categories
|
||||||
|
* @returns {Promise<Array<string>>} Array of category strings
|
||||||
|
*/
|
||||||
|
export const getAvailableCategories = async () => {
|
||||||
|
await delay(200);
|
||||||
|
return [...availableCategories];
|
||||||
|
};
|
||||||
57
src/frontend/src/utils/api.js
Normal file
57
src/frontend/src/utils/api.js
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
// Base API configuration
|
||||||
|
const API_BASE_URL = 'http://localhost:8000';
|
||||||
|
|
||||||
|
// Create axios instance with default configuration
|
||||||
|
const api = axios.create({
|
||||||
|
baseURL: API_BASE_URL,
|
||||||
|
timeout: 10000, // 10 seconds timeout
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export { API_BASE_URL };
|
||||||
|
|
||||||
|
// Request interceptor to add authentication token
|
||||||
|
api.interceptors.request.use(
|
||||||
|
(config) => {
|
||||||
|
// Get token from localStorage
|
||||||
|
const token = localStorage.getItem('access_token');
|
||||||
|
if (token) {
|
||||||
|
config.headers.Authorization = `Bearer ${token}`;
|
||||||
|
}
|
||||||
|
return config;
|
||||||
|
},
|
||||||
|
(error) => {
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Response interceptor to handle common errors
|
||||||
|
api.interceptors.response.use(
|
||||||
|
(response) => {
|
||||||
|
return response;
|
||||||
|
},
|
||||||
|
(error) => {
|
||||||
|
// Handle 401 errors (unauthorized)
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
// Clear token from localStorage on 401
|
||||||
|
localStorage.removeItem('access_token');
|
||||||
|
localStorage.removeItem('user');
|
||||||
|
|
||||||
|
// Redirect to login page
|
||||||
|
window.location.href = '/login';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle other common errors
|
||||||
|
if (error.response?.status >= 500) {
|
||||||
|
console.error('Server error:', error.response.data);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
export default api;
|
||||||
155
src/frontend/src/utils/mockData.js
Normal file
155
src/frontend/src/utils/mockData.js
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
/**
|
||||||
|
* Mock data for PDF documents
|
||||||
|
* This file provides sample data for development and testing purposes
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a placeholder thumbnail URL
|
||||||
|
* @param {number} index - Document index for unique colors
|
||||||
|
* @returns {string} Placeholder image URL
|
||||||
|
*/
|
||||||
|
const generateThumbnailUrl = (index) => {
|
||||||
|
const colors = ['3B82F6', '10B981', 'F59E0B', 'EF4444', '8B5CF6', 'EC4899'];
|
||||||
|
const color = colors[index % colors.length];
|
||||||
|
return `https://via.placeholder.com/300x400/${color}/FFFFFF?text=Page+1`;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mock documents data
|
||||||
|
* @type {Array<Object>}
|
||||||
|
*/
|
||||||
|
export const mockDocuments = [
|
||||||
|
{
|
||||||
|
id: 'doc-001',
|
||||||
|
name: 'Contrat-2025.pdf',
|
||||||
|
originalFileType: 'DOCX',
|
||||||
|
createdAt: '2025-10-01T10:30:00Z',
|
||||||
|
fileSize: 2048576, // 2 MB
|
||||||
|
pageCount: 12,
|
||||||
|
thumbnailUrl: generateThumbnailUrl(0),
|
||||||
|
pdfUrl: '/mock/contrat-2025.pdf',
|
||||||
|
tags: ['contrat', '2025'],
|
||||||
|
categories: ['legal']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'doc-002',
|
||||||
|
name: 'Facture-Janvier.pdf',
|
||||||
|
originalFileType: 'XLSX',
|
||||||
|
createdAt: '2025-09-15T14:20:00Z',
|
||||||
|
fileSize: 512000, // 512 KB
|
||||||
|
pageCount: 3,
|
||||||
|
thumbnailUrl: generateThumbnailUrl(1),
|
||||||
|
pdfUrl: '/mock/facture-janvier.pdf',
|
||||||
|
tags: ['facture', 'comptabilité'],
|
||||||
|
categories: ['finance']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'doc-003',
|
||||||
|
name: 'Présentation-Projet.pdf',
|
||||||
|
originalFileType: 'PPTX',
|
||||||
|
createdAt: '2025-09-28T09:15:00Z',
|
||||||
|
fileSize: 5242880, // 5 MB
|
||||||
|
pageCount: 24,
|
||||||
|
thumbnailUrl: generateThumbnailUrl(2),
|
||||||
|
pdfUrl: '/mock/presentation-projet.pdf',
|
||||||
|
tags: ['présentation', 'projet'],
|
||||||
|
categories: ['marketing']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'doc-004',
|
||||||
|
name: 'Photo-Identité.pdf',
|
||||||
|
originalFileType: 'JPG',
|
||||||
|
createdAt: '2025-10-05T16:45:00Z',
|
||||||
|
fileSize: 204800, // 200 KB
|
||||||
|
pageCount: 1,
|
||||||
|
thumbnailUrl: generateThumbnailUrl(3),
|
||||||
|
pdfUrl: '/mock/photo-identite.pdf',
|
||||||
|
tags: ['photo', 'identité'],
|
||||||
|
categories: ['personnel']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'doc-005',
|
||||||
|
name: 'Manuel-Utilisateur.pdf',
|
||||||
|
originalFileType: 'PDF',
|
||||||
|
createdAt: '2025-09-20T11:00:00Z',
|
||||||
|
fileSize: 3145728, // 3 MB
|
||||||
|
pageCount: 45,
|
||||||
|
thumbnailUrl: generateThumbnailUrl(4),
|
||||||
|
pdfUrl: '/mock/manuel-utilisateur.pdf',
|
||||||
|
tags: ['manuel', 'documentation'],
|
||||||
|
categories: ['technique']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'doc-006',
|
||||||
|
name: 'Rapport-Annuel.pdf',
|
||||||
|
originalFileType: 'DOCX',
|
||||||
|
createdAt: '2025-08-30T13:30:00Z',
|
||||||
|
fileSize: 4194304, // 4 MB
|
||||||
|
pageCount: 67,
|
||||||
|
thumbnailUrl: generateThumbnailUrl(5),
|
||||||
|
pdfUrl: '/mock/rapport-annuel.pdf',
|
||||||
|
tags: ['rapport', 'annuel'],
|
||||||
|
categories: ['finance', 'management']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'doc-007',
|
||||||
|
name: 'CV-Candidat.pdf',
|
||||||
|
originalFileType: 'DOCX',
|
||||||
|
createdAt: '2025-10-02T08:00:00Z',
|
||||||
|
fileSize: 153600, // 150 KB
|
||||||
|
pageCount: 2,
|
||||||
|
thumbnailUrl: generateThumbnailUrl(0),
|
||||||
|
pdfUrl: '/mock/cv-candidat.pdf',
|
||||||
|
tags: ['cv', 'recrutement'],
|
||||||
|
categories: ['rh']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'doc-008',
|
||||||
|
name: 'Devis-Travaux.pdf',
|
||||||
|
originalFileType: 'XLSX',
|
||||||
|
createdAt: '2025-09-25T15:20:00Z',
|
||||||
|
fileSize: 409600, // 400 KB
|
||||||
|
pageCount: 5,
|
||||||
|
thumbnailUrl: generateThumbnailUrl(1),
|
||||||
|
pdfUrl: '/mock/devis-travaux.pdf',
|
||||||
|
tags: ['devis', 'travaux'],
|
||||||
|
categories: ['finance']
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Available tags for documents
|
||||||
|
* @type {Array<string>}
|
||||||
|
*/
|
||||||
|
export const availableTags = [
|
||||||
|
'contrat',
|
||||||
|
'facture',
|
||||||
|
'présentation',
|
||||||
|
'photo',
|
||||||
|
'manuel',
|
||||||
|
'rapport',
|
||||||
|
'cv',
|
||||||
|
'devis',
|
||||||
|
'comptabilité',
|
||||||
|
'projet',
|
||||||
|
'identité',
|
||||||
|
'documentation',
|
||||||
|
'annuel',
|
||||||
|
'recrutement',
|
||||||
|
'travaux',
|
||||||
|
'2025'
|
||||||
|
];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Available categories for documents
|
||||||
|
* @type {Array<string>}
|
||||||
|
*/
|
||||||
|
export const availableCategories = [
|
||||||
|
'legal',
|
||||||
|
'finance',
|
||||||
|
'marketing',
|
||||||
|
'personnel',
|
||||||
|
'technique',
|
||||||
|
'management',
|
||||||
|
'rh'
|
||||||
|
];
|
||||||
15
src/frontend/tailwind.config.js
Normal file
15
src/frontend/tailwind.config.js
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
/** @type {import('tailwindcss').Config} */
|
||||||
|
export default {
|
||||||
|
content: [
|
||||||
|
"./index.html",
|
||||||
|
"./src/**/*.{js,ts,jsx,tsx}",
|
||||||
|
],
|
||||||
|
theme: {
|
||||||
|
extend: {},
|
||||||
|
},
|
||||||
|
plugins: [require("daisyui")],
|
||||||
|
daisyui: {
|
||||||
|
themes: ["light", "dark", "cupcake"],
|
||||||
|
darkTheme: "dark",
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
import { defineConfig } from 'vite'
|
import {defineConfig} from 'vite'
|
||||||
|
import tailwindcss from '@tailwindcss/vite'
|
||||||
import react from '@vitejs/plugin-react'
|
import react from '@vitejs/plugin-react'
|
||||||
|
|
||||||
// https://vite.dev/config/
|
// https://vite.dev/config/
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
plugins: [react()],
|
plugins: [tailwindcss(), react()],
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -7,14 +7,22 @@ WORKDIR /app
|
|||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
libmagic1 \
|
libmagic1 \
|
||||||
file \
|
file \
|
||||||
|
pandoc \
|
||||||
|
ghostscript \
|
||||||
|
texlive-xetex \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|
||||||
# Copy requirements and install dependencies
|
# Copy requirements and install dependencies
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Change the user
|
||||||
|
USER 1002:1002
|
||||||
|
|
||||||
# Copy application code
|
# Copy application code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
|
||||||
# Command will be overridden by docker-compose
|
# Command will be overridden by docker-compose
|
||||||
CMD ["celery", "-A", "main", "worker", "--loglevel=info"]
|
CMD ["celery", "-A", "main", "worker", "--loglevel=info"]
|
||||||
|
|||||||
@@ -5,9 +5,16 @@ email-validator==2.3.0
|
|||||||
fastapi==0.116.1
|
fastapi==0.116.1
|
||||||
httptools==0.6.4
|
httptools==0.6.4
|
||||||
motor==3.7.1
|
motor==3.7.1
|
||||||
pymongo==4.15.0
|
pikepdf==9.11.0
|
||||||
|
pillow==11.3.0
|
||||||
pydantic==2.11.9
|
pydantic==2.11.9
|
||||||
|
PyJWT==2.10.1
|
||||||
|
pymongo==4.15.0
|
||||||
|
PyMuPDF==1.26.4
|
||||||
|
pypandoc==1.15
|
||||||
|
python-multipart==0.0.20
|
||||||
redis==6.4.0
|
redis==6.4.0
|
||||||
|
reportlab==4.4.4
|
||||||
uvicorn==0.35.0
|
uvicorn==0.35.0
|
||||||
python-magic==0.4.27
|
python-magic==0.4.27
|
||||||
watchdog==6.0.0
|
watchdog==6.0.0
|
||||||
0
src/worker/tasks/common/__init__.py
Normal file
0
src/worker/tasks/common/__init__.py
Normal file
73
src/worker/tasks/common/converter_utils.py
Normal file
73
src/worker/tasks/common/converter_utils.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import magic # python-magic
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedFileTypeError(Exception):
|
||||||
|
"""Exception raised when a file type is not supported."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def detect_file_type(file_path: str) -> str:
|
||||||
|
"""
|
||||||
|
Detect the type of file using python-magic.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
'text', 'image', 'word'
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
UnsupportedFileTypeError: If file type is not supported.
|
||||||
|
"""
|
||||||
|
mime = magic.from_file(file_path, mime=True)
|
||||||
|
extension = Path(file_path).suffix
|
||||||
|
if mime.startswith("text/"):
|
||||||
|
return "text"
|
||||||
|
elif mime.startswith("image/"):
|
||||||
|
return "image"
|
||||||
|
elif mime in ("application/vnd.openxmlformats-officedocument.wordprocessingml.document",):
|
||||||
|
return "word"
|
||||||
|
elif mime == "application/pdf":
|
||||||
|
return "pdf"
|
||||||
|
elif mime == "application/vnd.ms-powerpoint":
|
||||||
|
return "powerpoint"
|
||||||
|
elif mime == "application/octet-stream" and extension in (".jpg", ".jpeg", ".png", ".gif"):
|
||||||
|
return "image"
|
||||||
|
else:
|
||||||
|
raise UnsupportedFileTypeError(f"Unsupported file type: {mime}")
|
||||||
|
|
||||||
|
|
||||||
|
def compress_pdf(input_pdf: str, output_pdf: str, quality: str = "ebook") -> None:
|
||||||
|
"""
|
||||||
|
Compress a PDF using Ghostscript.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
input_pdf (str): Path to the input PDF.
|
||||||
|
output_pdf (str): Path to save the compressed PDF.
|
||||||
|
quality (str): Ghostscript PDFSETTINGS option: screen, ebook, printer, prepress.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FileNotFoundError: If input PDF does not exist.
|
||||||
|
RuntimeError: If Ghostscript returns an error.
|
||||||
|
"""
|
||||||
|
input_path = Path(input_pdf)
|
||||||
|
output_path = Path(output_pdf)
|
||||||
|
|
||||||
|
if not input_path.exists():
|
||||||
|
raise FileNotFoundError(f"Input PDF not found: {input_pdf}")
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
"gs",
|
||||||
|
"-sDEVICE=pdfwrite",
|
||||||
|
"-dCompatibilityLevel=1.4",
|
||||||
|
f"-dPDFSETTINGS=/{quality}",
|
||||||
|
"-dNOPAUSE",
|
||||||
|
"-dQUIET",
|
||||||
|
"-dBATCH",
|
||||||
|
f"-sOutputFile={str(output_path)}",
|
||||||
|
str(input_path),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = subprocess.run(cmd)
|
||||||
|
if result.returncode != 0:
|
||||||
|
raise RuntimeError(f"Ghostscript failed with return code {result.returncode}")
|
||||||
64
src/worker/tasks/common/document_utils.py
Normal file
64
src/worker/tasks/common/document_utils.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from app.config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_hash(file_bytes: bytes) -> str:
|
||||||
|
"""
|
||||||
|
Calculate SHA256 hash of file content.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_bytes: Raw file content as bytes
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Hexadecimal SHA256 hash string
|
||||||
|
"""
|
||||||
|
return hashlib.sha256(file_bytes).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def get_object_path(file_hash):
|
||||||
|
"""
|
||||||
|
|
||||||
|
:param file_hash:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
root = settings.get_objects_folder()
|
||||||
|
return os.path.join(root, file_hash[:24], file_hash)
|
||||||
|
|
||||||
|
|
||||||
|
def save_as_object(file_path, remove_on_success=True) -> str:
|
||||||
|
"""
|
||||||
|
Read the file, get the hash and save using the hash as the filename.
|
||||||
|
:param file_path:
|
||||||
|
:param remove_on_success:
|
||||||
|
:return: hash of the file
|
||||||
|
"""
|
||||||
|
logger.info(f"Saving file {file_path} as object")
|
||||||
|
path = Path(file_path)
|
||||||
|
as_bytes = path.read_bytes()
|
||||||
|
|
||||||
|
file_hash = get_file_hash(as_bytes)
|
||||||
|
logger.info(f"File hash: {file_hash}")
|
||||||
|
|
||||||
|
object_path = get_object_path(file_hash)
|
||||||
|
if os.path.exists(object_path):
|
||||||
|
logger.info(f"Object already exists: {object_path}")
|
||||||
|
return file_hash
|
||||||
|
|
||||||
|
if not os.path.exists(os.path.dirname(object_path)):
|
||||||
|
os.makedirs(os.path.dirname(object_path))
|
||||||
|
|
||||||
|
logger.info(f"Saving object to: {object_path}")
|
||||||
|
with open(object_path, "wb") as f:
|
||||||
|
f.write(as_bytes)
|
||||||
|
|
||||||
|
if remove_on_success:
|
||||||
|
logger.info(f"Removing file: {file_path}")
|
||||||
|
path.unlink()
|
||||||
|
|
||||||
|
return file_hash
|
||||||
@@ -6,16 +6,28 @@ and update processing job statuses throughout the task lifecycle.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.database.connection import get_database
|
from app.database.connection import get_database
|
||||||
from app.services.document_service import DocumentService
|
from app.models.job import ProcessingStatus
|
||||||
|
from app.services.document_service import DocumentService, DocumentAlreadyExists
|
||||||
|
from app.services.job_service import JobService
|
||||||
from tasks.main import celery_app
|
from tasks.main import celery_app
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@celery_app.task(bind=True, autoretry_for=(Exception,), retry_kwargs={'max_retries': 3, 'countdown': 60})
|
|
||||||
|
def get_services():
|
||||||
|
database = get_database()
|
||||||
|
document_service = DocumentService(database=database, objects_folder=settings.get_objects_folder())
|
||||||
|
job_service = JobService(database=database)
|
||||||
|
return document_service, job_service
|
||||||
|
|
||||||
|
|
||||||
|
# @celery_app.task(bind=True, autoretry_for=(Exception,), retry_kwargs={'max_retries': 3, 'countdown': 60})
|
||||||
|
@celery_app.task(bind=True)
|
||||||
def process_document(self, filepath: str) -> Dict[str, Any]:
|
def process_document(self, filepath: str) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Process a document file and extract its content.
|
Process a document file and extract its content.
|
||||||
@@ -36,29 +48,34 @@ def process_document(self, filepath: str) -> Dict[str, Any]:
|
|||||||
Exception: Any processing error (will trigger retry)
|
Exception: Any processing error (will trigger retry)
|
||||||
"""
|
"""
|
||||||
task_id = self.request.id
|
task_id = self.request.id
|
||||||
logger.info(f"Starting document processing task {task_id} for file: {filepath}")
|
logger.info(f'Task {task_id} : Starting document processing for file: "{filepath}"')
|
||||||
|
|
||||||
database = get_database()
|
# get services
|
||||||
document_service = DocumentService(database=database, objects_folder=settings.get_objects_folder())
|
document_service, job_service = get_services()
|
||||||
from app.services.job_service import JobService
|
|
||||||
job_service = JobService(database=database)
|
|
||||||
|
|
||||||
job = None
|
job = None
|
||||||
|
document = None
|
||||||
try:
|
try:
|
||||||
# Step 1: Insert the document in DB
|
# Step 1: Create the document and a new job record for the document
|
||||||
document = document_service.create_document(filepath)
|
document = document_service.create_document(filepath)
|
||||||
logger.info(f"Job {task_id} created for document {document.id} with file path: {filepath}")
|
|
||||||
|
|
||||||
# Step 2: Create a new job record for the document
|
|
||||||
job = job_service.create_job(task_id=task_id, document_id=document.id)
|
job = job_service.create_job(task_id=task_id, document_id=document.id)
|
||||||
|
|
||||||
# Step 3: Mark job as started
|
|
||||||
job_service.mark_job_as_started(job_id=job.id)
|
job_service.mark_job_as_started(job_id=job.id)
|
||||||
logger.info(f"Job {task_id} marked as PROCESSING")
|
logger.info(f'Task {task_id} : Created document "{document.id}". Started job "{job.id}"')
|
||||||
|
|
||||||
# Step 4: Mark job as completed
|
logger.info(f"Task {task_id} : Creating associated PDF")
|
||||||
|
job_service.update_job_status(job_id=job.id, status=ProcessingStatus.SAVING_PDF)
|
||||||
|
document_service.create_pdf(document.id)
|
||||||
|
|
||||||
|
logger.info(f"Task {task_id} : Creating thumbnail")
|
||||||
|
job_service.update_job_status(job_id=job.id, status=ProcessingStatus.CREATING_THUMBNAIL)
|
||||||
|
document_service.create_thumbnail(document.id)
|
||||||
|
|
||||||
|
# remove the file from the watch folder
|
||||||
|
os.remove(filepath)
|
||||||
|
|
||||||
|
# Step x: Mark job as completed
|
||||||
job_service.mark_job_as_completed(job_id=job.id)
|
job_service.mark_job_as_completed(job_id=job.id)
|
||||||
logger.info(f"Job {task_id} marked as COMPLETED")
|
logger.info(f"Task {task_id} marked as COMPLETED")
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"task_id": task_id,
|
"task_id": task_id,
|
||||||
@@ -66,6 +83,19 @@ def process_document(self, filepath: str) -> Dict[str, Any]:
|
|||||||
"status": "completed",
|
"status": "completed",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
except DocumentAlreadyExists as e:
|
||||||
|
logger.info(f"Task {task_id} completed: {str(e)}")
|
||||||
|
if job is not None:
|
||||||
|
job_service.mark_job_as_completed(job_id=job.id)
|
||||||
|
logger.info(f"Job {task_id} marked as COMPLETED")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"task_id": task_id,
|
||||||
|
"filepath": filepath,
|
||||||
|
"status": "completed",
|
||||||
|
"message": str(e),
|
||||||
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_message = f"Document processing failed: {str(e)}"
|
error_message = f"Document processing failed: {str(e)}"
|
||||||
logger.error(f"Task {task_id} failed: {error_message}")
|
logger.error(f"Task {task_id} failed: {error_message}")
|
||||||
@@ -77,9 +107,13 @@ def process_document(self, filepath: str) -> Dict[str, Any]:
|
|||||||
logger.info(f"Job {task_id} marked as FAILED")
|
logger.info(f"Job {task_id} marked as FAILED")
|
||||||
else:
|
else:
|
||||||
logger.error(f"Failed to process {filepath}. error = {str(e)}")
|
logger.error(f"Failed to process {filepath}. error = {str(e)}")
|
||||||
|
|
||||||
|
if document is not None:
|
||||||
|
document_service.move_to_errors(document.id, filepath)
|
||||||
|
logger.info(f"Moved file {filepath} to errors/{document.id}")
|
||||||
|
|
||||||
except Exception as job_error:
|
except Exception as job_error:
|
||||||
logger.error(f"Failed to update job status for task {task_id}: {str(job_error)}")
|
logger.error(f"Failed to update job status for task {task_id}: {str(job_error)}")
|
||||||
|
|
||||||
# Re-raise the exception to trigger Celery retry mechanism
|
# Re-raise the exception to trigger Celery retry mechanism
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|||||||
@@ -3,13 +3,19 @@ Celery worker for MyDocManager document processing tasks.
|
|||||||
|
|
||||||
This module contains all Celery tasks for processing documents.
|
This module contains all Celery tasks for processing documents.
|
||||||
"""
|
"""
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from celery import Celery
|
from celery import Celery
|
||||||
|
from celery.signals import worker_process_init
|
||||||
|
|
||||||
|
from app.config import settings
|
||||||
|
|
||||||
# Environment variables
|
# Environment variables
|
||||||
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
REDIS_URL = settings.get_redis_url()
|
||||||
MONGODB_URL = os.getenv("MONGODB_URL", "mongodb://localhost:27017")
|
MONGODB_URL = settings.get_mongodb_url()
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Initialize Celery app
|
# Initialize Celery app
|
||||||
celery_app = Celery(
|
celery_app = Celery(
|
||||||
@@ -28,9 +34,20 @@ celery_app.conf.update(
|
|||||||
timezone="UTC",
|
timezone="UTC",
|
||||||
enable_utc=True,
|
enable_utc=True,
|
||||||
task_track_started=True,
|
task_track_started=True,
|
||||||
task_time_limit=300, # 5 minutes
|
task_time_limit=300, # 5 minutes
|
||||||
task_soft_time_limit=240, # 4 minutes
|
task_soft_time_limit=240, # 4 minutes
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def global_init(**kwargs):
|
||||||
|
"""Initialize global variables."""
|
||||||
|
logger.info(f"{'*' * 45}")
|
||||||
|
logger.info(f"{'--' * 5}" + " Starting MyDocManager worker " + f"{'--' * 5}")
|
||||||
|
logger.info(f"{'*' * 45}")
|
||||||
|
|
||||||
|
|
||||||
|
global_init()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
global_init()
|
||||||
celery_app.start()
|
celery_app.start()
|
||||||
|
|||||||
0
tests/common/__init__.py
Normal file
0
tests/common/__init__.py
Normal file
52
tests/common/test_utils.py
Normal file
52
tests/common/test_utils.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from tasks.common.converter_utils import detect_file_type, UnsupportedFileTypeError
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_dir():
|
||||||
|
"""Create a temporary directory for output PDFs."""
|
||||||
|
dir_path = tempfile.mkdtemp()
|
||||||
|
yield dir_path
|
||||||
|
shutil.rmtree(dir_path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_detect_text_file(temp_dir):
|
||||||
|
txt_file = Path(temp_dir) / "sample.txt"
|
||||||
|
txt_file.write_text("Sample text content")
|
||||||
|
detected_type = detect_file_type(str(txt_file))
|
||||||
|
assert detected_type == "text"
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_detect_image_file(temp_dir):
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
img_file = Path(temp_dir) / "sample.jpg"
|
||||||
|
image = Image.new("RGB", (50, 50), color="blue")
|
||||||
|
image.save(img_file)
|
||||||
|
|
||||||
|
detected_type = detect_file_type(str(img_file))
|
||||||
|
assert detected_type == "image"
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_detect_word_file(temp_dir):
|
||||||
|
import docx
|
||||||
|
|
||||||
|
docx_file = Path(temp_dir) / "sample.docx"
|
||||||
|
doc = docx.Document()
|
||||||
|
doc.add_paragraph("Sample content")
|
||||||
|
doc.save(docx_file)
|
||||||
|
|
||||||
|
detected_type = detect_file_type(str(docx_file))
|
||||||
|
assert detected_type == "word"
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_cannot_detect_unsupported_file(temp_dir):
|
||||||
|
exe_file = Path(temp_dir) / "sample.exe"
|
||||||
|
exe_file.write_bytes(b'\x4D\x5A\x90\x00\x03\x00\x00\x00')
|
||||||
|
with pytest.raises(UnsupportedFileTypeError):
|
||||||
|
detect_file_type(str(exe_file))
|
||||||
@@ -568,3 +568,137 @@ class TestFileTypeDetection:
|
|||||||
"""Test unsupported file type raises ValueError."""
|
"""Test unsupported file type raises ValueError."""
|
||||||
with pytest.raises(ValueError, match="Unsupported file type"):
|
with pytest.raises(ValueError, match="Unsupported file type"):
|
||||||
document_service._detect_file_type("/path/to/document.xyz")
|
document_service._detect_file_type("/path/to/document.xyz")
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreatePdf:
|
||||||
|
"""Tests for create_pdf method."""
|
||||||
|
|
||||||
|
@patch('app.services.document_service.convert_to_pdf')
|
||||||
|
@patch('app.services.document_service.magic.from_buffer')
|
||||||
|
def test_i_can_create_pdf_successfully(
|
||||||
|
self,
|
||||||
|
mock_magic,
|
||||||
|
mock_convert_to_pdf,
|
||||||
|
document_service,
|
||||||
|
sample_file_bytes
|
||||||
|
):
|
||||||
|
"""Test creating PDF from an existing document."""
|
||||||
|
# Setup
|
||||||
|
mock_magic.return_value = "text/plain"
|
||||||
|
|
||||||
|
# Create a document first
|
||||||
|
created_doc = document_service.create_document(
|
||||||
|
"/test/test.txt",
|
||||||
|
sample_file_bytes,
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock the PDF conversion
|
||||||
|
pdf_path = os.path.join(document_service.temp_folder, "converted.pdf")
|
||||||
|
mock_convert_to_pdf.return_value = pdf_path
|
||||||
|
|
||||||
|
# Write a sample PDF file that the conversion would create
|
||||||
|
pdf_content = b"This is PDF content"
|
||||||
|
os.makedirs(os.path.dirname(pdf_path), exist_ok=True)
|
||||||
|
with open(pdf_path, "wb") as f:
|
||||||
|
f.write(pdf_content)
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
result = document_service.create_pdf(created_doc.id)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is True
|
||||||
|
|
||||||
|
# Get the updated document
|
||||||
|
updated_doc = document_service.get_document_by_id(created_doc.id)
|
||||||
|
assert updated_doc.pdf_file_hash is not None
|
||||||
|
|
||||||
|
# Verify the PDF content was saved
|
||||||
|
pdf_hash = document_service._calculate_file_hash(pdf_content)
|
||||||
|
assert updated_doc.pdf_file_hash == pdf_hash
|
||||||
|
|
||||||
|
# Verify convert_to_pdf was called with correct arguments
|
||||||
|
doc_path = document_service.get_document_path(created_doc.file_hash)
|
||||||
|
mock_convert_to_pdf.assert_called_once_with(doc_path, document_service.temp_folder)
|
||||||
|
|
||||||
|
# Verify content exists on disk
|
||||||
|
validate_file_saved(document_service, pdf_hash, pdf_content)
|
||||||
|
|
||||||
|
# Verify PDF hash was added to document
|
||||||
|
updated_doc = document_service.get_document_by_id(created_doc.id)
|
||||||
|
pdf_hash = document_service._calculate_file_hash(pdf_content)
|
||||||
|
assert updated_doc.pdf_file_hash == pdf_hash
|
||||||
|
|
||||||
|
@patch('app.services.document_service.convert_to_pdf')
|
||||||
|
@patch('app.services.document_service.magic.from_buffer')
|
||||||
|
def test_i_can_reuse_existing_pdf(
|
||||||
|
self,
|
||||||
|
mock_magic,
|
||||||
|
mock_convert_to_pdf,
|
||||||
|
document_service,
|
||||||
|
sample_file_bytes
|
||||||
|
):
|
||||||
|
"""Test that if PDF already exists, it doesn't recreate it."""
|
||||||
|
# Setup
|
||||||
|
mock_magic.return_value = "text/plain"
|
||||||
|
|
||||||
|
# Create a document first
|
||||||
|
created_doc = document_service.create_document(
|
||||||
|
"/test/test.txt",
|
||||||
|
sample_file_bytes,
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a fake PDF file and update the document
|
||||||
|
pdf_content = b"This is PDF content"
|
||||||
|
pdf_hash = document_service._calculate_file_hash(pdf_content)
|
||||||
|
document_service.save_content_if_needed(pdf_hash, pdf_content)
|
||||||
|
document_service.update_document(created_doc.id, {"pdf_file_hash": pdf_hash})
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
result = document_service.create_pdf(created_doc.id)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is True
|
||||||
|
|
||||||
|
# Verify convert_to_pdf was NOT called
|
||||||
|
mock_convert_to_pdf.assert_not_called()
|
||||||
|
|
||||||
|
def test_i_cannot_create_pdf_for_nonexistent_document(
|
||||||
|
self,
|
||||||
|
document_service
|
||||||
|
):
|
||||||
|
"""Test behavior when document ID doesn't exist."""
|
||||||
|
# Execute with random ObjectId
|
||||||
|
result = document_service.create_pdf(ObjectId())
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
@patch('app.services.document_service.magic.from_buffer')
|
||||||
|
def test_i_cannot_create_pdf_when_file_content_missing(
|
||||||
|
self,
|
||||||
|
mock_magic,
|
||||||
|
document_service,
|
||||||
|
sample_file_bytes
|
||||||
|
):
|
||||||
|
"""Test behavior when file content doesn't exist."""
|
||||||
|
# Setup
|
||||||
|
mock_magic.return_value = "text/plain"
|
||||||
|
|
||||||
|
# Create a document
|
||||||
|
created_doc = document_service.create_document(
|
||||||
|
"/test/test.txt",
|
||||||
|
sample_file_bytes,
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Simulate missing content by removing file
|
||||||
|
file_path = document_service.get_document_path(created_doc.file_hash)
|
||||||
|
os.remove(file_path)
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
result = document_service.create_pdf(created_doc.id)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is False
|
||||||
|
|||||||
@@ -418,6 +418,25 @@ class TestUpdateStatus:
|
|||||||
assert exc_info.value.current_status == ProcessingStatus.FAILED
|
assert exc_info.value.current_status == ProcessingStatus.FAILED
|
||||||
assert exc_info.value.target_status == ProcessingStatus.FAILED
|
assert exc_info.value.target_status == ProcessingStatus.FAILED
|
||||||
|
|
||||||
|
def test_i_can_update_job_status(
|
||||||
|
self,
|
||||||
|
job_service,
|
||||||
|
sample_document_id,
|
||||||
|
sample_task_id
|
||||||
|
):
|
||||||
|
"""Test that failed job cannot be marked as failed again."""
|
||||||
|
# Create, start, and fail a job
|
||||||
|
created_job = job_service.create_job(sample_document_id, sample_task_id)
|
||||||
|
job_service.mark_job_as_started(created_job.id)
|
||||||
|
|
||||||
|
# Execute without error message
|
||||||
|
result = job_service.update_job_status(created_job.id, ProcessingStatus.SAVING_OBJECT)
|
||||||
|
|
||||||
|
# Verify status transition
|
||||||
|
assert result is not None
|
||||||
|
assert result.status == ProcessingStatus.SAVING_OBJECT
|
||||||
|
assert result.error_message is None
|
||||||
|
|
||||||
|
|
||||||
class TestDeleteJob:
|
class TestDeleteJob:
|
||||||
"""Tests for delete_job method."""
|
"""Tests for delete_job method."""
|
||||||
|
|||||||
739
tests/services/test_user_service.py
Normal file
739
tests/services/test_user_service.py
Normal file
@@ -0,0 +1,739 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for UserService using in-memory MongoDB.
|
||||||
|
|
||||||
|
Tests the business logic operations with real MongoDB operations
|
||||||
|
using mongomock for better integration testing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from bson import ObjectId
|
||||||
|
from mongomock.mongo_client import MongoClient
|
||||||
|
|
||||||
|
from app.models.auth import UserRole
|
||||||
|
from app.models.user import UserCreate, UserUpdate, UserCreateNoValidation
|
||||||
|
from app.services.user_service import UserService
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def in_memory_database():
|
||||||
|
"""Create an in-memory database for testing."""
|
||||||
|
client = MongoClient()
|
||||||
|
return client.test_database
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def user_service(in_memory_database):
|
||||||
|
"""Create UserService with in-memory repositories."""
|
||||||
|
service = UserService(in_memory_database).initialize()
|
||||||
|
return service
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_user_data():
|
||||||
|
"""Sample user data for testing."""
|
||||||
|
return {
|
||||||
|
"username": "testuser",
|
||||||
|
"email": "testuser@example.com",
|
||||||
|
"password": "SecureP@ssw0rd123"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_user_data_2():
|
||||||
|
"""Second sample user data for testing."""
|
||||||
|
return {
|
||||||
|
"username": "anotheruser",
|
||||||
|
"email": "anotheruser@example.com",
|
||||||
|
"password": "AnotherP@ssw0rd456"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateUser:
|
||||||
|
"""Tests for create_user method."""
|
||||||
|
|
||||||
|
def test_i_can_create_user_with_valid_data(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test creating user with valid data."""
|
||||||
|
# Execute
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
result = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Verify user creation
|
||||||
|
assert result is not None
|
||||||
|
assert result.username == sample_user_data["username"]
|
||||||
|
assert result.email == sample_user_data["email"]
|
||||||
|
assert result.hashed_password is not None
|
||||||
|
assert result.hashed_password != sample_user_data["password"]
|
||||||
|
assert result.role == UserRole.USER
|
||||||
|
assert result.is_active is True
|
||||||
|
assert result.preferences == {}
|
||||||
|
assert result.created_at is not None
|
||||||
|
assert result.updated_at is not None
|
||||||
|
|
||||||
|
# Verify user exists in database
|
||||||
|
user_in_db = user_service.get_user_by_id(str(result.id))
|
||||||
|
assert user_in_db is not None
|
||||||
|
assert user_in_db.id == result.id
|
||||||
|
assert user_in_db.username == sample_user_data["username"]
|
||||||
|
|
||||||
|
def test_i_cannot_create_user_with_duplicate_username(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test that duplicate username raises ValueError."""
|
||||||
|
# Create first user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Try to create user with same username but different email
|
||||||
|
duplicate_user_data = sample_user_data.copy()
|
||||||
|
duplicate_user_data["email"] = "different@example.com"
|
||||||
|
duplicate_user_create = UserCreate(**duplicate_user_data)
|
||||||
|
|
||||||
|
# Execute and verify exception
|
||||||
|
with pytest.raises(ValueError) as exc_info:
|
||||||
|
user_service.create_user(duplicate_user_create)
|
||||||
|
|
||||||
|
assert "already exists" in str(exc_info.value)
|
||||||
|
assert sample_user_data["username"] in str(exc_info.value)
|
||||||
|
|
||||||
|
def test_i_cannot_create_user_with_duplicate_email(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test that duplicate email raises ValueError."""
|
||||||
|
# Create first user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Try to create user with same email but different username
|
||||||
|
duplicate_user_data = sample_user_data.copy()
|
||||||
|
duplicate_user_data["username"] = "differentuser"
|
||||||
|
duplicate_user_create = UserCreate(**duplicate_user_data)
|
||||||
|
|
||||||
|
# Execute and verify exception
|
||||||
|
with pytest.raises(ValueError) as exc_info:
|
||||||
|
user_service.create_user(duplicate_user_create)
|
||||||
|
|
||||||
|
assert "already exists" in str(exc_info.value)
|
||||||
|
assert sample_user_data["email"] in str(exc_info.value)
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetUserMethods:
|
||||||
|
"""Tests for user retrieval methods."""
|
||||||
|
|
||||||
|
def test_i_can_get_user_by_username(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test retrieving user by username."""
|
||||||
|
# Create a user first
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
result = user_service.get_user_by_username(sample_user_data["username"])
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is not None
|
||||||
|
assert result.id == created_user.id
|
||||||
|
assert result.username == sample_user_data["username"]
|
||||||
|
assert result.email == sample_user_data["email"]
|
||||||
|
|
||||||
|
def test_i_can_get_user_by_id(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test retrieving user by ID."""
|
||||||
|
# Create a user first
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
result = user_service.get_user_by_id(str(created_user.id))
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is not None
|
||||||
|
assert result.id == created_user.id
|
||||||
|
assert result.username == sample_user_data["username"]
|
||||||
|
assert result.email == sample_user_data["email"]
|
||||||
|
|
||||||
|
def test_i_can_check_user_exists(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test checking if user exists."""
|
||||||
|
# Initially should not exist
|
||||||
|
assert user_service.user_exists(sample_user_data["username"]) is False
|
||||||
|
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Now should exist
|
||||||
|
assert user_service.user_exists(sample_user_data["username"]) is True
|
||||||
|
|
||||||
|
def test_i_cannot_get_nonexistent_user_by_username(
|
||||||
|
self,
|
||||||
|
user_service
|
||||||
|
):
|
||||||
|
"""Test retrieving nonexistent user by username returns None."""
|
||||||
|
# Execute
|
||||||
|
result = user_service.get_user_by_username("nonexistentuser")
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
def test_i_cannot_get_nonexistent_user_by_id(
|
||||||
|
self,
|
||||||
|
user_service
|
||||||
|
):
|
||||||
|
"""Test retrieving nonexistent user by ID returns None."""
|
||||||
|
# Execute with random ObjectId
|
||||||
|
result = user_service.get_user_by_id(str(ObjectId()))
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestAuthenticateUser:
|
||||||
|
"""Tests for authenticate_user method."""
|
||||||
|
|
||||||
|
def test_i_can_authenticate_user_with_valid_credentials(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test authenticating user with valid credentials."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute authentication
|
||||||
|
result = user_service.authenticate_user(
|
||||||
|
sample_user_data["username"],
|
||||||
|
sample_user_data["password"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is not None
|
||||||
|
assert result.id == created_user.id
|
||||||
|
assert result.username == sample_user_data["username"]
|
||||||
|
|
||||||
|
def test_i_cannot_authenticate_user_with_wrong_password(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test authenticating user with wrong password returns None."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute authentication with wrong password
|
||||||
|
result = user_service.authenticate_user(
|
||||||
|
sample_user_data["username"],
|
||||||
|
"WrongP@ssw0rd123"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
def test_i_cannot_authenticate_user_with_wrong_username(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test authenticating user with wrong username returns None."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute authentication with wrong username
|
||||||
|
result = user_service.authenticate_user(
|
||||||
|
"wrongusername",
|
||||||
|
sample_user_data["password"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
def test_i_cannot_authenticate_inactive_user(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test authenticating inactive user returns None."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Deactivate the user
|
||||||
|
user_service.update_user(str(created_user.id), UserUpdate(is_active=False))
|
||||||
|
|
||||||
|
# Execute authentication
|
||||||
|
result = user_service.authenticate_user(
|
||||||
|
sample_user_data["username"],
|
||||||
|
sample_user_data["password"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestUpdateUser:
|
||||||
|
"""Tests for update_user method."""
|
||||||
|
|
||||||
|
def test_i_can_update_user_username(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test updating user username."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute update
|
||||||
|
new_username = "updatedusername"
|
||||||
|
result = user_service.update_user(
|
||||||
|
str(created_user.id),
|
||||||
|
UserUpdate(username=new_username)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is not None
|
||||||
|
assert result.username == new_username
|
||||||
|
|
||||||
|
# Verify in database
|
||||||
|
updated_user = user_service.get_user_by_id(str(created_user.id))
|
||||||
|
assert updated_user.username == new_username
|
||||||
|
|
||||||
|
def test_i_can_update_user_email(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test updating user email."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute update
|
||||||
|
new_email = "newemail@example.com"
|
||||||
|
result = user_service.update_user(
|
||||||
|
str(created_user.id),
|
||||||
|
UserUpdate(email=new_email)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is not None
|
||||||
|
assert result.email == new_email
|
||||||
|
|
||||||
|
# Verify in database
|
||||||
|
updated_user = user_service.get_user_by_id(str(created_user.id))
|
||||||
|
assert updated_user.email == new_email
|
||||||
|
|
||||||
|
def test_i_can_update_user_role(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test updating user role."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute update
|
||||||
|
result = user_service.update_user(
|
||||||
|
str(created_user.id),
|
||||||
|
UserUpdate(role=UserRole.ADMIN)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is not None
|
||||||
|
assert result.role == UserRole.ADMIN
|
||||||
|
|
||||||
|
# Verify in database
|
||||||
|
updated_user = user_service.get_user_by_id(str(created_user.id))
|
||||||
|
assert updated_user.role == UserRole.ADMIN
|
||||||
|
|
||||||
|
def test_i_can_update_user_is_active(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test updating user is_active status."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute update
|
||||||
|
result = user_service.update_user(
|
||||||
|
str(created_user.id),
|
||||||
|
UserUpdate(is_active=False)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is not None
|
||||||
|
assert result.is_active is False
|
||||||
|
|
||||||
|
# Verify in database
|
||||||
|
updated_user = user_service.get_user_by_id(str(created_user.id))
|
||||||
|
assert updated_user.is_active is False
|
||||||
|
|
||||||
|
def test_i_cannot_update_user_with_duplicate_username(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data,
|
||||||
|
sample_user_data_2
|
||||||
|
):
|
||||||
|
"""Test that updating to existing username raises ValueError."""
|
||||||
|
# Create two users
|
||||||
|
user_create_1 = UserCreate(**sample_user_data)
|
||||||
|
user_1 = user_service.create_user(user_create_1)
|
||||||
|
|
||||||
|
user_create_2 = UserCreate(**sample_user_data_2)
|
||||||
|
user_2 = user_service.create_user(user_create_2)
|
||||||
|
|
||||||
|
# Try to update user_2 with user_1's username
|
||||||
|
with pytest.raises(ValueError) as exc_info:
|
||||||
|
user_service.update_user(
|
||||||
|
str(user_2.id),
|
||||||
|
UserUpdate(username=sample_user_data["username"])
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "already taken" in str(exc_info.value)
|
||||||
|
|
||||||
|
def test_i_cannot_update_user_with_duplicate_email(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data,
|
||||||
|
sample_user_data_2
|
||||||
|
):
|
||||||
|
"""Test that updating to existing email raises ValueError."""
|
||||||
|
# Create two users
|
||||||
|
user_create_1 = UserCreate(**sample_user_data)
|
||||||
|
user_1 = user_service.create_user(user_create_1)
|
||||||
|
|
||||||
|
user_create_2 = UserCreate(**sample_user_data_2)
|
||||||
|
user_2 = user_service.create_user(user_create_2)
|
||||||
|
|
||||||
|
# Try to update user_2 with user_1's email
|
||||||
|
with pytest.raises(ValueError) as exc_info:
|
||||||
|
user_service.update_user(
|
||||||
|
str(user_2.id),
|
||||||
|
UserUpdate(email=sample_user_data["email"])
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "already taken" in str(exc_info.value)
|
||||||
|
|
||||||
|
def test_i_cannot_update_nonexistent_user(
|
||||||
|
self,
|
||||||
|
user_service
|
||||||
|
):
|
||||||
|
"""Test updating nonexistent user returns None."""
|
||||||
|
# Execute update with random ObjectId
|
||||||
|
result = user_service.update_user(
|
||||||
|
str(ObjectId()),
|
||||||
|
UserUpdate(username="newusername")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteUser:
|
||||||
|
"""Tests for delete_user method."""
|
||||||
|
|
||||||
|
def test_i_can_delete_existing_user(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test deleting an existing user."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Verify user exists
|
||||||
|
user_before_delete = user_service.get_user_by_id(str(created_user.id))
|
||||||
|
assert user_before_delete is not None
|
||||||
|
|
||||||
|
# Execute deletion
|
||||||
|
result = user_service.delete_user(str(created_user.id))
|
||||||
|
|
||||||
|
# Verify deletion
|
||||||
|
assert result is True
|
||||||
|
|
||||||
|
# Verify user no longer exists
|
||||||
|
deleted_user = user_service.get_user_by_id(str(created_user.id))
|
||||||
|
assert deleted_user is None
|
||||||
|
|
||||||
|
def test_i_cannot_delete_nonexistent_user(
|
||||||
|
self,
|
||||||
|
user_service
|
||||||
|
):
|
||||||
|
"""Test deleting a nonexistent user returns False."""
|
||||||
|
# Execute deletion with random ObjectId
|
||||||
|
result = user_service.delete_user(str(ObjectId()))
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
|
||||||
|
class TestListAndCountMethods:
|
||||||
|
"""Tests for list_users and count_users methods."""
|
||||||
|
|
||||||
|
def test_i_can_list_users(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data,
|
||||||
|
sample_user_data_2
|
||||||
|
):
|
||||||
|
"""Test listing all users."""
|
||||||
|
# Create multiple users
|
||||||
|
user_create_1 = UserCreate(**sample_user_data)
|
||||||
|
user_1 = user_service.create_user(user_create_1)
|
||||||
|
|
||||||
|
user_create_2 = UserCreate(**sample_user_data_2)
|
||||||
|
user_2 = user_service.create_user(user_create_2)
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
result = user_service.list_users()
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert len(result) == 2
|
||||||
|
usernames = [user.username for user in result]
|
||||||
|
assert sample_user_data["username"] in usernames
|
||||||
|
assert sample_user_data_2["username"] in usernames
|
||||||
|
|
||||||
|
def test_i_can_list_users_with_pagination(
|
||||||
|
self,
|
||||||
|
user_service
|
||||||
|
):
|
||||||
|
"""Test listing users with pagination."""
|
||||||
|
# Create 5 users
|
||||||
|
for i in range(5):
|
||||||
|
user_data = UserCreateNoValidation(
|
||||||
|
username=f"user{i}",
|
||||||
|
email=f"user{i}@example.com",
|
||||||
|
password="SecureP@ssw0rd123"
|
||||||
|
)
|
||||||
|
user_service.create_user(user_data)
|
||||||
|
|
||||||
|
# Test skip and limit
|
||||||
|
result_page_1 = user_service.list_users(skip=0, limit=2)
|
||||||
|
assert len(result_page_1) == 2
|
||||||
|
|
||||||
|
result_page_2 = user_service.list_users(skip=2, limit=2)
|
||||||
|
assert len(result_page_2) == 2
|
||||||
|
|
||||||
|
result_page_3 = user_service.list_users(skip=4, limit=2)
|
||||||
|
assert len(result_page_3) == 1
|
||||||
|
|
||||||
|
# Verify different users in each page
|
||||||
|
page_1_usernames = [user.username for user in result_page_1]
|
||||||
|
page_2_usernames = [user.username for user in result_page_2]
|
||||||
|
assert page_1_usernames != page_2_usernames
|
||||||
|
|
||||||
|
def test_i_can_count_users(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data,
|
||||||
|
sample_user_data_2
|
||||||
|
):
|
||||||
|
"""Test counting users."""
|
||||||
|
# Initially no users
|
||||||
|
assert user_service.count_users() == 0
|
||||||
|
|
||||||
|
# Create first user
|
||||||
|
user_create_1 = UserCreate(**sample_user_data)
|
||||||
|
user_service.create_user(user_create_1)
|
||||||
|
assert user_service.count_users() == 1
|
||||||
|
|
||||||
|
# Create second user
|
||||||
|
user_create_2 = UserCreate(**sample_user_data_2)
|
||||||
|
user_service.create_user(user_create_2)
|
||||||
|
assert user_service.count_users() == 2
|
||||||
|
|
||||||
|
def test_list_users_returns_empty_list_when_no_users(
|
||||||
|
self,
|
||||||
|
user_service
|
||||||
|
):
|
||||||
|
"""Test listing users returns empty list when no users exist."""
|
||||||
|
# Execute
|
||||||
|
result = user_service.list_users()
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result == []
|
||||||
|
|
||||||
|
|
||||||
|
class TestUserPreferences:
|
||||||
|
"""Tests for user preferences methods."""
|
||||||
|
|
||||||
|
def test_i_can_get_user_preference(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test getting user preference."""
|
||||||
|
# Create a user with preferences
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Set a preference
|
||||||
|
user_service.set_preference(str(created_user.id), "theme", "dark")
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
result = user_service.get_preference(str(created_user.id), "theme")
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result == "dark"
|
||||||
|
|
||||||
|
def test_i_can_set_user_preference(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test setting user preference."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
result = user_service.set_preference(str(created_user.id), "language", "fr")
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is not None
|
||||||
|
assert result.preferences.get("language") == "fr"
|
||||||
|
|
||||||
|
# Verify in database
|
||||||
|
updated_user = user_service.get_user_by_id(str(created_user.id))
|
||||||
|
assert updated_user.preferences.get("language") == "fr"
|
||||||
|
|
||||||
|
def test_i_cannot_get_preference_for_nonexistent_user(
|
||||||
|
self,
|
||||||
|
user_service
|
||||||
|
):
|
||||||
|
"""Test getting preference for nonexistent user returns None."""
|
||||||
|
# Execute with random ObjectId
|
||||||
|
result = user_service.get_preference(str(ObjectId()), "theme")
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
def test_i_cannot_set_preference_for_nonexistent_user(
|
||||||
|
self,
|
||||||
|
user_service
|
||||||
|
):
|
||||||
|
"""Test setting preference for nonexistent user returns None."""
|
||||||
|
# Execute with random ObjectId
|
||||||
|
result = user_service.set_preference(str(ObjectId()), "theme", "dark")
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
def test_get_preference_returns_none_for_nonexistent_key(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test getting nonexistent preference key returns None."""
|
||||||
|
# Create a user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
result = user_service.get_preference(str(created_user.id), "nonexistent_key")
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestUserLifecycle:
|
||||||
|
"""Tests for complete user lifecycle scenarios."""
|
||||||
|
|
||||||
|
def test_complete_user_lifecycle(
|
||||||
|
self,
|
||||||
|
user_service,
|
||||||
|
sample_user_data
|
||||||
|
):
|
||||||
|
"""Test complete user lifecycle: create → authenticate → update → preferences → delete."""
|
||||||
|
# Create user
|
||||||
|
user_create = UserCreate(**sample_user_data)
|
||||||
|
created_user = user_service.create_user(user_create)
|
||||||
|
assert created_user is not None
|
||||||
|
assert created_user.username == sample_user_data["username"]
|
||||||
|
|
||||||
|
# Authenticate user
|
||||||
|
authenticated_user = user_service.authenticate_user(
|
||||||
|
sample_user_data["username"],
|
||||||
|
sample_user_data["password"]
|
||||||
|
)
|
||||||
|
assert authenticated_user is not None
|
||||||
|
assert authenticated_user.id == created_user.id
|
||||||
|
|
||||||
|
# Update user
|
||||||
|
updated_user = user_service.update_user(
|
||||||
|
str(created_user.id),
|
||||||
|
UserUpdate(role=UserRole.ADMIN)
|
||||||
|
)
|
||||||
|
assert updated_user.role == UserRole.ADMIN
|
||||||
|
|
||||||
|
# Set preference
|
||||||
|
user_with_pref = user_service.set_preference(
|
||||||
|
str(created_user.id),
|
||||||
|
"theme",
|
||||||
|
"dark"
|
||||||
|
)
|
||||||
|
assert user_with_pref.preferences.get("theme") == "dark"
|
||||||
|
|
||||||
|
# Get preference
|
||||||
|
pref_value = user_service.get_preference(str(created_user.id), "theme")
|
||||||
|
assert pref_value == "dark"
|
||||||
|
|
||||||
|
# Delete user
|
||||||
|
delete_result = user_service.delete_user(str(created_user.id))
|
||||||
|
assert delete_result is True
|
||||||
|
|
||||||
|
# Verify user no longer exists
|
||||||
|
deleted_user = user_service.get_user_by_id(str(created_user.id))
|
||||||
|
assert deleted_user is None
|
||||||
|
|
||||||
|
def test_user_operations_with_empty_database(
|
||||||
|
self,
|
||||||
|
user_service
|
||||||
|
):
|
||||||
|
"""Test user operations when database is empty."""
|
||||||
|
# Try to get nonexistent user
|
||||||
|
result = user_service.get_user_by_id(str(ObjectId()))
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
# Try to get user by username
|
||||||
|
result = user_service.get_user_by_username("nonexistent")
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
# Try to list users
|
||||||
|
users = user_service.list_users()
|
||||||
|
assert users == []
|
||||||
|
|
||||||
|
# Try to count users
|
||||||
|
count = user_service.count_users()
|
||||||
|
assert count == 0
|
||||||
|
|
||||||
|
# Try to delete nonexistent user
|
||||||
|
delete_result = user_service.delete_user(str(ObjectId()))
|
||||||
|
assert delete_result is False
|
||||||
|
|
||||||
|
# Try to check user existence
|
||||||
|
exists = user_service.user_exists("nonexistent")
|
||||||
|
assert exists is False
|
||||||
55
tests/utils/test_pdf_converter.py
Normal file
55
tests/utils/test_pdf_converter.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from app.utils.pdf_converter import TextToPdfConverter, ImageToPdfConverter, WordToPdfConverter
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_dir():
|
||||||
|
"""Create a temporary directory for output PDFs."""
|
||||||
|
dir_path = tempfile.mkdtemp()
|
||||||
|
yield dir_path
|
||||||
|
shutil.rmtree(dir_path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_convert_text_to_pdf(temp_dir):
|
||||||
|
input_txt = Path(temp_dir) / "test.txt"
|
||||||
|
input_txt.write_text("Hello World!\nThis is a test.")
|
||||||
|
|
||||||
|
converter = TextToPdfConverter(str(input_txt), output_dir=temp_dir)
|
||||||
|
converter.convert()
|
||||||
|
|
||||||
|
assert Path(converter.output_path).exists()
|
||||||
|
assert str(converter.output_path).endswith(".pdf")
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_convert_image_to_pdf(temp_dir):
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
input_img = Path(temp_dir) / "image.png"
|
||||||
|
image = Image.new("RGB", (100, 100), color="red")
|
||||||
|
image.save(input_img)
|
||||||
|
|
||||||
|
converter = ImageToPdfConverter(str(input_img), output_dir=temp_dir)
|
||||||
|
converter.convert()
|
||||||
|
|
||||||
|
assert Path(converter.output_path).exists()
|
||||||
|
assert str(converter.output_path).endswith(".pdf")
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_convert_word_to_pdf(temp_dir):
|
||||||
|
import docx
|
||||||
|
|
||||||
|
input_docx = Path(temp_dir) / "document.docx"
|
||||||
|
doc = docx.Document()
|
||||||
|
doc.add_paragraph("Hello Word!")
|
||||||
|
doc.save(input_docx)
|
||||||
|
|
||||||
|
converter = WordToPdfConverter(str(input_docx), output_dir=temp_dir)
|
||||||
|
converter.convert()
|
||||||
|
|
||||||
|
assert Path(converter.output_path).exists()
|
||||||
|
assert str(converter.output_path).endswith(".pdf")
|
||||||
Reference in New Issue
Block a user