commit
49d91ec4e9
|
|
@ -0,0 +1,59 @@
|
|||
name: Build and deploy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to registry
|
||||
run: echo "${{ secrets.TOKEN }}" | docker login ${{ secrets.GIT_HOST }} -u ${{ secrets.USERNAME }} --password-stdin
|
||||
|
||||
- name: Build and push app
|
||||
run: |
|
||||
docker build -t ${{ secrets.GIT_HOST }}/${{ gitea.repository }}:app -f app/Dockerfile .
|
||||
docker push ${{ secrets.GIT_HOST }}/${{ gitea.repository }}:app
|
||||
|
||||
- name: Build and push migrations image
|
||||
run: |
|
||||
docker build -t ${{ secrets.GIT_HOST }}/${{ gitea.repository }}:migrations -f migrations/Dockerfile .
|
||||
docker push ${{ secrets.GIT_HOST }}/${{ gitea.repository }}:migrations
|
||||
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install SSH key
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.DEPLOY_SSH_KEY }}
|
||||
|
||||
- name: Add host to known_hosts
|
||||
run: ssh-keyscan -H ${{ secrets.LXC_HOST }} >> ~/.ssh/known_hosts
|
||||
|
||||
- name: Create remote deployment directory
|
||||
run: ssh ${{ secrets.LXC_USER }}@${{ secrets.LXC_HOST }} "mkdir -p /srv/app"
|
||||
|
||||
- name: Deploy docker-compose-ci.yml
|
||||
run: scp docker-compose-ci.yml ${{ secrets.LXC_USER }}@${{ secrets.LXC_HOST }}:/srv/app/docker-compose.yml
|
||||
|
||||
- name: Restart services
|
||||
run: |
|
||||
ssh ${{ secrets.LXC_USER }}@${{ secrets.LXC_HOST }} << 'EOF'
|
||||
echo "${{ secrets.TOKEN }}" | docker login ${{ secrets.GIT_HOST }} -u ${{ secrets.USERNAME }} --password-stdin
|
||||
docker pull ${{ secrets.GIT_HOST }}/${{ gitea.repository }}:app
|
||||
docker pull ${{ secrets.GIT_HOST }}/${{ gitea.repository }}:migrations
|
||||
cd /srv/app
|
||||
docker compose up -d --force-recreate
|
||||
docker image prune -f
|
||||
EOF
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
name: Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "**"
|
||||
paths:
|
||||
- '**.py'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- 'tests/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
paths:
|
||||
- '**.py'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- 'tests/**'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.14'
|
||||
|
||||
- name: Install uv
|
||||
run: pip install uv
|
||||
|
||||
- name: Sync dependencies
|
||||
run: uv sync --dev
|
||||
|
||||
- name: Run tests
|
||||
run: uv run pytest
|
||||
|
||||
|
|
@ -160,3 +160,4 @@ cython_debug/
|
|||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
task.instructions.md
|
||||
11
README.md
11
README.md
|
|
@ -27,3 +27,14 @@ app/
|
|||
|
||||
Add new routers under `app/api/v1`, repositories under `app/repositories`, and keep business rules inside `app/services`.
|
||||
|
||||
## Redis analytics cache
|
||||
|
||||
Analytics endpoints can use a Redis cache (TTL 120 seconds). The cache is disabled by default, so the service falls back to the database.
|
||||
|
||||
1. Start Redis and set the following variables:
|
||||
- `REDIS_ENABLED=true`
|
||||
- `REDIS_URL=redis://localhost:6379/0`
|
||||
- `ANALYTICS_CACHE_TTL_SECONDS` (optional, defaults to 120)
|
||||
- `ANALYTICS_CACHE_BACKOFF_MS` (max delay for write/delete retries, defaults to 200)
|
||||
2. When Redis becomes unavailable, middleware logs the degradation and responses transparently fall back to database queries until connectivity is restored.
|
||||
|
||||
|
|
|
|||
16
alembic.ini
16
alembic.ini
|
|
@ -1,30 +1,34 @@
|
|||
[alembic]
|
||||
script_location = migrations
|
||||
file_template = %%(year)d%%(month)02d%%(day)02d_%%(hour)02d%%(minute)02d%%(second)d_%%(rev)s_%%(slug)s
|
||||
prepend_sys_path = .
|
||||
|
||||
# SQLAlchemy database URL is injected from app.core.config.Settings (see migrations/env.py).
|
||||
sqlalchemy.url =
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[post_write_hooks]
|
||||
hooks = ruff
|
||||
ruff.type = exec
|
||||
ruff.executable = %(here)s/.venv/bin/ruff
|
||||
ruff.options = format REVISION_SCRIPT_FILENAME
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
level = DEBUG
|
||||
handlers = console
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
level = DEBUG
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
level = DEBUG
|
||||
handlers = console
|
||||
qualname = alembic
|
||||
|
||||
|
|
|
|||
143
app/api/deps.py
143
app/api/deps.py
|
|
@ -1,14 +1,38 @@
|
|||
"""Reusable FastAPI dependencies."""
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from fastapi import Depends
|
||||
import jwt
|
||||
from fastapi import Depends, Header, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.cache import get_cache_client
|
||||
from app.core.config import settings
|
||||
from app.core.database import get_session
|
||||
from app.core.security import jwt_service, password_hasher
|
||||
from app.models.user import User
|
||||
from app.repositories.activity_repo import ActivityRepository
|
||||
from app.repositories.analytics_repo import AnalyticsRepository
|
||||
from app.repositories.contact_repo import ContactRepository
|
||||
from app.repositories.deal_repo import DealRepository
|
||||
from app.repositories.org_repo import OrganizationRepository
|
||||
from app.repositories.task_repo import TaskRepository
|
||||
from app.repositories.user_repo import UserRepository
|
||||
from app.services.analytics_service import AnalyticsService
|
||||
from app.services.auth_service import AuthService
|
||||
from app.services.user_service import UserService
|
||||
from app.services.activity_service import ActivityService
|
||||
from app.services.contact_service import ContactService
|
||||
from app.services.deal_service import DealService
|
||||
from app.services.organization_service import (
|
||||
OrganizationAccessDeniedError,
|
||||
OrganizationContext,
|
||||
OrganizationContextMissingError,
|
||||
OrganizationService,
|
||||
)
|
||||
from app.services.task_service import TaskService
|
||||
from redis.asyncio.client import Redis
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl=f"{settings.api_v1_prefix}/auth/token")
|
||||
|
||||
|
||||
async def get_db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
|
|
@ -21,8 +45,43 @@ def get_user_repository(session: AsyncSession = Depends(get_db_session)) -> User
|
|||
return UserRepository(session=session)
|
||||
|
||||
|
||||
def get_user_service(repo: UserRepository = Depends(get_user_repository)) -> UserService:
|
||||
return UserService(user_repository=repo, password_hasher=password_hasher)
|
||||
def get_organization_repository(session: AsyncSession = Depends(get_db_session)) -> OrganizationRepository:
|
||||
return OrganizationRepository(session=session)
|
||||
|
||||
|
||||
def get_deal_repository(session: AsyncSession = Depends(get_db_session)) -> DealRepository:
|
||||
return DealRepository(session=session)
|
||||
|
||||
|
||||
def get_contact_repository(session: AsyncSession = Depends(get_db_session)) -> ContactRepository:
|
||||
return ContactRepository(session=session)
|
||||
|
||||
|
||||
def get_task_repository(session: AsyncSession = Depends(get_db_session)) -> TaskRepository:
|
||||
return TaskRepository(session=session)
|
||||
|
||||
|
||||
def get_activity_repository(session: AsyncSession = Depends(get_db_session)) -> ActivityRepository:
|
||||
return ActivityRepository(session=session)
|
||||
|
||||
|
||||
def get_analytics_repository(session: AsyncSession = Depends(get_db_session)) -> AnalyticsRepository:
|
||||
return AnalyticsRepository(session=session)
|
||||
|
||||
|
||||
def get_cache_backend() -> Redis | None:
|
||||
return get_cache_client()
|
||||
|
||||
|
||||
def get_deal_service(
|
||||
repo: DealRepository = Depends(get_deal_repository),
|
||||
cache: Redis | None = Depends(get_cache_backend),
|
||||
) -> DealService:
|
||||
return DealService(
|
||||
repository=repo,
|
||||
cache=cache,
|
||||
cache_backoff_ms=settings.analytics_cache_backoff_ms,
|
||||
)
|
||||
|
||||
|
||||
def get_auth_service(
|
||||
|
|
@ -33,3 +92,79 @@ def get_auth_service(
|
|||
password_hasher=password_hasher,
|
||||
jwt_service=jwt_service,
|
||||
)
|
||||
|
||||
|
||||
def get_organization_service(
|
||||
repo: OrganizationRepository = Depends(get_organization_repository),
|
||||
) -> OrganizationService:
|
||||
return OrganizationService(repository=repo)
|
||||
|
||||
|
||||
def get_activity_service(
|
||||
repo: ActivityRepository = Depends(get_activity_repository),
|
||||
) -> ActivityService:
|
||||
return ActivityService(repository=repo)
|
||||
|
||||
|
||||
def get_analytics_service(
|
||||
repo: AnalyticsRepository = Depends(get_analytics_repository),
|
||||
cache: Redis | None = Depends(get_cache_backend),
|
||||
) -> AnalyticsService:
|
||||
return AnalyticsService(
|
||||
repository=repo,
|
||||
cache=cache,
|
||||
ttl_seconds=settings.analytics_cache_ttl_seconds,
|
||||
backoff_ms=settings.analytics_cache_backoff_ms,
|
||||
)
|
||||
|
||||
|
||||
def get_contact_service(
|
||||
repo: ContactRepository = Depends(get_contact_repository),
|
||||
) -> ContactService:
|
||||
return ContactService(repository=repo)
|
||||
|
||||
|
||||
def get_task_service(
|
||||
task_repo: TaskRepository = Depends(get_task_repository),
|
||||
activity_repo: ActivityRepository = Depends(get_activity_repository),
|
||||
) -> TaskService:
|
||||
return TaskService(task_repository=task_repo, activity_repository=activity_repo)
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
token: str = Depends(oauth2_scheme),
|
||||
repo: UserRepository = Depends(get_user_repository),
|
||||
) -> User:
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
)
|
||||
try:
|
||||
payload = jwt_service.decode(token)
|
||||
sub = payload.get("sub")
|
||||
if sub is None:
|
||||
raise credentials_exception
|
||||
scope = payload.get("scope", "access")
|
||||
if scope != "access":
|
||||
raise credentials_exception
|
||||
user_id = int(sub)
|
||||
except (jwt.PyJWTError, TypeError, ValueError):
|
||||
raise credentials_exception from None
|
||||
|
||||
user = await repo.get_by_id(user_id)
|
||||
if user is None:
|
||||
raise credentials_exception
|
||||
return user
|
||||
|
||||
|
||||
async def get_organization_context(
|
||||
x_organization_id: int | None = Header(default=None, alias="X-Organization-Id"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
service: OrganizationService = Depends(get_organization_service),
|
||||
) -> OrganizationContext:
|
||||
try:
|
||||
return await service.get_context(user_id=current_user.id, organization_id=x_organization_id)
|
||||
except OrganizationContextMissingError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
except OrganizationAccessDeniedError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
|
||||
|
|
|
|||
|
|
@ -1,9 +1,22 @@
|
|||
"""Root API router that aggregates versioned routers."""
|
||||
from fastapi import APIRouter
|
||||
|
||||
from app.api.v1 import auth, users
|
||||
from app.api.v1 import (
|
||||
activities,
|
||||
analytics,
|
||||
auth,
|
||||
contacts,
|
||||
deals,
|
||||
organizations,
|
||||
tasks,
|
||||
)
|
||||
from app.core.config import settings
|
||||
|
||||
api_router = APIRouter()
|
||||
api_router.include_router(users.router, prefix=settings.api_v1_prefix)
|
||||
api_router.include_router(auth.router, prefix=settings.api_v1_prefix)
|
||||
api_router.include_router(organizations.router, prefix=settings.api_v1_prefix)
|
||||
api_router.include_router(contacts.router, prefix=settings.api_v1_prefix)
|
||||
api_router.include_router(deals.router, prefix=settings.api_v1_prefix)
|
||||
api_router.include_router(tasks.router, prefix=settings.api_v1_prefix)
|
||||
api_router.include_router(activities.router, prefix=settings.api_v1_prefix)
|
||||
api_router.include_router(analytics.router, prefix=settings.api_v1_prefix)
|
||||
|
|
|
|||
|
|
@ -1 +1,20 @@
|
|||
"""Version 1 API routers."""
|
||||
from . import (
|
||||
activities,
|
||||
analytics,
|
||||
auth,
|
||||
contacts,
|
||||
deals,
|
||||
organizations,
|
||||
tasks,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"activities",
|
||||
"analytics",
|
||||
"auth",
|
||||
"contacts",
|
||||
"deals",
|
||||
"organizations",
|
||||
"tasks",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,75 @@
|
|||
"""Activity timeline endpoints and payload schemas."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from app.api.deps import get_activity_service, get_organization_context
|
||||
from app.models.activity import ActivityRead
|
||||
from app.services.activity_service import (
|
||||
ActivityForbiddenError,
|
||||
ActivityListFilters,
|
||||
ActivityService,
|
||||
ActivityValidationError,
|
||||
)
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
class ActivityCommentBody(BaseModel):
|
||||
text: str = Field(..., min_length=1, max_length=2000)
|
||||
|
||||
|
||||
class ActivityCommentPayload(BaseModel):
|
||||
type: Literal["comment"] = "comment"
|
||||
payload: ActivityCommentBody
|
||||
|
||||
def extract_text(self) -> str:
|
||||
return self.payload.text.strip()
|
||||
|
||||
|
||||
router = APIRouter(prefix="/deals/{deal_id}/activities", tags=["activities"])
|
||||
|
||||
|
||||
@router.get("/", response_model=list[ActivityRead])
|
||||
async def list_activities(
|
||||
deal_id: int,
|
||||
limit: int = Query(50, ge=1, le=200),
|
||||
offset: int = Query(0, ge=0),
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: ActivityService = Depends(get_activity_service),
|
||||
) -> list[ActivityRead]:
|
||||
"""Fetch paginated activities for the deal within the current organization."""
|
||||
|
||||
filters = ActivityListFilters(deal_id=deal_id, limit=limit, offset=offset)
|
||||
try:
|
||||
activities = await service.list_activities(filters=filters, context=context)
|
||||
except ActivityForbiddenError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
|
||||
|
||||
return [ActivityRead.model_validate(activity) for activity in activities]
|
||||
|
||||
|
||||
@router.post("/", response_model=ActivityRead, status_code=status.HTTP_201_CREATED)
|
||||
async def create_activity_comment(
|
||||
deal_id: int,
|
||||
payload: ActivityCommentPayload,
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: ActivityService = Depends(get_activity_service),
|
||||
) -> ActivityRead:
|
||||
"""Add a comment to the deal timeline."""
|
||||
|
||||
try:
|
||||
activity = await service.add_comment(
|
||||
deal_id=deal_id,
|
||||
author_id=context.user_id,
|
||||
text=payload.extract_text(),
|
||||
context=context,
|
||||
)
|
||||
except ActivityValidationError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
except ActivityForbiddenError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
|
||||
|
||||
return ActivityRead.model_validate(activity)
|
||||
|
|
@ -0,0 +1,95 @@
|
|||
"""Analytics API endpoints for summaries and funnels."""
|
||||
from __future__ import annotations
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from pydantic import BaseModel, ConfigDict, field_serializer
|
||||
|
||||
from app.api.deps import get_analytics_service, get_organization_context
|
||||
from app.models.deal import DealStage, DealStatus
|
||||
from app.services.analytics_service import AnalyticsService, DealSummary, StageBreakdown
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
def _decimal_to_str(value: Decimal) -> str:
|
||||
normalized = value.normalize()
|
||||
return format(normalized, "f")
|
||||
|
||||
router = APIRouter(prefix="/analytics", tags=["analytics"])
|
||||
|
||||
|
||||
class StatusSummaryModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
status: DealStatus
|
||||
count: int
|
||||
amount_sum: Decimal
|
||||
|
||||
@field_serializer("amount_sum")
|
||||
def serialize_amount_sum(self, value: Decimal) -> str:
|
||||
return _decimal_to_str(value)
|
||||
|
||||
|
||||
class WonStatisticsModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
count: int
|
||||
amount_sum: Decimal
|
||||
average_amount: Decimal
|
||||
|
||||
@field_serializer("amount_sum", "average_amount")
|
||||
def serialize_decimal_fields(self, value: Decimal) -> str:
|
||||
return _decimal_to_str(value)
|
||||
|
||||
|
||||
class NewDealsWindowModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
days: int
|
||||
count: int
|
||||
|
||||
|
||||
class DealSummaryResponse(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
by_status: list[StatusSummaryModel]
|
||||
won: WonStatisticsModel
|
||||
new_deals: NewDealsWindowModel
|
||||
total_deals: int
|
||||
|
||||
|
||||
class StageBreakdownModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
stage: DealStage
|
||||
total: int
|
||||
by_status: dict[DealStatus, int]
|
||||
conversion_to_next: float | None
|
||||
|
||||
|
||||
class DealFunnelResponse(BaseModel):
|
||||
stages: list[StageBreakdownModel]
|
||||
|
||||
|
||||
@router.get("/deals/summary", response_model=DealSummaryResponse)
|
||||
async def deals_summary(
|
||||
days: int = Query(30, ge=1, le=180),
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: AnalyticsService = Depends(get_analytics_service),
|
||||
) -> DealSummaryResponse:
|
||||
"""Return aggregated deal statistics for the current organization."""
|
||||
|
||||
summary: DealSummary = await service.get_deal_summary(context.organization_id, days=days)
|
||||
return DealSummaryResponse.model_validate(summary)
|
||||
|
||||
|
||||
@router.get("/deals/funnel", response_model=DealFunnelResponse)
|
||||
async def deals_funnel(
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: AnalyticsService = Depends(get_analytics_service),
|
||||
) -> DealFunnelResponse:
|
||||
"""Return funnel breakdown by stages and statuses."""
|
||||
|
||||
breakdowns: list[StageBreakdown] = await service.get_deal_funnel(context.organization_id)
|
||||
return DealFunnelResponse(stages=[StageBreakdownModel.model_validate(item) for item in breakdowns])
|
||||
|
|
@ -1,15 +1,95 @@
|
|||
"""Authentication API endpoints."""
|
||||
"""Authentication API endpoints and payloads."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, EmailStr
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app.api.deps import get_auth_service, get_user_repository
|
||||
from app.core.security import password_hasher
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.token import LoginRequest, RefreshRequest, TokenResponse
|
||||
from app.models.user import UserCreate
|
||||
from app.repositories.user_repo import UserRepository
|
||||
from app.services.auth_service import AuthService, InvalidCredentialsError, InvalidRefreshTokenError
|
||||
|
||||
|
||||
class RegisterRequest(BaseModel):
|
||||
email: EmailStr
|
||||
password: str
|
||||
name: str
|
||||
organization_name: str | None = None
|
||||
|
||||
from app.api.deps import get_auth_service
|
||||
from app.models.token import LoginRequest, TokenResponse
|
||||
from app.services.auth_service import AuthService, InvalidCredentialsError
|
||||
|
||||
router = APIRouter(prefix="/auth", tags=["auth"])
|
||||
|
||||
|
||||
@router.post("/register", response_model=TokenResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def register_user(
|
||||
payload: RegisterRequest,
|
||||
repo: UserRepository = Depends(get_user_repository),
|
||||
auth_service: AuthService = Depends(get_auth_service),
|
||||
) -> TokenResponse:
|
||||
"""Register a new owner along with the first organization and return JWT."""
|
||||
|
||||
existing = await repo.get_by_email(payload.email)
|
||||
if existing is not None:
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="User already exists")
|
||||
|
||||
organization: Organization | None = None
|
||||
if payload.organization_name:
|
||||
existing_org = await repo.session.scalar(
|
||||
select(Organization).where(Organization.name == payload.organization_name)
|
||||
)
|
||||
if existing_org is not None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Organization already exists",
|
||||
)
|
||||
|
||||
organization = Organization(name=payload.organization_name)
|
||||
repo.session.add(organization)
|
||||
await repo.session.flush()
|
||||
|
||||
user_data = UserCreate(email=payload.email, password=payload.password, name=payload.name)
|
||||
hashed_password = password_hasher.hash(payload.password)
|
||||
|
||||
try:
|
||||
user = await repo.create(data=user_data, hashed_password=hashed_password)
|
||||
if organization is not None:
|
||||
membership = OrganizationMember(
|
||||
organization_id=organization.id,
|
||||
user_id=user.id,
|
||||
role=OrganizationRole.OWNER,
|
||||
)
|
||||
repo.session.add(membership)
|
||||
await repo.session.commit()
|
||||
except IntegrityError as exc:
|
||||
await repo.session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Organization or user already exists",
|
||||
) from exc
|
||||
|
||||
await repo.session.refresh(user)
|
||||
return auth_service.issue_tokens(user)
|
||||
|
||||
|
||||
@router.post("/login", response_model=TokenResponse)
|
||||
async def login(
|
||||
credentials: LoginRequest,
|
||||
service: AuthService = Depends(get_auth_service),
|
||||
) -> TokenResponse:
|
||||
"""Authenticate user credentials and issue a JWT."""
|
||||
try:
|
||||
user = await service.authenticate(credentials.email, credentials.password)
|
||||
except InvalidCredentialsError as exc: # pragma: no cover - thin API
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(exc)) from exc
|
||||
return service.issue_tokens(user)
|
||||
|
||||
|
||||
@router.post("/token", response_model=TokenResponse)
|
||||
async def login_for_access_token(
|
||||
credentials: LoginRequest,
|
||||
|
|
@ -19,4 +99,15 @@ async def login_for_access_token(
|
|||
user = await service.authenticate(credentials.email, credentials.password)
|
||||
except InvalidCredentialsError as exc: # pragma: no cover - thin API
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(exc)) from exc
|
||||
return service.create_access_token(user)
|
||||
return service.issue_tokens(user)
|
||||
|
||||
|
||||
@router.post("/refresh", response_model=TokenResponse)
|
||||
async def refresh_tokens(
|
||||
payload: RefreshRequest,
|
||||
service: AuthService = Depends(get_auth_service),
|
||||
) -> TokenResponse:
|
||||
try:
|
||||
return await service.refresh_tokens(payload.refresh_token)
|
||||
except InvalidRefreshTokenError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(exc)) from exc
|
||||
|
|
|
|||
|
|
@ -0,0 +1,127 @@
|
|||
"""Contact API endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from pydantic import BaseModel, ConfigDict, EmailStr
|
||||
|
||||
from app.api.deps import get_contact_service, get_organization_context
|
||||
from app.models.contact import ContactCreate, ContactRead
|
||||
from app.services.contact_service import (
|
||||
ContactDeletionError,
|
||||
ContactForbiddenError,
|
||||
ContactListFilters,
|
||||
ContactNotFoundError,
|
||||
ContactOrganizationError,
|
||||
ContactService,
|
||||
ContactUpdateData,
|
||||
)
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
class ContactCreatePayload(BaseModel):
|
||||
name: str
|
||||
email: EmailStr | None = None
|
||||
phone: str | None = None
|
||||
owner_id: int | None = None
|
||||
|
||||
def to_domain(self, *, organization_id: int, fallback_owner: int) -> ContactCreate:
|
||||
return ContactCreate(
|
||||
organization_id=organization_id,
|
||||
owner_id=self.owner_id or fallback_owner,
|
||||
name=self.name,
|
||||
email=self.email,
|
||||
phone=self.phone,
|
||||
)
|
||||
|
||||
|
||||
class ContactUpdatePayload(BaseModel):
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
|
||||
name: str | None = None
|
||||
email: EmailStr | None = None
|
||||
phone: str | None = None
|
||||
|
||||
def to_update_data(self) -> ContactUpdateData:
|
||||
dump = self.model_dump(exclude_unset=True)
|
||||
return ContactUpdateData(
|
||||
name=dump.get("name"),
|
||||
email=dump.get("email"),
|
||||
phone=dump.get("phone"),
|
||||
)
|
||||
|
||||
|
||||
router = APIRouter(prefix="/contacts", tags=["contacts"])
|
||||
|
||||
|
||||
@router.get("/", response_model=list[ContactRead])
|
||||
async def list_contacts(
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(20, ge=1, le=100),
|
||||
search: str | None = Query(default=None, min_length=1),
|
||||
owner_id: int | None = None,
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: ContactService = Depends(get_contact_service),
|
||||
) -> list[ContactRead]:
|
||||
filters = ContactListFilters(
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
search=search,
|
||||
owner_id=owner_id,
|
||||
)
|
||||
try:
|
||||
contacts = await service.list_contacts(filters=filters, context=context)
|
||||
except ContactForbiddenError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(exc)) from exc
|
||||
return [ContactRead.model_validate(contact) for contact in contacts]
|
||||
|
||||
|
||||
@router.post("/", response_model=ContactRead, status_code=status.HTTP_201_CREATED)
|
||||
async def create_contact(
|
||||
payload: ContactCreatePayload,
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: ContactService = Depends(get_contact_service),
|
||||
) -> ContactRead:
|
||||
data = payload.to_domain(organization_id=context.organization_id, fallback_owner=context.user_id)
|
||||
try:
|
||||
contact = await service.create_contact(data, context=context)
|
||||
except ContactForbiddenError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(exc)) from exc
|
||||
except ContactOrganizationError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
return ContactRead.model_validate(contact)
|
||||
|
||||
|
||||
@router.patch("/{contact_id}", response_model=ContactRead)
|
||||
async def update_contact(
|
||||
contact_id: int,
|
||||
payload: ContactUpdatePayload,
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: ContactService = Depends(get_contact_service),
|
||||
) -> ContactRead:
|
||||
try:
|
||||
contact = await service.get_contact(contact_id, context=context)
|
||||
updated = await service.update_contact(contact, payload.to_update_data(), context=context)
|
||||
except ContactNotFoundError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
|
||||
except ContactForbiddenError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(exc)) from exc
|
||||
except ContactOrganizationError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
return ContactRead.model_validate(updated)
|
||||
|
||||
|
||||
@router.delete("/{contact_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_contact(
|
||||
contact_id: int,
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: ContactService = Depends(get_contact_service),
|
||||
) -> None:
|
||||
try:
|
||||
contact = await service.get_contact(contact_id, context=context)
|
||||
await service.delete_contact(contact, context=context)
|
||||
except ContactNotFoundError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
|
||||
except ContactForbiddenError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(exc)) from exc
|
||||
except ContactDeletionError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(exc)) from exc
|
||||
|
|
@ -0,0 +1,145 @@
|
|||
"""Deal API endpoints backed by DealService with inline payload schemas."""
|
||||
from __future__ import annotations
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.api.deps import get_deal_repository, get_deal_service, get_organization_context
|
||||
from app.models.deal import DealCreate, DealRead, DealStage, DealStatus
|
||||
from app.repositories.deal_repo import DealRepository, DealAccessError, DealQueryParams
|
||||
from app.services.deal_service import (
|
||||
DealService,
|
||||
DealStageTransitionError,
|
||||
DealStatusValidationError,
|
||||
DealUpdateData,
|
||||
)
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
class DealCreatePayload(BaseModel):
|
||||
contact_id: int
|
||||
title: str
|
||||
amount: Decimal | None = None
|
||||
currency: str | None = None
|
||||
owner_id: int | None = None
|
||||
|
||||
def to_domain(self, *, organization_id: int, fallback_owner: int) -> DealCreate:
|
||||
return DealCreate(
|
||||
organization_id=organization_id,
|
||||
contact_id=self.contact_id,
|
||||
owner_id=self.owner_id or fallback_owner,
|
||||
title=self.title,
|
||||
amount=self.amount,
|
||||
currency=self.currency,
|
||||
)
|
||||
|
||||
|
||||
class DealUpdatePayload(BaseModel):
|
||||
status: DealStatus | None = None
|
||||
stage: DealStage | None = None
|
||||
amount: Decimal | None = None
|
||||
currency: str | None = None
|
||||
|
||||
|
||||
router = APIRouter(prefix="/deals", tags=["deals"])
|
||||
|
||||
|
||||
@router.get("/", response_model=list[DealRead])
|
||||
async def list_deals(
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(20, ge=1, le=100),
|
||||
status_filter: list[str] | None = Query(default=None, alias="status"),
|
||||
min_amount: Decimal | None = None,
|
||||
max_amount: Decimal | None = None,
|
||||
stage: str | None = None,
|
||||
owner_id: int | None = None,
|
||||
order_by: str | None = None,
|
||||
order: str | None = Query(default="desc", pattern="^(asc|desc)$"),
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
repo: DealRepository = Depends(get_deal_repository),
|
||||
) -> list[DealRead]:
|
||||
"""List deals for the current organization with optional filters."""
|
||||
|
||||
try:
|
||||
statuses_value = [DealStatus(value) for value in status_filter] if status_filter else None
|
||||
stage_value = DealStage(stage) if stage else None
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid deal filter") from exc
|
||||
|
||||
params = DealQueryParams(
|
||||
organization_id=context.organization_id,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
statuses=statuses_value,
|
||||
stage=stage_value,
|
||||
owner_id=owner_id,
|
||||
min_amount=min_amount,
|
||||
max_amount=max_amount,
|
||||
order_by=order_by,
|
||||
order_desc=(order != "asc"),
|
||||
)
|
||||
try:
|
||||
deals = await repo.list(params=params, role=context.role, user_id=context.user_id)
|
||||
except DealAccessError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(exc)) from exc
|
||||
|
||||
return [DealRead.model_validate(deal) for deal in deals]
|
||||
|
||||
|
||||
@router.post("/", response_model=DealRead, status_code=status.HTTP_201_CREATED)
|
||||
async def create_deal(
|
||||
payload: DealCreatePayload,
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: DealService = Depends(get_deal_service),
|
||||
) -> DealRead:
|
||||
"""Create a new deal within the current organization."""
|
||||
|
||||
data = payload.to_domain(organization_id=context.organization_id, fallback_owner=context.user_id)
|
||||
try:
|
||||
deal = await service.create_deal(data, context=context)
|
||||
except DealAccessError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(exc)) from exc
|
||||
except DealStatusValidationError as exc: # pragma: no cover - creation shouldn't trigger
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
|
||||
return DealRead.model_validate(deal)
|
||||
|
||||
|
||||
@router.patch("/{deal_id}", response_model=DealRead)
|
||||
async def update_deal(
|
||||
deal_id: int,
|
||||
payload: DealUpdatePayload,
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
repo: DealRepository = Depends(get_deal_repository),
|
||||
service: DealService = Depends(get_deal_service),
|
||||
) -> DealRead:
|
||||
"""Update deal status, stage, or financial data."""
|
||||
|
||||
existing = await repo.get(
|
||||
deal_id,
|
||||
organization_id=context.organization_id,
|
||||
role=context.role,
|
||||
user_id=context.user_id,
|
||||
)
|
||||
if existing is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Deal not found")
|
||||
|
||||
updates = DealUpdateData(
|
||||
status=payload.status,
|
||||
stage=payload.stage,
|
||||
amount=payload.amount,
|
||||
currency=payload.currency,
|
||||
)
|
||||
|
||||
try:
|
||||
deal = await service.update_deal(existing, updates, context=context)
|
||||
except DealAccessError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(exc)) from exc
|
||||
except DealStageTransitionError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
except DealStatusValidationError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
|
||||
return DealRead.model_validate(deal)
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
"""Organization-related API endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel, EmailStr
|
||||
|
||||
from app.api.deps import (
|
||||
get_current_user,
|
||||
get_organization_context,
|
||||
get_organization_repository,
|
||||
get_organization_service,
|
||||
get_user_repository,
|
||||
)
|
||||
from app.models.organization import OrganizationRead
|
||||
from app.models.organization_member import OrganizationMemberRead, OrganizationRole
|
||||
from app.models.user import User
|
||||
from app.repositories.org_repo import OrganizationRepository
|
||||
from app.repositories.user_repo import UserRepository
|
||||
from app.services.organization_service import (
|
||||
OrganizationContext,
|
||||
OrganizationForbiddenError,
|
||||
OrganizationMemberAlreadyExistsError,
|
||||
OrganizationService,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/organizations", tags=["organizations"])
|
||||
|
||||
|
||||
class AddMemberPayload(BaseModel):
|
||||
email: EmailStr
|
||||
role: OrganizationRole = OrganizationRole.MEMBER
|
||||
|
||||
|
||||
@router.get("/me", response_model=list[OrganizationRead])
|
||||
async def list_user_organizations(
|
||||
current_user: User = Depends(get_current_user),
|
||||
repo: OrganizationRepository = Depends(get_organization_repository),
|
||||
) -> list[OrganizationRead]:
|
||||
"""Return organizations the authenticated user belongs to."""
|
||||
|
||||
organizations = await repo.list_for_user(current_user.id)
|
||||
return [OrganizationRead.model_validate(org) for org in organizations]
|
||||
|
||||
|
||||
@router.post("/members", response_model=OrganizationMemberRead, status_code=status.HTTP_201_CREATED)
|
||||
async def add_member_to_organization(
|
||||
payload: AddMemberPayload,
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: OrganizationService = Depends(get_organization_service),
|
||||
user_repo: UserRepository = Depends(get_user_repository),
|
||||
) -> OrganizationMemberRead:
|
||||
"""Allow owners/admins to add existing users to their organization."""
|
||||
|
||||
user = await user_repo.get_by_email(payload.email)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
|
||||
|
||||
try:
|
||||
membership = await service.add_member(context=context, user_id=user.id, role=payload.role)
|
||||
except OrganizationMemberAlreadyExistsError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(exc)) from exc
|
||||
except OrganizationForbiddenError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(exc)) from exc
|
||||
|
||||
return OrganizationMemberRead.model_validate(membership)
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
"""Task API endpoints with inline schemas."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime, time, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.api.deps import get_organization_context, get_task_service
|
||||
from app.models.task import TaskCreate, TaskRead
|
||||
from app.services.organization_service import OrganizationContext
|
||||
from app.services.task_service import (
|
||||
TaskDueDateError,
|
||||
TaskForbiddenError,
|
||||
TaskListFilters,
|
||||
TaskOrganizationError,
|
||||
TaskService,
|
||||
)
|
||||
|
||||
|
||||
class TaskCreatePayload(BaseModel):
|
||||
deal_id: int
|
||||
title: str
|
||||
description: str | None = None
|
||||
due_date: date | None = None
|
||||
|
||||
def to_domain(self) -> TaskCreate:
|
||||
return TaskCreate(
|
||||
deal_id=self.deal_id,
|
||||
title=self.title,
|
||||
description=self.description,
|
||||
due_date=_date_to_datetime(self.due_date) if self.due_date else None,
|
||||
)
|
||||
|
||||
|
||||
def to_range_boundary(value: date | None, *, end_of_day: bool) -> datetime | None:
|
||||
"""Convert a date query param to an inclusive datetime boundary."""
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
boundary_time = time(23, 59, 59, 999999) if end_of_day else time(0, 0, 0)
|
||||
return datetime.combine(value, boundary_time, tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def _date_to_datetime(value: date) -> datetime:
|
||||
return datetime.combine(value, time(0, 0, 0), tzinfo=timezone.utc)
|
||||
|
||||
|
||||
router = APIRouter(prefix="/tasks", tags=["tasks"])
|
||||
|
||||
|
||||
@router.get("/", response_model=list[TaskRead])
|
||||
async def list_tasks(
|
||||
deal_id: int | None = None,
|
||||
only_open: bool = False,
|
||||
due_before: date | None = Query(default=None),
|
||||
due_after: date | None = Query(default=None),
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: TaskService = Depends(get_task_service),
|
||||
) -> list[TaskRead]:
|
||||
"""Filter tasks by deal, state, or due date range."""
|
||||
|
||||
filters = TaskListFilters(
|
||||
deal_id=deal_id,
|
||||
only_open=only_open,
|
||||
due_before=to_range_boundary(due_before, end_of_day=True),
|
||||
due_after=to_range_boundary(due_after, end_of_day=False),
|
||||
)
|
||||
tasks = await service.list_tasks(filters=filters, context=context)
|
||||
return [TaskRead.model_validate(task) for task in tasks]
|
||||
|
||||
|
||||
@router.post("/", response_model=TaskRead, status_code=status.HTTP_201_CREATED)
|
||||
async def create_task(
|
||||
payload: TaskCreatePayload,
|
||||
context: OrganizationContext = Depends(get_organization_context),
|
||||
service: TaskService = Depends(get_task_service),
|
||||
) -> TaskRead:
|
||||
"""Create a task ensuring due-date and ownership constraints."""
|
||||
|
||||
try:
|
||||
task = await service.create_task(payload.to_domain(), context=context)
|
||||
except TaskDueDateError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
except TaskForbiddenError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(exc)) from exc
|
||||
except TaskOrganizationError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
|
||||
|
||||
return TaskRead.model_validate(task)
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
"""User API endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
|
||||
from app.api.deps import get_user_service
|
||||
from app.models.user import UserCreate, UserRead
|
||||
from app.services.user_service import UserAlreadyExistsError, UserNotFoundError, UserService
|
||||
|
||||
router = APIRouter(prefix="/users", tags=["users"])
|
||||
|
||||
|
||||
@router.get("/", response_model=list[UserRead])
|
||||
async def list_users(service: UserService = Depends(get_user_service)) -> list[UserRead]:
|
||||
users = await service.list_users()
|
||||
return [UserRead.model_validate(user) for user in users]
|
||||
|
||||
|
||||
@router.post("/", response_model=UserRead, status_code=status.HTTP_201_CREATED)
|
||||
async def create_user(
|
||||
user_in: UserCreate,
|
||||
service: UserService = Depends(get_user_service),
|
||||
) -> UserRead:
|
||||
try:
|
||||
user = await service.create_user(user_in)
|
||||
except UserAlreadyExistsError as exc: # pragma: no cover - thin API layer
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(exc)) from exc
|
||||
return UserRead.model_validate(user)
|
||||
|
||||
|
||||
@router.get("/{user_id}", response_model=UserRead)
|
||||
async def get_user(user_id: int, service: UserService = Depends(get_user_service)) -> UserRead:
|
||||
try:
|
||||
user = await service.get_user(user_id)
|
||||
except UserNotFoundError as exc: # pragma: no cover - thin API layer
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
|
||||
return UserRead.model_validate(user)
|
||||
|
|
@ -0,0 +1,160 @@
|
|||
"""Redis cache utilities and availability tracking."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Awaitable, Callable, Optional
|
||||
|
||||
import redis.asyncio as redis
|
||||
from redis.asyncio.client import Redis
|
||||
from redis.exceptions import RedisError
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RedisCacheManager:
|
||||
"""Manages lifecycle and availability of the Redis cache client."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._client: Redis | None = None
|
||||
self._available: bool = False
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
@property
|
||||
def is_enabled(self) -> bool:
|
||||
return settings.redis_enabled
|
||||
|
||||
@property
|
||||
def is_available(self) -> bool:
|
||||
return self._available and self._client is not None
|
||||
|
||||
def get_client(self) -> Redis | None:
|
||||
if not self.is_enabled:
|
||||
return None
|
||||
if self.is_available:
|
||||
return self._client
|
||||
return None
|
||||
|
||||
async def startup(self) -> None:
|
||||
if not self.is_enabled:
|
||||
return
|
||||
async with self._lock:
|
||||
if self._client is not None:
|
||||
return
|
||||
self._client = redis.from_url(settings.redis_url, encoding="utf-8", decode_responses=False)
|
||||
await self._refresh_availability()
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
async with self._lock:
|
||||
if self._client is not None:
|
||||
await self._client.close()
|
||||
self._client = None
|
||||
self._available = False
|
||||
|
||||
async def reconnect(self) -> None:
|
||||
if not self.is_enabled:
|
||||
return
|
||||
async with self._lock:
|
||||
if self._client is None:
|
||||
self._client = redis.from_url(settings.redis_url, encoding="utf-8", decode_responses=False)
|
||||
await self._refresh_availability()
|
||||
|
||||
async def _refresh_availability(self) -> None:
|
||||
if self._client is None:
|
||||
self._available = False
|
||||
return
|
||||
try:
|
||||
await self._client.ping()
|
||||
except RedisError as exc: # pragma: no cover - logging only
|
||||
self._available = False
|
||||
logger.warning("Redis ping failed: %s", exc)
|
||||
else:
|
||||
self._available = True
|
||||
|
||||
def mark_unavailable(self) -> None:
|
||||
self._available = False
|
||||
|
||||
def mark_available(self) -> None:
|
||||
if self._client is not None:
|
||||
self._available = True
|
||||
|
||||
|
||||
cache_manager = RedisCacheManager()
|
||||
|
||||
|
||||
async def init_cache() -> None:
|
||||
"""Initialize Redis cache connection if enabled."""
|
||||
await cache_manager.startup()
|
||||
|
||||
|
||||
async def shutdown_cache() -> None:
|
||||
"""Close Redis cache connection."""
|
||||
await cache_manager.shutdown()
|
||||
|
||||
|
||||
def get_cache_client() -> Optional[Redis]:
|
||||
"""Expose the active Redis client for dependency injection."""
|
||||
return cache_manager.get_client()
|
||||
|
||||
|
||||
async def read_json(client: Redis, key: str) -> Any | None:
|
||||
"""Read and decode JSON payload from Redis."""
|
||||
try:
|
||||
raw = await client.get(key)
|
||||
except RedisError as exc: # pragma: no cover - network errors
|
||||
cache_manager.mark_unavailable()
|
||||
logger.debug("Redis GET failed for %s: %s", key, exc)
|
||||
return None
|
||||
if raw is None:
|
||||
return None
|
||||
cache_manager.mark_available()
|
||||
try:
|
||||
return json.loads(raw.decode("utf-8"))
|
||||
except (UnicodeDecodeError, json.JSONDecodeError) as exc: # pragma: no cover - malformed payloads
|
||||
logger.warning("Discarding malformed cache entry %s: %s", key, exc)
|
||||
return None
|
||||
|
||||
|
||||
async def write_json(client: Redis, key: str, value: Any, ttl_seconds: int, backoff_ms: int) -> None:
|
||||
"""Serialize data to JSON and store it with TTL using retry/backoff."""
|
||||
payload = json.dumps(value, separators=(",", ":"), ensure_ascii=True).encode("utf-8")
|
||||
|
||||
async def _operation() -> Any:
|
||||
return await client.set(name=key, value=payload, ex=ttl_seconds)
|
||||
|
||||
await _run_with_retry(_operation, backoff_ms)
|
||||
|
||||
|
||||
async def delete_keys(client: Redis, keys: list[str], backoff_ms: int) -> None:
|
||||
"""Delete cache keys with retry/backoff semantics."""
|
||||
if not keys:
|
||||
return
|
||||
|
||||
async def _operation() -> Any:
|
||||
return await client.delete(*keys)
|
||||
|
||||
await _run_with_retry(_operation, backoff_ms)
|
||||
|
||||
|
||||
async def _run_with_retry(operation: Callable[[], Awaitable[Any]], max_sleep_ms: int) -> None:
|
||||
try:
|
||||
await operation()
|
||||
cache_manager.mark_available()
|
||||
return
|
||||
except RedisError as exc: # pragma: no cover - network errors
|
||||
cache_manager.mark_unavailable()
|
||||
logger.debug("Redis cache operation failed: %s", exc)
|
||||
if max_sleep_ms <= 0:
|
||||
return
|
||||
sleep_seconds = min(max_sleep_ms / 1000, 0.1)
|
||||
await asyncio.sleep(sleep_seconds)
|
||||
await cache_manager.reconnect()
|
||||
try:
|
||||
await operation()
|
||||
cache_manager.mark_available()
|
||||
except RedisError as exc: # pragma: no cover - repeated network errors
|
||||
cache_manager.mark_unavailable()
|
||||
logger.warning("Redis cache operation failed after retry: %s", exc)
|
||||
|
|
@ -11,14 +11,39 @@ class Settings(BaseSettings):
|
|||
project_name: str = "Test Task CRM"
|
||||
version: str = "0.1.0"
|
||||
api_v1_prefix: str = "/api/v1"
|
||||
database_url: str = Field(
|
||||
default="postgresql+asyncpg://postgres:postgres@localhost:5432/test_task_crm",
|
||||
description="SQLAlchemy async connection string",
|
||||
db_host: str = Field(default="localhost", description="Database hostname")
|
||||
db_port: int = Field(default=5432, description="Database port")
|
||||
db_name: str = Field(default="test_task_crm", description="Database name")
|
||||
db_user: str = Field(default="postgres", description="Database user")
|
||||
db_password: SecretStr = Field(default=SecretStr("postgres"), description="Database user password")
|
||||
database_url_override: str | None = Field(
|
||||
default=None,
|
||||
alias="DATABASE_URL",
|
||||
description="Optional full SQLAlchemy URL override",
|
||||
)
|
||||
sqlalchemy_echo: bool = False
|
||||
jwt_secret_key: SecretStr = Field(default=SecretStr("change-me"))
|
||||
jwt_algorithm: str = "HS256"
|
||||
access_token_expire_minutes: int = 30
|
||||
refresh_token_expire_days: int = 7
|
||||
redis_enabled: bool = Field(default=False, description="Toggle Redis-backed cache usage")
|
||||
redis_url: str = Field(default="redis://localhost:6379/0", description="Redis connection URL")
|
||||
analytics_cache_ttl_seconds: int = Field(default=120, ge=1, description="TTL for cached analytics responses")
|
||||
analytics_cache_backoff_ms: int = Field(
|
||||
default=200,
|
||||
ge=0,
|
||||
description="Maximum backoff (ms) for retrying cache writes/invalidation",
|
||||
)
|
||||
|
||||
@property
|
||||
def database_url(self) -> str:
|
||||
if self.database_url_override:
|
||||
return self.database_url_override
|
||||
password = self.db_password.get_secret_value()
|
||||
return (
|
||||
f"postgresql+asyncpg://{self.db_user}:{password}@"
|
||||
f"{self.db_host}:{self.db_port}/{self.db_name}"
|
||||
)
|
||||
|
||||
|
||||
settings = Settings()
|
||||
|
|
|
|||
|
|
@ -14,4 +14,9 @@ AsyncSessionMaker = async_sessionmaker(bind=engine, expire_on_commit=False)
|
|||
async def get_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Yield an async database session for request scope."""
|
||||
async with AsyncSessionMaker() as session:
|
||||
try:
|
||||
yield session
|
||||
await session.commit()
|
||||
except Exception: # pragma: no cover - defensive cleanup
|
||||
await session.rollback()
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
"""Application middleware components."""
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
"""Middleware that logs cache availability transitions."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from starlette.types import ASGIApp, Receive, Scope, Send
|
||||
|
||||
from app.core.cache import cache_manager
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CacheAvailabilityMiddleware:
|
||||
"""Logs when Redis cache becomes unavailable or recovers."""
|
||||
|
||||
def __init__(self, app: ASGIApp) -> None:
|
||||
self.app = app
|
||||
self._last_state: bool | None = None
|
||||
|
||||
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
||||
if scope["type"] == "http" and settings.redis_enabled:
|
||||
self._log_transition()
|
||||
await self.app(scope, receive, send)
|
||||
|
||||
def _log_transition(self) -> None:
|
||||
available = cache_manager.is_available
|
||||
if self._last_state is None:
|
||||
self._last_state = available
|
||||
if not available:
|
||||
logger.warning("Redis cache unavailable, serving responses without cache")
|
||||
return
|
||||
if available == self._last_state:
|
||||
return
|
||||
if available:
|
||||
logger.info("Redis cache connectivity restored; caching re-enabled")
|
||||
else:
|
||||
logger.warning("Redis cache unavailable, serving responses without cache")
|
||||
self._last_state = available
|
||||
|
|
@ -5,7 +5,7 @@ from datetime import datetime, timedelta, timezone
|
|||
from typing import Any, Mapping
|
||||
|
||||
import jwt
|
||||
from passlib.context import CryptContext
|
||||
from passlib.context import CryptContext # type: ignore
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
|
@ -14,7 +14,7 @@ class PasswordHasher:
|
|||
"""Wraps passlib context to hash and verify secrets."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
self._context = CryptContext(schemes=["pbkdf2_sha256"], deprecated="auto")
|
||||
|
||||
def hash(self, password: str) -> str:
|
||||
return self._context.hash(password)
|
||||
|
|
|
|||
27
app/main.py
27
app/main.py
|
|
@ -1,14 +1,39 @@
|
|||
"""FastAPI application factory."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
from app.api.routes import api_router
|
||||
from app.core.cache import init_cache, shutdown_cache
|
||||
from app.core.config import settings
|
||||
from app.core.middleware.cache_monitor import CacheAvailabilityMiddleware
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
"""Build FastAPI application instance."""
|
||||
application = FastAPI(title=settings.project_name, version=settings.version)
|
||||
@asynccontextmanager
|
||||
async def lifespan(_: FastAPI) -> AsyncIterator[None]:
|
||||
await init_cache()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
await shutdown_cache()
|
||||
|
||||
application = FastAPI(title=settings.project_name, version=settings.version, lifespan=lifespan)
|
||||
application.include_router(api_router)
|
||||
application.add_middleware(CacheAvailabilityMiddleware)
|
||||
application.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["https://kitchen-crm.k1nq.tech", "http://192.168.31.51"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"], # Разрешить все HTTP-методы
|
||||
allow_headers=["*"], # Разрешить все заголовки
|
||||
)
|
||||
return application
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,18 +8,34 @@ from typing import Any
|
|||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from sqlalchemy import DateTime, Enum as SqlEnum, ForeignKey, Integer, func, text
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.types import JSON as GenericJSON, TypeDecorator
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.models.base import Base
|
||||
from app.models.base import Base, enum_values
|
||||
|
||||
|
||||
class ActivityType(StrEnum):
|
||||
COMMENT = "comment"
|
||||
STATUS_CHANGED = "status_changed"
|
||||
STAGE_CHANGED = "stage_changed"
|
||||
TASK_CREATED = "task_created"
|
||||
SYSTEM = "system"
|
||||
|
||||
|
||||
class JSONBCompat(TypeDecorator):
|
||||
"""Uses JSONB on Postgres and plain JSON elsewhere for testability."""
|
||||
|
||||
impl = JSONB
|
||||
cache_ok = True
|
||||
|
||||
def load_dialect_impl(self, dialect): # type: ignore[override]
|
||||
if dialect.name == "sqlite":
|
||||
from sqlalchemy.dialects.sqlite import JSON as SQLiteJSON # local import
|
||||
|
||||
return dialect.type_descriptor(SQLiteJSON())
|
||||
return dialect.type_descriptor(JSONB())
|
||||
|
||||
|
||||
class Activity(Base):
|
||||
"""Represents a timeline event for a deal."""
|
||||
|
||||
|
|
@ -30,11 +46,13 @@ class Activity(Base):
|
|||
author_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="SET NULL"), nullable=True
|
||||
)
|
||||
type: Mapped[ActivityType] = mapped_column(SqlEnum(ActivityType, name="activity_type"), nullable=False)
|
||||
type: Mapped[ActivityType] = mapped_column(
|
||||
SqlEnum(ActivityType, name="activity_type", values_callable=enum_values), nullable=False
|
||||
)
|
||||
payload: Mapped[dict[str, Any]] = mapped_column(
|
||||
JSONB,
|
||||
JSONBCompat().with_variant(GenericJSON(), "sqlite"),
|
||||
nullable=False,
|
||||
server_default=text("'{}'::jsonb"),
|
||||
server_default=text("'{}'"),
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), server_default=func.now(), nullable=False
|
||||
|
|
|
|||
|
|
@ -1,6 +1,13 @@
|
|||
"""Declarative base for SQLAlchemy models."""
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
from typing import TypeVar
|
||||
|
||||
from sqlalchemy.orm import DeclarativeBase, declared_attr
|
||||
|
||||
EnumT = TypeVar("EnumT", bound=StrEnum)
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
"""Base class that configures naming conventions."""
|
||||
|
|
@ -8,3 +15,9 @@ class Base(DeclarativeBase):
|
|||
@declared_attr.directive
|
||||
def __tablename__(cls) -> str: # type: ignore[misc]
|
||||
return cls.__name__.lower()
|
||||
|
||||
|
||||
def enum_values(enum_cls: type[EnumT]) -> list[str]:
|
||||
"""Return enum member values to keep DB representation stable."""
|
||||
|
||||
return [member.value for member in enum_cls]
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from pydantic import BaseModel, ConfigDict
|
|||
from sqlalchemy import DateTime, Enum as SqlEnum, ForeignKey, Integer, Numeric, String, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.models.base import Base
|
||||
from app.models.base import Base, enum_values
|
||||
|
||||
|
||||
class DealStatus(StrEnum):
|
||||
|
|
@ -39,10 +39,14 @@ class Deal(Base):
|
|||
amount: Mapped[Decimal | None] = mapped_column(Numeric(12, 2), nullable=True)
|
||||
currency: Mapped[str | None] = mapped_column(String(8), nullable=True)
|
||||
status: Mapped[DealStatus] = mapped_column(
|
||||
SqlEnum(DealStatus, name="deal_status"), nullable=False, default=DealStatus.NEW
|
||||
SqlEnum(DealStatus, name="deal_status", values_callable=enum_values),
|
||||
nullable=False,
|
||||
default=DealStatus.NEW,
|
||||
)
|
||||
stage: Mapped[DealStage] = mapped_column(
|
||||
SqlEnum(DealStage, name="deal_stage"), nullable=False, default=DealStage.QUALIFICATION
|
||||
SqlEnum(DealStage, name="deal_stage", values_callable=enum_values),
|
||||
nullable=False,
|
||||
default=DealStage.QUALIFICATION,
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), server_default=func.now(), nullable=False
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from pydantic import BaseModel, ConfigDict
|
|||
from sqlalchemy import DateTime, Enum as SqlEnum, ForeignKey, Integer, UniqueConstraint, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.models.base import Base
|
||||
from app.models.base import Base, enum_values
|
||||
|
||||
|
||||
class OrganizationRole(StrEnum):
|
||||
|
|
@ -30,7 +30,11 @@ class OrganizationMember(Base):
|
|||
organization_id: Mapped[int] = mapped_column(ForeignKey("organizations.id", ondelete="CASCADE"))
|
||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"))
|
||||
role: Mapped[OrganizationRole] = mapped_column(
|
||||
SqlEnum(OrganizationRole, name="organization_role"),
|
||||
SqlEnum(
|
||||
OrganizationRole,
|
||||
name="organization_role",
|
||||
values_callable=enum_values,
|
||||
),
|
||||
nullable=False,
|
||||
default=OrganizationRole.MEMBER,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,10 +14,16 @@ class TokenPayload(BaseModel):
|
|||
|
||||
class TokenResponse(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str = "bearer"
|
||||
expires_in: int
|
||||
refresh_expires_in: int
|
||||
|
||||
|
||||
class LoginRequest(BaseModel):
|
||||
email: EmailStr
|
||||
password: str
|
||||
|
||||
|
||||
class RefreshRequest(BaseModel):
|
||||
refresh_token: str
|
||||
|
|
|
|||
|
|
@ -0,0 +1,68 @@
|
|||
"""Repository helpers for deal activities."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import Select, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.activity import Activity, ActivityCreate
|
||||
from app.models.deal import Deal
|
||||
|
||||
|
||||
class ActivityOrganizationMismatchError(Exception):
|
||||
"""Raised when a deal/activity pair targets another organization."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ActivityQueryParams:
|
||||
"""Filtering options for fetching activities."""
|
||||
|
||||
organization_id: int
|
||||
deal_id: int
|
||||
limit: int | None = None
|
||||
offset: int = 0
|
||||
|
||||
|
||||
class ActivityRepository:
|
||||
"""Provides CRUD helpers for Activity model."""
|
||||
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
@property
|
||||
def session(self) -> AsyncSession:
|
||||
return self._session
|
||||
|
||||
async def list(self, *, params: ActivityQueryParams) -> Sequence[Activity]:
|
||||
stmt = (
|
||||
select(Activity)
|
||||
.join(Deal, Deal.id == Activity.deal_id)
|
||||
.where(Activity.deal_id == params.deal_id, Deal.organization_id == params.organization_id)
|
||||
.order_by(Activity.created_at)
|
||||
)
|
||||
stmt = self._apply_window(stmt, params)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.all()
|
||||
|
||||
async def create(self, data: ActivityCreate, *, organization_id: int) -> Activity:
|
||||
deal = await self._session.get(Deal, data.deal_id)
|
||||
if deal is None or deal.organization_id != organization_id:
|
||||
raise ActivityOrganizationMismatchError("Deal belongs to another organization")
|
||||
|
||||
activity = Activity(**data.model_dump())
|
||||
self._session.add(activity)
|
||||
await self._session.flush()
|
||||
return activity
|
||||
|
||||
def _apply_window(
|
||||
self,
|
||||
stmt: Select[tuple[Activity]],
|
||||
params: ActivityQueryParams,
|
||||
) -> Select[tuple[Activity]]:
|
||||
if params.offset:
|
||||
stmt = stmt.offset(params.offset)
|
||||
if params.limit is not None:
|
||||
stmt = stmt.limit(params.limit)
|
||||
return stmt
|
||||
|
|
@ -0,0 +1,93 @@
|
|||
"""Analytics-specific data access helpers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import Select, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.deal import Deal, DealStage, DealStatus
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class StatusRollup:
|
||||
status: DealStatus
|
||||
deal_count: int
|
||||
amount_sum: Decimal
|
||||
amount_count: int
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class StageStatusRollup:
|
||||
stage: DealStage
|
||||
status: DealStatus
|
||||
deal_count: int
|
||||
|
||||
|
||||
class AnalyticsRepository:
|
||||
"""Provides aggregate queries for analytics endpoints."""
|
||||
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
@property
|
||||
def session(self) -> AsyncSession:
|
||||
return self._session
|
||||
|
||||
async def fetch_status_rollup(self, organization_id: int) -> list[StatusRollup]:
|
||||
stmt: Select[tuple[Any, ...]] = (
|
||||
select(
|
||||
Deal.status,
|
||||
func.count(Deal.id),
|
||||
func.coalesce(func.sum(Deal.amount), 0),
|
||||
func.count(Deal.amount),
|
||||
)
|
||||
.where(Deal.organization_id == organization_id)
|
||||
.group_by(Deal.status)
|
||||
)
|
||||
result = await self._session.execute(stmt)
|
||||
rows = result.all()
|
||||
rollup: list[StatusRollup] = []
|
||||
for status, count, amount_sum, amount_count in rows:
|
||||
rollup.append(
|
||||
StatusRollup(
|
||||
status=status,
|
||||
deal_count=int(count or 0),
|
||||
amount_sum=_to_decimal(amount_sum),
|
||||
amount_count=int(amount_count or 0),
|
||||
)
|
||||
)
|
||||
return rollup
|
||||
|
||||
async def count_new_deals_since(self, organization_id: int, threshold: datetime) -> int:
|
||||
stmt = select(func.count(Deal.id)).where(
|
||||
Deal.organization_id == organization_id,
|
||||
Deal.created_at >= threshold,
|
||||
)
|
||||
result = await self._session.execute(stmt)
|
||||
value = result.scalar_one()
|
||||
return int(value or 0)
|
||||
|
||||
async def fetch_stage_status_rollup(self, organization_id: int) -> list[StageStatusRollup]:
|
||||
stmt: Select[tuple[Any, ...]] = (
|
||||
select(Deal.stage, Deal.status, func.count(Deal.id))
|
||||
.where(Deal.organization_id == organization_id)
|
||||
.group_by(Deal.stage, Deal.status)
|
||||
)
|
||||
result = await self._session.execute(stmt)
|
||||
rows = result.all()
|
||||
return [
|
||||
StageStatusRollup(stage=stage, status=status, deal_count=int(count or 0))
|
||||
for stage, status, count in rows
|
||||
]
|
||||
|
||||
|
||||
def _to_decimal(value: Any) -> Decimal:
|
||||
if isinstance(value, Decimal):
|
||||
return value
|
||||
if value is None:
|
||||
return Decimal("0")
|
||||
return Decimal(str(value))
|
||||
|
|
@ -0,0 +1,126 @@
|
|||
"""Repository helpers for contacts with role-aware access."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import Select, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.contact import Contact, ContactCreate
|
||||
from app.models.organization_member import OrganizationRole
|
||||
|
||||
|
||||
class ContactAccessError(Exception):
|
||||
"""Raised when attempting operations without sufficient permissions."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ContactQueryParams:
|
||||
"""Filters accepted by contact list queries."""
|
||||
|
||||
organization_id: int
|
||||
page: int = 1
|
||||
page_size: int = 20
|
||||
search: str | None = None
|
||||
owner_id: int | None = None
|
||||
|
||||
|
||||
class ContactRepository:
|
||||
"""Provides CRUD helpers for Contact entities."""
|
||||
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
@property
|
||||
def session(self) -> AsyncSession:
|
||||
return self._session
|
||||
|
||||
async def list(
|
||||
self,
|
||||
*,
|
||||
params: ContactQueryParams,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Sequence[Contact]:
|
||||
stmt: Select[tuple[Contact]] = select(Contact).where(Contact.organization_id == params.organization_id)
|
||||
stmt = self._apply_filters(stmt, params, role, user_id)
|
||||
offset = (max(params.page, 1) - 1) * params.page_size
|
||||
stmt = stmt.order_by(Contact.created_at.desc()).offset(offset).limit(params.page_size)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.all()
|
||||
|
||||
async def get(
|
||||
self,
|
||||
contact_id: int,
|
||||
*,
|
||||
organization_id: int,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Contact | None:
|
||||
stmt = select(Contact).where(Contact.id == contact_id, Contact.organization_id == organization_id)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.first()
|
||||
|
||||
async def create(
|
||||
self,
|
||||
data: ContactCreate,
|
||||
*,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Contact:
|
||||
if role == OrganizationRole.MEMBER and data.owner_id != user_id:
|
||||
raise ContactAccessError("Members can only create contacts they own")
|
||||
contact = Contact(**data.model_dump())
|
||||
self._session.add(contact)
|
||||
await self._session.flush()
|
||||
return contact
|
||||
|
||||
async def update(
|
||||
self,
|
||||
contact: Contact,
|
||||
updates: Mapping[str, Any],
|
||||
*,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Contact:
|
||||
if role == OrganizationRole.MEMBER and contact.owner_id != user_id:
|
||||
raise ContactAccessError("Members can only modify their own contacts")
|
||||
for field, value in updates.items():
|
||||
if hasattr(contact, field):
|
||||
setattr(contact, field, value)
|
||||
await self._session.flush()
|
||||
await self._session.refresh(contact)
|
||||
return contact
|
||||
|
||||
async def delete(
|
||||
self,
|
||||
contact: Contact,
|
||||
*,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> None:
|
||||
if role == OrganizationRole.MEMBER and contact.owner_id != user_id:
|
||||
raise ContactAccessError("Members can only delete their own contacts")
|
||||
await self._session.delete(contact)
|
||||
await self._session.flush()
|
||||
|
||||
def _apply_filters(
|
||||
self,
|
||||
stmt: Select[tuple[Contact]],
|
||||
params: ContactQueryParams,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Select[tuple[Contact]]:
|
||||
if params.search:
|
||||
pattern = f"%{params.search.lower()}%"
|
||||
stmt = stmt.where(
|
||||
func.lower(Contact.name).like(pattern)
|
||||
| func.lower(func.coalesce(Contact.email, "")).like(pattern)
|
||||
)
|
||||
if params.owner_id is not None:
|
||||
if role == OrganizationRole.MEMBER:
|
||||
raise ContactAccessError("Members cannot filter by owner")
|
||||
stmt = stmt.where(Contact.owner_id == params.owner_id)
|
||||
return stmt
|
||||
|
|
@ -0,0 +1,153 @@
|
|||
"""Deal repository with access-aware CRUD helpers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
from dataclasses import dataclass
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import Select, asc, desc, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.deal import Deal, DealCreate, DealStage, DealStatus
|
||||
from app.models.organization_member import OrganizationRole
|
||||
|
||||
|
||||
ORDERABLE_COLUMNS: dict[str, Any] = {
|
||||
"created_at": Deal.created_at,
|
||||
"amount": Deal.amount,
|
||||
"title": Deal.title,
|
||||
}
|
||||
|
||||
|
||||
class DealAccessError(Exception):
|
||||
"""Raised when a user attempts an operation without sufficient permissions."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class DealQueryParams:
|
||||
"""Filters supported by list queries."""
|
||||
|
||||
organization_id: int
|
||||
page: int = 1
|
||||
page_size: int = 20
|
||||
statuses: Sequence[DealStatus] | None = None
|
||||
stage: DealStage | None = None
|
||||
owner_id: int | None = None
|
||||
min_amount: Decimal | None = None
|
||||
max_amount: Decimal | None = None
|
||||
order_by: str | None = None
|
||||
order_desc: bool = True
|
||||
|
||||
|
||||
class DealRepository:
|
||||
"""Provides CRUD helpers for deals with role-aware filtering."""
|
||||
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
@property
|
||||
def session(self) -> AsyncSession:
|
||||
return self._session
|
||||
|
||||
async def list(
|
||||
self,
|
||||
*,
|
||||
params: DealQueryParams,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Sequence[Deal]:
|
||||
stmt = select(Deal).where(Deal.organization_id == params.organization_id)
|
||||
stmt = self._apply_filters(stmt, params, role, user_id)
|
||||
stmt = self._apply_ordering(stmt, params)
|
||||
|
||||
offset = (max(params.page, 1) - 1) * params.page_size
|
||||
stmt = stmt.offset(offset).limit(params.page_size)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.all()
|
||||
|
||||
async def get(
|
||||
self,
|
||||
deal_id: int,
|
||||
*,
|
||||
organization_id: int,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
require_owner: bool = False,
|
||||
) -> Deal | None:
|
||||
stmt = select(Deal).where(Deal.id == deal_id, Deal.organization_id == organization_id)
|
||||
stmt = self._apply_role_clause(stmt, role, user_id, require_owner=require_owner)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.first()
|
||||
|
||||
async def create(
|
||||
self,
|
||||
data: DealCreate,
|
||||
*,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Deal:
|
||||
if role == OrganizationRole.MEMBER and data.owner_id != user_id:
|
||||
raise DealAccessError("Members can only create deals they own")
|
||||
deal = Deal(**data.model_dump())
|
||||
self._session.add(deal)
|
||||
await self._session.flush()
|
||||
return deal
|
||||
|
||||
async def update(
|
||||
self,
|
||||
deal: Deal,
|
||||
updates: Mapping[str, Any],
|
||||
*,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Deal:
|
||||
if role == OrganizationRole.MEMBER and deal.owner_id != user_id:
|
||||
raise DealAccessError("Members can only modify their own deals")
|
||||
for field, value in updates.items():
|
||||
if hasattr(deal, field):
|
||||
setattr(deal, field, value)
|
||||
await self._session.flush()
|
||||
await self._session.refresh(deal)
|
||||
return deal
|
||||
|
||||
def _apply_filters(
|
||||
self,
|
||||
stmt: Select[tuple[Deal]],
|
||||
params: DealQueryParams,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Select[tuple[Deal]]:
|
||||
if params.statuses:
|
||||
stmt = stmt.where(Deal.status.in_(params.statuses))
|
||||
if params.stage:
|
||||
stmt = stmt.where(Deal.stage == params.stage)
|
||||
if params.owner_id is not None:
|
||||
if role == OrganizationRole.MEMBER and params.owner_id != user_id:
|
||||
raise DealAccessError("Members cannot filter by other owners")
|
||||
stmt = stmt.where(Deal.owner_id == params.owner_id)
|
||||
if params.min_amount is not None:
|
||||
stmt = stmt.where(Deal.amount >= params.min_amount)
|
||||
if params.max_amount is not None:
|
||||
stmt = stmt.where(Deal.amount <= params.max_amount)
|
||||
|
||||
return self._apply_role_clause(stmt, role, user_id)
|
||||
|
||||
def _apply_role_clause(
|
||||
self,
|
||||
stmt: Select[tuple[Deal]],
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
*,
|
||||
require_owner: bool = False,
|
||||
) -> Select[tuple[Deal]]:
|
||||
if role in {OrganizationRole.OWNER, OrganizationRole.ADMIN, OrganizationRole.MANAGER}:
|
||||
return stmt
|
||||
if require_owner:
|
||||
return stmt.where(Deal.owner_id == user_id)
|
||||
return stmt
|
||||
|
||||
def _apply_ordering(self, stmt: Select[tuple[Deal]], params: DealQueryParams) -> Select[tuple[Deal]]:
|
||||
column = ORDERABLE_COLUMNS.get(params.order_by or "created_at", Deal.created_at)
|
||||
order_func = desc if params.order_desc else asc
|
||||
return stmt.order_by(order_func(column))
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
"""Organization repository for database operations."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.organization import Organization, OrganizationCreate
|
||||
from app.models.organization_member import OrganizationMember
|
||||
|
||||
|
||||
class OrganizationRepository:
|
||||
"""Provides CRUD helpers for Organization model."""
|
||||
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
@property
|
||||
def session(self) -> AsyncSession:
|
||||
return self._session
|
||||
|
||||
async def list(self) -> Sequence[Organization]:
|
||||
result = await self._session.scalars(select(Organization))
|
||||
return result.all()
|
||||
|
||||
async def get_by_id(self, organization_id: int) -> Organization | None:
|
||||
return await self._session.get(Organization, organization_id)
|
||||
|
||||
async def get_by_name(self, name: str) -> Organization | None:
|
||||
stmt = select(Organization).where(Organization.name == name)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.first()
|
||||
|
||||
async def list_for_user(self, user_id: int) -> Sequence[Organization]:
|
||||
stmt = (
|
||||
select(Organization)
|
||||
.join(OrganizationMember, OrganizationMember.organization_id == Organization.id)
|
||||
.where(OrganizationMember.user_id == user_id)
|
||||
.order_by(Organization.id)
|
||||
)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.unique().all()
|
||||
|
||||
async def get_membership(self, organization_id: int, user_id: int) -> OrganizationMember | None:
|
||||
stmt = (
|
||||
select(OrganizationMember)
|
||||
.where(
|
||||
OrganizationMember.organization_id == organization_id,
|
||||
OrganizationMember.user_id == user_id,
|
||||
)
|
||||
.options(selectinload(OrganizationMember.organization))
|
||||
)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.first()
|
||||
|
||||
async def create(self, data: OrganizationCreate) -> Organization:
|
||||
organization = Organization(name=data.name)
|
||||
self._session.add(organization)
|
||||
await self._session.flush()
|
||||
return organization
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
"""Task repository providing role-aware CRUD helpers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import Select, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.models.deal import Deal
|
||||
from app.models.organization_member import OrganizationRole
|
||||
from app.models.task import Task, TaskCreate
|
||||
|
||||
|
||||
class TaskAccessError(Exception):
|
||||
"""Raised when a user attempts to modify a forbidden task."""
|
||||
|
||||
|
||||
class TaskOrganizationMismatchError(Exception):
|
||||
"""Raised when a task or deal belongs to another organization."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TaskQueryParams:
|
||||
"""Filtering options supported by list queries."""
|
||||
|
||||
organization_id: int
|
||||
deal_id: int | None = None
|
||||
only_open: bool = False
|
||||
due_before: datetime | None = None
|
||||
due_after: datetime | None = None
|
||||
|
||||
|
||||
class TaskRepository:
|
||||
"""Encapsulates database access for Task entities."""
|
||||
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
@property
|
||||
def session(self) -> AsyncSession:
|
||||
return self._session
|
||||
|
||||
async def list(self, *, params: TaskQueryParams) -> Sequence[Task]:
|
||||
stmt = (
|
||||
select(Task)
|
||||
.join(Deal, Deal.id == Task.deal_id)
|
||||
.where(Deal.organization_id == params.organization_id)
|
||||
.options(selectinload(Task.deal))
|
||||
.order_by(Task.due_date.is_(None), Task.due_date, Task.id)
|
||||
)
|
||||
stmt = self._apply_filters(stmt, params)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.all()
|
||||
|
||||
async def get(self, task_id: int, *, organization_id: int) -> Task | None:
|
||||
stmt = (
|
||||
select(Task)
|
||||
.join(Deal, Deal.id == Task.deal_id)
|
||||
.where(Task.id == task_id, Deal.organization_id == organization_id)
|
||||
.options(selectinload(Task.deal))
|
||||
)
|
||||
result = await self._session.scalars(stmt)
|
||||
return result.first()
|
||||
|
||||
async def create(
|
||||
self,
|
||||
data: TaskCreate,
|
||||
*,
|
||||
organization_id: int,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Task:
|
||||
deal = await self._session.get(Deal, data.deal_id)
|
||||
if deal is None or deal.organization_id != organization_id:
|
||||
raise TaskOrganizationMismatchError("Deal belongs to another organization")
|
||||
if role == OrganizationRole.MEMBER and deal.owner_id != user_id:
|
||||
raise TaskAccessError("Members can only create tasks for their own deals")
|
||||
|
||||
task = Task(**data.model_dump())
|
||||
self._session.add(task)
|
||||
await self._session.flush()
|
||||
return task
|
||||
|
||||
async def update(
|
||||
self,
|
||||
task: Task,
|
||||
updates: Mapping[str, Any],
|
||||
*,
|
||||
role: OrganizationRole,
|
||||
user_id: int,
|
||||
) -> Task:
|
||||
owner_id = await self._resolve_task_owner(task)
|
||||
if owner_id is None:
|
||||
raise TaskOrganizationMismatchError("Task is missing an owner context")
|
||||
if role == OrganizationRole.MEMBER and owner_id != user_id:
|
||||
raise TaskAccessError("Members can only modify their own tasks")
|
||||
|
||||
for field, value in updates.items():
|
||||
if hasattr(task, field):
|
||||
setattr(task, field, value)
|
||||
await self._session.flush()
|
||||
return task
|
||||
|
||||
def _apply_filters(self, stmt: Select[tuple[Task]], params: TaskQueryParams) -> Select[tuple[Task]]:
|
||||
if params.deal_id is not None:
|
||||
stmt = stmt.where(Task.deal_id == params.deal_id)
|
||||
if params.only_open:
|
||||
stmt = stmt.where(Task.is_done.is_(False))
|
||||
if params.due_before is not None:
|
||||
stmt = stmt.where(Task.due_date <= params.due_before)
|
||||
if params.due_after is not None:
|
||||
stmt = stmt.where(Task.due_date >= params.due_after)
|
||||
return stmt
|
||||
|
||||
async def _resolve_task_owner(self, task: Task) -> int | None:
|
||||
if task.deal is not None:
|
||||
return task.deal.owner_id
|
||||
stmt = select(Deal.owner_id).where(Deal.id == task.deal_id)
|
||||
return await self._session.scalar(stmt)
|
||||
|
|
@ -35,7 +35,7 @@ class UserRepository:
|
|||
user = User(
|
||||
email=data.email,
|
||||
hashed_password=hashed_password,
|
||||
full_name=data.full_name,
|
||||
name=data.name,
|
||||
is_active=data.is_active,
|
||||
)
|
||||
self._session.add(user)
|
||||
|
|
|
|||
|
|
@ -1 +1,25 @@
|
|||
"""Business logic services."""
|
||||
from .activity_service import ( # noqa: F401
|
||||
ActivityForbiddenError,
|
||||
ActivityListFilters,
|
||||
ActivityService,
|
||||
ActivityServiceError,
|
||||
ActivityValidationError,
|
||||
)
|
||||
from .auth_service import AuthService # noqa: F401
|
||||
from .organization_service import ( # noqa: F401
|
||||
OrganizationAccessDeniedError,
|
||||
OrganizationContext,
|
||||
OrganizationContextMissingError,
|
||||
OrganizationService,
|
||||
)
|
||||
from .task_service import ( # noqa: F401
|
||||
TaskDueDateError,
|
||||
TaskForbiddenError,
|
||||
TaskListFilters,
|
||||
TaskNotFoundError,
|
||||
TaskOrganizationError,
|
||||
TaskService,
|
||||
TaskServiceError,
|
||||
TaskUpdateData,
|
||||
)
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
"""Business logic for timeline activities."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from app.models.activity import Activity, ActivityCreate, ActivityType
|
||||
from app.models.deal import Deal
|
||||
from app.repositories.activity_repo import (
|
||||
ActivityOrganizationMismatchError,
|
||||
ActivityQueryParams,
|
||||
ActivityRepository,
|
||||
)
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
class ActivityServiceError(Exception):
|
||||
"""Base class for activity service errors."""
|
||||
|
||||
|
||||
class ActivityValidationError(ActivityServiceError):
|
||||
"""Raised when payload does not satisfy business constraints."""
|
||||
|
||||
|
||||
class ActivityForbiddenError(ActivityServiceError):
|
||||
"""Raised when a user accesses activities from another organization."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ActivityListFilters:
|
||||
"""Filtering helpers for listing activities."""
|
||||
|
||||
deal_id: int
|
||||
limit: int | None = None
|
||||
offset: int = 0
|
||||
|
||||
|
||||
class ActivityService:
|
||||
"""Encapsulates timeline-specific workflows."""
|
||||
|
||||
def __init__(self, repository: ActivityRepository) -> None:
|
||||
self._repository = repository
|
||||
|
||||
async def list_activities(
|
||||
self,
|
||||
*,
|
||||
filters: ActivityListFilters,
|
||||
context: OrganizationContext,
|
||||
) -> Sequence[Activity]:
|
||||
await self._ensure_deal_in_context(filters.deal_id, context)
|
||||
params = ActivityQueryParams(
|
||||
organization_id=context.organization_id,
|
||||
deal_id=filters.deal_id,
|
||||
limit=filters.limit,
|
||||
offset=max(filters.offset, 0),
|
||||
)
|
||||
return await self._repository.list(params=params)
|
||||
|
||||
async def add_comment(
|
||||
self,
|
||||
*,
|
||||
deal_id: int,
|
||||
author_id: int,
|
||||
text: str,
|
||||
context: OrganizationContext,
|
||||
) -> Activity:
|
||||
normalized = text.strip()
|
||||
if not normalized:
|
||||
raise ActivityValidationError("Comment text cannot be empty")
|
||||
return await self.record_activity(
|
||||
deal_id=deal_id,
|
||||
activity_type=ActivityType.COMMENT,
|
||||
payload={"text": normalized},
|
||||
author_id=author_id,
|
||||
context=context,
|
||||
)
|
||||
|
||||
async def record_activity(
|
||||
self,
|
||||
*,
|
||||
deal_id: int,
|
||||
activity_type: ActivityType,
|
||||
context: OrganizationContext,
|
||||
payload: dict[str, Any] | None = None,
|
||||
author_id: int | None = None,
|
||||
) -> Activity:
|
||||
await self._ensure_deal_in_context(deal_id, context)
|
||||
data = ActivityCreate(
|
||||
deal_id=deal_id,
|
||||
author_id=author_id,
|
||||
type=activity_type,
|
||||
payload=payload or {},
|
||||
)
|
||||
try:
|
||||
return await self._repository.create(data, organization_id=context.organization_id)
|
||||
except ActivityOrganizationMismatchError as exc: # pragma: no cover - defensive
|
||||
raise ActivityForbiddenError("Deal belongs to another organization") from exc
|
||||
|
||||
async def _ensure_deal_in_context(self, deal_id: int, context: OrganizationContext) -> Deal:
|
||||
deal = await self._repository.session.get(Deal, deal_id)
|
||||
if deal is None or deal.organization_id != context.organization_id:
|
||||
raise ActivityForbiddenError("Deal not found in current organization")
|
||||
return deal
|
||||
|
|
@ -0,0 +1,341 @@
|
|||
"""Analytics-related business logic."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from decimal import Decimal, InvalidOperation
|
||||
from typing import Any, Iterable
|
||||
|
||||
from redis.asyncio.client import Redis
|
||||
from redis.exceptions import RedisError
|
||||
|
||||
from app.core.cache import cache_manager, delete_keys, read_json, write_json
|
||||
from app.models.deal import DealStage, DealStatus
|
||||
from app.repositories.analytics_repo import AnalyticsRepository, StageStatusRollup
|
||||
|
||||
_STAGE_ORDER: list[DealStage] = [
|
||||
DealStage.QUALIFICATION,
|
||||
DealStage.PROPOSAL,
|
||||
DealStage.NEGOTIATION,
|
||||
DealStage.CLOSED,
|
||||
]
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class StatusSummary:
|
||||
status: DealStatus
|
||||
count: int
|
||||
amount_sum: Decimal
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class WonStatistics:
|
||||
count: int
|
||||
amount_sum: Decimal
|
||||
average_amount: Decimal
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class NewDealsWindow:
|
||||
days: int
|
||||
count: int
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class DealSummary:
|
||||
by_status: list[StatusSummary]
|
||||
won: WonStatistics
|
||||
new_deals: NewDealsWindow
|
||||
total_deals: int
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class StageBreakdown:
|
||||
stage: DealStage
|
||||
total: int
|
||||
by_status: dict[DealStatus, int]
|
||||
conversion_to_next: float | None
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_SUMMARY_CACHE_PREFIX = "analytics:summary"
|
||||
_FUNNEL_CACHE_PREFIX = "analytics:funnel"
|
||||
|
||||
|
||||
class AnalyticsService:
|
||||
"""Provides aggregated analytics for deals."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
repository: AnalyticsRepository,
|
||||
cache: Redis | None = None,
|
||||
*,
|
||||
ttl_seconds: int = 0,
|
||||
backoff_ms: int = 0,
|
||||
) -> None:
|
||||
self._repository = repository
|
||||
self._cache = cache
|
||||
self._ttl_seconds = ttl_seconds
|
||||
self._backoff_ms = backoff_ms
|
||||
|
||||
async def get_deal_summary(self, organization_id: int, *, days: int) -> DealSummary:
|
||||
cached = await self._fetch_cached_summary(organization_id, days)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
status_rollup = await self._repository.fetch_status_rollup(organization_id)
|
||||
status_map = {item.status: item for item in status_rollup}
|
||||
|
||||
summaries: list[StatusSummary] = []
|
||||
total_deals = 0
|
||||
won_amount_sum = Decimal("0")
|
||||
won_amount_count = 0
|
||||
won_count = 0
|
||||
|
||||
for status in DealStatus:
|
||||
row = status_map.get(status)
|
||||
count = row.deal_count if row else 0
|
||||
amount_sum = row.amount_sum if row else Decimal("0")
|
||||
summaries.append(StatusSummary(status=status, count=count, amount_sum=amount_sum))
|
||||
total_deals += count
|
||||
if status is DealStatus.WON and row:
|
||||
won_amount_sum = row.amount_sum
|
||||
won_amount_count = row.amount_count
|
||||
won_count = row.deal_count
|
||||
|
||||
won_average = (
|
||||
(won_amount_sum / won_amount_count) if won_amount_count > 0 else Decimal("0")
|
||||
)
|
||||
|
||||
window_threshold = _threshold_from_days(days)
|
||||
new_deals = await self._repository.count_new_deals_since(organization_id, window_threshold)
|
||||
|
||||
summary = DealSummary(
|
||||
by_status=summaries,
|
||||
won=WonStatistics(
|
||||
count=won_count,
|
||||
amount_sum=won_amount_sum,
|
||||
average_amount=won_average,
|
||||
),
|
||||
new_deals=NewDealsWindow(days=days, count=new_deals),
|
||||
total_deals=total_deals,
|
||||
)
|
||||
|
||||
await self._store_summary_cache(organization_id, days, summary)
|
||||
return summary
|
||||
|
||||
async def get_deal_funnel(self, organization_id: int) -> list[StageBreakdown]:
|
||||
cached = await self._fetch_cached_funnel(organization_id)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
rollup = await self._repository.fetch_stage_status_rollup(organization_id)
|
||||
stage_map = _build_stage_map(rollup)
|
||||
|
||||
breakdowns: list[StageBreakdown] = []
|
||||
totals = {stage: sum(by_status.values()) for stage, by_status in stage_map.items()}
|
||||
for index, stage in enumerate(_STAGE_ORDER):
|
||||
by_status = stage_map.get(stage, {status: 0 for status in DealStatus})
|
||||
total = totals.get(stage, 0)
|
||||
conversion = None
|
||||
if index < len(_STAGE_ORDER) - 1:
|
||||
next_stage = _STAGE_ORDER[index + 1]
|
||||
next_total = totals.get(next_stage, 0)
|
||||
if total > 0:
|
||||
conversion = float(round((next_total / total) * 100, 2))
|
||||
breakdowns.append(
|
||||
StageBreakdown(
|
||||
stage=stage,
|
||||
total=total,
|
||||
by_status=by_status,
|
||||
conversion_to_next=conversion,
|
||||
)
|
||||
)
|
||||
await self._store_funnel_cache(organization_id, breakdowns)
|
||||
return breakdowns
|
||||
|
||||
def _is_cache_enabled(self) -> bool:
|
||||
return self._cache is not None and self._ttl_seconds > 0
|
||||
|
||||
async def _fetch_cached_summary(self, organization_id: int, days: int) -> DealSummary | None:
|
||||
if not self._is_cache_enabled() or self._cache is None:
|
||||
return None
|
||||
key = _summary_cache_key(organization_id, days)
|
||||
payload = await read_json(self._cache, key)
|
||||
if payload is None:
|
||||
return None
|
||||
return _deserialize_summary(payload)
|
||||
|
||||
async def _store_summary_cache(self, organization_id: int, days: int, summary: DealSummary) -> None:
|
||||
if not self._is_cache_enabled() or self._cache is None:
|
||||
return
|
||||
key = _summary_cache_key(organization_id, days)
|
||||
payload = _serialize_summary(summary)
|
||||
await write_json(self._cache, key, payload, self._ttl_seconds, self._backoff_ms)
|
||||
|
||||
async def _fetch_cached_funnel(self, organization_id: int) -> list[StageBreakdown] | None:
|
||||
if not self._is_cache_enabled() or self._cache is None:
|
||||
return None
|
||||
key = _funnel_cache_key(organization_id)
|
||||
payload = await read_json(self._cache, key)
|
||||
if payload is None:
|
||||
return None
|
||||
return _deserialize_funnel(payload)
|
||||
|
||||
async def _store_funnel_cache(self, organization_id: int, breakdowns: list[StageBreakdown]) -> None:
|
||||
if not self._is_cache_enabled() or self._cache is None:
|
||||
return
|
||||
key = _funnel_cache_key(organization_id)
|
||||
payload = _serialize_funnel(breakdowns)
|
||||
await write_json(self._cache, key, payload, self._ttl_seconds, self._backoff_ms)
|
||||
|
||||
|
||||
def _threshold_from_days(days: int) -> datetime:
|
||||
return datetime.now(timezone.utc) - timedelta(days=days)
|
||||
|
||||
|
||||
def _build_stage_map(rollup: Iterable[StageStatusRollup]) -> dict[DealStage, dict[DealStatus, int]]:
|
||||
stage_map: dict[DealStage, dict[DealStatus, int]] = {
|
||||
stage: {status: 0 for status in DealStatus}
|
||||
for stage in _STAGE_ORDER
|
||||
}
|
||||
for item in rollup:
|
||||
stage_map.setdefault(item.stage, {status: 0 for status in DealStatus})
|
||||
stage_map[item.stage][item.status] = item.deal_count
|
||||
return stage_map
|
||||
|
||||
|
||||
def _summary_cache_key(organization_id: int, days: int) -> str:
|
||||
return f"{_SUMMARY_CACHE_PREFIX}:{organization_id}:{days}"
|
||||
|
||||
|
||||
def summary_cache_pattern(organization_id: int) -> str:
|
||||
return f"{_SUMMARY_CACHE_PREFIX}:{organization_id}:*"
|
||||
|
||||
|
||||
def _funnel_cache_key(organization_id: int) -> str:
|
||||
return f"{_FUNNEL_CACHE_PREFIX}:{organization_id}"
|
||||
|
||||
|
||||
def funnel_cache_key(organization_id: int) -> str:
|
||||
return _funnel_cache_key(organization_id)
|
||||
|
||||
|
||||
def _serialize_summary(summary: DealSummary) -> dict[str, Any]:
|
||||
return {
|
||||
"by_status": [
|
||||
{
|
||||
"status": item.status.value,
|
||||
"count": item.count,
|
||||
"amount_sum": str(item.amount_sum),
|
||||
}
|
||||
for item in summary.by_status
|
||||
],
|
||||
"won": {
|
||||
"count": summary.won.count,
|
||||
"amount_sum": str(summary.won.amount_sum),
|
||||
"average_amount": str(summary.won.average_amount),
|
||||
},
|
||||
"new_deals": {
|
||||
"days": summary.new_deals.days,
|
||||
"count": summary.new_deals.count,
|
||||
},
|
||||
"total_deals": summary.total_deals,
|
||||
}
|
||||
|
||||
|
||||
def _deserialize_summary(payload: Any) -> DealSummary | None:
|
||||
try:
|
||||
by_status_payload = payload["by_status"]
|
||||
won_payload = payload["won"]
|
||||
new_deals_payload = payload["new_deals"]
|
||||
total_deals = int(payload["total_deals"])
|
||||
except (KeyError, TypeError, ValueError):
|
||||
return None
|
||||
|
||||
summaries: list[StatusSummary] = []
|
||||
try:
|
||||
for item in by_status_payload:
|
||||
summaries.append(
|
||||
StatusSummary(
|
||||
status=DealStatus(item["status"]),
|
||||
count=int(item["count"]),
|
||||
amount_sum=Decimal(item["amount_sum"]),
|
||||
)
|
||||
)
|
||||
won = WonStatistics(
|
||||
count=int(won_payload["count"]),
|
||||
amount_sum=Decimal(won_payload["amount_sum"]),
|
||||
average_amount=Decimal(won_payload["average_amount"]),
|
||||
)
|
||||
new_deals = NewDealsWindow(
|
||||
days=int(new_deals_payload["days"]),
|
||||
count=int(new_deals_payload["count"]),
|
||||
)
|
||||
except (KeyError, TypeError, ValueError, InvalidOperation):
|
||||
return None
|
||||
|
||||
return DealSummary(by_status=summaries, won=won, new_deals=new_deals, total_deals=total_deals)
|
||||
|
||||
|
||||
def _serialize_funnel(breakdowns: list[StageBreakdown]) -> list[dict[str, Any]]:
|
||||
serialized: list[dict[str, Any]] = []
|
||||
for item in breakdowns:
|
||||
serialized.append(
|
||||
{
|
||||
"stage": item.stage.value,
|
||||
"total": item.total,
|
||||
"by_status": {status.value: count for status, count in item.by_status.items()},
|
||||
"conversion_to_next": item.conversion_to_next,
|
||||
}
|
||||
)
|
||||
return serialized
|
||||
|
||||
|
||||
def _deserialize_funnel(payload: Any) -> list[StageBreakdown] | None:
|
||||
if not isinstance(payload, list):
|
||||
return None
|
||||
breakdowns: list[StageBreakdown] = []
|
||||
try:
|
||||
for item in payload:
|
||||
by_status_payload = item["by_status"]
|
||||
by_status = {DealStatus(key): int(value) for key, value in by_status_payload.items()}
|
||||
breakdowns.append(
|
||||
StageBreakdown(
|
||||
stage=DealStage(item["stage"]),
|
||||
total=int(item["total"]),
|
||||
by_status=by_status,
|
||||
conversion_to_next=float(item["conversion_to_next"]) if item["conversion_to_next"] is not None else None,
|
||||
)
|
||||
)
|
||||
except (KeyError, TypeError, ValueError):
|
||||
return None
|
||||
return breakdowns
|
||||
|
||||
|
||||
async def invalidate_analytics_cache(cache: Redis | None, organization_id: int, backoff_ms: int) -> None:
|
||||
"""Remove cached analytics payloads for the organization."""
|
||||
|
||||
if cache is None:
|
||||
return
|
||||
|
||||
summary_pattern = summary_cache_pattern(organization_id)
|
||||
keys: list[str] = [funnel_cache_key(organization_id)]
|
||||
try:
|
||||
async for raw_key in cache.scan_iter(match=summary_pattern):
|
||||
if isinstance(raw_key, bytes):
|
||||
keys.append(raw_key.decode("utf-8"))
|
||||
else:
|
||||
keys.append(str(raw_key))
|
||||
except RedisError as exc: # pragma: no cover - network errors
|
||||
cache_manager.mark_unavailable()
|
||||
logger.warning(
|
||||
"Failed to enumerate summary cache keys for organization %s: %s",
|
||||
organization_id,
|
||||
exc,
|
||||
)
|
||||
return
|
||||
|
||||
await delete_keys(cache, keys, backoff_ms)
|
||||
|
|
@ -2,6 +2,9 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.security import JWTService, PasswordHasher
|
||||
|
|
@ -14,6 +17,10 @@ class InvalidCredentialsError(Exception):
|
|||
"""Raised when user authentication fails."""
|
||||
|
||||
|
||||
class InvalidRefreshTokenError(Exception):
|
||||
"""Raised when refresh token validation fails."""
|
||||
|
||||
|
||||
class AuthService:
|
||||
"""Handles authentication flows and token issuance."""
|
||||
|
||||
|
|
@ -33,11 +40,47 @@ class AuthService:
|
|||
raise InvalidCredentialsError("Invalid email or password")
|
||||
return user
|
||||
|
||||
def create_access_token(self, user: User) -> TokenResponse:
|
||||
expires_delta = timedelta(minutes=settings.access_token_expire_minutes)
|
||||
token = self._jwt_service.create_access_token(
|
||||
def issue_tokens(self, user: User) -> TokenResponse:
|
||||
access_expires = timedelta(minutes=settings.access_token_expire_minutes)
|
||||
refresh_expires = timedelta(days=settings.refresh_token_expire_days)
|
||||
access_token = self._jwt_service.create_access_token(
|
||||
subject=str(user.id),
|
||||
expires_delta=expires_delta,
|
||||
claims={"email": user.email},
|
||||
expires_delta=access_expires,
|
||||
claims={"email": user.email, "scope": "access"},
|
||||
)
|
||||
return TokenResponse(access_token=token, expires_in=int(expires_delta.total_seconds()))
|
||||
refresh_token = self._jwt_service.create_access_token(
|
||||
subject=str(user.id),
|
||||
expires_delta=refresh_expires,
|
||||
claims={"scope": "refresh"},
|
||||
)
|
||||
return TokenResponse(
|
||||
access_token=access_token,
|
||||
refresh_token=refresh_token,
|
||||
expires_in=int(access_expires.total_seconds()),
|
||||
refresh_expires_in=int(refresh_expires.total_seconds()),
|
||||
)
|
||||
|
||||
async def refresh_tokens(self, refresh_token: str) -> TokenResponse:
|
||||
payload = self._decode_refresh_token(refresh_token)
|
||||
sub = payload.get("sub")
|
||||
if sub is None:
|
||||
raise InvalidRefreshTokenError("Invalid refresh token")
|
||||
|
||||
try:
|
||||
user_id = int(sub)
|
||||
except (TypeError, ValueError) as exc: # pragma: no cover - defensive
|
||||
raise InvalidRefreshTokenError("Invalid refresh token") from exc
|
||||
|
||||
user = await self._user_repository.get_by_id(user_id)
|
||||
if user is None:
|
||||
raise InvalidRefreshTokenError("Invalid refresh token")
|
||||
return self.issue_tokens(user)
|
||||
|
||||
def _decode_refresh_token(self, token: str) -> dict[str, Any]:
|
||||
try:
|
||||
payload = self._jwt_service.decode(token)
|
||||
except jwt.PyJWTError as exc:
|
||||
raise InvalidRefreshTokenError("Invalid refresh token") from exc
|
||||
if payload.get("scope") != "refresh":
|
||||
raise InvalidRefreshTokenError("Invalid refresh token")
|
||||
return payload
|
||||
|
|
|
|||
|
|
@ -0,0 +1,155 @@
|
|||
"""Business logic for contact workflows."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
from app.models.contact import Contact, ContactCreate
|
||||
from app.models.deal import Deal
|
||||
from app.repositories.contact_repo import ContactAccessError, ContactQueryParams, ContactRepository
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
class ContactServiceError(Exception):
|
||||
"""Base error for contact workflows."""
|
||||
|
||||
|
||||
class ContactNotFoundError(ContactServiceError):
|
||||
"""Raised when contact cannot be found within organization."""
|
||||
|
||||
|
||||
class ContactForbiddenError(ContactServiceError):
|
||||
"""Raised when user lacks permissions for the operation."""
|
||||
|
||||
|
||||
class ContactOrganizationError(ContactServiceError):
|
||||
"""Raised when attempting to operate outside current organization."""
|
||||
|
||||
|
||||
class ContactDeletionError(ContactServiceError):
|
||||
"""Raised when contact cannot be deleted due to business constraints."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ContactListFilters:
|
||||
"""Filters accepted by contact list endpoint."""
|
||||
|
||||
page: int = 1
|
||||
page_size: int = 20
|
||||
search: str | None = None
|
||||
owner_id: int | None = None
|
||||
|
||||
|
||||
class _UnsetType:
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
UNSET = _UnsetType()
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ContactUpdateData:
|
||||
"""Subset of fields allowed during contact update."""
|
||||
|
||||
name: str | None | _UnsetType = UNSET
|
||||
email: str | None | _UnsetType = UNSET
|
||||
phone: str | None | _UnsetType = UNSET
|
||||
|
||||
|
||||
class ContactService:
|
||||
"""Encapsulates contact-specific business rules."""
|
||||
|
||||
def __init__(self, repository: ContactRepository) -> None:
|
||||
self._repository = repository
|
||||
|
||||
async def list_contacts(
|
||||
self,
|
||||
*,
|
||||
filters: ContactListFilters,
|
||||
context: OrganizationContext,
|
||||
) -> Sequence[Contact]:
|
||||
params = ContactQueryParams(
|
||||
organization_id=context.organization_id,
|
||||
page=filters.page,
|
||||
page_size=filters.page_size,
|
||||
search=filters.search,
|
||||
owner_id=filters.owner_id,
|
||||
)
|
||||
try:
|
||||
return await self._repository.list(params=params, role=context.role, user_id=context.user_id)
|
||||
except ContactAccessError as exc:
|
||||
raise ContactForbiddenError(str(exc)) from exc
|
||||
|
||||
async def create_contact(
|
||||
self,
|
||||
data: ContactCreate,
|
||||
*,
|
||||
context: OrganizationContext,
|
||||
) -> Contact:
|
||||
self._ensure_same_organization(data.organization_id, context)
|
||||
try:
|
||||
return await self._repository.create(data, role=context.role, user_id=context.user_id)
|
||||
except ContactAccessError as exc:
|
||||
raise ContactForbiddenError(str(exc)) from exc
|
||||
|
||||
async def get_contact(
|
||||
self,
|
||||
contact_id: int,
|
||||
*,
|
||||
context: OrganizationContext,
|
||||
) -> Contact:
|
||||
contact = await self._repository.get(
|
||||
contact_id,
|
||||
organization_id=context.organization_id,
|
||||
role=context.role,
|
||||
user_id=context.user_id,
|
||||
)
|
||||
if contact is None:
|
||||
raise ContactNotFoundError("Contact not found")
|
||||
return contact
|
||||
|
||||
async def update_contact(
|
||||
self,
|
||||
contact: Contact,
|
||||
updates: ContactUpdateData,
|
||||
*,
|
||||
context: OrganizationContext,
|
||||
) -> Contact:
|
||||
self._ensure_same_organization(contact.organization_id, context)
|
||||
payload = self._build_update_mapping(updates)
|
||||
if not payload:
|
||||
return contact
|
||||
try:
|
||||
return await self._repository.update(contact, payload, role=context.role, user_id=context.user_id)
|
||||
except ContactAccessError as exc:
|
||||
raise ContactForbiddenError(str(exc)) from exc
|
||||
|
||||
async def delete_contact(self, contact: Contact, *, context: OrganizationContext) -> None:
|
||||
self._ensure_same_organization(contact.organization_id, context)
|
||||
await self._ensure_no_related_deals(contact_id=contact.id)
|
||||
try:
|
||||
await self._repository.delete(contact, role=context.role, user_id=context.user_id)
|
||||
except ContactAccessError as exc:
|
||||
raise ContactForbiddenError(str(exc)) from exc
|
||||
|
||||
def _ensure_same_organization(self, organization_id: int, context: OrganizationContext) -> None:
|
||||
if organization_id != context.organization_id:
|
||||
raise ContactOrganizationError("Contact belongs to another organization")
|
||||
|
||||
def _build_update_mapping(self, updates: ContactUpdateData) -> dict[str, str | None]:
|
||||
payload: dict[str, str | None] = {}
|
||||
if updates.name is not UNSET:
|
||||
payload["name"] = updates.name
|
||||
if updates.email is not UNSET:
|
||||
payload["email"] = updates.email
|
||||
if updates.phone is not UNSET:
|
||||
payload["phone"] = updates.phone
|
||||
return payload
|
||||
|
||||
async def _ensure_no_related_deals(self, contact_id: int) -> None:
|
||||
stmt = select(Deal.id).where(Deal.contact_id == contact_id).limit(1)
|
||||
result = await self._repository.session.scalar(stmt)
|
||||
if result is not None:
|
||||
raise ContactDeletionError("Contact has related deals and cannot be deleted")
|
||||
|
|
@ -0,0 +1,177 @@
|
|||
"""Business logic for deals."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
from decimal import Decimal
|
||||
|
||||
from redis.asyncio.client import Redis
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from app.models.activity import Activity, ActivityType
|
||||
from app.models.contact import Contact
|
||||
from app.models.deal import Deal, DealCreate, DealStage, DealStatus
|
||||
from app.models.organization_member import OrganizationRole
|
||||
from app.repositories.deal_repo import DealRepository
|
||||
from app.services.analytics_service import invalidate_analytics_cache
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
STAGE_ORDER = {
|
||||
stage: index
|
||||
for index, stage in enumerate(
|
||||
[
|
||||
DealStage.QUALIFICATION,
|
||||
DealStage.PROPOSAL,
|
||||
DealStage.NEGOTIATION,
|
||||
DealStage.CLOSED,
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
class DealServiceError(Exception):
|
||||
"""Base class for deal service errors."""
|
||||
|
||||
|
||||
class DealOrganizationMismatchError(DealServiceError):
|
||||
"""Raised when attempting to use resources from another organization."""
|
||||
|
||||
|
||||
class DealStageTransitionError(DealServiceError):
|
||||
"""Raised when stage transition violates business rules."""
|
||||
|
||||
|
||||
class DealStatusValidationError(DealServiceError):
|
||||
"""Raised when invalid status transitions are requested."""
|
||||
|
||||
|
||||
class ContactHasDealsError(DealServiceError):
|
||||
"""Raised when attempting to delete a contact with active deals."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class DealUpdateData:
|
||||
"""Structured container for deal update operations."""
|
||||
|
||||
status: DealStatus | None = None
|
||||
stage: DealStage | None = None
|
||||
amount: Decimal | None = None
|
||||
currency: str | None = None
|
||||
|
||||
|
||||
class DealService:
|
||||
"""Encapsulates deal workflows and validations."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
repository: DealRepository,
|
||||
cache: Redis | None = None,
|
||||
*,
|
||||
cache_backoff_ms: int = 0,
|
||||
) -> None:
|
||||
self._repository = repository
|
||||
self._cache = cache
|
||||
self._cache_backoff_ms = cache_backoff_ms
|
||||
|
||||
async def create_deal(self, data: DealCreate, *, context: OrganizationContext) -> Deal:
|
||||
self._ensure_same_organization(data.organization_id, context)
|
||||
await self._ensure_contact_in_organization(data.contact_id, context.organization_id)
|
||||
deal = await self._repository.create(data=data, role=context.role, user_id=context.user_id)
|
||||
await invalidate_analytics_cache(self._cache, context.organization_id, self._cache_backoff_ms)
|
||||
return deal
|
||||
|
||||
async def update_deal(
|
||||
self,
|
||||
deal: Deal,
|
||||
updates: DealUpdateData,
|
||||
*,
|
||||
context: OrganizationContext,
|
||||
) -> Deal:
|
||||
self._ensure_same_organization(deal.organization_id, context)
|
||||
changes: dict[str, object] = {}
|
||||
stage_activity: tuple[ActivityType, dict[str, str]] | None = None
|
||||
status_activity: tuple[ActivityType, dict[str, str]] | None = None
|
||||
|
||||
if updates.amount is not None:
|
||||
changes["amount"] = updates.amount
|
||||
if updates.currency is not None:
|
||||
changes["currency"] = updates.currency
|
||||
|
||||
if updates.stage is not None and updates.stage != deal.stage:
|
||||
self._validate_stage_transition(deal.stage, updates.stage, context.role)
|
||||
changes["stage"] = updates.stage
|
||||
stage_activity = (
|
||||
ActivityType.STAGE_CHANGED,
|
||||
{"old_stage": deal.stage, "new_stage": updates.stage},
|
||||
)
|
||||
|
||||
if updates.status is not None and updates.status != deal.status:
|
||||
self._validate_status_transition(deal, updates)
|
||||
changes["status"] = updates.status
|
||||
status_activity = (
|
||||
ActivityType.STATUS_CHANGED,
|
||||
{"old_status": deal.status, "new_status": updates.status},
|
||||
)
|
||||
|
||||
if not changes:
|
||||
return deal
|
||||
|
||||
updated = await self._repository.update(deal, changes, role=context.role, user_id=context.user_id)
|
||||
await self._log_activities(
|
||||
deal_id=deal.id,
|
||||
author_id=context.user_id,
|
||||
activities=[activity for activity in [stage_activity, status_activity] if activity],
|
||||
)
|
||||
await invalidate_analytics_cache(self._cache, context.organization_id, self._cache_backoff_ms)
|
||||
return updated
|
||||
|
||||
async def ensure_contact_can_be_deleted(self, contact_id: int) -> None:
|
||||
stmt = select(func.count()).select_from(Deal).where(Deal.contact_id == contact_id)
|
||||
count = await self._repository.session.scalar(stmt)
|
||||
if count and count > 0:
|
||||
raise ContactHasDealsError("Contact has related deals and cannot be deleted")
|
||||
|
||||
async def _log_activities(
|
||||
self,
|
||||
*,
|
||||
deal_id: int,
|
||||
author_id: int,
|
||||
activities: Iterable[tuple[ActivityType, dict[str, str]]],
|
||||
) -> None:
|
||||
entries = list(activities)
|
||||
if not entries:
|
||||
return
|
||||
for activity_type, payload in entries:
|
||||
activity = Activity(deal_id=deal_id, author_id=author_id, type=activity_type, payload=payload)
|
||||
self._repository.session.add(activity)
|
||||
await self._repository.session.flush()
|
||||
|
||||
def _ensure_same_organization(self, organization_id: int, context: OrganizationContext) -> None:
|
||||
if organization_id != context.organization_id:
|
||||
raise DealOrganizationMismatchError("Operation targets a different organization")
|
||||
|
||||
async def _ensure_contact_in_organization(self, contact_id: int, organization_id: int) -> Contact:
|
||||
contact = await self._repository.session.get(Contact, contact_id)
|
||||
if contact is None or contact.organization_id != organization_id:
|
||||
raise DealOrganizationMismatchError("Contact belongs to another organization")
|
||||
return contact
|
||||
|
||||
def _validate_stage_transition(
|
||||
self,
|
||||
current_stage: DealStage,
|
||||
new_stage: DealStage,
|
||||
role: OrganizationRole,
|
||||
) -> None:
|
||||
if STAGE_ORDER[new_stage] < STAGE_ORDER[current_stage] and role not in {
|
||||
OrganizationRole.OWNER,
|
||||
OrganizationRole.ADMIN,
|
||||
}:
|
||||
raise DealStageTransitionError("Stage rollback requires owner or admin role")
|
||||
|
||||
def _validate_status_transition(self, deal: Deal, updates: DealUpdateData) -> None:
|
||||
if updates.status != DealStatus.WON:
|
||||
return
|
||||
effective_amount = updates.amount if updates.amount is not None else deal.amount
|
||||
if effective_amount is None or Decimal(effective_amount) <= Decimal("0"):
|
||||
raise DealStatusValidationError("Amount must be greater than zero to mark a deal as won")
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
"""Organization-related business rules."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.repositories.org_repo import OrganizationRepository
|
||||
|
||||
|
||||
class OrganizationServiceError(Exception):
|
||||
"""Base class for organization service errors."""
|
||||
|
||||
|
||||
class OrganizationContextMissingError(OrganizationServiceError):
|
||||
"""Raised when the request lacks organization context."""
|
||||
|
||||
|
||||
class OrganizationAccessDeniedError(OrganizationServiceError):
|
||||
"""Raised when a user tries to work with a foreign organization."""
|
||||
|
||||
|
||||
class OrganizationForbiddenError(OrganizationServiceError):
|
||||
"""Raised when a user does not have enough privileges."""
|
||||
|
||||
|
||||
class OrganizationMemberAlreadyExistsError(OrganizationServiceError):
|
||||
"""Raised when attempting to add a duplicate organization member."""
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class OrganizationContext:
|
||||
"""Resolved organization and membership information for a request."""
|
||||
|
||||
organization: Organization
|
||||
membership: OrganizationMember
|
||||
|
||||
@property
|
||||
def organization_id(self) -> int:
|
||||
return self.organization.id
|
||||
|
||||
@property
|
||||
def role(self) -> OrganizationRole:
|
||||
return self.membership.role
|
||||
|
||||
@property
|
||||
def user_id(self) -> int:
|
||||
return self.membership.user_id
|
||||
|
||||
|
||||
class OrganizationService:
|
||||
"""Encapsulates organization-specific policies."""
|
||||
|
||||
def __init__(self, repository: OrganizationRepository) -> None:
|
||||
self._repository = repository
|
||||
|
||||
async def get_context(self, *, user_id: int, organization_id: int | None) -> OrganizationContext:
|
||||
"""Resolve request context ensuring the user belongs to the given organization."""
|
||||
|
||||
if organization_id is None:
|
||||
raise OrganizationContextMissingError("X-Organization-Id header is required")
|
||||
|
||||
membership = await self._repository.get_membership(organization_id, user_id)
|
||||
if membership is None or membership.organization is None:
|
||||
raise OrganizationAccessDeniedError("Organization not found")
|
||||
|
||||
return OrganizationContext(organization=membership.organization, membership=membership)
|
||||
|
||||
def ensure_entity_in_context(self, *, entity_organization_id: int, context: OrganizationContext) -> None:
|
||||
"""Make sure a resource belongs to the current organization."""
|
||||
|
||||
if entity_organization_id != context.organization_id:
|
||||
raise OrganizationAccessDeniedError("Resource belongs to another organization")
|
||||
|
||||
def ensure_can_manage_settings(self, context: OrganizationContext) -> None:
|
||||
"""Allow only owner/admin to change organization-level settings."""
|
||||
|
||||
if context.role not in {OrganizationRole.OWNER, OrganizationRole.ADMIN}:
|
||||
raise OrganizationForbiddenError("Only owner/admin can modify organization settings")
|
||||
|
||||
def ensure_can_manage_entity(self, context: OrganizationContext) -> None:
|
||||
"""Managers/admins/owners may manage entities; members are restricted."""
|
||||
|
||||
if context.role == OrganizationRole.MEMBER:
|
||||
raise OrganizationForbiddenError("Members cannot manage shared entities")
|
||||
|
||||
def ensure_member_owns_entity(self, *, context: OrganizationContext, owner_id: int) -> None:
|
||||
"""Members can only mutate entities they own (contacts/deals/tasks)."""
|
||||
|
||||
if context.role == OrganizationRole.MEMBER and owner_id != context.user_id:
|
||||
raise OrganizationForbiddenError("Members can only modify their own records")
|
||||
|
||||
async def add_member(
|
||||
self,
|
||||
*,
|
||||
context: OrganizationContext,
|
||||
user_id: int,
|
||||
role: OrganizationRole,
|
||||
) -> OrganizationMember:
|
||||
"""Add a user to the current organization enforced by permissions."""
|
||||
|
||||
self.ensure_can_manage_settings(context)
|
||||
|
||||
existing = await self._repository.get_membership(context.organization_id, user_id)
|
||||
if existing is not None:
|
||||
raise OrganizationMemberAlreadyExistsError("User already belongs to this organization")
|
||||
|
||||
membership = OrganizationMember(
|
||||
organization_id=context.organization_id,
|
||||
user_id=user_id,
|
||||
role=role,
|
||||
)
|
||||
self._repository.session.add(membership)
|
||||
await self._repository.session.commit()
|
||||
await self._repository.session.refresh(membership)
|
||||
return membership
|
||||
|
|
@ -0,0 +1,186 @@
|
|||
"""Business logic for tasks linked to deals."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
from app.models.activity import ActivityCreate, ActivityType
|
||||
from app.models.organization_member import OrganizationRole
|
||||
from app.models.task import Task, TaskCreate
|
||||
from app.repositories.activity_repo import ActivityRepository, ActivityOrganizationMismatchError
|
||||
from app.repositories.task_repo import (
|
||||
TaskAccessError as RepoTaskAccessError,
|
||||
TaskOrganizationMismatchError as RepoTaskOrganizationMismatchError,
|
||||
TaskQueryParams,
|
||||
TaskRepository,
|
||||
)
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
class TaskServiceError(Exception):
|
||||
"""Base class for task service errors."""
|
||||
|
||||
|
||||
class TaskDueDateError(TaskServiceError):
|
||||
"""Raised when due_date violates temporal constraints."""
|
||||
|
||||
|
||||
class TaskForbiddenError(TaskServiceError):
|
||||
"""Raised when the user lacks permissions for an operation."""
|
||||
|
||||
|
||||
class TaskOrganizationError(TaskServiceError):
|
||||
"""Raised when a task/deal belongs to another organization."""
|
||||
|
||||
|
||||
class TaskNotFoundError(TaskServiceError):
|
||||
"""Raised when task cannot be located in the current organization."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TaskListFilters:
|
||||
"""Filters accepted by the task listing endpoint."""
|
||||
|
||||
deal_id: int | None = None
|
||||
only_open: bool = False
|
||||
due_before: datetime | None = None
|
||||
due_after: datetime | None = None
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TaskUpdateData:
|
||||
"""Subset of fields allowed for partial updates."""
|
||||
|
||||
title: str | None = None
|
||||
description: str | None = None
|
||||
due_date: datetime | None = None
|
||||
is_done: bool | None = None
|
||||
|
||||
|
||||
class TaskService:
|
||||
"""Encapsulates task workflows and policy validations."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
task_repository: TaskRepository,
|
||||
activity_repository: ActivityRepository | None = None,
|
||||
) -> None:
|
||||
self._task_repository = task_repository
|
||||
self._activity_repository = activity_repository
|
||||
|
||||
async def list_tasks(
|
||||
self,
|
||||
*,
|
||||
filters: TaskListFilters,
|
||||
context: OrganizationContext,
|
||||
) -> Sequence[Task]:
|
||||
params = TaskQueryParams(
|
||||
organization_id=context.organization_id,
|
||||
deal_id=filters.deal_id,
|
||||
only_open=filters.only_open,
|
||||
due_before=filters.due_before,
|
||||
due_after=filters.due_after,
|
||||
)
|
||||
return await self._task_repository.list(params=params)
|
||||
|
||||
async def get_task(self, task_id: int, *, context: OrganizationContext) -> Task:
|
||||
task = await self._task_repository.get(task_id, organization_id=context.organization_id)
|
||||
if task is None:
|
||||
raise TaskNotFoundError("Task not found")
|
||||
return task
|
||||
|
||||
async def create_task(
|
||||
self,
|
||||
data: TaskCreate,
|
||||
*,
|
||||
context: OrganizationContext,
|
||||
) -> Task:
|
||||
self._validate_due_date(data.due_date)
|
||||
try:
|
||||
task = await self._task_repository.create(
|
||||
data,
|
||||
organization_id=context.organization_id,
|
||||
role=context.role,
|
||||
user_id=context.user_id,
|
||||
)
|
||||
except RepoTaskOrganizationMismatchError as exc:
|
||||
raise TaskOrganizationError("Deal belongs to another organization") from exc
|
||||
except RepoTaskAccessError as exc:
|
||||
raise TaskForbiddenError(str(exc)) from exc
|
||||
|
||||
await self._log_task_created(task, context=context)
|
||||
return task
|
||||
|
||||
async def update_task(
|
||||
self,
|
||||
task_id: int,
|
||||
updates: TaskUpdateData,
|
||||
*,
|
||||
context: OrganizationContext,
|
||||
) -> Task:
|
||||
task = await self.get_task(task_id, context=context)
|
||||
if updates.due_date is not None:
|
||||
self._validate_due_date(updates.due_date)
|
||||
|
||||
payload = self._build_update_mapping(updates)
|
||||
if not payload:
|
||||
return task
|
||||
|
||||
try:
|
||||
return await self._task_repository.update(
|
||||
task,
|
||||
payload,
|
||||
role=context.role,
|
||||
user_id=context.user_id,
|
||||
)
|
||||
except RepoTaskAccessError as exc:
|
||||
raise TaskForbiddenError(str(exc)) from exc
|
||||
|
||||
async def delete_task(self, task_id: int, *, context: OrganizationContext) -> None:
|
||||
task = await self.get_task(task_id, context=context)
|
||||
self._ensure_member_owns_task(task, context)
|
||||
await self._task_repository.session.delete(task)
|
||||
await self._task_repository.session.flush()
|
||||
|
||||
def _ensure_member_owns_task(self, task: Task, context: OrganizationContext) -> None:
|
||||
if context.role != OrganizationRole.MEMBER:
|
||||
return
|
||||
owner_id = task.deal.owner_id if task.deal is not None else None
|
||||
if owner_id is None or owner_id != context.user_id:
|
||||
raise TaskForbiddenError("Members can only modify their own tasks")
|
||||
|
||||
def _validate_due_date(self, due_date: datetime | None) -> None:
|
||||
if due_date is None:
|
||||
return
|
||||
today = datetime.now(timezone.utc).date()
|
||||
value_date = (due_date.astimezone(timezone.utc) if due_date.tzinfo else due_date).date()
|
||||
if value_date < today:
|
||||
raise TaskDueDateError("Task due date cannot be in the past")
|
||||
|
||||
def _build_update_mapping(self, updates: TaskUpdateData) -> Mapping[str, Any]:
|
||||
payload: dict[str, Any] = {}
|
||||
if updates.title is not None:
|
||||
payload["title"] = updates.title
|
||||
if updates.description is not None:
|
||||
payload["description"] = updates.description
|
||||
if updates.due_date is not None:
|
||||
payload["due_date"] = updates.due_date
|
||||
if updates.is_done is not None:
|
||||
payload["is_done"] = updates.is_done
|
||||
return payload
|
||||
|
||||
async def _log_task_created(self, task: Task, *, context: OrganizationContext) -> None:
|
||||
if self._activity_repository is None:
|
||||
return
|
||||
data = ActivityCreate(
|
||||
deal_id=task.deal_id,
|
||||
author_id=context.user_id,
|
||||
type=ActivityType.TASK_CREATED,
|
||||
payload={"task_id": task.id, "title": task.title},
|
||||
)
|
||||
try:
|
||||
await self._activity_repository.create(data, organization_id=context.organization_id)
|
||||
except ActivityOrganizationMismatchError: # pragma: no cover - defensive
|
||||
raise TaskOrganizationError("Activity target does not belong to organization")
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
"""User-related business logic."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
from app.core.security import PasswordHasher
|
||||
from app.models.user import User, UserCreate
|
||||
from app.repositories.user_repo import UserRepository
|
||||
|
||||
|
||||
class UserServiceError(Exception):
|
||||
"""Base class for user service errors."""
|
||||
|
||||
|
||||
class UserAlreadyExistsError(UserServiceError):
|
||||
"""Raised when attempting to create a user with duplicate email."""
|
||||
|
||||
|
||||
class UserNotFoundError(UserServiceError):
|
||||
"""Raised when user record cannot be located."""
|
||||
|
||||
|
||||
class UserService:
|
||||
"""Encapsulates user-related workflows."""
|
||||
|
||||
def __init__(self, user_repository: UserRepository, password_hasher: PasswordHasher) -> None:
|
||||
self._repository = user_repository
|
||||
self._password_hasher = password_hasher
|
||||
|
||||
async def list_users(self) -> Sequence[User]:
|
||||
return await self._repository.list()
|
||||
|
||||
async def get_user(self, user_id: int) -> User:
|
||||
user = await self._repository.get_by_id(user_id)
|
||||
if user is None:
|
||||
raise UserNotFoundError(f"User {user_id} not found")
|
||||
return user
|
||||
|
||||
async def create_user(self, data: UserCreate) -> User:
|
||||
existing = await self._repository.get_by_email(data.email)
|
||||
if existing is not None:
|
||||
raise UserAlreadyExistsError(f"User {data.email} already exists")
|
||||
|
||||
hashed_password = self._password_hasher.hash(data.password)
|
||||
user = await self._repository.create(data=data, hashed_password=hashed_password)
|
||||
await self._repository.session.commit()
|
||||
await self._repository.session.refresh(user)
|
||||
return user
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
services:
|
||||
app:
|
||||
image: ${GIT_HOST}/${GIT_USER}/${GIT_REPO}:app
|
||||
restart: unless-stopped
|
||||
command: uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
PROJECT_NAME: ${PROJECT_NAME}
|
||||
VERSION: ${VERSION}
|
||||
API_V1_PREFIX: ${API_V1_PREFIX}
|
||||
DB_HOST: postgres
|
||||
DB_PORT: ${DB_PORT}
|
||||
DB_NAME: ${DB_NAME}
|
||||
DB_USER: ${DB_USER}
|
||||
DB_PASSWORD: ${DB_PASSWORD}
|
||||
SQLALCHEMY_ECHO: ${SQLALCHEMY_ECHO}
|
||||
JWT_SECRET_KEY: ${JWT_SECRET_KEY}
|
||||
JWT_ALGORITHM: ${JWT_ALGORITHM}
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES}
|
||||
REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS}
|
||||
REDIS_ENABLED: ${REDIS_ENABLED}
|
||||
REDIS_URL: redis://redis:6379/0
|
||||
ANALYTICS_CACHE_TTL_SECONDS: ${ANALYTICS_CACHE_TTL_SECONDS}
|
||||
ANALYTICS_CACHE_BACKOFF_MS: ${ANALYTICS_CACHE_BACKOFF_MS}
|
||||
ports:
|
||||
- "80:8000"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 10s
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_started
|
||||
redis:
|
||||
condition: service_started
|
||||
migrations:
|
||||
condition: service_completed_successfully
|
||||
|
||||
migrations:
|
||||
image: ${GIT_HOST}/${GIT_USER}/${GIT_REPO}:migrations
|
||||
restart: "no"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
DB_HOST: postgres
|
||||
REDIS_URL: redis://redis:6379/0
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_started
|
||||
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
environment:
|
||||
POSTGRES_DB: ${DB_NAME}
|
||||
POSTGRES_USER: ${DB_USER}
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- /mnt/data/postgres:/var/lib/postgresql/data
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"pg_isready",
|
||||
"-U",
|
||||
"${DB_USER}",
|
||||
"-d",
|
||||
"${DB_NAME}",
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 10s
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
command: redis-server --save "" --appendonly no
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "6379:6379"
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 5s
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app/Dockerfile
|
||||
command: uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
PROJECT_NAME: ${PROJECT_NAME}
|
||||
VERSION: ${VERSION}
|
||||
API_V1_PREFIX: ${API_V1_PREFIX}
|
||||
DB_HOST: ${DB_HOST:-postgres}
|
||||
DB_PORT: ${DB_PORT}
|
||||
DB_NAME: ${DB_NAME}
|
||||
DB_USER: ${DB_USER}
|
||||
DB_PASSWORD: ${DB_PASSWORD}
|
||||
SQLALCHEMY_ECHO: ${SQLALCHEMY_ECHO}
|
||||
JWT_SECRET_KEY: ${JWT_SECRET_KEY}
|
||||
JWT_ALGORITHM: ${JWT_ALGORITHM}
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES}
|
||||
REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS}
|
||||
REDIS_ENABLED: ${REDIS_ENABLED}
|
||||
REDIS_URL: ${REDIS_URL:-redis://redis:6379/0}
|
||||
ANALYTICS_CACHE_TTL_SECONDS: ${ANALYTICS_CACHE_TTL_SECONDS}
|
||||
ANALYTICS_CACHE_BACKOFF_MS: ${ANALYTICS_CACHE_BACKOFF_MS}
|
||||
ports:
|
||||
- "8000:8000"
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
environment:
|
||||
POSTGRES_DB: ${DB_NAME}
|
||||
POSTGRES_USER: ${DB_USER}
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
command: redis-server --save "" --appendonly no
|
||||
ports:
|
||||
- "6379:6379"
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app/Dockerfile
|
||||
command: uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
DATABASE_URL: postgresql+asyncpg://postgres:postgres@postgres:5432/test_task_crm
|
||||
ports:
|
||||
- "8000:8000"
|
||||
depends_on:
|
||||
- postgres
|
||||
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
environment:
|
||||
POSTGRES_DB: test_task_crm
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
11
main.py
11
main.py
|
|
@ -1,11 +0,0 @@
|
|||
"""Entry point for running the FastAPI application with uvicorn."""
|
||||
import uvicorn
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run development server."""
|
||||
uvicorn.run("app.main:app", host="0.0.0.0", port=8000, reload=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# syntax=docker/dockerfile:1.7
|
||||
|
||||
FROM ghcr.io/astral-sh/uv:python3.14-alpine AS builder
|
||||
WORKDIR /opt/app
|
||||
|
||||
COPY pyproject.toml uv.lock ./
|
||||
RUN uv sync --frozen --no-dev
|
||||
|
||||
COPY app ./app
|
||||
COPY migrations ./migrations
|
||||
COPY alembic.ini .
|
||||
|
||||
FROM python:3.14-alpine AS runtime
|
||||
|
||||
ENV PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PATH="/opt/app/.venv/bin:${PATH}"
|
||||
|
||||
WORKDIR /opt/app
|
||||
|
||||
RUN apk add --no-cache libpq
|
||||
|
||||
COPY --from=builder /opt/app/.venv /opt/app/.venv
|
||||
COPY --from=builder /opt/app/app ./app
|
||||
COPY --from=builder /opt/app/migrations ./migrations
|
||||
COPY --from=builder /opt/app/alembic.ini .
|
||||
|
||||
ENTRYPOINT ["alembic", "upgrade", "head"]
|
||||
|
|
@ -28,8 +28,8 @@ def run_migrations_offline() -> None:
|
|||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
# compare_type=True,
|
||||
# compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
|
|
@ -41,8 +41,8 @@ def do_run_migrations(connection: Connection) -> None:
|
|||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
# compare_type=True,
|
||||
# compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
|
|
@ -67,12 +67,7 @@ async def run_migrations_online() -> None:
|
|||
await connectable.dispose()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
if context.is_offline_mode():
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
else:
|
||||
asyncio.run(run_migrations_online())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
|||
|
|
@ -13,20 +13,38 @@ depends_on: tuple[str, ...] | None = None
|
|||
|
||||
def upgrade() -> None:
|
||||
organization_role = sa.Enum(
|
||||
"owner", "admin", "manager", "member", name="organization_role"
|
||||
"owner",
|
||||
"admin",
|
||||
"manager",
|
||||
"member",
|
||||
name="organization_role",
|
||||
create_type=False,
|
||||
)
|
||||
deal_status = sa.Enum(
|
||||
"new",
|
||||
"in_progress",
|
||||
"won",
|
||||
"lost",
|
||||
name="deal_status",
|
||||
create_type=False,
|
||||
)
|
||||
deal_stage = sa.Enum(
|
||||
"qualification",
|
||||
"proposal",
|
||||
"negotiation",
|
||||
"closed",
|
||||
name="deal_stage",
|
||||
create_type=False,
|
||||
)
|
||||
deal_status = sa.Enum("new", "in_progress", "won", "lost", name="deal_status")
|
||||
deal_stage = sa.Enum("qualification", "proposal", "negotiation", "closed", name="deal_stage")
|
||||
activity_type = sa.Enum(
|
||||
"comment", "status_changed", "task_created", "system", name="activity_type"
|
||||
"comment",
|
||||
"status_changed",
|
||||
"task_created",
|
||||
"system",
|
||||
name="activity_type",
|
||||
create_type=False,
|
||||
)
|
||||
|
||||
bind = op.get_bind()
|
||||
organization_role.create(bind, checkfirst=True)
|
||||
deal_status.create(bind, checkfirst=True)
|
||||
deal_stage.create(bind, checkfirst=True)
|
||||
activity_type.create(bind, checkfirst=True)
|
||||
|
||||
op.create_table(
|
||||
"organizations",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
|
|
@ -212,12 +230,36 @@ def downgrade() -> None:
|
|||
op.drop_table("organizations")
|
||||
|
||||
organization_role = sa.Enum(
|
||||
"owner", "admin", "manager", "member", name="organization_role"
|
||||
"owner",
|
||||
"admin",
|
||||
"manager",
|
||||
"member",
|
||||
name="organization_role",
|
||||
create_type=False,
|
||||
)
|
||||
deal_status = sa.Enum(
|
||||
"new",
|
||||
"in_progress",
|
||||
"won",
|
||||
"lost",
|
||||
name="deal_status",
|
||||
create_type=False,
|
||||
)
|
||||
deal_stage = sa.Enum(
|
||||
"qualification",
|
||||
"proposal",
|
||||
"negotiation",
|
||||
"closed",
|
||||
name="deal_stage",
|
||||
create_type=False,
|
||||
)
|
||||
deal_status = sa.Enum("new", "in_progress", "won", "lost", name="deal_status")
|
||||
deal_stage = sa.Enum("qualification", "proposal", "negotiation", "closed", name="deal_stage")
|
||||
activity_type = sa.Enum(
|
||||
"comment", "status_changed", "task_created", "system", name="activity_type"
|
||||
"comment",
|
||||
"status_changed",
|
||||
"task_created",
|
||||
"system",
|
||||
name="activity_type",
|
||||
create_type=False,
|
||||
)
|
||||
|
||||
bind = op.get_bind()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
"""Add stage_changed activity type."""
|
||||
from __future__ import annotations
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "20251127_0002_stage_changed"
|
||||
down_revision: str | None = "20251122_0001"
|
||||
branch_labels: tuple[str, ...] | None = None
|
||||
depends_on: tuple[str, ...] | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.execute("ALTER TYPE activity_type ADD VALUE IF NOT EXISTS 'stage_changed';")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.execute("UPDATE activities SET type = 'status_changed' WHERE type = 'stage_changed';")
|
||||
op.execute("ALTER TYPE activity_type RENAME TO activity_type_old;")
|
||||
op.execute(
|
||||
"CREATE TYPE activity_type AS ENUM ('comment','status_changed','task_created','system');"
|
||||
)
|
||||
op.execute(
|
||||
"ALTER TABLE activities ALTER COLUMN type TYPE activity_type USING type::text::activity_type;"
|
||||
)
|
||||
op.execute("DROP TYPE activity_type_old;")
|
||||
|
|
@ -12,6 +12,7 @@ dependencies = [
|
|||
"pyjwt>=2.9.0",
|
||||
"pydantic-settings>=2.12.0",
|
||||
"sqlalchemy>=2.0.44",
|
||||
"redis>=5.2.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
|
|
@ -19,4 +20,7 @@ dev = [
|
|||
"isort>=7.0.0",
|
||||
"mypy>=1.18.2",
|
||||
"ruff>=0.14.6",
|
||||
"pytest>=8.3.3",
|
||||
"pytest-asyncio>=0.25.0",
|
||||
"aiosqlite>=0.20.0",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,65 @@
|
|||
TRUNCATE TABLE activities CASCADE;
|
||||
TRUNCATE TABLE contacts CASCADE;
|
||||
TRUNCATE TABLE deals CASCADE;
|
||||
TRUNCATE TABLE organization_members CASCADE;
|
||||
TRUNCATE TABLE organizations CASCADE;
|
||||
TRUNCATE TABLE tasks CASCADE;
|
||||
TRUNCATE TABLE users CASCADE;
|
||||
|
||||
-- Пользователи
|
||||
INSERT INTO users (id, email, hashed_password, name, is_active, created_at)
|
||||
VALUES
|
||||
(1, 'owner@example.com', 'pbkdf2_sha256$260000$demo$Tk5YEtPJj6..', 'Alice Owner', TRUE, now()),
|
||||
(2, 'manager@example.com', 'pbkdf2_sha256$260000$demo$Tk5YEtPJj6..', 'Bob Manager', TRUE, now()),
|
||||
(3, 'member@example.com', 'pbkdf2_sha256$260000$demo$Tk5YEtPJj6..', 'Carol Member', TRUE, now());
|
||||
|
||||
-- Организации
|
||||
INSERT INTO organizations (id, name, created_at)
|
||||
VALUES
|
||||
(1, 'Acme Corp', now()),
|
||||
(2, 'Beta LLC', now());
|
||||
|
||||
-- Участники организаций
|
||||
INSERT INTO organization_members (id, organization_id, user_id, role, created_at)
|
||||
VALUES
|
||||
(1, 1, 1, 'owner', now()),
|
||||
(2, 1, 2, 'manager', now()),
|
||||
(3, 1, 3, 'member', now()),
|
||||
(4, 2, 2, 'owner', now());
|
||||
|
||||
-- Контакты (в рамках орг. 1)
|
||||
INSERT INTO contacts (id, organization_id, owner_id, name, email, phone, created_at)
|
||||
VALUES
|
||||
(1, 1, 2, 'John Doe', 'john.doe@acme.com', '+1-202-555-0101', now()),
|
||||
(2, 1, 3, 'Jane Smith', 'jane.smith@acme.com', '+1-202-555-0102', now());
|
||||
|
||||
-- Сделки
|
||||
INSERT INTO deals (
|
||||
id, organization_id, contact_id, owner_id, title, amount, currency,
|
||||
status, stage, created_at, updated_at
|
||||
) VALUES
|
||||
(1, 1, 1, 2, 'Website Redesign', 15000.00, 'USD', 'in_progress', 'proposal', now(), now()),
|
||||
(2, 1, 2, 3, 'Support Contract', 5000.00, 'USD', 'new', 'qualification', now(), now());
|
||||
|
||||
-- Задачи
|
||||
INSERT INTO tasks (
|
||||
id, deal_id, title, description, due_date, is_done, created_at
|
||||
) VALUES
|
||||
(1, 1, 'Prepare proposal', 'Draft technical scope', now() + interval '5 days', FALSE, now()),
|
||||
(2, 2, 'Call client', 'Discuss onboarding plan', now() + interval '3 days', FALSE, now());
|
||||
|
||||
-- Активности
|
||||
INSERT INTO activities (
|
||||
id, deal_id, author_id, type, payload, created_at
|
||||
) VALUES
|
||||
(1, 1, 2, 'comment', '{"text": "Kickoff meeting scheduled"}', now()),
|
||||
(2, 1, 2, 'status_changed', '{"old_status": "new", "new_status": "in_progress"}', now()),
|
||||
(3, 2, 3, 'task_created', '{"task_id": 2, "title": "Call client"}', now());
|
||||
|
||||
SELECT setval('users_id_seq', COALESCE((SELECT MAX(id) FROM users), 0), (SELECT MAX(id) FROM users) IS NOT NULL);
|
||||
SELECT setval('organizations_id_seq', COALESCE((SELECT MAX(id) FROM organizations), 0), (SELECT MAX(id) FROM organizations) IS NOT NULL);
|
||||
SELECT setval('organization_members_id_seq', COALESCE((SELECT MAX(id) FROM organization_members), 0), (SELECT MAX(id) FROM organization_members) IS NOT NULL);
|
||||
SELECT setval('contacts_id_seq', COALESCE((SELECT MAX(id) FROM contacts), 0), (SELECT MAX(id) FROM contacts) IS NOT NULL);
|
||||
SELECT setval('deals_id_seq', COALESCE((SELECT MAX(id) FROM deals), 0), (SELECT MAX(id) FROM deals) IS NOT NULL);
|
||||
SELECT setval('tasks_id_seq', COALESCE((SELECT MAX(id) FROM tasks), 0), (SELECT MAX(id) FROM tasks) IS NOT NULL);
|
||||
SELECT setval('activities_id_seq', COALESCE((SELECT MAX(id) FROM activities), 0), (SELECT MAX(id) FROM activities) IS NOT NULL);
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
"""Pytest fixtures shared across API v1 tests."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
|
||||
from app.api.deps import get_cache_backend, get_db_session
|
||||
from app.core.security import password_hasher
|
||||
from app.main import create_app
|
||||
from app.models import Base
|
||||
from tests.utils.fake_redis import InMemoryRedis
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def stub_password_hasher(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
"""Replace bcrypt-dependent hashing with deterministic helpers for tests."""
|
||||
|
||||
def fake_hash(password: str) -> str:
|
||||
return f"hashed-{password}"
|
||||
|
||||
def fake_verify(password: str, hashed_password: str) -> bool:
|
||||
return hashed_password == f"hashed-{password}"
|
||||
|
||||
monkeypatch.setattr(password_hasher, "hash", fake_hash)
|
||||
monkeypatch.setattr(password_hasher, "verify", fake_verify)
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def session_factory() -> AsyncGenerator[async_sessionmaker[AsyncSession], None]:
|
||||
engine = create_async_engine("sqlite+aiosqlite:///:memory:", future=True)
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
factory = async_sessionmaker(engine, expire_on_commit=False)
|
||||
yield factory
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def client(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
cache_stub: InMemoryRedis,
|
||||
) -> AsyncGenerator[AsyncClient, None]:
|
||||
app = create_app()
|
||||
|
||||
async def _get_session_override() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with session_factory() as session:
|
||||
try:
|
||||
yield session
|
||||
await session.commit()
|
||||
except Exception:
|
||||
await session.rollback()
|
||||
raise
|
||||
|
||||
app.dependency_overrides[get_db_session] = _get_session_override
|
||||
app.dependency_overrides[get_cache_backend] = lambda: cache_stub
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as test_client:
|
||||
yield test_client
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def cache_stub() -> InMemoryRedis:
|
||||
return InMemoryRedis()
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
"""Shared helpers for task and activity API tests."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
||||
|
||||
from app.core.security import jwt_service
|
||||
from app.models.contact import Contact
|
||||
from app.models.deal import Deal
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.user import User
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Scenario:
|
||||
"""Captures seeded entities for API tests."""
|
||||
|
||||
user_id: int
|
||||
user_email: str
|
||||
organization_id: int
|
||||
contact_id: int
|
||||
deal_id: int
|
||||
|
||||
|
||||
async def prepare_scenario(session_factory: async_sessionmaker[AsyncSession]) -> Scenario:
|
||||
async with session_factory() as session:
|
||||
user = User(email="owner@example.com", hashed_password="hashed", name="Owner", is_active=True)
|
||||
org = Organization(name="Acme LLC")
|
||||
session.add_all([user, org])
|
||||
await session.flush()
|
||||
|
||||
membership = OrganizationMember(
|
||||
organization_id=org.id,
|
||||
user_id=user.id,
|
||||
role=OrganizationRole.OWNER,
|
||||
)
|
||||
session.add(membership)
|
||||
|
||||
contact = Contact(
|
||||
organization_id=org.id,
|
||||
owner_id=user.id,
|
||||
name="John Doe",
|
||||
email="john@example.com",
|
||||
)
|
||||
session.add(contact)
|
||||
await session.flush()
|
||||
|
||||
deal = Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Website redesign",
|
||||
amount=None,
|
||||
)
|
||||
session.add(deal)
|
||||
await session.commit()
|
||||
|
||||
return Scenario(
|
||||
user_id=user.id,
|
||||
user_email=user.email,
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
deal_id=deal.id,
|
||||
)
|
||||
|
||||
|
||||
async def create_deal(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
*,
|
||||
scenario: Scenario,
|
||||
title: str,
|
||||
) -> int:
|
||||
async with session_factory() as session:
|
||||
deal = Deal(
|
||||
organization_id=scenario.organization_id,
|
||||
contact_id=scenario.contact_id,
|
||||
owner_id=scenario.user_id,
|
||||
title=title,
|
||||
amount=None,
|
||||
)
|
||||
session.add(deal)
|
||||
await session.commit()
|
||||
return deal.id
|
||||
|
||||
|
||||
def auth_headers(token: str, scenario: Scenario) -> dict[str, str]:
|
||||
return {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"X-Organization-Id": str(scenario.organization_id),
|
||||
}
|
||||
|
||||
|
||||
def make_token(user_id: int, email: str) -> str:
|
||||
return jwt_service.create_access_token(
|
||||
subject=str(user_id),
|
||||
expires_delta=timedelta(minutes=30),
|
||||
claims={"email": email},
|
||||
)
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
"""API tests for activity endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
||||
|
||||
from app.models.activity import Activity, ActivityType
|
||||
|
||||
from tests.api.v1.task_activity_shared import auth_headers, make_token, prepare_scenario
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_activity_comment_endpoint(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
response = await client.post(
|
||||
f"/api/v1/deals/{scenario.deal_id}/activities/",
|
||||
json={"type": "comment", "payload": {"text": " hello world "}},
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
payload = response.json()
|
||||
assert payload["payload"]["text"] == "hello world"
|
||||
assert payload["type"] == ActivityType.COMMENT.value
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_activities_endpoint_supports_pagination(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
base_time = datetime.now(timezone.utc)
|
||||
async with session_factory() as session:
|
||||
for index in range(3):
|
||||
activity = Activity(
|
||||
deal_id=scenario.deal_id,
|
||||
author_id=scenario.user_id,
|
||||
type=ActivityType.COMMENT,
|
||||
payload={"text": f"Entry {index}"},
|
||||
created_at=base_time + timedelta(seconds=index),
|
||||
)
|
||||
session.add(activity)
|
||||
await session.commit()
|
||||
|
||||
response = await client.get(
|
||||
f"/api/v1/deals/{scenario.deal_id}/activities/?limit=2&offset=1",
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 2
|
||||
assert data[0]["payload"]["text"] == "Entry 1"
|
||||
assert data[1]["payload"]["text"] == "Entry 2"
|
||||
|
|
@ -0,0 +1,201 @@
|
|||
"""API tests for analytics endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
||||
|
||||
from app.core.security import jwt_service
|
||||
from app.models.contact import Contact
|
||||
from app.models.deal import Deal, DealStage, DealStatus
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.user import User
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class AnalyticsScenario:
|
||||
organization_id: int
|
||||
user_id: int
|
||||
user_email: str
|
||||
token: str
|
||||
in_progress_deal_id: int
|
||||
|
||||
|
||||
async def prepare_analytics_scenario(session_factory: async_sessionmaker[AsyncSession]) -> AnalyticsScenario:
|
||||
async with session_factory() as session:
|
||||
org = Organization(name="Analytics Org")
|
||||
user = User(email="analytics@example.com", hashed_password="hashed", name="Analyst", is_active=True)
|
||||
session.add_all([org, user])
|
||||
await session.flush()
|
||||
|
||||
membership = OrganizationMember(
|
||||
organization_id=org.id,
|
||||
user_id=user.id,
|
||||
role=OrganizationRole.OWNER,
|
||||
)
|
||||
contact = Contact(
|
||||
organization_id=org.id,
|
||||
owner_id=user.id,
|
||||
name="Client",
|
||||
email="client@example.com",
|
||||
)
|
||||
session.add_all([membership, contact])
|
||||
await session.flush()
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
deals = [
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Qual 1",
|
||||
amount=Decimal("100"),
|
||||
status=DealStatus.NEW,
|
||||
stage=DealStage.QUALIFICATION,
|
||||
created_at=now - timedelta(days=5),
|
||||
),
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Proposal",
|
||||
amount=Decimal("200"),
|
||||
status=DealStatus.IN_PROGRESS,
|
||||
stage=DealStage.PROPOSAL,
|
||||
created_at=now - timedelta(days=15),
|
||||
),
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Negotiation Won",
|
||||
amount=Decimal("500"),
|
||||
status=DealStatus.WON,
|
||||
stage=DealStage.NEGOTIATION,
|
||||
created_at=now - timedelta(days=2),
|
||||
),
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Closed Lost",
|
||||
amount=Decimal("300"),
|
||||
status=DealStatus.LOST,
|
||||
stage=DealStage.CLOSED,
|
||||
created_at=now - timedelta(days=40),
|
||||
),
|
||||
]
|
||||
session.add_all(deals)
|
||||
await session.commit()
|
||||
|
||||
token = jwt_service.create_access_token(
|
||||
subject=str(user.id),
|
||||
expires_delta=timedelta(minutes=30),
|
||||
claims={"email": user.email},
|
||||
)
|
||||
return AnalyticsScenario(
|
||||
organization_id=org.id,
|
||||
user_id=user.id,
|
||||
user_email=user.email,
|
||||
token=token,
|
||||
in_progress_deal_id=next(deal.id for deal in deals if deal.status is DealStatus.IN_PROGRESS),
|
||||
)
|
||||
|
||||
|
||||
def _headers(token: str, organization_id: int) -> dict[str, str]:
|
||||
return {"Authorization": f"Bearer {token}", "X-Organization-Id": str(organization_id)}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_deals_summary_endpoint_returns_metrics(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_analytics_scenario(session_factory)
|
||||
|
||||
response = await client.get(
|
||||
"/api/v1/analytics/deals/summary?days=30",
|
||||
headers=_headers(scenario.token, scenario.organization_id),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
payload = response.json()
|
||||
assert payload["total_deals"] == 4
|
||||
by_status = {entry["status"]: entry for entry in payload["by_status"]}
|
||||
assert by_status[DealStatus.NEW.value]["count"] == 1
|
||||
assert by_status[DealStatus.WON.value]["amount_sum"] == "500"
|
||||
assert payload["won"]["average_amount"] == "500"
|
||||
assert payload["new_deals"]["count"] == 3
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_deals_summary_respects_days_filter(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_analytics_scenario(session_factory)
|
||||
|
||||
response = await client.get(
|
||||
"/api/v1/analytics/deals/summary?days=3",
|
||||
headers=_headers(scenario.token, scenario.organization_id),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
payload = response.json()
|
||||
assert payload["new_deals"]["count"] == 1 # только сделки моложе трёх дней
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_deals_funnel_returns_breakdown(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_analytics_scenario(session_factory)
|
||||
|
||||
response = await client.get(
|
||||
"/api/v1/analytics/deals/funnel",
|
||||
headers=_headers(scenario.token, scenario.organization_id),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
payload = response.json()
|
||||
assert len(payload["stages"]) == 4
|
||||
qualification = next(item for item in payload["stages"] if item["stage"] == DealStage.QUALIFICATION.value)
|
||||
assert qualification["total"] == 1
|
||||
proposal = next(item for item in payload["stages"] if item["stage"] == DealStage.PROPOSAL.value)
|
||||
assert proposal["conversion_to_next"] == 100.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_deal_update_invalidates_cached_summary(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
cache_stub,
|
||||
) -> None:
|
||||
scenario = await prepare_analytics_scenario(session_factory)
|
||||
headers = _headers(scenario.token, scenario.organization_id)
|
||||
|
||||
first = await client.get(
|
||||
"/api/v1/analytics/deals/summary?days=30",
|
||||
headers=headers,
|
||||
)
|
||||
assert first.status_code == 200
|
||||
keys = [key async for key in cache_stub.scan_iter("analytics:summary:*")]
|
||||
assert keys, "cache should contain warmed summary"
|
||||
|
||||
patch_response = await client.patch(
|
||||
f"/api/v1/deals/{scenario.in_progress_deal_id}",
|
||||
json={"status": DealStatus.WON.value, "stage": DealStage.CLOSED.value},
|
||||
headers=headers,
|
||||
)
|
||||
assert patch_response.status_code == 200
|
||||
|
||||
refreshed = await client.get(
|
||||
"/api/v1/analytics/deals/summary?days=30",
|
||||
headers=headers,
|
||||
)
|
||||
assert refreshed.status_code == 200
|
||||
payload = refreshed.json()
|
||||
assert payload["won"]["count"] == 2
|
||||
|
|
@ -0,0 +1,197 @@
|
|||
"""API tests for authentication endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
||||
|
||||
from app.core.security import password_hasher
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.user import User
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_register_user_creates_organization_membership(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
payload = {
|
||||
"email": "new-owner@example.com",
|
||||
"password": "StrongPass123!",
|
||||
"name": "Alice Owner",
|
||||
"organization_name": "Rocket LLC",
|
||||
}
|
||||
|
||||
response = await client.post("/api/v1/auth/register", json=payload)
|
||||
|
||||
assert response.status_code == 201
|
||||
body = response.json()
|
||||
assert body["token_type"] == "bearer"
|
||||
assert "access_token" in body
|
||||
assert "refresh_token" in body
|
||||
|
||||
async with session_factory() as session:
|
||||
user = await session.scalar(select(User).where(User.email == payload["email"]))
|
||||
assert user is not None
|
||||
|
||||
organization = await session.scalar(
|
||||
select(Organization).where(Organization.name == payload["organization_name"])
|
||||
)
|
||||
assert organization is not None
|
||||
|
||||
membership = await session.scalar(
|
||||
select(OrganizationMember).where(
|
||||
OrganizationMember.organization_id == organization.id,
|
||||
OrganizationMember.user_id == user.id,
|
||||
)
|
||||
)
|
||||
assert membership is not None
|
||||
assert membership.role == OrganizationRole.OWNER
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_register_user_without_organization_succeeds(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
payload = {
|
||||
"email": "solo-user@example.com",
|
||||
"password": "StrongPass123!",
|
||||
"name": "Solo User",
|
||||
}
|
||||
|
||||
response = await client.post("/api/v1/auth/register", json=payload)
|
||||
|
||||
assert response.status_code == 201
|
||||
|
||||
async with session_factory() as session:
|
||||
user = await session.scalar(select(User).where(User.email == payload["email"]))
|
||||
assert user is not None
|
||||
|
||||
membership = await session.scalar(
|
||||
select(OrganizationMember).where(OrganizationMember.user_id == user.id)
|
||||
)
|
||||
assert membership is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_register_fails_when_organization_exists(
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
payload = {
|
||||
"email": "owner-one@example.com",
|
||||
"password": "StrongPass123!",
|
||||
"name": "Owner One",
|
||||
"organization_name": "Duplicate Org",
|
||||
}
|
||||
response = await client.post("/api/v1/auth/register", json=payload)
|
||||
assert response.status_code == 201
|
||||
|
||||
duplicate_payload = {
|
||||
"email": "owner-two@example.com",
|
||||
"password": "StrongPass123!",
|
||||
"name": "Owner Two",
|
||||
"organization_name": "Duplicate Org",
|
||||
}
|
||||
|
||||
duplicate_response = await client.post("/api/v1/auth/register", json=duplicate_payload)
|
||||
|
||||
assert duplicate_response.status_code == 409
|
||||
assert duplicate_response.json()["detail"] == "Organization already exists"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_login_endpoint_returns_token_for_valid_credentials(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
user = User(
|
||||
email="login-user@example.com",
|
||||
hashed_password=password_hasher.hash("Secret123!"),
|
||||
name="Login User",
|
||||
is_active=True,
|
||||
)
|
||||
session.add(user)
|
||||
await session.commit()
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"email": "login-user@example.com", "password": "Secret123!"},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
body = response.json()
|
||||
assert body["token_type"] == "bearer"
|
||||
assert "access_token" in body
|
||||
assert "refresh_token" in body
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_token_endpoint_rejects_invalid_credentials(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
user = User(
|
||||
email="token-user@example.com",
|
||||
hashed_password=password_hasher.hash("SuperSecret123"),
|
||||
name="Token User",
|
||||
is_active=True,
|
||||
)
|
||||
session.add(user)
|
||||
await session.commit()
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/auth/token",
|
||||
json={"email": "token-user@example.com", "password": "wrong-pass"},
|
||||
)
|
||||
|
||||
assert response.status_code == 401
|
||||
assert response.json()["detail"] == "Invalid email or password"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_refresh_endpoint_returns_new_tokens(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
user = User(
|
||||
email="refresh-user@example.com",
|
||||
hashed_password=password_hasher.hash("StrongPass123"),
|
||||
name="Refresh User",
|
||||
is_active=True,
|
||||
)
|
||||
session.add(user)
|
||||
await session.commit()
|
||||
|
||||
login_response = await client.post(
|
||||
"/api/v1/auth/login",
|
||||
json={"email": "refresh-user@example.com", "password": "StrongPass123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
refresh_token = login_response.json()["refresh_token"]
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/auth/refresh",
|
||||
json={"refresh_token": refresh_token},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
body = response.json()
|
||||
assert "access_token" in body
|
||||
assert "refresh_token" in body
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_refresh_endpoint_rejects_invalid_token(client: AsyncClient) -> None:
|
||||
response = await client.post(
|
||||
"/api/v1/auth/refresh",
|
||||
json={"refresh_token": "not-a-jwt"},
|
||||
)
|
||||
|
||||
assert response.status_code == 401
|
||||
assert response.json()["detail"] == "Invalid refresh token"
|
||||
|
|
@ -0,0 +1,279 @@
|
|||
"""API tests for contact endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
||||
|
||||
from app.models.contact import Contact
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.user import User
|
||||
|
||||
from tests.api.v1.task_activity_shared import auth_headers, make_token, prepare_scenario
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_contacts_supports_search_and_pagination(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
async with session_factory() as session:
|
||||
session.add_all(
|
||||
[
|
||||
Contact(
|
||||
organization_id=scenario.organization_id,
|
||||
owner_id=scenario.user_id,
|
||||
name="Alpha Lead",
|
||||
email="alpha@example.com",
|
||||
phone=None,
|
||||
),
|
||||
Contact(
|
||||
organization_id=scenario.organization_id,
|
||||
owner_id=scenario.user_id,
|
||||
name="Beta Prospect",
|
||||
email="beta@example.com",
|
||||
phone=None,
|
||||
),
|
||||
]
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
response = await client.get(
|
||||
"/api/v1/contacts/?page=1&page_size=10&search=alpha",
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 1
|
||||
assert data[0]["name"] == "Alpha Lead"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_contact_returns_created_payload(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/contacts/",
|
||||
json={
|
||||
"name": "New Contact",
|
||||
"email": "new@example.com",
|
||||
"phone": "+123",
|
||||
"owner_id": scenario.user_id,
|
||||
},
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
payload = response.json()
|
||||
assert payload["name"] == "New Contact"
|
||||
assert payload["email"] == "new@example.com"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_cannot_assign_foreign_owner(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
async with session_factory() as session:
|
||||
membership = await session.scalar(
|
||||
select(OrganizationMember).where(
|
||||
OrganizationMember.organization_id == scenario.organization_id,
|
||||
OrganizationMember.user_id == scenario.user_id,
|
||||
)
|
||||
)
|
||||
assert membership is not None
|
||||
membership.role = OrganizationRole.MEMBER
|
||||
|
||||
other_user = User(
|
||||
email="manager@example.com",
|
||||
hashed_password="hashed",
|
||||
name="Manager",
|
||||
is_active=True,
|
||||
)
|
||||
session.add(other_user)
|
||||
await session.flush()
|
||||
|
||||
session.add(
|
||||
OrganizationMember(
|
||||
organization_id=scenario.organization_id,
|
||||
user_id=other_user.id,
|
||||
role=OrganizationRole.ADMIN,
|
||||
)
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/contacts/",
|
||||
json={
|
||||
"name": "Blocked",
|
||||
"email": "blocked@example.com",
|
||||
"owner_id": scenario.user_id + 1,
|
||||
},
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_can_view_foreign_contacts(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
async with session_factory() as session:
|
||||
membership = await session.scalar(
|
||||
select(OrganizationMember).where(
|
||||
OrganizationMember.organization_id == scenario.organization_id,
|
||||
OrganizationMember.user_id == scenario.user_id,
|
||||
)
|
||||
)
|
||||
assert membership is not None
|
||||
membership.role = OrganizationRole.MEMBER
|
||||
|
||||
other_user = User(
|
||||
email="viewer@example.com",
|
||||
hashed_password="hashed",
|
||||
name="Viewer",
|
||||
is_active=True,
|
||||
)
|
||||
session.add(other_user)
|
||||
await session.flush()
|
||||
|
||||
session.add(
|
||||
OrganizationMember(
|
||||
organization_id=scenario.organization_id,
|
||||
user_id=other_user.id,
|
||||
role=OrganizationRole.MANAGER,
|
||||
)
|
||||
)
|
||||
|
||||
session.add(
|
||||
Contact(
|
||||
organization_id=scenario.organization_id,
|
||||
owner_id=other_user.id,
|
||||
name="Foreign Owner",
|
||||
email="foreign@example.com",
|
||||
phone=None,
|
||||
)
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
response = await client.get(
|
||||
"/api/v1/contacts/",
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
names = {contact["name"] for contact in response.json()}
|
||||
assert {"John Doe", "Foreign Owner"}.issubset(names)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_patch_foreign_contact_forbidden(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
async with session_factory() as session:
|
||||
membership = await session.scalar(
|
||||
select(OrganizationMember).where(
|
||||
OrganizationMember.organization_id == scenario.organization_id,
|
||||
OrganizationMember.user_id == scenario.user_id,
|
||||
)
|
||||
)
|
||||
assert membership is not None
|
||||
membership.role = OrganizationRole.MEMBER
|
||||
|
||||
other_user = User(
|
||||
email="owner2@example.com",
|
||||
hashed_password="hashed",
|
||||
name="Owner2",
|
||||
is_active=True,
|
||||
)
|
||||
session.add(other_user)
|
||||
await session.flush()
|
||||
|
||||
session.add(
|
||||
OrganizationMember(
|
||||
organization_id=scenario.organization_id,
|
||||
user_id=other_user.id,
|
||||
role=OrganizationRole.MANAGER,
|
||||
)
|
||||
)
|
||||
|
||||
contact = Contact(
|
||||
organization_id=scenario.organization_id,
|
||||
owner_id=other_user.id,
|
||||
name="Locked Contact",
|
||||
email="locked@example.com",
|
||||
phone=None,
|
||||
)
|
||||
session.add(contact)
|
||||
await session.commit()
|
||||
contact_id = contact.id
|
||||
|
||||
response = await client.patch(
|
||||
f"/api/v1/contacts/{contact_id}",
|
||||
json={"name": "Hacked"},
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_patch_contact_updates_fields(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
async with session_factory() as session:
|
||||
contact = Contact(
|
||||
organization_id=scenario.organization_id,
|
||||
owner_id=scenario.user_id,
|
||||
name="Old Name",
|
||||
email="old@example.com",
|
||||
phone="+111",
|
||||
)
|
||||
session.add(contact)
|
||||
await session.commit()
|
||||
contact_id = contact.id
|
||||
|
||||
response = await client.patch(
|
||||
f"/api/v1/contacts/{contact_id}",
|
||||
json={"name": "Updated", "phone": None},
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
payload = response.json()
|
||||
assert payload["name"] == "Updated"
|
||||
assert payload["phone"] is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_contact_with_deals_returns_conflict(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
response = await client.delete(
|
||||
f"/api/v1/contacts/{scenario.contact_id}",
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 409
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
"""API tests for deal endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
||||
|
||||
from app.models.activity import Activity, ActivityType
|
||||
from app.models.deal import Deal, DealStage, DealStatus
|
||||
|
||||
from tests.api.v1.task_activity_shared import auth_headers, make_token, prepare_scenario
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_deal_endpoint_uses_context_owner(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/deals/",
|
||||
json={
|
||||
"contact_id": scenario.contact_id,
|
||||
"title": "Upsell Subscription",
|
||||
"amount": 2500.0,
|
||||
"currency": "USD",
|
||||
},
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
payload = response.json()
|
||||
assert payload["owner_id"] == scenario.user_id
|
||||
assert payload["organization_id"] == scenario.organization_id
|
||||
assert payload["title"] == "Upsell Subscription"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_deals_endpoint_filters_by_status(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
async with session_factory() as session:
|
||||
base_deal = await session.get(Deal, scenario.deal_id)
|
||||
assert base_deal is not None
|
||||
base_deal.status = DealStatus.NEW
|
||||
|
||||
won_deal = Deal(
|
||||
organization_id=scenario.organization_id,
|
||||
contact_id=scenario.contact_id,
|
||||
owner_id=scenario.user_id,
|
||||
title="Enterprise Upgrade",
|
||||
amount=Decimal("8000"),
|
||||
currency="USD",
|
||||
status=DealStatus.WON,
|
||||
stage=DealStage.CLOSED,
|
||||
)
|
||||
session.add(won_deal)
|
||||
await session.commit()
|
||||
|
||||
response = await client.get(
|
||||
"/api/v1/deals/?status=won",
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 1
|
||||
assert data[0]["title"] == "Enterprise Upgrade"
|
||||
assert data[0]["status"] == DealStatus.WON.value
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_deal_endpoint_updates_stage_and_logs_activity(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
|
||||
response = await client.patch(
|
||||
f"/api/v1/deals/{scenario.deal_id}",
|
||||
json={
|
||||
"stage": DealStage.PROPOSAL.value,
|
||||
"status": DealStatus.WON.value,
|
||||
"amount": 5000.0,
|
||||
"currency": "USD",
|
||||
},
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
body = response.json()
|
||||
assert body["stage"] == DealStage.PROPOSAL.value
|
||||
assert body["status"] == DealStatus.WON.value
|
||||
assert Decimal(body["amount"]) == Decimal("5000")
|
||||
|
||||
async with session_factory() as session:
|
||||
activity_types = await session.scalars(
|
||||
select(Activity.type).where(Activity.deal_id == scenario.deal_id)
|
||||
)
|
||||
collected = set(activity_types.all())
|
||||
|
||||
assert ActivityType.STAGE_CHANGED in collected
|
||||
assert ActivityType.STATUS_CHANGED in collected
|
||||
|
|
@ -0,0 +1,289 @@
|
|||
"""API tests for organization endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import AsyncGenerator, Sequence, cast
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.schema import Table
|
||||
|
||||
from app.api.deps import get_db_session
|
||||
from app.core.security import jwt_service
|
||||
from app.main import create_app
|
||||
from app.models import Base
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.user import User
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def session_factory() -> AsyncGenerator[async_sessionmaker[AsyncSession], None]:
|
||||
engine = create_async_engine("sqlite+aiosqlite:///:memory:", future=True)
|
||||
async with engine.begin() as conn:
|
||||
tables: Sequence[Table] = cast(
|
||||
Sequence[Table],
|
||||
(User.__table__, Organization.__table__, OrganizationMember.__table__),
|
||||
)
|
||||
await conn.run_sync(Base.metadata.create_all, tables=tables)
|
||||
SessionLocal = async_sessionmaker(engine, expire_on_commit=False)
|
||||
|
||||
yield SessionLocal
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def client(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
) -> AsyncGenerator[AsyncClient, None]:
|
||||
app = create_app()
|
||||
|
||||
async def _get_session_override() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with session_factory() as session:
|
||||
yield session
|
||||
|
||||
app.dependency_overrides[get_db_session] = _get_session_override
|
||||
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as test_client:
|
||||
yield test_client
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_user_organizations_returns_memberships(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
user = User(email="owner@example.com", hashed_password="hashed", name="Owner", is_active=True)
|
||||
session.add(user)
|
||||
await session.flush()
|
||||
|
||||
org_1 = Organization(name="Alpha LLC")
|
||||
org_2 = Organization(name="Beta LLC")
|
||||
session.add_all([org_1, org_2])
|
||||
await session.flush()
|
||||
|
||||
membership = OrganizationMember(
|
||||
organization_id=org_1.id,
|
||||
user_id=user.id,
|
||||
role=OrganizationRole.OWNER,
|
||||
)
|
||||
other_member = OrganizationMember(
|
||||
organization_id=org_2.id,
|
||||
user_id=user.id + 1,
|
||||
role=OrganizationRole.MEMBER,
|
||||
)
|
||||
session.add_all([membership, other_member])
|
||||
await session.commit()
|
||||
|
||||
token = jwt_service.create_access_token(
|
||||
subject=str(user.id),
|
||||
expires_delta=timedelta(minutes=30),
|
||||
claims={"email": user.email},
|
||||
)
|
||||
|
||||
response = await client.get(
|
||||
"/api/v1/organizations/me",
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
payload = response.json()
|
||||
assert len(payload) == 1
|
||||
assert payload[0]["id"] == org_1.id
|
||||
assert payload[0]["name"] == org_1.name
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_user_organizations_requires_token(client: AsyncClient) -> None:
|
||||
response = await client.get("/api/v1/organizations/me")
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_owner_can_add_member_to_organization(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
owner = User(email="owner-add@example.com", hashed_password="hashed", name="Owner", is_active=True)
|
||||
invitee = User(email="new-member@example.com", hashed_password="hashed", name="Member", is_active=True)
|
||||
session.add_all([owner, invitee])
|
||||
await session.flush()
|
||||
|
||||
organization = Organization(name="Membership LLC")
|
||||
session.add(organization)
|
||||
await session.flush()
|
||||
|
||||
membership = OrganizationMember(
|
||||
organization_id=organization.id,
|
||||
user_id=owner.id,
|
||||
role=OrganizationRole.OWNER,
|
||||
)
|
||||
session.add(membership)
|
||||
await session.commit()
|
||||
|
||||
token = jwt_service.create_access_token(
|
||||
subject=str(owner.id),
|
||||
expires_delta=timedelta(minutes=30),
|
||||
claims={"email": owner.email},
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/organizations/members",
|
||||
headers={
|
||||
"Authorization": f"Bearer {token}",
|
||||
"X-Organization-Id": str(organization.id),
|
||||
},
|
||||
json={"email": invitee.email, "role": OrganizationRole.MANAGER.value},
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
payload = response.json()
|
||||
assert payload["organization_id"] == organization.id
|
||||
assert payload["user_id"] == invitee.id
|
||||
assert payload["role"] == OrganizationRole.MANAGER.value
|
||||
|
||||
async with session_factory() as session:
|
||||
new_membership = await session.scalar(
|
||||
select(OrganizationMember).where(
|
||||
OrganizationMember.organization_id == organization.id,
|
||||
OrganizationMember.user_id == invitee.id,
|
||||
)
|
||||
)
|
||||
assert new_membership is not None
|
||||
assert new_membership.role == OrganizationRole.MANAGER
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_member_requires_existing_user(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
owner = User(email="owner-missing@example.com", hashed_password="hashed", name="Owner", is_active=True)
|
||||
session.add(owner)
|
||||
await session.flush()
|
||||
|
||||
organization = Organization(name="Missing LLC")
|
||||
session.add(organization)
|
||||
await session.flush()
|
||||
|
||||
membership = OrganizationMember(
|
||||
organization_id=organization.id,
|
||||
user_id=owner.id,
|
||||
role=OrganizationRole.OWNER,
|
||||
)
|
||||
session.add(membership)
|
||||
await session.commit()
|
||||
|
||||
token = jwt_service.create_access_token(
|
||||
subject=str(owner.id),
|
||||
expires_delta=timedelta(minutes=30),
|
||||
claims={"email": owner.email},
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/organizations/members",
|
||||
headers={
|
||||
"Authorization": f"Bearer {token}",
|
||||
"X-Organization-Id": str(organization.id),
|
||||
},
|
||||
json={"email": "ghost@example.com"},
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
assert response.json()["detail"] == "User not found"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_role_cannot_add_users(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
member_user = User(email="member@example.com", hashed_password="hashed", name="Member", is_active=True)
|
||||
invitee = User(email="invitee@example.com", hashed_password="hashed", name="Invitee", is_active=True)
|
||||
session.add_all([member_user, invitee])
|
||||
await session.flush()
|
||||
|
||||
organization = Organization(name="Members Only LLC")
|
||||
session.add(organization)
|
||||
await session.flush()
|
||||
|
||||
membership = OrganizationMember(
|
||||
organization_id=organization.id,
|
||||
user_id=member_user.id,
|
||||
role=OrganizationRole.MEMBER,
|
||||
)
|
||||
session.add(membership)
|
||||
await session.commit()
|
||||
|
||||
token = jwt_service.create_access_token(
|
||||
subject=str(member_user.id),
|
||||
expires_delta=timedelta(minutes=30),
|
||||
claims={"email": member_user.email},
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/organizations/members",
|
||||
headers={
|
||||
"Authorization": f"Bearer {token}",
|
||||
"X-Organization-Id": str(organization.id),
|
||||
},
|
||||
json={"email": invitee.email},
|
||||
)
|
||||
|
||||
assert response.status_code == 403
|
||||
assert response.json()["detail"] == "Only owner/admin can modify organization settings"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cannot_add_duplicate_member(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
client: AsyncClient,
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
owner = User(email="dup-owner@example.com", hashed_password="hashed", name="Owner", is_active=True)
|
||||
invitee = User(email="dup-member@example.com", hashed_password="hashed", name="Invitee", is_active=True)
|
||||
session.add_all([owner, invitee])
|
||||
await session.flush()
|
||||
|
||||
organization = Organization(name="Duplicate LLC")
|
||||
session.add(organization)
|
||||
await session.flush()
|
||||
|
||||
owner_membership = OrganizationMember(
|
||||
organization_id=organization.id,
|
||||
user_id=owner.id,
|
||||
role=OrganizationRole.OWNER,
|
||||
)
|
||||
invitee_membership = OrganizationMember(
|
||||
organization_id=organization.id,
|
||||
user_id=invitee.id,
|
||||
role=OrganizationRole.MEMBER,
|
||||
)
|
||||
session.add_all([owner_membership, invitee_membership])
|
||||
await session.commit()
|
||||
|
||||
token = jwt_service.create_access_token(
|
||||
subject=str(owner.id),
|
||||
expires_delta=timedelta(minutes=30),
|
||||
claims={"email": owner.email},
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/organizations/members",
|
||||
headers={
|
||||
"Authorization": f"Bearer {token}",
|
||||
"X-Organization-Id": str(organization.id),
|
||||
},
|
||||
json={"email": invitee.email},
|
||||
)
|
||||
|
||||
assert response.status_code == 409
|
||||
assert response.json()["detail"] == "User already belongs to this organization"
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
"""API tests for task endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
||||
|
||||
from app.models.task import Task
|
||||
|
||||
from tests.api.v1.task_activity_shared import auth_headers, create_deal, make_token, prepare_scenario
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_task_endpoint_creates_task_and_activity(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
due_date = (date.today() + timedelta(days=5)).isoformat()
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/tasks/",
|
||||
json={
|
||||
"deal_id": scenario.deal_id,
|
||||
"title": "Prepare proposal",
|
||||
"description": "Send draft",
|
||||
"due_date": due_date,
|
||||
},
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
payload = response.json()
|
||||
assert payload["deal_id"] == scenario.deal_id
|
||||
assert payload["title"] == "Prepare proposal"
|
||||
assert payload["is_done"] is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_tasks_endpoint_filters_by_deal(
|
||||
session_factory: async_sessionmaker[AsyncSession], client: AsyncClient
|
||||
) -> None:
|
||||
scenario = await prepare_scenario(session_factory)
|
||||
token = make_token(scenario.user_id, scenario.user_email)
|
||||
other_deal_id = await create_deal(session_factory, scenario=scenario, title="Renewal")
|
||||
|
||||
async with session_factory() as session:
|
||||
session.add_all(
|
||||
[
|
||||
Task(
|
||||
deal_id=scenario.deal_id,
|
||||
title="Task A",
|
||||
description=None,
|
||||
due_date=datetime.now(timezone.utc) + timedelta(days=2),
|
||||
is_done=False,
|
||||
),
|
||||
Task(
|
||||
deal_id=other_deal_id,
|
||||
title="Task B",
|
||||
description=None,
|
||||
due_date=datetime.now(timezone.utc) + timedelta(days=3),
|
||||
is_done=False,
|
||||
),
|
||||
]
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
response = await client.get(
|
||||
f"/api/v1/tasks/?deal_id={scenario.deal_id}",
|
||||
headers=auth_headers(token, scenario),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 1
|
||||
assert data[0]["title"] == "Task A"
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
"""Pytest configuration & shared fixtures."""
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Ensure project root is on sys.path so that `app` package imports succeed during tests.
|
||||
PROJECT_ROOT = Path(__file__).resolve().parents[1]
|
||||
if str(PROJECT_ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
"""Regression tests ensuring Enum mappings store lowercase values."""
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
|
||||
from app.models.activity import Activity, ActivityType
|
||||
from app.models.deal import Deal, DealStage, DealStatus
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
|
||||
|
||||
def _values(enum_cls: type[StrEnum]) -> list[str]:
|
||||
return [member.value for member in enum_cls]
|
||||
|
||||
|
||||
def test_organization_role_column_uses_value_strings() -> None:
|
||||
role_type = OrganizationMember.__table__.c.role.type # noqa: SLF001 - runtime inspection
|
||||
assert role_type.enums == _values(OrganizationRole)
|
||||
|
||||
|
||||
def test_deal_status_column_uses_value_strings() -> None:
|
||||
status_type = Deal.__table__.c.status.type # noqa: SLF001 - runtime inspection
|
||||
assert status_type.enums == _values(DealStatus)
|
||||
|
||||
|
||||
def test_deal_stage_column_uses_value_strings() -> None:
|
||||
stage_type = Deal.__table__.c.stage.type # noqa: SLF001 - runtime inspection
|
||||
assert stage_type.enums == _values(DealStage)
|
||||
|
||||
|
||||
def test_activity_type_column_uses_value_strings() -> None:
|
||||
activity_type = Activity.__table__.c.type.type # noqa: SLF001 - runtime inspection
|
||||
assert activity_type.enums == _values(ActivityType)
|
||||
|
|
@ -0,0 +1,164 @@
|
|||
"""Unit tests for ActivityService."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.pool import StaticPool
|
||||
|
||||
from app.models.activity import Activity, ActivityType
|
||||
from app.models.base import Base
|
||||
from app.models.contact import Contact
|
||||
from app.models.deal import Deal
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.user import User
|
||||
from app.repositories.activity_repo import ActivityRepository
|
||||
from app.services.activity_service import (
|
||||
ActivityForbiddenError,
|
||||
ActivityListFilters,
|
||||
ActivityService,
|
||||
ActivityValidationError,
|
||||
)
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def session() -> AsyncGenerator[AsyncSession, None]:
|
||||
engine = create_async_engine(
|
||||
"sqlite+aiosqlite:///:memory:",
|
||||
future=True,
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
session_factory = async_sessionmaker(engine, expire_on_commit=False)
|
||||
async with session_factory() as session:
|
||||
yield session
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
def _make_user(suffix: str) -> User:
|
||||
return User(
|
||||
email=f"user-{suffix}@example.com",
|
||||
hashed_password="hashed",
|
||||
name="Test",
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
|
||||
async def _prepare_deal(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
role: OrganizationRole = OrganizationRole.MANAGER,
|
||||
) -> tuple[OrganizationContext, ActivityRepository, int, Organization]:
|
||||
org = Organization(name=f"Org-{uuid.uuid4()}"[:8])
|
||||
user = _make_user("owner")
|
||||
session.add_all([org, user])
|
||||
await session.flush()
|
||||
|
||||
contact = Contact(
|
||||
organization_id=org.id,
|
||||
owner_id=user.id,
|
||||
name="Alice",
|
||||
email="alice@example.com",
|
||||
)
|
||||
session.add(contact)
|
||||
await session.flush()
|
||||
|
||||
deal = Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Activity",
|
||||
amount=None,
|
||||
)
|
||||
session.add(deal)
|
||||
await session.flush()
|
||||
|
||||
membership = OrganizationMember(organization_id=org.id, user_id=user.id, role=role)
|
||||
context = OrganizationContext(organization=org, membership=membership)
|
||||
return context, ActivityRepository(session=session), deal.id, org
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_activities_returns_only_current_deal(session: AsyncSession) -> None:
|
||||
context, repo, deal_id, _ = await _prepare_deal(session)
|
||||
service = ActivityService(repository=repo)
|
||||
|
||||
session.add_all(
|
||||
[
|
||||
Activity(deal_id=deal_id, author_id=context.user_id, type=ActivityType.COMMENT, payload={"text": "hi"}),
|
||||
Activity(deal_id=deal_id + 1, author_id=context.user_id, type=ActivityType.SYSTEM, payload={}),
|
||||
]
|
||||
)
|
||||
await session.flush()
|
||||
|
||||
activities = await service.list_activities(
|
||||
filters=ActivityListFilters(deal_id=deal_id, limit=10, offset=0),
|
||||
context=context,
|
||||
)
|
||||
|
||||
assert len(activities) == 1
|
||||
assert activities[0].deal_id == deal_id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_comment_rejects_empty_text(session: AsyncSession) -> None:
|
||||
context, repo, deal_id, _ = await _prepare_deal(session)
|
||||
service = ActivityService(repository=repo)
|
||||
|
||||
with pytest.raises(ActivityValidationError):
|
||||
await service.add_comment(deal_id=deal_id, author_id=context.user_id, text=" ", context=context)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_record_activity_blocks_foreign_deal(session: AsyncSession) -> None:
|
||||
context, repo, _deal_id, _ = await _prepare_deal(session)
|
||||
service = ActivityService(repository=repo)
|
||||
# Create a second deal in another organization
|
||||
other_org = Organization(name="External")
|
||||
other_user = _make_user("external")
|
||||
session.add_all([other_org, other_user])
|
||||
await session.flush()
|
||||
other_contact = Contact(
|
||||
organization_id=other_org.id,
|
||||
owner_id=other_user.id,
|
||||
name="Bob",
|
||||
email="bob@example.com",
|
||||
)
|
||||
session.add(other_contact)
|
||||
await session.flush()
|
||||
other_deal = Deal(
|
||||
organization_id=other_org.id,
|
||||
contact_id=other_contact.id,
|
||||
owner_id=other_user.id,
|
||||
title="Foreign",
|
||||
amount=None,
|
||||
)
|
||||
session.add(other_deal)
|
||||
await session.flush()
|
||||
|
||||
with pytest.raises(ActivityForbiddenError):
|
||||
await service.list_activities(
|
||||
filters=ActivityListFilters(deal_id=other_deal.id),
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_comment_trims_payload_text(session: AsyncSession) -> None:
|
||||
context, repo, deal_id, _ = await _prepare_deal(session)
|
||||
service = ActivityService(repository=repo)
|
||||
|
||||
activity = await service.add_comment(
|
||||
deal_id=deal_id,
|
||||
author_id=context.user_id,
|
||||
text=" trimmed text ",
|
||||
context=context,
|
||||
)
|
||||
|
||||
assert activity.payload["text"] == "trimmed text"
|
||||
|
|
@ -0,0 +1,234 @@
|
|||
"""Unit tests for AnalyticsService."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.pool import StaticPool
|
||||
|
||||
from app.models import Base
|
||||
from app.models.contact import Contact
|
||||
from app.models.deal import Deal, DealStage, DealStatus
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.user import User
|
||||
from app.repositories.analytics_repo import AnalyticsRepository
|
||||
from app.services.analytics_service import AnalyticsService, invalidate_analytics_cache
|
||||
from tests.utils.fake_redis import InMemoryRedis
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def session() -> AsyncGenerator[AsyncSession, None]:
|
||||
engine = create_async_engine(
|
||||
"sqlite+aiosqlite:///:memory:", future=True, poolclass=StaticPool
|
||||
)
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
Session = async_sessionmaker(engine, expire_on_commit=False)
|
||||
async with Session() as session:
|
||||
yield session
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
async def _seed_data(session: AsyncSession) -> tuple[int, int, int]:
|
||||
org = Organization(name="Analytics Org")
|
||||
user = User(email="analytics@example.com", hashed_password="hashed", name="Analyst", is_active=True)
|
||||
session.add_all([org, user])
|
||||
await session.flush()
|
||||
|
||||
member = OrganizationMember(organization_id=org.id, user_id=user.id, role=OrganizationRole.OWNER)
|
||||
contact = Contact(organization_id=org.id, owner_id=user.id, name="Client", email="client@example.com")
|
||||
session.add_all([member, contact])
|
||||
await session.flush()
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
deals = [
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Qual 1",
|
||||
amount=Decimal("100"),
|
||||
status=DealStatus.NEW,
|
||||
stage=DealStage.QUALIFICATION,
|
||||
created_at=now - timedelta(days=5),
|
||||
),
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Qual 2",
|
||||
amount=Decimal("150"),
|
||||
status=DealStatus.NEW,
|
||||
stage=DealStage.QUALIFICATION,
|
||||
created_at=now - timedelta(days=3),
|
||||
),
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Proposal",
|
||||
amount=Decimal("200"),
|
||||
status=DealStatus.IN_PROGRESS,
|
||||
stage=DealStage.PROPOSAL,
|
||||
created_at=now - timedelta(days=15),
|
||||
),
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Negotiation Won",
|
||||
amount=Decimal("500"),
|
||||
status=DealStatus.WON,
|
||||
stage=DealStage.NEGOTIATION,
|
||||
created_at=now - timedelta(days=2),
|
||||
),
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Negotiation Won No Amount",
|
||||
amount=None,
|
||||
status=DealStatus.WON,
|
||||
stage=DealStage.NEGOTIATION,
|
||||
created_at=now - timedelta(days=1),
|
||||
),
|
||||
Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=user.id,
|
||||
title="Closed Lost",
|
||||
amount=Decimal("300"),
|
||||
status=DealStatus.LOST,
|
||||
stage=DealStage.CLOSED,
|
||||
created_at=now - timedelta(days=40),
|
||||
),
|
||||
]
|
||||
session.add_all(deals)
|
||||
await session.commit()
|
||||
return org.id, user.id, contact.id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_deal_summary_returns_expected_metrics(session: AsyncSession) -> None:
|
||||
org_id, _, _ = await _seed_data(session)
|
||||
service = AnalyticsService(repository=AnalyticsRepository(session))
|
||||
|
||||
summary = await service.get_deal_summary(org_id, days=30)
|
||||
|
||||
assert summary.total_deals == 6
|
||||
status_map = {item.status: item for item in summary.by_status}
|
||||
assert status_map[DealStatus.NEW].count == 2
|
||||
assert Decimal(status_map[DealStatus.NEW].amount_sum) == Decimal("250")
|
||||
assert status_map[DealStatus.WON].count == 2
|
||||
assert Decimal(summary.won.amount_sum) == Decimal("500")
|
||||
assert Decimal(summary.won.average_amount) == Decimal("500")
|
||||
assert summary.new_deals.count == 5 # все кроме старой закрытой сделки
|
||||
assert summary.new_deals.days == 30
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_funnel_breakdown_contains_stage_conversions(session: AsyncSession) -> None:
|
||||
org_id, _, _ = await _seed_data(session)
|
||||
service = AnalyticsService(repository=AnalyticsRepository(session))
|
||||
|
||||
funnel = await service.get_deal_funnel(org_id)
|
||||
|
||||
assert len(funnel) == 4
|
||||
qual = next(item for item in funnel if item.stage == DealStage.QUALIFICATION)
|
||||
assert qual.total == 2
|
||||
assert qual.by_status[DealStatus.NEW] == 2
|
||||
assert qual.conversion_to_next == 50.0
|
||||
|
||||
proposal = next(item for item in funnel if item.stage == DealStage.PROPOSAL)
|
||||
assert proposal.total == 1
|
||||
assert proposal.by_status[DealStatus.IN_PROGRESS] == 1
|
||||
assert proposal.conversion_to_next == 200.0
|
||||
|
||||
last_stage = next(item for item in funnel if item.stage == DealStage.CLOSED)
|
||||
assert last_stage.conversion_to_next is None
|
||||
|
||||
|
||||
class _ExplodingRepository(AnalyticsRepository):
|
||||
async def fetch_status_rollup(self, organization_id: int): # type: ignore[override]
|
||||
raise AssertionError("cache not used for status rollup")
|
||||
|
||||
async def count_new_deals_since(self, organization_id: int, threshold): # type: ignore[override]
|
||||
raise AssertionError("cache not used for new deal count")
|
||||
|
||||
async def fetch_stage_status_rollup(self, organization_id: int): # type: ignore[override]
|
||||
raise AssertionError("cache not used for funnel rollup")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_summary_reads_from_cache_when_available(session: AsyncSession) -> None:
|
||||
org_id, _, _ = await _seed_data(session)
|
||||
cache = InMemoryRedis()
|
||||
service = AnalyticsService(
|
||||
repository=AnalyticsRepository(session),
|
||||
cache=cache,
|
||||
ttl_seconds=60,
|
||||
backoff_ms=0,
|
||||
)
|
||||
|
||||
await service.get_deal_summary(org_id, days=30)
|
||||
service._repository = _ExplodingRepository(session)
|
||||
|
||||
cached = await service.get_deal_summary(org_id, days=30)
|
||||
assert cached.total_deals == 6
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_invalidation_refreshes_cached_summary(session: AsyncSession) -> None:
|
||||
org_id, _, contact_id = await _seed_data(session)
|
||||
cache = InMemoryRedis()
|
||||
service = AnalyticsService(
|
||||
repository=AnalyticsRepository(session),
|
||||
cache=cache,
|
||||
ttl_seconds=60,
|
||||
backoff_ms=0,
|
||||
)
|
||||
|
||||
await service.get_deal_summary(org_id, days=30)
|
||||
|
||||
deal = Deal(
|
||||
organization_id=org_id,
|
||||
contact_id=contact_id,
|
||||
owner_id=1,
|
||||
title="New",
|
||||
amount=Decimal("50"),
|
||||
status=DealStatus.NEW,
|
||||
stage=DealStage.QUALIFICATION,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
)
|
||||
session.add(deal)
|
||||
await session.commit()
|
||||
|
||||
cached = await service.get_deal_summary(org_id, days=30)
|
||||
assert cached.total_deals == 6
|
||||
|
||||
await invalidate_analytics_cache(cache, org_id, backoff_ms=0)
|
||||
refreshed = await service.get_deal_summary(org_id, days=30)
|
||||
assert refreshed.total_deals == 7
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_funnel_reads_from_cache_when_available(session: AsyncSession) -> None:
|
||||
org_id, _, _ = await _seed_data(session)
|
||||
cache = InMemoryRedis()
|
||||
service = AnalyticsService(
|
||||
repository=AnalyticsRepository(session),
|
||||
cache=cache,
|
||||
ttl_seconds=60,
|
||||
backoff_ms=0,
|
||||
)
|
||||
|
||||
await service.get_deal_funnel(org_id)
|
||||
service._repository = _ExplodingRepository(session)
|
||||
|
||||
cached = await service.get_deal_funnel(org_id)
|
||||
assert len(cached) == 4
|
||||
|
|
@ -0,0 +1,126 @@
|
|||
"""Unit tests for AuthService."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest # type: ignore[import-not-found]
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.security import JWTService, PasswordHasher
|
||||
from app.models.user import User
|
||||
from app.repositories.user_repo import UserRepository
|
||||
from app.services.auth_service import AuthService, InvalidCredentialsError, InvalidRefreshTokenError
|
||||
|
||||
|
||||
class StubUserRepository(UserRepository):
|
||||
"""In-memory stand-in for UserRepository."""
|
||||
|
||||
def __init__(self, user: User | None) -> None:
|
||||
super().__init__(session=MagicMock(spec=AsyncSession))
|
||||
self._user = user
|
||||
|
||||
async def get_by_email(self, email: str) -> User | None: # pragma: no cover - helper
|
||||
if self._user and self._user.email == email:
|
||||
return self._user
|
||||
return None
|
||||
|
||||
async def get_by_id(self, user_id: int) -> User | None: # pragma: no cover - helper
|
||||
if self._user and self._user.id == user_id:
|
||||
return self._user
|
||||
return None
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def password_hasher() -> PasswordHasher:
|
||||
class DummyPasswordHasher:
|
||||
def hash(self, password: str) -> str: # pragma: no cover - trivial
|
||||
return f"hashed::{password}"
|
||||
|
||||
def verify(self, password: str, hashed_password: str) -> bool: # pragma: no cover - trivial
|
||||
return hashed_password == self.hash(password)
|
||||
|
||||
return cast(PasswordHasher, DummyPasswordHasher())
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def jwt_service() -> JWTService:
|
||||
return JWTService(secret_key="unit-test-secret", algorithm="HS256")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_authenticate_success(password_hasher: PasswordHasher, jwt_service: JWTService) -> None:
|
||||
hashed = password_hasher.hash("StrongPass123")
|
||||
user = User(email="user@example.com", hashed_password=hashed, name="Alice", is_active=True)
|
||||
user.id = 1
|
||||
repo = StubUserRepository(user)
|
||||
service = AuthService(repo, password_hasher, jwt_service)
|
||||
|
||||
authenticated = await service.authenticate("user@example.com", "StrongPass123")
|
||||
|
||||
assert authenticated is user
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_authenticate_invalid_credentials(
|
||||
password_hasher: PasswordHasher,
|
||||
jwt_service: JWTService,
|
||||
) -> None:
|
||||
hashed = password_hasher.hash("StrongPass123")
|
||||
user = User(email="user@example.com", hashed_password=hashed, name="Alice", is_active=True)
|
||||
user.id = 1
|
||||
repo = StubUserRepository(user)
|
||||
service = AuthService(repo, password_hasher, jwt_service)
|
||||
|
||||
with pytest.raises(InvalidCredentialsError):
|
||||
await service.authenticate("user@example.com", "wrong-pass")
|
||||
|
||||
|
||||
def test_issue_tokens_contains_user_claims(
|
||||
password_hasher: PasswordHasher,
|
||||
jwt_service: JWTService,
|
||||
) -> None:
|
||||
user = User(email="user@example.com", hashed_password="hashed", name="Alice", is_active=True)
|
||||
user.id = 42
|
||||
service = AuthService(StubUserRepository(user), password_hasher, jwt_service)
|
||||
|
||||
token_pair = service.issue_tokens(user)
|
||||
payload = jwt_service.decode(token_pair.access_token)
|
||||
|
||||
assert payload["sub"] == str(user.id)
|
||||
assert payload["email"] == user.email
|
||||
assert payload["scope"] == "access"
|
||||
assert token_pair.refresh_token
|
||||
assert token_pair.expires_in > 0
|
||||
assert token_pair.refresh_expires_in > token_pair.expires_in
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_refresh_tokens_returns_new_pair(
|
||||
password_hasher: PasswordHasher,
|
||||
jwt_service: JWTService,
|
||||
) -> None:
|
||||
user = User(email="refresh@example.com", hashed_password="hashed", name="Refresh", is_active=True)
|
||||
user.id = 7
|
||||
service = AuthService(StubUserRepository(user), password_hasher, jwt_service)
|
||||
|
||||
initial = service.issue_tokens(user)
|
||||
refreshed = await service.refresh_tokens(initial.refresh_token)
|
||||
|
||||
assert refreshed.access_token
|
||||
assert refreshed.refresh_token
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_refresh_tokens_rejects_access_token(
|
||||
password_hasher: PasswordHasher,
|
||||
jwt_service: JWTService,
|
||||
) -> None:
|
||||
user = User(email="refresh@example.com", hashed_password="hashed", name="Refresh", is_active=True)
|
||||
user.id = 9
|
||||
service = AuthService(StubUserRepository(user), password_hasher, jwt_service)
|
||||
|
||||
pair = service.issue_tokens(user)
|
||||
|
||||
with pytest.raises(InvalidRefreshTokenError):
|
||||
await service.refresh_tokens(pair.access_token)
|
||||
|
|
@ -0,0 +1,262 @@
|
|||
"""Unit tests for ContactService."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.pool import StaticPool
|
||||
|
||||
from app.models.base import Base
|
||||
from app.models.contact import Contact, ContactCreate
|
||||
from app.models.deal import Deal
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.user import User
|
||||
from app.repositories.contact_repo import ContactRepository
|
||||
from app.services.contact_service import (
|
||||
ContactDeletionError,
|
||||
ContactForbiddenError,
|
||||
ContactListFilters,
|
||||
ContactService,
|
||||
ContactUpdateData,
|
||||
)
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def session() -> AsyncGenerator[AsyncSession, None]:
|
||||
engine = create_async_engine(
|
||||
"sqlite+aiosqlite:///:memory:",
|
||||
future=True,
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
factory = async_sessionmaker(engine, expire_on_commit=False)
|
||||
async with factory() as session:
|
||||
yield session
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
def _make_user(label: str) -> User:
|
||||
return User(
|
||||
email=f"{label}-{uuid.uuid4()}@example.com",
|
||||
hashed_password="hashed",
|
||||
name=f"{label.title()} User",
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
|
||||
def _context_for(
|
||||
*,
|
||||
organization: Organization,
|
||||
user: User,
|
||||
role: OrganizationRole,
|
||||
) -> OrganizationContext:
|
||||
membership = OrganizationMember(organization_id=organization.id, user_id=user.id, role=role)
|
||||
return OrganizationContext(organization=organization, membership=membership)
|
||||
|
||||
|
||||
async def _setup_contact(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
role: OrganizationRole = OrganizationRole.MANAGER,
|
||||
owner: User | None = None,
|
||||
context_user: User | None = None,
|
||||
) -> tuple[OrganizationContext, ContactRepository, Contact]:
|
||||
organization = Organization(name=f"Org-{uuid.uuid4()}"[:8])
|
||||
owner_user = owner or _make_user("owner")
|
||||
ctx_user = context_user or owner_user
|
||||
session.add_all([organization, owner_user])
|
||||
if ctx_user is not owner_user:
|
||||
session.add(ctx_user)
|
||||
await session.flush()
|
||||
|
||||
contact = Contact(
|
||||
organization_id=organization.id,
|
||||
owner_id=owner_user.id,
|
||||
name="John Doe",
|
||||
email="john.doe@example.com",
|
||||
phone="+100000000",
|
||||
)
|
||||
session.add(contact)
|
||||
await session.flush()
|
||||
|
||||
context = _context_for(organization=organization, user=ctx_user, role=role)
|
||||
repo = ContactRepository(session=session)
|
||||
return context, repo, contact
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_contact_honors_owner_override(session: AsyncSession) -> None:
|
||||
context, repo, _ = await _setup_contact(session)
|
||||
other_user = _make_user("other")
|
||||
session.add(other_user)
|
||||
await session.flush()
|
||||
|
||||
service = ContactService(repository=repo)
|
||||
contact = await service.create_contact(
|
||||
ContactCreate(
|
||||
organization_id=context.organization_id,
|
||||
owner_id=other_user.id,
|
||||
name="Alice",
|
||||
email="alice@example.com",
|
||||
phone=None,
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
assert contact.owner_id == other_user.id
|
||||
assert contact.name == "Alice"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_cannot_create_foreign_owner(session: AsyncSession) -> None:
|
||||
owner = _make_user("owner")
|
||||
member = _make_user("member")
|
||||
context, repo, _ = await _setup_contact(
|
||||
session,
|
||||
role=OrganizationRole.MEMBER,
|
||||
owner=owner,
|
||||
context_user=member,
|
||||
)
|
||||
service = ContactService(repository=repo)
|
||||
|
||||
with pytest.raises(ContactForbiddenError):
|
||||
await service.create_contact(
|
||||
ContactCreate(
|
||||
organization_id=context.organization_id,
|
||||
owner_id=owner.id,
|
||||
name="Restricted",
|
||||
email=None,
|
||||
phone=None,
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_contacts_supports_search(session: AsyncSession) -> None:
|
||||
context, repo, base_contact = await _setup_contact(session)
|
||||
service = ContactService(repository=repo)
|
||||
|
||||
another = Contact(
|
||||
organization_id=context.organization_id,
|
||||
owner_id=base_contact.owner_id,
|
||||
name="Searchable",
|
||||
email="findme@example.com",
|
||||
phone=None,
|
||||
)
|
||||
session.add(another)
|
||||
await session.flush()
|
||||
|
||||
contacts = await service.list_contacts(
|
||||
filters=ContactListFilters(search="search"),
|
||||
context=context,
|
||||
)
|
||||
|
||||
assert len(contacts) == 1
|
||||
assert contacts[0].id == another.id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_owner_filter_forbidden(session: AsyncSession) -> None:
|
||||
owner = _make_user("owner")
|
||||
member = _make_user("member")
|
||||
context, repo, _ = await _setup_contact(
|
||||
session,
|
||||
role=OrganizationRole.MEMBER,
|
||||
owner=owner,
|
||||
context_user=member,
|
||||
)
|
||||
service = ContactService(repository=repo)
|
||||
|
||||
with pytest.raises(ContactForbiddenError):
|
||||
await service.list_contacts(
|
||||
filters=ContactListFilters(owner_id=owner.id),
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_can_view_foreign_contacts(session: AsyncSession) -> None:
|
||||
owner = _make_user("owner")
|
||||
member = _make_user("member")
|
||||
context, repo, contact = await _setup_contact(
|
||||
session,
|
||||
role=OrganizationRole.MEMBER,
|
||||
owner=owner,
|
||||
context_user=member,
|
||||
)
|
||||
service = ContactService(repository=repo)
|
||||
|
||||
contacts = await service.list_contacts(filters=ContactListFilters(), context=context)
|
||||
|
||||
assert contacts and contacts[0].id == contact.id
|
||||
assert contacts[0].owner_id == owner.id != context.user_id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_cannot_update_foreign_contact(session: AsyncSession) -> None:
|
||||
owner = _make_user("owner")
|
||||
member = _make_user("member")
|
||||
context, repo, contact = await _setup_contact(
|
||||
session,
|
||||
role=OrganizationRole.MEMBER,
|
||||
owner=owner,
|
||||
context_user=member,
|
||||
)
|
||||
service = ContactService(repository=repo)
|
||||
|
||||
with pytest.raises(ContactForbiddenError):
|
||||
await service.update_contact(contact, ContactUpdateData(name="Blocked"), context=context)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_contact_allows_nullifying_fields(session: AsyncSession) -> None:
|
||||
context, repo, contact = await _setup_contact(session)
|
||||
service = ContactService(repository=repo)
|
||||
|
||||
updated = await service.update_contact(
|
||||
contact,
|
||||
ContactUpdateData(name="Updated", email=None, phone=None),
|
||||
context=context,
|
||||
)
|
||||
|
||||
assert updated.name == "Updated"
|
||||
assert updated.email is None
|
||||
assert updated.phone is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_contact_blocks_when_deals_exist(session: AsyncSession) -> None:
|
||||
context, repo, contact = await _setup_contact(session)
|
||||
service = ContactService(repository=repo)
|
||||
|
||||
session.add(
|
||||
Deal(
|
||||
organization_id=context.organization_id,
|
||||
contact_id=contact.id,
|
||||
owner_id=contact.owner_id,
|
||||
title="Pending",
|
||||
amount=None,
|
||||
)
|
||||
)
|
||||
await session.flush()
|
||||
|
||||
with pytest.raises(ContactDeletionError):
|
||||
await service.delete_contact(contact, context=context)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_contact_succeeds_without_deals(session: AsyncSession) -> None:
|
||||
context, repo, contact = await _setup_contact(session)
|
||||
service = ContactService(repository=repo)
|
||||
|
||||
await service.delete_contact(contact, context=context)
|
||||
result = await session.scalar(select(Contact).where(Contact.id == contact.id))
|
||||
assert result is None
|
||||
|
|
@ -0,0 +1,295 @@
|
|||
"""Unit tests for DealService."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
from decimal import Decimal
|
||||
import uuid
|
||||
|
||||
import pytest # type: ignore[import-not-found]
|
||||
import pytest_asyncio # type: ignore[import-not-found]
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.pool import StaticPool
|
||||
|
||||
from app.models.activity import Activity, ActivityType
|
||||
from app.models.base import Base
|
||||
from app.models.contact import Contact
|
||||
from app.models.deal import DealCreate, DealStage, DealStatus
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.user import User
|
||||
from app.repositories.deal_repo import DealRepository
|
||||
from app.services.deal_service import (
|
||||
ContactHasDealsError,
|
||||
DealOrganizationMismatchError,
|
||||
DealService,
|
||||
DealStageTransitionError,
|
||||
DealStatusValidationError,
|
||||
DealUpdateData,
|
||||
)
|
||||
from app.services.organization_service import OrganizationContext
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def session() -> AsyncGenerator[AsyncSession, None]:
|
||||
engine = create_async_engine(
|
||||
"sqlite+aiosqlite:///:memory:",
|
||||
future=True,
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
async_session = async_sessionmaker(engine, expire_on_commit=False)
|
||||
async with async_session() as session:
|
||||
yield session
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
def _make_organization(name: str) -> Organization:
|
||||
org = Organization(name=name)
|
||||
return org
|
||||
|
||||
|
||||
def _make_user(email_suffix: str) -> User:
|
||||
return User(
|
||||
email=f"user-{email_suffix}@example.com",
|
||||
hashed_password="hashed",
|
||||
name="Test User",
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
|
||||
def _make_context(org: Organization, user: User, role: OrganizationRole) -> OrganizationContext:
|
||||
membership = OrganizationMember(organization_id=org.id, user_id=user.id, role=role)
|
||||
return OrganizationContext(organization=org, membership=membership)
|
||||
|
||||
|
||||
async def _persist_base(session: AsyncSession, *, role: OrganizationRole = OrganizationRole.MANAGER) -> tuple[
|
||||
OrganizationContext,
|
||||
Contact,
|
||||
DealRepository,
|
||||
]:
|
||||
org = _make_organization(name=f"Org-{uuid.uuid4()}"[:8])
|
||||
user = _make_user(email_suffix=str(uuid.uuid4())[:8])
|
||||
session.add_all([org, user])
|
||||
await session.flush()
|
||||
|
||||
contact = Contact(
|
||||
organization_id=org.id,
|
||||
owner_id=user.id,
|
||||
name="John Doe",
|
||||
email="john@example.com",
|
||||
)
|
||||
session.add(contact)
|
||||
await session.flush()
|
||||
|
||||
context = _make_context(org, user, role)
|
||||
repo = DealRepository(session=session)
|
||||
return context, contact, repo
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_deal_rejects_foreign_contact(session: AsyncSession) -> None:
|
||||
context, contact, repo = await _persist_base(session)
|
||||
|
||||
other_org = _make_organization(name="Other")
|
||||
other_user = _make_user(email_suffix="other")
|
||||
session.add_all([other_org, other_user])
|
||||
await session.flush()
|
||||
|
||||
service = DealService(repository=repo)
|
||||
payload = DealCreate(
|
||||
organization_id=other_org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=context.user_id,
|
||||
title="Website Redesign",
|
||||
amount=None,
|
||||
)
|
||||
|
||||
other_context = _make_context(other_org, other_user, OrganizationRole.MANAGER)
|
||||
|
||||
with pytest.raises(DealOrganizationMismatchError):
|
||||
await service.create_deal(payload, context=other_context)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stage_rollback_requires_admin(session: AsyncSession) -> None:
|
||||
context, contact, repo = await _persist_base(session, role=OrganizationRole.MANAGER)
|
||||
service = DealService(repository=repo)
|
||||
|
||||
deal = await service.create_deal(
|
||||
DealCreate(
|
||||
organization_id=context.organization_id,
|
||||
contact_id=contact.id,
|
||||
owner_id=context.user_id,
|
||||
title="Migration",
|
||||
amount=Decimal("5000"),
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
deal.stage = DealStage.PROPOSAL
|
||||
|
||||
with pytest.raises(DealStageTransitionError):
|
||||
await service.update_deal(
|
||||
deal,
|
||||
DealUpdateData(stage=DealStage.QUALIFICATION),
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stage_rollback_allowed_for_admin(session: AsyncSession) -> None:
|
||||
context, contact, repo = await _persist_base(session, role=OrganizationRole.ADMIN)
|
||||
service = DealService(repository=repo)
|
||||
|
||||
deal = await service.create_deal(
|
||||
DealCreate(
|
||||
organization_id=context.organization_id,
|
||||
contact_id=contact.id,
|
||||
owner_id=context.user_id,
|
||||
title="Rollout",
|
||||
amount=Decimal("1000"),
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
deal.stage = DealStage.NEGOTIATION
|
||||
|
||||
updated = await service.update_deal(
|
||||
deal,
|
||||
DealUpdateData(stage=DealStage.PROPOSAL),
|
||||
context=context,
|
||||
)
|
||||
|
||||
assert updated.stage == DealStage.PROPOSAL
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stage_rollback_allowed_for_owner(session: AsyncSession) -> None:
|
||||
context, contact, repo = await _persist_base(session, role=OrganizationRole.OWNER)
|
||||
service = DealService(repository=repo)
|
||||
|
||||
deal = await service.create_deal(
|
||||
DealCreate(
|
||||
organization_id=context.organization_id,
|
||||
contact_id=contact.id,
|
||||
owner_id=context.user_id,
|
||||
title="Owner Rollback",
|
||||
amount=Decimal("2500"),
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
deal.stage = DealStage.CLOSED
|
||||
|
||||
updated = await service.update_deal(
|
||||
deal,
|
||||
DealUpdateData(stage=DealStage.NEGOTIATION),
|
||||
context=context,
|
||||
)
|
||||
|
||||
assert updated.stage == DealStage.NEGOTIATION
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stage_forward_allowed_for_member(session: AsyncSession) -> None:
|
||||
context, contact, repo = await _persist_base(session, role=OrganizationRole.MEMBER)
|
||||
service = DealService(repository=repo)
|
||||
|
||||
deal = await service.create_deal(
|
||||
DealCreate(
|
||||
organization_id=context.organization_id,
|
||||
contact_id=contact.id,
|
||||
owner_id=context.user_id,
|
||||
title="Forward Move",
|
||||
amount=Decimal("1000"),
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
updated = await service.update_deal(
|
||||
deal,
|
||||
DealUpdateData(stage=DealStage.PROPOSAL),
|
||||
context=context,
|
||||
)
|
||||
|
||||
assert updated.stage == DealStage.PROPOSAL
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_status_won_requires_positive_amount(session: AsyncSession) -> None:
|
||||
context, contact, repo = await _persist_base(session)
|
||||
service = DealService(repository=repo)
|
||||
|
||||
deal = await service.create_deal(
|
||||
DealCreate(
|
||||
organization_id=context.organization_id,
|
||||
contact_id=contact.id,
|
||||
owner_id=context.user_id,
|
||||
title="Zero",
|
||||
amount=None,
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
with pytest.raises(DealStatusValidationError):
|
||||
await service.update_deal(
|
||||
deal,
|
||||
DealUpdateData(status=DealStatus.WON),
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_updates_create_activity_records(session: AsyncSession) -> None:
|
||||
context, contact, repo = await _persist_base(session)
|
||||
service = DealService(repository=repo)
|
||||
|
||||
deal = await service.create_deal(
|
||||
DealCreate(
|
||||
organization_id=context.organization_id,
|
||||
contact_id=contact.id,
|
||||
owner_id=context.user_id,
|
||||
title="Activity",
|
||||
amount=Decimal("100"),
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
await service.update_deal(
|
||||
deal,
|
||||
DealUpdateData(
|
||||
stage=DealStage.PROPOSAL,
|
||||
status=DealStatus.WON,
|
||||
amount=Decimal("5000"),
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
result = await session.scalars(select(Activity).where(Activity.deal_id == deal.id))
|
||||
activity_types = {activity.type for activity in result.all()}
|
||||
assert ActivityType.STAGE_CHANGED in activity_types
|
||||
assert ActivityType.STATUS_CHANGED in activity_types
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_contact_delete_guard(session: AsyncSession) -> None:
|
||||
context, contact, repo = await _persist_base(session)
|
||||
service = DealService(repository=repo)
|
||||
|
||||
deal = await service.create_deal(
|
||||
DealCreate(
|
||||
organization_id=context.organization_id,
|
||||
contact_id=contact.id,
|
||||
owner_id=context.user_id,
|
||||
title="To Delete",
|
||||
amount=Decimal("100"),
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
with pytest.raises(ContactHasDealsError):
|
||||
await service.ensure_contact_can_be_deleted(contact.id)
|
||||
|
||||
await session.delete(deal)
|
||||
await session.flush()
|
||||
|
||||
await service.ensure_contact_can_be_deleted(contact.id)
|
||||
|
|
@ -0,0 +1,194 @@
|
|||
"""Unit tests for OrganizationService."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest # type: ignore[import-not-found]
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.repositories.org_repo import OrganizationRepository
|
||||
from app.services.organization_service import (
|
||||
OrganizationAccessDeniedError,
|
||||
OrganizationContext,
|
||||
OrganizationContextMissingError,
|
||||
OrganizationForbiddenError,
|
||||
OrganizationMemberAlreadyExistsError,
|
||||
OrganizationService,
|
||||
)
|
||||
|
||||
|
||||
class StubOrganizationRepository(OrganizationRepository):
|
||||
"""Simple in-memory stand-in for OrganizationRepository."""
|
||||
|
||||
def __init__(self, membership: OrganizationMember | None) -> None:
|
||||
super().__init__(session=MagicMock(spec=AsyncSession))
|
||||
self._membership = membership
|
||||
|
||||
async def get_membership(self, organization_id: int, user_id: int) -> OrganizationMember | None: # pragma: no cover - helper
|
||||
if (
|
||||
self._membership
|
||||
and self._membership.organization_id == organization_id
|
||||
and self._membership.user_id == user_id
|
||||
):
|
||||
return self._membership
|
||||
return None
|
||||
|
||||
|
||||
def make_membership(role: OrganizationRole, *, organization_id: int = 1, user_id: int = 10) -> OrganizationMember:
|
||||
organization = Organization(name="Acme Inc")
|
||||
organization.id = organization_id
|
||||
membership = OrganizationMember(
|
||||
organization_id=organization_id,
|
||||
user_id=user_id,
|
||||
role=role,
|
||||
)
|
||||
membership.organization = organization
|
||||
return membership
|
||||
|
||||
|
||||
class SessionStub:
|
||||
"""Minimal async session stub capturing writes."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.added: list[OrganizationMember] = []
|
||||
self.committed: bool = False
|
||||
self.refreshed: list[OrganizationMember] = []
|
||||
|
||||
def add(self, obj: OrganizationMember) -> None:
|
||||
self.added.append(obj)
|
||||
|
||||
async def commit(self) -> None:
|
||||
self.committed = True
|
||||
|
||||
async def refresh(self, obj: OrganizationMember) -> None:
|
||||
self.refreshed.append(obj)
|
||||
|
||||
|
||||
class MembershipRepositoryStub(OrganizationRepository):
|
||||
"""Repository stub that can emulate duplicate checks for add_member."""
|
||||
|
||||
def __init__(self, memberships: dict[tuple[int, int], OrganizationMember] | None = None) -> None:
|
||||
self._session_stub = SessionStub()
|
||||
super().__init__(session=cast(AsyncSession, self._session_stub))
|
||||
self._memberships = memberships or {}
|
||||
|
||||
@property
|
||||
def session_stub(self) -> SessionStub:
|
||||
return self._session_stub
|
||||
|
||||
async def get_membership(self, organization_id: int, user_id: int) -> OrganizationMember | None:
|
||||
return self._memberships.get((organization_id, user_id))
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_context_success() -> None:
|
||||
membership = make_membership(OrganizationRole.MANAGER)
|
||||
service = OrganizationService(StubOrganizationRepository(membership))
|
||||
|
||||
context = await service.get_context(user_id=membership.user_id, organization_id=membership.organization_id)
|
||||
|
||||
assert context.organization_id == membership.organization_id
|
||||
assert context.role == OrganizationRole.MANAGER
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_context_missing_header() -> None:
|
||||
service = OrganizationService(StubOrganizationRepository(None))
|
||||
|
||||
with pytest.raises(OrganizationContextMissingError):
|
||||
await service.get_context(user_id=1, organization_id=None)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_context_access_denied() -> None:
|
||||
service = OrganizationService(StubOrganizationRepository(None))
|
||||
|
||||
with pytest.raises(OrganizationAccessDeniedError):
|
||||
await service.get_context(user_id=1, organization_id=99)
|
||||
|
||||
|
||||
def test_ensure_can_manage_settings_blocks_manager() -> None:
|
||||
membership = make_membership(OrganizationRole.MANAGER)
|
||||
organization = membership.organization
|
||||
assert organization is not None
|
||||
context = OrganizationContext(organization=organization, membership=membership)
|
||||
service = OrganizationService(StubOrganizationRepository(membership))
|
||||
|
||||
with pytest.raises(OrganizationForbiddenError):
|
||||
service.ensure_can_manage_settings(context)
|
||||
|
||||
|
||||
def test_member_must_own_entity() -> None:
|
||||
membership = make_membership(OrganizationRole.MEMBER)
|
||||
organization = membership.organization
|
||||
assert organization is not None
|
||||
context = OrganizationContext(organization=organization, membership=membership)
|
||||
service = OrganizationService(StubOrganizationRepository(membership))
|
||||
|
||||
with pytest.raises(OrganizationForbiddenError):
|
||||
service.ensure_member_owns_entity(context=context, owner_id=999)
|
||||
|
||||
# Same owner should pass silently.
|
||||
service.ensure_member_owns_entity(context=context, owner_id=membership.user_id)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_member_succeeds_for_owner() -> None:
|
||||
owner_membership = make_membership(OrganizationRole.OWNER, organization_id=7, user_id=1)
|
||||
organization = owner_membership.organization
|
||||
assert organization is not None
|
||||
context = OrganizationContext(organization=organization, membership=owner_membership)
|
||||
|
||||
repo = MembershipRepositoryStub()
|
||||
service = OrganizationService(repo)
|
||||
|
||||
result = await service.add_member(context=context, user_id=42, role=OrganizationRole.MANAGER)
|
||||
|
||||
assert result.organization_id == organization.id
|
||||
assert result.user_id == 42
|
||||
assert result.role == OrganizationRole.MANAGER
|
||||
|
||||
session_stub = repo.session_stub
|
||||
assert session_stub.committed is True
|
||||
assert session_stub.added and session_stub.added[0] is result
|
||||
assert session_stub.refreshed and session_stub.refreshed[0] is result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_member_rejects_duplicate_membership() -> None:
|
||||
owner_membership = make_membership(OrganizationRole.OWNER, organization_id=5, user_id=10)
|
||||
organization = owner_membership.organization
|
||||
assert organization is not None
|
||||
context = OrganizationContext(organization=organization, membership=owner_membership)
|
||||
|
||||
duplicate_user_id = 55
|
||||
existing = OrganizationMember(
|
||||
organization_id=organization.id,
|
||||
user_id=duplicate_user_id,
|
||||
role=OrganizationRole.MEMBER,
|
||||
)
|
||||
repo = MembershipRepositoryStub({(organization.id, duplicate_user_id): existing})
|
||||
service = OrganizationService(repo)
|
||||
|
||||
with pytest.raises(OrganizationMemberAlreadyExistsError):
|
||||
await service.add_member(context=context, user_id=duplicate_user_id, role=OrganizationRole.MANAGER)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_member_requires_privileged_role() -> None:
|
||||
member_context = make_membership(OrganizationRole.MEMBER, organization_id=3, user_id=77)
|
||||
organization = member_context.organization
|
||||
assert organization is not None
|
||||
context = OrganizationContext(organization=organization, membership=member_context)
|
||||
|
||||
repo = MembershipRepositoryStub()
|
||||
service = OrganizationService(repo)
|
||||
|
||||
with pytest.raises(OrganizationForbiddenError):
|
||||
await service.add_member(context=context, user_id=99, role=OrganizationRole.MANAGER)
|
||||
|
||||
# Ensure DB work not attempted when permissions fail.
|
||||
assert repo.session_stub.committed is False
|
||||
|
|
@ -0,0 +1,199 @@
|
|||
"""Unit tests for TaskService."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.pool import StaticPool
|
||||
|
||||
from app.models.activity import Activity, ActivityType
|
||||
from app.models.base import Base
|
||||
from app.models.contact import Contact
|
||||
from app.models.deal import Deal
|
||||
from app.models.organization import Organization
|
||||
from app.models.organization_member import OrganizationMember, OrganizationRole
|
||||
from app.models.task import TaskCreate
|
||||
from app.models.user import User
|
||||
from app.repositories.activity_repo import ActivityRepository
|
||||
from app.repositories.task_repo import TaskRepository
|
||||
from app.services.organization_service import OrganizationContext
|
||||
from app.services.task_service import (
|
||||
TaskDueDateError,
|
||||
TaskForbiddenError,
|
||||
TaskService,
|
||||
TaskUpdateData,
|
||||
)
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def session() -> AsyncGenerator[AsyncSession, None]:
|
||||
engine = create_async_engine(
|
||||
"sqlite+aiosqlite:///:memory:",
|
||||
future=True,
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
session_factory = async_sessionmaker(engine, expire_on_commit=False)
|
||||
async with session_factory() as session:
|
||||
yield session
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
def _make_user(suffix: str) -> User:
|
||||
return User(
|
||||
email=f"user-{suffix}@example.com",
|
||||
hashed_password="hashed",
|
||||
name="Test User",
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
|
||||
async def _setup_environment(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
role: OrganizationRole = OrganizationRole.MANAGER,
|
||||
context_user: User | None = None,
|
||||
owner_user: User | None = None,
|
||||
) -> tuple[OrganizationContext, User, User, int, TaskRepository, ActivityRepository]:
|
||||
org = Organization(name=f"Org-{uuid.uuid4()}"[:8])
|
||||
owner = owner_user or _make_user("owner")
|
||||
ctx_user = context_user or owner
|
||||
session.add_all([org, owner])
|
||||
if ctx_user is not owner:
|
||||
session.add(ctx_user)
|
||||
await session.flush()
|
||||
|
||||
contact = Contact(
|
||||
organization_id=org.id,
|
||||
owner_id=owner.id,
|
||||
name="John Doe",
|
||||
email="john@example.com",
|
||||
)
|
||||
session.add(contact)
|
||||
await session.flush()
|
||||
|
||||
deal = Deal(
|
||||
organization_id=org.id,
|
||||
contact_id=contact.id,
|
||||
owner_id=owner.id,
|
||||
title="Implementation",
|
||||
amount=None,
|
||||
)
|
||||
session.add(deal)
|
||||
await session.flush()
|
||||
|
||||
membership = OrganizationMember(organization_id=org.id, user_id=ctx_user.id, role=role)
|
||||
context = OrganizationContext(organization=org, membership=membership)
|
||||
task_repo = TaskRepository(session=session)
|
||||
activity_repo = ActivityRepository(session=session)
|
||||
return context, owner, ctx_user, deal.id, task_repo, activity_repo
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_task_logs_activity(session: AsyncSession) -> None:
|
||||
context, owner, _, deal_id, task_repo, activity_repo = await _setup_environment(session)
|
||||
service = TaskService(task_repository=task_repo, activity_repository=activity_repo)
|
||||
|
||||
due_date = datetime.now(timezone.utc) + timedelta(days=2)
|
||||
task = await service.create_task(
|
||||
TaskCreate(
|
||||
deal_id=deal_id,
|
||||
title="Follow up",
|
||||
description="Call client",
|
||||
due_date=due_date,
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
result = await session.scalars(select(Activity).where(Activity.deal_id == deal_id))
|
||||
activities = result.all()
|
||||
assert len(activities) == 1
|
||||
assert activities[0].type == ActivityType.TASK_CREATED
|
||||
assert activities[0].payload["task_id"] == task.id
|
||||
assert activities[0].payload["title"] == task.title
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_cannot_create_task_for_foreign_deal(session: AsyncSession) -> None:
|
||||
owner = _make_user("owner")
|
||||
member = _make_user("member")
|
||||
context, _, _, deal_id, task_repo, activity_repo = await _setup_environment(
|
||||
session,
|
||||
role=OrganizationRole.MEMBER,
|
||||
context_user=member,
|
||||
owner_user=owner,
|
||||
)
|
||||
service = TaskService(task_repository=task_repo, activity_repository=activity_repo)
|
||||
|
||||
with pytest.raises(TaskForbiddenError):
|
||||
await service.create_task(
|
||||
TaskCreate(
|
||||
deal_id=deal_id,
|
||||
title="Follow up",
|
||||
description=None,
|
||||
due_date=datetime.now(timezone.utc) + timedelta(days=1),
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_due_date_cannot_be_in_past(session: AsyncSession) -> None:
|
||||
context, _, _, deal_id, task_repo, activity_repo = await _setup_environment(session)
|
||||
service = TaskService(task_repository=task_repo, activity_repository=activity_repo)
|
||||
|
||||
with pytest.raises(TaskDueDateError):
|
||||
await service.create_task(
|
||||
TaskCreate(
|
||||
deal_id=deal_id,
|
||||
title="Late",
|
||||
description=None,
|
||||
due_date=datetime.now(timezone.utc) - timedelta(days=1),
|
||||
),
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_member_cannot_update_foreign_task(session: AsyncSession) -> None:
|
||||
# First create a task as the owner
|
||||
owner = _make_user("owner")
|
||||
context_owner, _, _, deal_id, task_repo, activity_repo = await _setup_environment(
|
||||
session,
|
||||
context_user=owner,
|
||||
owner_user=owner,
|
||||
)
|
||||
service = TaskService(task_repository=task_repo, activity_repository=activity_repo)
|
||||
task = await service.create_task(
|
||||
TaskCreate(
|
||||
deal_id=deal_id,
|
||||
title="Prepare deck",
|
||||
description=None,
|
||||
due_date=datetime.now(timezone.utc) + timedelta(days=5),
|
||||
),
|
||||
context=context_owner,
|
||||
)
|
||||
|
||||
# Attempt to update it as another member
|
||||
member = _make_user("member")
|
||||
session.add(member)
|
||||
await session.flush()
|
||||
membership = OrganizationMember(
|
||||
organization_id=context_owner.organization_id,
|
||||
user_id=member.id,
|
||||
role=OrganizationRole.MEMBER,
|
||||
)
|
||||
member_context = OrganizationContext(organization=context_owner.organization, membership=membership)
|
||||
|
||||
with pytest.raises(TaskForbiddenError):
|
||||
await service.update_task(
|
||||
task.id,
|
||||
TaskUpdateData(is_done=True),
|
||||
context=member_context,
|
||||
)
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
"""Simple in-memory Redis replacement for tests."""
|
||||
from __future__ import annotations
|
||||
|
||||
import fnmatch
|
||||
import time
|
||||
from collections.abc import AsyncIterator
|
||||
|
||||
|
||||
class InMemoryRedis:
|
||||
"""Subset of redis.asyncio.Redis API backed by an in-memory dict."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._store: dict[str, bytes] = {}
|
||||
self._expirations: dict[str, float] = {}
|
||||
|
||||
async def ping(self) -> bool: # pragma: no cover - compatibility shim
|
||||
return True
|
||||
|
||||
async def get(self, name: str) -> bytes | None:
|
||||
self._purge_if_expired(name)
|
||||
return self._store.get(name)
|
||||
|
||||
async def set(self, name: str, value: bytes, ex: int | None = None) -> None:
|
||||
self._store[name] = value
|
||||
if ex is not None:
|
||||
self._expirations[name] = time.monotonic() + ex
|
||||
elif name in self._expirations:
|
||||
self._expirations.pop(name, None)
|
||||
|
||||
async def delete(self, *names: str) -> int:
|
||||
removed = 0
|
||||
for name in names:
|
||||
if name in self._store:
|
||||
del self._store[name]
|
||||
removed += 1
|
||||
self._expirations.pop(name, None)
|
||||
return removed
|
||||
|
||||
async def close(self) -> None: # pragma: no cover - interface completeness
|
||||
self._store.clear()
|
||||
self._expirations.clear()
|
||||
|
||||
async def scan_iter(self, match: str) -> AsyncIterator[str]:
|
||||
pattern = match or "*"
|
||||
for key in list(self._store.keys()):
|
||||
self._purge_if_expired(key)
|
||||
for key in self._store.keys():
|
||||
if fnmatch.fnmatch(key, pattern):
|
||||
yield key
|
||||
|
||||
def _purge_if_expired(self, name: str) -> None:
|
||||
expires_at = self._expirations.get(name)
|
||||
if expires_at is None:
|
||||
return
|
||||
if expires_at <= time.monotonic():
|
||||
self._store.pop(name, None)
|
||||
self._expirations.pop(name, None)
|
||||
84
uv.lock
84
uv.lock
|
|
@ -2,6 +2,18 @@ version = 1
|
|||
revision = 3
|
||||
requires-python = ">=3.14"
|
||||
|
||||
[[package]]
|
||||
name = "aiosqlite"
|
||||
version = "0.21.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size = 13454, upload-time = "2025-02-03T07:30:16.235Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size = 15792, upload-time = "2025-02-03T07:30:13.6Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alembic"
|
||||
version = "1.17.2"
|
||||
|
|
@ -354,6 +366,15 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "isort"
|
||||
version = "7.0.0"
|
||||
|
|
@ -467,6 +488,15 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "passlib"
|
||||
version = "1.7.4"
|
||||
|
|
@ -490,6 +520,15 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.12.4"
|
||||
|
|
@ -581,6 +620,34 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "1.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.2.1"
|
||||
|
|
@ -625,6 +692,15 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "7.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159, upload-time = "2025-11-19T15:54:38.064Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "14.2.0"
|
||||
|
|
@ -783,13 +859,17 @@ dependencies = [
|
|||
{ name = "passlib", extra = ["bcrypt"] },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "pyjwt" },
|
||||
{ name = "redis" },
|
||||
{ name = "sqlalchemy" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "aiosqlite" },
|
||||
{ name = "isort" },
|
||||
{ name = "mypy" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "ruff" },
|
||||
]
|
||||
|
||||
|
|
@ -801,13 +881,17 @@ requires-dist = [
|
|||
{ name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" },
|
||||
{ name = "pydantic-settings", specifier = ">=2.12.0" },
|
||||
{ name = "pyjwt", specifier = ">=2.9.0" },
|
||||
{ name = "redis", specifier = ">=5.2.0" },
|
||||
{ name = "sqlalchemy", specifier = ">=2.0.44" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "aiosqlite", specifier = ">=0.20.0" },
|
||||
{ name = "isort", specifier = ">=7.0.0" },
|
||||
{ name = "mypy", specifier = ">=1.18.2" },
|
||||
{ name = "pytest", specifier = ">=8.3.3" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.25.0" },
|
||||
{ name = "ruff", specifier = ">=0.14.6" },
|
||||
]
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue