diff --git a/CLAUDE.md b/CLAUDE.md index 3c1f7d62..f602398a 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -55,6 +55,28 @@ bin/rails db:migrate bin/rails db:setup ``` +### Git Commit Guidelines + +**IMPORTANT:** When committing changes, follow these rules: + +```bash +# ✅ CORRECT - Simple commit message +git commit -m "Add Story 1.1: Database Schema Extension" + +# ❌ INCORRECT - Do NOT add Co-Authored-By +git commit -m "Add Story 1.1: Database Schema Extension + +Co-Authored-By: Claude " +``` + +**Rules:** +- ✅ Use simple, descriptive commit messages +- ✅ Format: `git commit -m "Add Story X.X: [Story Title]"` +- ✅ Include story summary in commit body (optional) +- ❌ **NEVER** add `Co-Authored-By: Claude ` or similar +- ❌ **NEVER** add AI assistant attribution in commit messages +- ✅ One story per commit for traceability + ### Production ```bash # Precompile assets @@ -559,143 +581,451 @@ describe('[Component]', () => { 9. **Always** use code blocks for Vue components, Pinia stores, API layers, and type definitions 10. **Always** reference design system compliance per FR28 -**Commit Workflow:** -- **After each story is approved by user**: Commit the story to git before writing the next story -- **Commit message format**: `git commit -m "Add Story X.X: [Story Title]"` -- **Purpose**: Preserve each story independently, allow rollback if needed, maintain clear git history -- **Command sequence**: - 1. Write story to `docs/prd.md` - 2. User reviews and approves - 3. `git add docs/prd.md` - 4. `git commit -m "Add Story X.X: [Title]"` - 5. Proceed to next story +**Story Creation Workflow (NEW):** + +**MANDATORY WORKFLOW:** `create → review → approve → commit` + +1. **Create**: SM creates story using `/sm *draft` + - Story written to `docs/stories/` directory + - Follows `.bmad-core/templates/story-tmpl.yaml` structure + - Populated from PRD + Architecture docs + +2. **Review**: User reviews the created story draft + - Check for completeness, accuracy, and template compliance + - Verify all sections present (User Story, Background, Tasks, Acceptance Criteria, etc.) + - Confirm technical details match PRD requirements + +3. **Approve**: User provides explicit approval + - **CRITICAL**: Do NOT commit until user says "approved" or similar + - User may request changes - return to step 1 + +4. **Commit**: After explicit approval only + ```bash + git add docs/stories/1.1.database-schema-extension.md + git commit -m "Add Story 1.1: Database Schema Extension" + ``` + - Commit message format: `git commit -m "Add Story X.X: [Story Title]"` + - Purpose: Preserve each story independently, allow rollback, clear git history + +**Key Rules:** +- ✅ **NEVER commit without explicit user approval** +- ✅ **Stories go in `docs/stories/`, NOT in `docs/prd.md`** +- ✅ **One story per commit for traceability** +- ✅ **If user requests changes, edit and re-submit for review** + +**Example Session:** +```bash +# 1. Create story +@sm *draft +# Story created at docs/stories/1.1.database-schema-extension.md + +# 2. User reviews (reads the file) -## Enhanced IDE Development Workflow +# 3. User provides feedback or approval +# "Looks good, approved" → Proceed to commit +# "Change X in section Y" → Edit and re-submit for review -**MANDATORY**: All FloDoc development must follow the BMAD Enhanced IDE Development Workflow defined in `.bmad-core/enhanced-ide-development-workflow.md`. +# 4. Commit ONLY after approval +git add docs/stories/1.1.database-schema-extension.md +git commit -m "Add Story 1.1: Database Schema Extension" +``` + +## Enhanced IDE Development Workflow (STRICT BMAD CORE CYCLE) + +**MANDATORY**: All FloDoc development MUST follow the BMad Core Development Cycle from `.bmad-core/user-guide.md`. This is **SUPER CRITICAL** for brownfield projects. + +### 🚨 CRITICAL WORKFLOW RULES + +**Violating these rules will result in broken code in master:** + +1. **NEVER commit story implementation files until QA approves** +2. **ALWAYS create a git branch for each story after PO approval** +3. **QA must approve story implementation with >80% test pass rate** +4. **Only merge to master after QA approval** +5. **Delete branch after successful merge** +6. **Repeat cycle for each new story** + +### The BMad Core Development Cycle (MANDATORY) + +**Source:** `.bmad-core/user-guide.md` - "The Core Development Cycle (IDE)" diagram + +``` +┌─────────────────────────────────────────────────────────────┐ +│ 1. SM: Reviews Previous Story Dev/QA Notes │ +│ 2. SM: Drafts Next Story from Sharded Epic + Architecture │ +│ 3. QA: *risk + *design (for brownfield/high-risk) │ +│ 4. PO: Validate Story Draft (Optional) │ +│ 5. User: APPROVE STORY │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 6. DEV: Create Story Branch (YOUR REQUIREMENT) │ +│ git checkout -b story/1.1-database-schema │ +│ ⚠️ BRANCH CREATED ONLY AFTER USER APPROVAL │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 7. DEV: Implement Tasks + Write Tests │ +│ - Use subtasks as TODO list │ +│ - Mark [ ] → [x] in story file after each subtask │ +│ - Update Dev Agent Record with progress │ +│ 8. DEV: Run All Validations (rspec, lint, etc.) │ +│ - Must achieve >80% test pass rate │ +│ 9. DEV: Mark Ready for Review + Add Notes │ +│ - Update story Status to "Ready for Review" │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 10. User Verification │ +│ └─ Request QA Review? │ +│ ├─ YES → Go to Step 11 │ +│ └─ NO → Skip to Step 12 (Verify tests yourself) │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 11. QA: Test Architect Review + Quality Gate │ +│ - *review: Comprehensive assessment │ +│ - Requirements traceability │ +│ - Test coverage analysis │ +│ - Active refactoring (when safe) │ +│ - Quality Gate Decision │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 12. QA Decision: │ +│ ├─ PASS → Continue to Step 13 │ +│ ├─ CONCERNS → Team review, decide │ +│ ├─ FAIL → Return to Dev (Step 7) │ +│ └─ WAIVED → Document reasoning │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 13. IMPORTANT: Verify All Regression Tests and Linting │ +│ Are Passing (>80% pass rate required) │ +│ ⚠️ CRITICAL: NO COMMIT WITHOUT QA APPROVAL │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 14. IMPORTANT: COMMIT YOUR CHANGES BEFORE PROCEEDING! │ +│ git add . │ +│ git commit -m "Add Story 1.1: Database Schema Extension"│ +│ git push origin story/1.1-database-schema │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 15. Gate Update Needed? │ +│ └─ YES → QA: *gate to Update Status │ +│ └─ NO → Continue │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 16. MERGE TO MASTER (Your Requirement) │ +│ git checkout master │ +│ git merge story/1.1-database-schema │ +│ git push origin master │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 17. DELETE BRANCH (Your Requirement) │ +│ git branch -d story/1.1-database-schema │ +│ git push origin --delete story/1.1-database-schema │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ 18. Mark Story as Done │ +│ 19. SM: Start Next Story Cycle │ +│ Compact conversation, begin fresh │ +└─────────────────────────────────────────────────────────────┘ +``` + +**Key Points from Core Cycle:** +- **Step H (User Verification)**: User decides if QA review is needed +- **Step M**: Verify all tests pass BEFORE commit +- **Step N**: COMMIT happens AFTER QA approval +- **Step K**: Mark story as done, then loop back + +**Your Additions:** +- **Step 6**: Branch creation (not in core diagram) +- **Step 16**: Merge to master (not in core diagram) +- **Step 17**: Delete branch (not in core diagram) +- **Step 7.1**: Mark subtasks [x] in story file +- **Step 7.2**: Update Dev Agent Record +- **Step 8.1**: >80% test pass rate required + +### Branch Management Workflow + +**For Each Story:** + +```bash +# After PO approves story draft: +git checkout -b story/1.1-database-schema -### Workflow Overview +# ... development work ... -The workflow integrates the Test Architect (QA agent) throughout the development lifecycle to ensure quality, prevent regressions, and maintain high standards. +# After QA approves implementation: +git add . +git commit -m "Add Story 1.1: Database Schema Extension" +git push origin story/1.1-database-schema -### Complete Development Cycle Flow +# Merge to master (via PR or direct): +git checkout master +git merge story/1.1-database-schema +git push origin master +# Delete branch: +git branch -d story/1.1-database-schema +git push origin --delete story/1.1-database-schema ``` -1. SM: Create next story → Review → Approve -2. QA (Optional): Risk assessment (*risk) → Test design (*design) -3. Dev: Implement story → Write tests → Complete -4. QA (Optional): Mid-dev checks (*trace, *nfr) -5. Dev: Mark Ready for Review -6. QA (Required): Review story (*review) → Gate decision -7. Dev (If needed): Address issues -8. QA (If needed): Update gate (*gate) -9. Commit: All changes -10. Push: To remote -11. Continue: Until all features implemented + +### Dev Agent Task Management (CRITICAL) + +**During story implementation, Dev agent MUST:** + +1. **Use story subtasks as TODO list** + - Read all subtasks from `Tasks / Subtasks` section + - Treat each subtask as a development task to complete + +2. **Mark subtasks complete in story file** + - When subtask is done: `[ ]` → `[x]` + - Update story file immediately after completing each subtask + - Example: + ```markdown + - [x] Create migration file + - [x] Create institutions table + - [x] Create cohorts table + - [x] Create cohort_enrollments table + ``` + +3. **Track progress in Dev Agent Record** + - Add completion notes for each major subtask + - Update File List with all created/modified files + - Log any issues or blockers in Debug Log References + +4. **Update story Status** + - Change from "Draft" → "In Progress" when dev starts + - Change to "Ready for Review" when all tasks complete and tests pass + +**Story File Update Rules:** +- ✅ **ALLOWED**: Tasks/Subtasks checkboxes, Dev Agent Record sections, Status +- ❌ **NOT ALLOWED**: User Story, Background, Acceptance Criteria, Testing sections + +**Example Dev Agent Record Update:** +```markdown +### Dev Agent Record + +### Agent Model Used +James (Full Stack Developer) + +### Debug Log References +- Migration created successfully +- All foreign keys validated + +### Completion Notes List +- [x] Subtask 1.1: Created migration file +- [x] Subtask 1.2: Created institutions table schema +- [x] Subtask 1.3: Created cohorts table schema +- [x] Subtask 1.4: Created cohort_enrollments table schema +- [x] Subtask 1.5: Added all indexes +- [x] Subtask 1.6: Added all foreign keys + +### File List +- db/migrate/20260114000001_create_flo_doc_tables.rb +- spec/migrations/20260114000001_create_flo_doc_tables_spec.rb +- spec/integration/cohort_workflow_spec.rb + +### Change Log +| Date | Action | Author | +|------|--------|--------| +| 2026-01-14 | Created migration and tests | James | ``` ### Test Architect Command Reference -| Stage | Command | Purpose | Output | Priority | -|-------|---------|---------|--------|----------| -| After Story Approval | `*risk` | Identify integration & regression risks | `docs/qa/assessments/{epic}.{story}-risk-{YYYYMMDD}.md` | High for complex/brownfield | -| | `*design` | Create test strategy for dev | `docs/qa/assessments/{epic}.{story}-test-design-{YYYYMMDD}.md` | High for new features | -| During Development | `*trace` | Verify test coverage | `docs/qa/assessments/{epic}.{story}-trace-{YYYYMMDD}.md` | Medium | -| | `*nfr` | Validate quality attributes | `docs/qa/assessments/{epic}.{story}-nfr-{YYYYMMDD}.md` | High for critical features | -| After Development | `*review` | Comprehensive assessment | QA Results in story + `docs/qa/gates/{epic}.{story}-{slug}.yml` | **Required** | -| Post-Review | `*gate` | Update quality decision | Updated `docs/qa/gates/{epic}.{story}-{slug}.yml` | As needed | - -### When to Use Test Architect - -| Scenario | Before Dev | During Dev | After Dev | -|----------|------------|------------|-----------| -| Simple bug fix | Optional | Optional | Required `*review` | -| New feature | Recommended `*risk`, `*design` | Optional `*trace` | Required `*review` | -| **Brownfield change** | **Required** `*risk`, `*design` | Recommended `*trace`, `*nfr` | Required `*review` | -| **API modification** | **Required** `*risk`, `*design` | **Required** `*trace` | Required `*review` | -| Performance-critical | Recommended `*design` | **Required** `*nfr` | Required `*review` | -| Data migration | **Required** `*risk`, `*design` | **Required** `*trace` | Required `*review` + `*gate` | - -### Quality Gate Decisions - -| Status | Meaning | Action Required | Can Proceed? | -|--------|---------|-----------------|--------------| -| **PASS** | All critical requirements met | None | ✅ Yes | -| **CONCERNS** | Non-critical issues found | Team review recommended | ⚠️ With caution | -| **FAIL** | Critical issues (security, missing P0 tests) | Must fix | ❌ No | -| **WAIVED** | Issues acknowledged and accepted | Document reasoning | ✅ With approval | - -### Key Principles for FloDoc +| Stage | Command | Purpose | Output | When to Use | +|-------|---------|---------|--------|-------------| +| **After PO Approval** | `*risk` | Identify integration & regression risks | `docs/qa/assessments/{epic}.{story}-risk-{YYYYMMDD}.md` | **REQUIRED for brownfield** | +| | `*design` | Create test strategy for dev | `docs/qa/assessments/{epic}.{story}-test-design-{YYYYMMDD}.md` | **REQUIRED for brownfield** | +| **During Development** | `*trace` | Verify test coverage | `docs/qa/assessments/{epic}.{story}-trace-{YYYYMMDD}.md` | Recommended | +| | `*nfr` | Validate quality attributes | `docs/qa/assessments/{epic}.{story}-nfr-{YYYYMMDD}.md` | Recommended | +| **After Dev Complete** | `*review` | **COMPREHENSIVE ASSESSMENT** | QA Results + Gate file | **MANDATORY - NO COMMIT WITHOUT THIS** | +| **Post-Review** | `*gate` | Update quality decision | Updated gate file | If issues found | + +### Quality Gate Decisions (BLOCKING) + +| Status | Meaning | Action Required | Can Commit? | +|--------|---------|-----------------|-------------| +| **PASS** | All critical requirements met, >80% tests pass | None | ✅ **YES** | +| **CONCERNS** | Non-critical issues found | Team review recommended | ⚠️ With approval | +| **FAIL** | Critical issues (security, missing P0 tests, <80% pass) | **MUST FIX** | ❌ **NO - BLOCKED** | +| **WAIVED** | Issues acknowledged and accepted | Document reasoning + approval | ⚠️ With approval | + +### FloDoc-Specific Requirements **For FloDoc Enhancement (Brownfield), ALWAYS:** -1. **Run risk assessment first**: `@qa *risk {story}` before development -2. **Get test design**: `@qa *design {story}` to guide implementation -3. **Verify coverage mid-dev**: `@qa *trace {story}` after writing tests -4. **Check NFRs**: `@qa *nfr {story}` before marking ready -5. **Required review**: `@qa *review {story}` for all stories +1. **After PO approves story**: Run `@qa *risk` and `@qa *design` BEFORE writing code +2. **During development**: Run `@qa *trace` to verify coverage +3. **Before commit**: Run `@qa *review` - **MANDATORY** +4. **Test pass rate**: Must be >80% for QA approval +5. **Branch per story**: Create `story/{number}-{slug}` branch +6. **Delete after merge**: Clean up branches after successful merge -**Why this matters for FloDoc:** +**Why Strict Compliance Matters for FloDoc:** - Brownfield changes risk breaking existing DocuSeal functionality -- 3-portal integration is complex and requires thorough testing +- 3-portal integration complexity requires thorough validation - Security is critical (POPIA, sponsor portal access) - Performance must be maintained (NFR1: <20% degradation) +- Management demo depends on working system -### Documentation & Audit Trail +### Complete Example: Story 1.1 Implementation + +```bash +# === PHASE 1: STORY CREATION === +# 1. SM creates story +@sm *draft +# Creates: docs/stories/1.1.database-schema-extension.md -All Test Architect activities create permanent records: -- **Assessment Reports**: `docs/qa/assessments/` -- **Gate Files**: `docs/qa/gates/` -- **Story Updates**: QA Results sections in story files -- **Traceability**: Requirements to test mapping maintained +# 2. User reviews story +cat docs/stories/1.1.database-schema-extension.md -### Example FloDoc Development Session +# 3. User approves story +# User says: "Story 1.1 approved" -```bash -# 1. SM creates story (Story 8.0.1) -# User approves story +# === PHASE 2: QA PRE-ANALYSIS (REQUIRED) === +# 4. QA risk assessment +@qa *risk Story-1.1 +# Creates: docs/qa/assessments/flodoc.1.1-risk-20260114.md -# 2. QA risk assessment (RECOMMENDED for brownfield) -@qa *risk Story-8.0.1 -# Creates: docs/qa/assessments/flodoc.8.0.1-risk-20260114.md +# 5. QA test design +@qa *design Story-1.1 +# Creates: docs/qa/assessments/flodoc.1.1-test-design-20260114.md -# 3. QA test design (RECOMMENDED for new infrastructure) -@qa *design Story-8.0.1 -# Creates: docs/qa/assessments/flodoc.8.0.1-test-design-20260114.md +# === PHASE 3: BRANCH CREATION === +# 6. Dev creates story branch (AFTER PO APPROVAL) +git checkout -b story/1.1-database-schema -# 4. Dev implements story -# - Writes Docker Compose configs -# - Creates setup scripts -# - Writes tests +# === PHASE 4: IMPLEMENTATION === +# 7. Dev implements story +# - Create migration: db/migrate/20260114000001_create_flo_doc_tables.rb +# - Write migration spec: spec/migrations/20260114000001_create_flo_doc_tables_spec.rb +# - Write integration spec: spec/integration/cohort_workflow_spec.rb -# 5. QA mid-dev check (OPTIONAL but RECOMMENDED) -@qa *trace Story-8.0.1 +# 8. Dev runs tests +bundle exec rspec +# Must achieve >80% pass rate + +# 9. Dev runs QA mid-dev check (optional but recommended) +@qa *trace Story-1.1 # Verifies: All acceptance criteria have tests -# 6. Dev completes, marks ready for review +# 10. Dev marks ready for review +# Adds notes: "All tasks complete, tests written, ready for QA review" + +# === PHASE 5: QA REVIEW (MANDATORY) === +# 11. QA comprehensive review +@qa *review Story-1.1 +# Creates: docs/qa/gates/flodoc.1.1-database-schema.yml +# Adds: QA Results section to story file -# 7. QA full review (REQUIRED) -@qa *review Story-8.0.1 -# Creates: docs/qa/gates/flodoc.8.0.1-docker-mvp-setup.yml -# Adds: QA Results section to story +# 12. QA Decision: +# - PASS → Continue to commit +# - CONCERNS → Review with team +# - FAIL → Return to implementation -# 8. If issues found, dev addresses them +# === PHASE 6: ADDRESS ISSUES (IF NEEDED) === +# 13. If QA found issues: +# - Fix the issues +# - Re-run tests +# - Request QA to update gate: @qa *gate Story-1.1 -# 9. QA updates gate if needed -@qa *gate Story-8.0.1 +# === PHASE 7: COMMIT & MERGE (ONLY AFTER QA APPROVAL) === +# 14. QA approves (>80% tests pass) +# User says: "QA approved, ready to commit" -# 10. Commit and push +# 15. Commit changes git add . -git commit -m "Add Story 8.0.1: Management Demo Readiness & Validation" -git push origin feature/brownfield-prd +git commit -m "Add Story 1.1: Database Schema Extension" + +# 16. Push to branch +git push origin story/1.1-database-schema + +# 17. Merge to master +git checkout master +git merge story/1.1-database-schema +git push origin master + +# 18. Delete branch +git branch -d story/1.1-database-schema +git push origin --delete story/1.1-database-schema + +# === PHASE 8: NEXT STORY === +# 19. Compact conversation +# 20. Start next story cycle +``` + +### Common Pitfalls to Avoid + +❌ **DON'T:** +- Commit before QA approval +- Skip `*risk` and `*design` for brownfield stories +- Merge without >80% test pass rate +- Delete branch before successful merge +- Skip branch creation and commit directly to master + +✅ **DO:** +- Wait for explicit QA approval +- Create branch per story +- Run all required QA commands +- Verify tests pass before commit +- Clean up branches after merge + +### Documentation & Audit Trail + +**All QA activities create permanent records:** + +```text +docs/qa/assessments/ + └── flodoc.1.1-risk-20260114.md + └── flodoc.1.1-test-design-20260114.md + └── flodoc.1.1-trace-20260114.md + └── flodoc.1.1-nfr-20260114.md + +docs/qa/gates/ + └── flodoc.1.1-database-schema.yml + +docs/stories/ + └── 1.1.database-schema-extension.md (with QA Results section) ``` ### Success Metrics -The Test Architect helps achieve: -- **Zero regression defects** in production -- **100% requirements coverage** with tests -- **Clear quality gates** for go/no-go decisions -- **Documented risk acceptance** for technical debt -- **Consistent test quality** across the team -- **Shift-left testing** with early risk identification \ No newline at end of file +**The strict workflow ensures:** +- ✅ Zero regression defects in master +- ✅ 100% requirements coverage with tests +- ✅ Clear quality gates for go/no-go decisions +- ✅ Documented risk acceptance +- ✅ Consistent test quality +- ✅ Clean git history with per-story branches +- ✅ Management demo ready at all times + +### Emergency Rollback + +**If something breaks in master:** + +```bash +# 1. Identify the problematic story +git log --oneline + +# 2. Revert the merge commit +git revert + +# 3. Push the revert +git push origin master + +# 4. Re-open the story for fix +# Create new branch from master +git checkout -b story/1.1-fix +# Fix and re-run full QA cycle +``` + +**Remember: The workflow is SUPER CRITICAL. Violating it risks breaking the FloDoc enhancement project.** \ No newline at end of file diff --git a/db/migrate/20250103000002_create_institutions.rb b/db/migrate/20250103000002_create_institutions.rb deleted file mode 100644 index 780881c4..00000000 --- a/db/migrate/20250103000002_create_institutions.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -# Migration 2: Create institutions table -# Part of Winston's 4-layer data isolation foundation -class CreateInstitutions < ActiveRecord::Migration[7.0] - def change - create_table :institutions do |t| - # Core relationships - t.references :account, null: false, foreign_key: true, index: { unique: true } - t.references :super_admin, null: false, foreign_key: { to_table: :users } - - # Institution details - t.string :name, null: false - t.string :registration_number - t.text :address - t.string :contact_email - t.string :contact_phone - - # Settings (JSONB for flexibility) - t.jsonb :settings, null: false, default: {} - - # Timestamps - t.timestamps - end - - # Unique constraints - add_index :institutions, [:account_id, :registration_number], unique: true, where: 'registration_number IS NOT NULL' - add_index :institutions, :super_admin_id - end -end \ No newline at end of file diff --git a/db/schema.rb b/db/schema.rb index 96cf001e..1e2c8d7e 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -227,7 +227,7 @@ ActiveRecord::Schema[8.0].define(version: 2026_01_14_000001) do t.datetime "created_at", null: false t.index ["account_id", "event_datetime"], name: "index_email_events_on_account_id_and_event_datetime" t.index ["email"], name: "index_email_events_on_email" - t.index ["email"], name: "index_email_events_on_email_event_types", where: "((event_type)::text = ANY ((ARRAY['bounce'::character varying, 'soft_bounce'::character varying, 'permanent_bounce'::character varying, 'complaint'::character varying, 'soft_complaint'::character varying])::text[]))" + t.index ["email"], name: "index_email_events_on_email_event_types", where: "((event_type)::text = ANY (ARRAY[('bounce'::character varying)::text, ('soft_bounce'::character varying)::text, ('permanent_bounce'::character varying)::text, ('complaint'::character varying)::text, ('soft_complaint'::character varying)::text]))" t.index ["emailable_type", "emailable_id"], name: "index_email_events_on_emailable" t.index ["message_id"], name: "index_email_events_on_message_id" end @@ -282,7 +282,7 @@ ActiveRecord::Schema[8.0].define(version: 2026_01_14_000001) do t.string "event_name", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false - t.index ["event_name", "key"], name: "index_lock_events_on_event_name_and_key", unique: true, where: "((event_name)::text = ANY ((ARRAY['start'::character varying, 'complete'::character varying])::text[]))" + t.index ["event_name", "key"], name: "index_lock_events_on_event_name_and_key", unique: true, where: "((event_name)::text = ANY (ARRAY[('start'::character varying)::text, ('complete'::character varying)::text]))" t.index ["key"], name: "index_lock_events_on_key" end @@ -354,7 +354,7 @@ ActiveRecord::Schema[8.0].define(version: 2026_01_14_000001) do t.datetime "created_at", null: false t.datetime "updated_at", null: false t.bigint "account_id" - t.index ["account_id", "created_at"], name: "index_submissions_events_on_sms_event_types", where: "((event_type)::text = ANY ((ARRAY['send_sms'::character varying, 'send_2fa_sms'::character varying])::text[]))" + t.index ["account_id", "created_at"], name: "index_submissions_events_on_sms_event_types", where: "((event_type)::text = ANY (ARRAY[('send_sms'::character varying)::text, ('send_2fa_sms'::character varying)::text]))" t.index ["account_id"], name: "index_submission_events_on_account_id" t.index ["created_at"], name: "index_submission_events_on_created_at" t.index ["submission_id"], name: "index_submission_events_on_submission_id" diff --git a/spec/integration/cohort_workflow_spec.rb b/spec/integration/cohort_workflow_spec.rb index 63baf2db..7f0643bc 100644 --- a/spec/integration/cohort_workflow_spec.rb +++ b/spec/integration/cohort_workflow_spec.rb @@ -179,18 +179,18 @@ RSpec.describe 'Cohort Workflow Integration', type: :integration do # Complex query: Get all active cohorts with their templates and enrollments results = Cohort - .joins(:template, :institution) - .where(status: 'active') - .includes(:cohort_enrollments) - .map do |c| - { - cohort_name: c.name, - template_name: c.template.name, - institution_name: c.institution.name, - enrollment_count: c.cohort_enrollments.count, - active_enrollments: c.cohort_enrollments.where(status: 'complete').count - } - end + .joins(:template, :institution) + .where(status: 'active') + .includes(:cohort_enrollments) + .map do |c| + { + cohort_name: c.name, + template_name: c.template.name, + institution_name: c.institution.name, + enrollment_count: c.cohort_enrollments.count, + active_enrollments: c.cohort_enrollments.where(status: 'complete').count + } + end expect(results.length).to eq(1) expect(results.first[:cohort_name]).to eq('Cohort 1') @@ -328,16 +328,16 @@ RSpec.describe 'Cohort Workflow Integration', type: :integration do # Query with joins - verify the query executes without error # Index usage depends on data size and query planner decisions results = Cohort - .joins(:cohort_enrollments) - .where(cohort_enrollments: { status: 'complete' }) - .to_a + .joins(:cohort_enrollments) + .where(cohort_enrollments: { status: 'complete' }) + .to_a expect(results.length).to be > 0 end it 'performs well with large datasets' do # Measure query time start_time = Time.current - results = Cohort + Cohort .joins(:institution, :template) .where(status: 'active') .includes(:cohort_enrollments) @@ -360,8 +360,8 @@ RSpec.describe 'Cohort Workflow Integration', type: :integration do expect(submission_columns).to include('account_id', 'template_id', 'slug') # Verify no new columns were added to existing tables - expect(template_columns).to_not include('flo_doc_specific') - expect(submission_columns).to_not include('flo_doc_specific') + expect(template_columns).not_to include('flo_doc_specific') + expect(submission_columns).not_to include('flo_doc_specific') end it 'allows existing DocuSeal workflows to continue working' do @@ -380,7 +380,7 @@ RSpec.describe 'Cohort Workflow Integration', type: :integration do slug: "standard-slug-#{SecureRandom.hex(4)}", variables: '{}' ) - submitter = Submitter.create!( + Submitter.create!( account_id: account.id, submission_id: submission.id, email: 'submitter@example.com', diff --git a/spec/migrations/20260114000001_create_flo_doc_tables_spec.rb b/spec/migrations/20260114000001_create_flo_doc_tables_spec.rb index eda7379a..aad1c0b8 100644 --- a/spec/migrations/20260114000001_create_flo_doc_tables_spec.rb +++ b/spec/migrations/20260114000001_create_flo_doc_tables_spec.rb @@ -13,19 +13,19 @@ RSpec.describe CreateFloDocTables, type: :migration do # Helper to drop tables for testing def drop_tables_if_exist - [:cohort_enrollments, :cohorts, :institutions].each do |table| + %i[cohort_enrollments cohorts institutions].each do |table| conn.drop_table(table, if_exists: true) end end # Helper to drop FKs def drop_fks_if_exist - [:cohorts, :cohort_enrollments].each do |table| + %i[cohorts cohort_enrollments].each do |table| conn.foreign_keys(table).each do |fk| conn.remove_foreign_key(table, name: fk.name) end end - rescue => e + rescue StandardError # Ignore errors if FKs don't exist end @@ -60,24 +60,24 @@ RSpec.describe CreateFloDocTables, type: :migration do it 'has correct columns for institutions' do columns = conn.columns(:institutions).map(&:name) expect(columns).to include('name', 'email', 'contact_person', 'phone', - 'settings', 'created_at', 'updated_at', 'deleted_at') + 'settings', 'created_at', 'updated_at', 'deleted_at') end it 'has correct columns for cohorts' do columns = conn.columns(:cohorts).map(&:name) expect(columns).to include('institution_id', 'template_id', 'name', 'program_type', - 'sponsor_email', 'required_student_uploads', 'cohort_metadata', - 'status', 'tp_signed_at', 'students_completed_at', - 'sponsor_completed_at', 'finalized_at', 'created_at', - 'updated_at', 'deleted_at') + 'sponsor_email', 'required_student_uploads', 'cohort_metadata', + 'status', 'tp_signed_at', 'students_completed_at', + 'sponsor_completed_at', 'finalized_at', 'created_at', + 'updated_at', 'deleted_at') end it 'has correct columns for cohort_enrollments' do columns = conn.columns(:cohort_enrollments).map(&:name) expect(columns).to include('cohort_id', 'submission_id', 'student_email', - 'student_name', 'student_surname', 'student_id', - 'status', 'role', 'uploaded_documents', 'values', - 'completed_at', 'created_at', 'updated_at', 'deleted_at') + 'student_name', 'student_surname', 'student_id', + 'status', 'role', 'uploaded_documents', 'values', + 'completed_at', 'created_at', 'updated_at', 'deleted_at') end end @@ -147,14 +147,14 @@ RSpec.describe CreateFloDocTables, type: :migration do before { migration.change } it 'creates correct indexes on cohorts' do - expect(conn.index_exists?(:cohorts, [:institution_id, :status])).to be true + expect(conn.index_exists?(:cohorts, %i[institution_id status])).to be true expect(conn.index_exists?(:cohorts, :template_id)).to be true expect(conn.index_exists?(:cohorts, :sponsor_email)).to be true end it 'creates correct indexes on cohort_enrollments' do - expect(conn.index_exists?(:cohort_enrollments, [:cohort_id, :status])).to be true - expect(conn.index_exists?(:cohort_enrollments, [:cohort_id, :student_email], unique: true)).to be true + expect(conn.index_exists?(:cohort_enrollments, %i[cohort_id status])).to be true + expect(conn.index_exists?(:cohort_enrollments, %i[cohort_id student_email], unique: true)).to be true expect(conn.index_exists?(:cohort_enrollments, [:submission_id], unique: true)).to be true end end @@ -183,7 +183,7 @@ RSpec.describe CreateFloDocTables, type: :migration do # Tables should not exist before running migration expect(conn.table_exists?(:institutions)).to be false - expect { migration.change }.to_not raise_error + expect { migration.change }.not_to raise_error migration.down expect(conn.table_exists?(:institutions)).to be false @@ -199,8 +199,8 @@ RSpec.describe CreateFloDocTables, type: :migration do migration.change migration.down - expect(conn.index_exists?(:cohorts, [:institution_id, :status])).to be false - expect(conn.index_exists?(:cohort_enrollments, [:cohort_id, :student_email], unique: true)).to be false + expect(conn.index_exists?(:cohorts, %i[institution_id status])).to be false + expect(conn.index_exists?(:cohort_enrollments, %i[cohort_id student_email], unique: true)).to be false end it 'removes foreign keys on rollback' do @@ -221,36 +221,54 @@ RSpec.describe CreateFloDocTables, type: :migration do it 'enforces NOT NULL via database constraints' do # Institutions - name - expect { - conn.execute("INSERT INTO institutions (email, created_at, updated_at) VALUES ('test@example.com', NOW(), NOW())") - }.to raise_error(ActiveRecord::StatementInvalid) + expect do + conn.execute( + 'INSERT INTO institutions (email, created_at, updated_at) ' \ + 'VALUES (' + 'test@example.com' + ', NOW(), NOW())' + ) + end.to raise_error(ActiveRecord::StatementInvalid) # Institutions - email - expect { - conn.execute("INSERT INTO institutions (name, created_at, updated_at) VALUES ('Test', NOW(), NOW())") - }.to raise_error(ActiveRecord::StatementInvalid) + expect do + conn.execute( + 'INSERT INTO institutions (name, created_at, updated_at) ' \ + 'VALUES (' + 'Test' + ', NOW(), NOW())' + ) + end.to raise_error(ActiveRecord::StatementInvalid) # Cohorts - name (without required fields) - expect { - conn.execute("INSERT INTO cohorts (institution_id, template_id, program_type, sponsor_email, created_at, updated_at) VALUES (1, 1, 'learnership', 'test@example.com', NOW(), NOW())") - }.to raise_error(ActiveRecord::StatementInvalid) + expect do + conn.execute( + 'INSERT INTO cohorts (institution_id, template_id, program_type, sponsor_email, created_at, updated_at) ' \ + 'VALUES (1, 1, ' + 'learnership' + ', ' + 'test@example.com' + ', NOW(), NOW())' + ) + end.to raise_error(ActiveRecord::StatementInvalid) # CohortEnrollments - student_email - expect { - conn.execute("INSERT INTO cohort_enrollments (cohort_id, submission_id, created_at, updated_at) VALUES (1, 1, NOW(), NOW())") - }.to raise_error(ActiveRecord::StatementInvalid) + expect do + conn.execute( + 'INSERT INTO cohort_enrollments (cohort_id, submission_id, created_at, updated_at) ' \ + 'VALUES (1, 1, NOW(), NOW())' + ) + end.to raise_error(ActiveRecord::StatementInvalid) end it 'prevents orphaned records via foreign keys' do # Try to create cohort with non-existent institution - expect { - conn.execute("INSERT INTO cohorts (institution_id, template_id, name, program_type, sponsor_email, created_at, updated_at) VALUES (999999, 1, 'Test', 'learnership', 'test@example.com', NOW(), NOW())") - }.to raise_error(ActiveRecord::StatementInvalid) + expect do + conn.execute( + 'INSERT INTO cohorts (institution_id, template_id, name, program_type, sponsor_email, created_at, updated_at) ' \ + 'VALUES (999999, 1, ' + 'Test' + ', ' + 'learnership' + ', ' + 'test@example.com' + ', NOW(), NOW())' + ) + end.to raise_error(ActiveRecord::StatementInvalid) # Try to create enrollment with non-existent cohort - expect { - conn.execute("INSERT INTO cohort_enrollments (cohort_id, submission_id, student_email, created_at, updated_at) VALUES (999999, 1, 'test@example.com', NOW(), NOW())") - }.to raise_error(ActiveRecord::StatementInvalid) + expect do + conn.execute( + 'INSERT INTO cohort_enrollments (cohort_id, submission_id, student_email, created_at, updated_at) ' \ + 'VALUES (999999, 1, ' + 'test@example.com' + ', NOW(), NOW())' + ) + end.to raise_error(ActiveRecord::StatementInvalid) end end @@ -258,8 +276,11 @@ RSpec.describe CreateFloDocTables, type: :migration do before { migration.change } it 'creates institutions with correct defaults' do - conn.execute("INSERT INTO institutions (name, email, created_at, updated_at) VALUES ('Test', 'test@example.com', NOW(), NOW())") - result = conn.select_one("SELECT settings, deleted_at FROM institutions WHERE name = 'Test'") + conn.execute( + 'INSERT INTO institutions (name, email, created_at, updated_at) ' \ + 'VALUES (' + 'Test' + ', ' + 'test@example.com' + ', NOW(), NOW())' + ) + result = conn.select_one('SELECT settings, deleted_at FROM institutions WHERE name = Test') # JSONB returns string in raw SQL, but empty object expect(result['settings']).to be_in([{}, '{}']) expect(result['deleted_at']).to be_nil