diff --git a/.agents/skills/mework/SKILL.md b/.agents/skills/mework/SKILL.md new file mode 100644 index 0000000..a26498a --- /dev/null +++ b/.agents/skills/mework/SKILL.md @@ -0,0 +1,417 @@ +--- +name: mework-conventions +description: Development conventions and patterns for mework. Python project with conventional commits. +--- + +# Mework Conventions + +> Generated from [jaayslaughter-cpu/mework](https://github.com/jaayslaughter-cpu/mework) on 2026-03-20 + +## Overview + +This skill teaches Claude the development patterns and conventions used in mework. + +## Tech Stack + +- **Primary Language**: Python +- **Architecture**: hybrid module organization +- **Test Location**: separate + +## When to Use This Skill + +Activate this skill when: +- Making changes to this repository +- Adding new features following established patterns +- Writing tests that match project conventions +- Creating commits with proper message format + +## Commit Conventions + +Follow these commit message conventions based on 55 analyzed commits. + +### Commit Style: Conventional Commits + +### Prefixes Used + +- `feat` +- `fix` +- `chore` + +### Message Guidelines + +- Average message length: ~58 characters +- Keep first line concise and descriptive +- Use imperative mood ("Add feature" not "Added feature") + + +*Commit message example* + +```text +feat: PropIQ complete implementation [fix/sync-worker-bugs] +``` + +*Commit message example* + +```text +fix(docker): Add REDIS_PASSWORD env var to hub service +``` + +*Commit message example* + +```text +chore: Tighten CORS origins, remove wildcard +``` + +*Commit message example* + +```text +Merge pull request #25 from jaayslaughter-cpu/ticket-6.4-defensive-contrast +``` + +*Commit message example* + +```text +Merge branch 'main' into ticket-6.4-defensive-contrast +``` + +*Commit message example* + +```text +chore: Remove Streamlit dashboard +``` + +*Commit message example* + +```text +feat(scripts): Enhanced training pipeline with multi-source data +``` + +*Commit message example* + +```text +feat(api): Defensive Contrast Engine for batted-ball profile mismatches +``` + +## Architecture + +### Project Structure: Single Package + +This project uses **hybrid** module organization. + +### Configuration Files + +- `.github/workflows/npm-publish-github-packages.yml` +- `api/Dockerfile` +- `docker-compose.yml` +- `hub/Dockerfile` +- `hub/package.json` + +### Guidelines + +- This project uses a hybrid organization +- Follow existing patterns when adding new code + +## Code Style + +### Language: Python + +### Naming Conventions + +| Element | Convention | +|---------|------------| +| Files | snake_case | +| Functions | camelCase | +| Classes | PascalCase | +| Constants | SCREAMING_SNAKE_CASE | + +### Import Style: Mixed Style + +### Export Style: Mixed Style + + +## Error Handling + +### Error Handling Style: Try-Catch Blocks + + +*Standard error handling pattern* + +```typescript +try { + const result = await riskyOperation() + return result +} catch (error) { + console.error('Operation failed:', error) + throw new Error('User-friendly message') +} +``` + +## Common Workflows + +These workflows were detected from analyzing commit patterns. + +### Database Migration + +Database schema changes with migration files + +**Frequency**: ~10 times per month + +**Steps**: +1. Create migration file +2. Update schema definitions +3. Generate/update types + +**Example commit sequence**: +``` +fix: CI bot feedback - placeholder files, security, and schema fixes +Merge pull request #4 from jaayslaughter-cpu/ticket-1.4-bets-log-views-indexes +fix: change npm ci to npm install, reorder USER before EXPOSE +``` + +### Feature Development + +Standard feature implementation workflow + +**Frequency**: ~17 times per month + +**Steps**: +1. Add feature implementation +2. Add tests for feature +3. Update documentation + +**Files typically involved**: +- `**/api/**` + +**Example commit sequence**: +``` +fix: CI bot feedback - placeholder files, security, and schema fixes +Merge pull request #4 from jaayslaughter-cpu/ticket-1.4-bets-log-views-indexes +fix: change npm ci to npm install, reorder USER before EXPOSE +``` + +### Refactoring + +Code refactoring and cleanup workflow + +**Frequency**: ~2 times per month + +**Steps**: +1. Ensure tests pass before refactor +2. Refactor code structure +3. Verify tests still pass + +**Files typically involved**: +- `src/**/*` + +**Example commit sequence**: +``` +fix: 8 architecture improvements from CI bot review +fix: 5 architecture refinements from CI bot review +feat(api): FastAPI bootstrap with async SQLAlchemy +``` + +### Add Or Update Database Table Or Schema + +Adds or updates a database table, view, or index, often for new features or analytics. Includes SQL migration files and sometimes updates to related backend code. + +**Frequency**: ~4 times per month + +**Steps**: +1. Create or update SQL migration file in db/init/*.sql +2. Sometimes update related backend code (e.g., api/database.py, hub/src/sync.js) to use new/changed tables +3. Commit migration and related code + +**Files typically involved**: +- `db/init/01_core_reference.sql` +- `db/init/02_projection_market_layer.sql` +- `db/init/03_bets_log_views_indexes.sql` + +**Example commit sequence**: +``` +Create or update SQL migration file in db/init/*.sql +Sometimes update related backend code (e.g., api/database.py, hub/src/sync.js) to use new/changed tables +Commit migration and related code +``` + +### Add Or Enhance Api Endpoint + +Adds or updates FastAPI endpoints, including new routers, services, and sometimes model or requirements updates. + +**Frequency**: ~3 times per month + +**Steps**: +1. Create or update api/routers/*.py for endpoint logic +2. Create or update api/services/*.py for business logic +3. Update api/main.py to register new routers +4. Update api/requirements.txt if new dependencies are needed +5. Commit all related files + +**Files typically involved**: +- `api/routers/*.py` +- `api/services/*.py` +- `api/main.py` +- `api/requirements.txt` + +**Example commit sequence**: +``` +Create or update api/routers/*.py for endpoint logic +Create or update api/services/*.py for business logic +Update api/main.py to register new routers +Update api/requirements.txt if new dependencies are needed +Commit all related files +``` + +### Add Or Train Ml Model + +Adds new ML models, training scripts, and updates model artifacts for predictions. + +**Frequency**: ~2 times per month + +**Steps**: +1. Create or update scripts/train_model.py for training logic +2. Generate or update model artifact files in api/models/*.json +3. Update api/services/predictor.py to use new models if needed +4. Update api/requirements.txt if new ML dependencies are needed +5. Commit all related files + +**Files typically involved**: +- `scripts/train_model.py` +- `api/models/*.json` +- `api/services/predictor.py` +- `api/requirements.txt` + +**Example commit sequence**: +``` +Create or update scripts/train_model.py for training logic +Generate or update model artifact files in api/models/*.json +Update api/services/predictor.py to use new models if needed +Update api/requirements.txt if new ML dependencies are needed +Commit all related files +``` + +### Add Or Enhance Hub Sync Worker + +Implements or updates the Node.js hub's background sync worker for polling APIs and syncing betting markets. + +**Frequency**: ~2 times per month + +**Steps**: +1. Update or create hub/src/sync.js for polling logic +2. Update related fetchers (hub/src/fetchers/*.js) for new data sources +3. Update hub/src/server.js to integrate sync worker +4. Update docker-compose.yml or .env.example if new env vars are needed +5. Commit all related files + +**Files typically involved**: +- `hub/src/sync.js` +- `hub/src/fetchers/*.js` +- `hub/src/server.js` +- `docker-compose.yml` +- `.env.example` + +**Example commit sequence**: +``` +Update or create hub/src/sync.js for polling logic +Update related fetchers (hub/src/fetchers/*.js) for new data sources +Update hub/src/server.js to integrate sync worker +Update docker-compose.yml or .env.example if new env vars are needed +Commit all related files +``` + +### Add Or Enhance Dashboard Feature + +Adds or updates the Streamlit dashboard, including app logic, requirements, and Dockerfile. + +**Frequency**: ~2 times per month + +**Steps**: +1. Create or update dashboard/app.py for Streamlit UI +2. Update dashboard/requirements.txt for new dependencies +3. Update dashboard/Dockerfile if needed +4. Update docker-compose.yml if dashboard service changes +5. Commit all related files + +**Files typically involved**: +- `dashboard/app.py` +- `dashboard/requirements.txt` +- `dashboard/Dockerfile` +- `docker-compose.yml` + +**Example commit sequence**: +``` +Create or update dashboard/app.py for Streamlit UI +Update dashboard/requirements.txt for new dependencies +Update dashboard/Dockerfile if needed +Update docker-compose.yml if dashboard service changes +Commit all related files +``` + +### Add Or Update Docker Orchestration + +Updates Docker Compose and service Dockerfiles to orchestrate multi-service deployments, often when adding new services or changing environment variables. + +**Frequency**: ~2 times per month + +**Steps**: +1. Update docker-compose.yml with new/changed services or env vars +2. Update service Dockerfiles as needed (api/Dockerfile, dashboard/Dockerfile, hub/Dockerfile) +3. Update .env.example for new environment variables +4. Commit all related files + +**Files typically involved**: +- `docker-compose.yml` +- `api/Dockerfile` +- `dashboard/Dockerfile` +- `hub/Dockerfile` +- `.env.example` + +**Example commit sequence**: +``` +Update docker-compose.yml with new/changed services or env vars +Update service Dockerfiles as needed (api/Dockerfile, dashboard/Dockerfile, hub/Dockerfile) +Update .env.example for new environment variables +Commit all related files +``` + +### Add Or Update Backend Service Logic + +Implements or enhances backend service logic, especially in api/services/*.py, often for analytics engines (fatigue, usage vacuums, defensive contrast, etc). + +**Frequency**: ~3 times per month + +**Steps**: +1. Create or update api/services/*.py with new logic +2. Update api/services/predictor.py to integrate new logic +3. Update or create tests if needed +4. Commit all related files + +**Files typically involved**: +- `api/services/*.py` + +**Example commit sequence**: +``` +Create or update api/services/*.py with new logic +Update api/services/predictor.py to integrate new logic +Update or create tests if needed +Commit all related files +``` + + +## Best Practices + +Based on analysis of the codebase, follow these practices: + +### Do + +- Use conventional commit format (feat:, fix:, etc.) +- Use snake_case for file names +- Prefer mixed exports + +### Don't + +- Don't write vague commit messages +- Don't deviate from established patterns without discussion + +--- + +*This skill was auto-generated by [ECC Tools](https://ecc.tools). Review and customize as needed for your team.* diff --git a/.agents/skills/mework/agents/openai.yaml b/.agents/skills/mework/agents/openai.yaml new file mode 100644 index 0000000..15eb651 --- /dev/null +++ b/.agents/skills/mework/agents/openai.yaml @@ -0,0 +1,6 @@ +interface: + display_name: "Mework" + short_description: "Repo-specific patterns and workflows for mework" + default_prompt: "Use the mework repo skill to follow existing architecture, testing, and workflow conventions." +policy: + allow_implicit_invocation: true \ No newline at end of file diff --git a/.claude/commands/database-migration.md b/.claude/commands/database-migration.md new file mode 100644 index 0000000..4ca5825 --- /dev/null +++ b/.claude/commands/database-migration.md @@ -0,0 +1,35 @@ +--- +name: database-migration +description: Workflow command scaffold for database-migration in mework. +allowed_tools: ["Bash", "Read", "Write", "Grep", "Glob"] +--- + +# /database-migration + +Use this workflow when working on **database-migration** in `mework`. + +## Goal + +Database schema changes with migration files + +## Common Files + +- Inspect the files touched by the related commits before editing. + +## Suggested Sequence + +1. Understand the current state and failure mode before editing. +2. Make the smallest coherent change that satisfies the workflow goal. +3. Run the most relevant verification for touched files. +4. Summarize what changed and what still needs review. + +## Typical Commit Signals + +- Create migration file +- Update schema definitions +- Generate/update types + +## Notes + +- Treat this as a scaffold, not a hard-coded script. +- Update the command if the workflow evolves materially. \ No newline at end of file diff --git a/.claude/commands/feature-development.md b/.claude/commands/feature-development.md new file mode 100644 index 0000000..1dd3f71 --- /dev/null +++ b/.claude/commands/feature-development.md @@ -0,0 +1,35 @@ +--- +name: feature-development +description: Workflow command scaffold for feature-development in mework. +allowed_tools: ["Bash", "Read", "Write", "Grep", "Glob"] +--- + +# /feature-development + +Use this workflow when working on **feature-development** in `mework`. + +## Goal + +Standard feature implementation workflow + +## Common Files + +- `**/api/**` + +## Suggested Sequence + +1. Understand the current state and failure mode before editing. +2. Make the smallest coherent change that satisfies the workflow goal. +3. Run the most relevant verification for touched files. +4. Summarize what changed and what still needs review. + +## Typical Commit Signals + +- Add feature implementation +- Add tests for feature +- Update documentation + +## Notes + +- Treat this as a scaffold, not a hard-coded script. +- Update the command if the workflow evolves materially. \ No newline at end of file diff --git a/.claude/commands/refactoring.md b/.claude/commands/refactoring.md new file mode 100644 index 0000000..033ddc5 --- /dev/null +++ b/.claude/commands/refactoring.md @@ -0,0 +1,35 @@ +--- +name: refactoring +description: Workflow command scaffold for refactoring in mework. +allowed_tools: ["Bash", "Read", "Write", "Grep", "Glob"] +--- + +# /refactoring + +Use this workflow when working on **refactoring** in `mework`. + +## Goal + +Code refactoring and cleanup workflow + +## Common Files + +- `src/**/*` + +## Suggested Sequence + +1. Understand the current state and failure mode before editing. +2. Make the smallest coherent change that satisfies the workflow goal. +3. Run the most relevant verification for touched files. +4. Summarize what changed and what still needs review. + +## Typical Commit Signals + +- Ensure tests pass before refactor +- Refactor code structure +- Verify tests still pass + +## Notes + +- Treat this as a scaffold, not a hard-coded script. +- Update the command if the workflow evolves materially. \ No newline at end of file diff --git a/.claude/ecc-tools.json b/.claude/ecc-tools.json new file mode 100644 index 0000000..5a21467 --- /dev/null +++ b/.claude/ecc-tools.json @@ -0,0 +1,261 @@ +{ + "version": "1.3", + "schemaVersion": "1.0", + "generatedBy": "ecc-tools", + "generatedAt": "2026-03-20T03:03:49.551Z", + "repo": "https://github.com/jaayslaughter-cpu/mework", + "profiles": { + "requested": "full", + "recommended": "full", + "effective": "developer", + "requestedAlias": "full", + "recommendedAlias": "full", + "effectiveAlias": "developer" + }, + "requestedProfile": "full", + "profile": "developer", + "recommendedProfile": "full", + "effectiveProfile": "developer", + "tier": "free", + "requestedComponents": [ + "repo-baseline", + "workflow-automation", + "security-audits", + "research-tooling", + "team-rollout", + "governance-controls" + ], + "selectedComponents": [ + "repo-baseline", + "workflow-automation" + ], + "requestedAddComponents": [], + "requestedRemoveComponents": [], + "blockedRemovalComponents": [], + "tierFilteredComponents": [ + "security-audits", + "research-tooling", + "team-rollout", + "governance-controls" + ], + "requestedRootPackages": [ + "runtime-core", + "workflow-pack", + "agentshield-pack", + "research-pack", + "team-config-sync", + "enterprise-controls" + ], + "selectedRootPackages": [ + "runtime-core", + "workflow-pack" + ], + "requestedPackages": [ + "runtime-core", + "workflow-pack", + "agentshield-pack", + "research-pack", + "team-config-sync", + "enterprise-controls" + ], + "requestedAddPackages": [], + "requestedRemovePackages": [], + "selectedPackages": [ + "runtime-core", + "workflow-pack" + ], + "packages": [ + "runtime-core", + "workflow-pack" + ], + "blockedRemovalPackages": [], + "tierFilteredRootPackages": [ + "agentshield-pack", + "research-pack", + "team-config-sync", + "enterprise-controls" + ], + "tierFilteredPackages": [ + "agentshield-pack", + "research-pack", + "team-config-sync", + "enterprise-controls" + ], + "conflictingPackages": [], + "dependencyGraph": { + "runtime-core": [], + "workflow-pack": [ + "runtime-core" + ] + }, + "resolutionOrder": [ + "runtime-core", + "workflow-pack" + ], + "requestedModules": [ + "runtime-core", + "workflow-pack", + "agentshield-pack", + "research-pack", + "team-config-sync", + "enterprise-controls" + ], + "selectedModules": [ + "runtime-core", + "workflow-pack" + ], + "modules": [ + "runtime-core", + "workflow-pack" + ], + "managedFiles": [ + ".claude/skills/mework/SKILL.md", + ".agents/skills/mework/SKILL.md", + ".agents/skills/mework/agents/openai.yaml", + ".claude/identity.json", + ".codex/config.toml", + ".codex/AGENTS.md", + ".codex/agents/explorer.toml", + ".codex/agents/reviewer.toml", + ".codex/agents/docs-researcher.toml", + ".claude/homunculus/instincts/inherited/mework-instincts.yaml", + ".claude/commands/database-migration.md", + ".claude/commands/feature-development.md", + ".claude/commands/refactoring.md" + ], + "packageFiles": { + "runtime-core": [ + ".claude/skills/mework/SKILL.md", + ".agents/skills/mework/SKILL.md", + ".agents/skills/mework/agents/openai.yaml", + ".claude/identity.json", + ".codex/config.toml", + ".codex/AGENTS.md", + ".codex/agents/explorer.toml", + ".codex/agents/reviewer.toml", + ".codex/agents/docs-researcher.toml", + ".claude/homunculus/instincts/inherited/mework-instincts.yaml" + ], + "workflow-pack": [ + ".claude/commands/database-migration.md", + ".claude/commands/feature-development.md", + ".claude/commands/refactoring.md" + ] + }, + "moduleFiles": { + "runtime-core": [ + ".claude/skills/mework/SKILL.md", + ".agents/skills/mework/SKILL.md", + ".agents/skills/mework/agents/openai.yaml", + ".claude/identity.json", + ".codex/config.toml", + ".codex/AGENTS.md", + ".codex/agents/explorer.toml", + ".codex/agents/reviewer.toml", + ".codex/agents/docs-researcher.toml", + ".claude/homunculus/instincts/inherited/mework-instincts.yaml" + ], + "workflow-pack": [ + ".claude/commands/database-migration.md", + ".claude/commands/feature-development.md", + ".claude/commands/refactoring.md" + ] + }, + "files": [ + { + "moduleId": "runtime-core", + "path": ".claude/skills/mework/SKILL.md", + "description": "Repository-specific Claude Code skill generated from git history." + }, + { + "moduleId": "runtime-core", + "path": ".agents/skills/mework/SKILL.md", + "description": "Codex-facing copy of the generated repository skill." + }, + { + "moduleId": "runtime-core", + "path": ".agents/skills/mework/agents/openai.yaml", + "description": "Codex skill metadata so the repo skill appears cleanly in the skill interface." + }, + { + "moduleId": "runtime-core", + "path": ".claude/identity.json", + "description": "Suggested identity.json baseline derived from repository conventions." + }, + { + "moduleId": "runtime-core", + "path": ".codex/config.toml", + "description": "Repo-local Codex MCP and multi-agent baseline aligned with ECC defaults." + }, + { + "moduleId": "runtime-core", + "path": ".codex/AGENTS.md", + "description": "Codex usage guide that points at the generated repo skill and workflow bundle." + }, + { + "moduleId": "runtime-core", + "path": ".codex/agents/explorer.toml", + "description": "Read-only explorer role config for Codex multi-agent work." + }, + { + "moduleId": "runtime-core", + "path": ".codex/agents/reviewer.toml", + "description": "Read-only reviewer role config focused on correctness and security." + }, + { + "moduleId": "runtime-core", + "path": ".codex/agents/docs-researcher.toml", + "description": "Read-only docs researcher role config for API verification." + }, + { + "moduleId": "runtime-core", + "path": ".claude/homunculus/instincts/inherited/mework-instincts.yaml", + "description": "Continuous-learning instincts derived from repository patterns." + }, + { + "moduleId": "workflow-pack", + "path": ".claude/commands/database-migration.md", + "description": "Workflow command scaffold for database-migration." + }, + { + "moduleId": "workflow-pack", + "path": ".claude/commands/feature-development.md", + "description": "Workflow command scaffold for feature-development." + }, + { + "moduleId": "workflow-pack", + "path": ".claude/commands/refactoring.md", + "description": "Workflow command scaffold for refactoring." + } + ], + "workflows": [ + { + "command": "database-migration", + "path": ".claude/commands/database-migration.md" + }, + { + "command": "feature-development", + "path": ".claude/commands/feature-development.md" + }, + { + "command": "refactoring", + "path": ".claude/commands/refactoring.md" + } + ], + "adapters": { + "claudeCode": { + "skillPath": ".claude/skills/mework/SKILL.md", + "identityPath": ".claude/identity.json", + "commandPaths": [ + ".claude/commands/database-migration.md", + ".claude/commands/feature-development.md", + ".claude/commands/refactoring.md" + ] + }, + "codex": { + "configPath": ".codex/config.toml", + "agentsGuidePath": ".codex/AGENTS.md", + "skillPath": ".agents/skills/mework/SKILL.md" + } + } +} \ No newline at end of file diff --git a/.claude/homunculus/instincts/inherited/mework-instincts.yaml b/.claude/homunculus/instincts/inherited/mework-instincts.yaml new file mode 100644 index 0000000..6da1677 --- /dev/null +++ b/.claude/homunculus/instincts/inherited/mework-instincts.yaml @@ -0,0 +1,357 @@ +# Instincts generated from https://github.com/jaayslaughter-cpu/mework +# Generated: 2026-03-20T03:04:10.701Z +# Version: 2.0 +# NOTE: This file supplements (does not replace) any existing curated instincts. +# High-confidence manually curated instincts should be preserved alongside these. + +--- +id: mework-commit-conventional +trigger: "when writing a commit message" +confidence: 0.85 +domain: git +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Commit Conventional + +## Action + +Use conventional commit format with prefixes: feat, fix, chore + +## Evidence + +- 55 commits analyzed +- Detected conventional commit pattern +- Examples: feat: PropIQ complete implementation [fix/sync-worker-bugs], fix(docker): Add REDIS_PASSWORD env var to hub service + +--- +id: mework-commit-length +trigger: "when writing a commit message" +confidence: 0.6 +domain: git +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Commit Length + +## Action + +Write moderate-length commit messages (~58 characters) + +## Evidence + +- Average commit message length: 58 chars +- Based on 55 commits + +--- +id: mework-naming-files +trigger: "when creating a new file" +confidence: 0.8 +domain: code-style +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Naming Files + +## Action + +Use snake_case naming convention + +## Evidence + +- Analyzed file naming patterns in repository +- Dominant pattern: snake_case + +--- +id: mework-export-style +trigger: "when exporting from a module" +confidence: 0.7 +domain: code-style +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Export Style + +## Action + +Prefer mixed exports + +## Evidence + +- Export pattern analysis +- Dominant style: mixed + +--- +id: mework-test-separate +trigger: "when writing tests" +confidence: 0.8 +domain: testing +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Test Separate + +## Action + +Place tests in the tests/ or __tests__/ directory, mirroring src structure + +## Evidence + +- Separate test directory pattern detected +- Tests live in dedicated test folders + +--- +id: mework-workflow-database-migration +trigger: "when modifying the database schema or adding tables" +confidence: 0.9 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Database Migration + +## Action + +Follow the database-migration workflow: +1. Create migration file +2. Update schema definitions +3. Generate/update types + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~10x per month +- Files: + +--- +id: mework-workflow-feature-development +trigger: "when implementing a new feature" +confidence: 0.9 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Feature Development + +## Action + +Follow the feature-development workflow: +1. Add feature implementation +2. Add tests for feature +3. Update documentation + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~17x per month +- Files: **/api/** + +--- +id: mework-workflow-refactoring +trigger: "when refactoring code" +confidence: 0.6 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Refactoring + +## Action + +Follow the refactoring workflow: +1. Ensure tests pass before refactor +2. Refactor code structure +3. Verify tests still pass + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~2x per month +- Files: src/**/* + +--- +id: mework-workflow-add-or-update-database-table-or-schema +trigger: "when doing add or update database table or schema" +confidence: 0.7 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Add Or Update Database Table Or Schema + +## Action + +Follow the add-or-update-database-table-or-schema workflow: +1. Create or update SQL migration file in db/init/*.sql +2. Sometimes update related backend code (e.g., api/database.py, hub/src/sync.js) to use new/changed tables +3. Commit migration and related code + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~4x per month +- Files: db/init/01_core_reference.sql, db/init/02_projection_market_layer.sql, db/init/03_bets_log_views_indexes.sql + +--- +id: mework-workflow-add-or-enhance-api-endpoint +trigger: "when doing add or enhance api endpoint" +confidence: 0.65 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Add Or Enhance Api Endpoint + +## Action + +Follow the add-or-enhance-api-endpoint workflow: +1. Create or update api/routers/*.py for endpoint logic +2. Create or update api/services/*.py for business logic +3. Update api/main.py to register new routers +4. Update api/requirements.txt if new dependencies are needed +5. Commit all related files + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~3x per month +- Files: api/routers/*.py, api/services/*.py, api/main.py + +--- +id: mework-workflow-add-or-train-ml-model +trigger: "when doing add or train ml model" +confidence: 0.6 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Add Or Train Ml Model + +## Action + +Follow the add-or-train-ml-model workflow: +1. Create or update scripts/train_model.py for training logic +2. Generate or update model artifact files in api/models/*.json +3. Update api/services/predictor.py to use new models if needed +4. Update api/requirements.txt if new ML dependencies are needed +5. Commit all related files + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~2x per month +- Files: scripts/train_model.py, api/models/*.json, api/services/predictor.py + +--- +id: mework-workflow-add-or-enhance-hub-sync-worker +trigger: "when doing add or enhance hub sync worker" +confidence: 0.6 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Add Or Enhance Hub Sync Worker + +## Action + +Follow the add-or-enhance-hub-sync-worker workflow: +1. Update or create hub/src/sync.js for polling logic +2. Update related fetchers (hub/src/fetchers/*.js) for new data sources +3. Update hub/src/server.js to integrate sync worker +4. Update docker-compose.yml or .env.example if new env vars are needed +5. Commit all related files + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~2x per month +- Files: hub/src/sync.js, hub/src/fetchers/*.js, hub/src/server.js + +--- +id: mework-workflow-add-or-enhance-dashboard-feature +trigger: "when doing add or enhance dashboard feature" +confidence: 0.6 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Add Or Enhance Dashboard Feature + +## Action + +Follow the add-or-enhance-dashboard-feature workflow: +1. Create or update dashboard/app.py for Streamlit UI +2. Update dashboard/requirements.txt for new dependencies +3. Update dashboard/Dockerfile if needed +4. Update docker-compose.yml if dashboard service changes +5. Commit all related files + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~2x per month +- Files: dashboard/app.py, dashboard/requirements.txt, dashboard/Dockerfile + +--- +id: mework-workflow-add-or-update-docker-orchestration +trigger: "when doing add or update docker orchestration" +confidence: 0.6 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Add Or Update Docker Orchestration + +## Action + +Follow the add-or-update-docker-orchestration workflow: +1. Update docker-compose.yml with new/changed services or env vars +2. Update service Dockerfiles as needed (api/Dockerfile, dashboard/Dockerfile, hub/Dockerfile) +3. Update .env.example for new environment variables +4. Commit all related files + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~2x per month +- Files: docker-compose.yml, api/Dockerfile, dashboard/Dockerfile + +--- +id: mework-workflow-add-or-update-backend-service-logic +trigger: "when doing add or update backend service logic" +confidence: 0.65 +domain: workflow +source: repo-analysis +source_repo: https://github.com/jaayslaughter-cpu/mework +--- + +# Mework Workflow Add Or Update Backend Service Logic + +## Action + +Follow the add-or-update-backend-service-logic workflow: +1. Create or update api/services/*.py with new logic +2. Update api/services/predictor.py to integrate new logic +3. Update or create tests if needed +4. Commit all related files + +## Evidence + +- Workflow detected from commit patterns +- Frequency: ~3x per month +- Files: api/services/*.py + diff --git a/.claude/identity.json b/.claude/identity.json new file mode 100644 index 0000000..aea70de --- /dev/null +++ b/.claude/identity.json @@ -0,0 +1,14 @@ +{ + "version": "2.0", + "technicalLevel": "technical", + "preferredStyle": { + "verbosity": "moderate", + "codeComments": true, + "explanations": true + }, + "domains": [ + "python" + ], + "suggestedBy": "ecc-tools-repo-analysis", + "createdAt": "2026-03-20T03:04:10.701Z" +} \ No newline at end of file diff --git a/.claude/skills/mework/SKILL.md b/.claude/skills/mework/SKILL.md new file mode 100644 index 0000000..a26498a --- /dev/null +++ b/.claude/skills/mework/SKILL.md @@ -0,0 +1,417 @@ +--- +name: mework-conventions +description: Development conventions and patterns for mework. Python project with conventional commits. +--- + +# Mework Conventions + +> Generated from [jaayslaughter-cpu/mework](https://github.com/jaayslaughter-cpu/mework) on 2026-03-20 + +## Overview + +This skill teaches Claude the development patterns and conventions used in mework. + +## Tech Stack + +- **Primary Language**: Python +- **Architecture**: hybrid module organization +- **Test Location**: separate + +## When to Use This Skill + +Activate this skill when: +- Making changes to this repository +- Adding new features following established patterns +- Writing tests that match project conventions +- Creating commits with proper message format + +## Commit Conventions + +Follow these commit message conventions based on 55 analyzed commits. + +### Commit Style: Conventional Commits + +### Prefixes Used + +- `feat` +- `fix` +- `chore` + +### Message Guidelines + +- Average message length: ~58 characters +- Keep first line concise and descriptive +- Use imperative mood ("Add feature" not "Added feature") + + +*Commit message example* + +```text +feat: PropIQ complete implementation [fix/sync-worker-bugs] +``` + +*Commit message example* + +```text +fix(docker): Add REDIS_PASSWORD env var to hub service +``` + +*Commit message example* + +```text +chore: Tighten CORS origins, remove wildcard +``` + +*Commit message example* + +```text +Merge pull request #25 from jaayslaughter-cpu/ticket-6.4-defensive-contrast +``` + +*Commit message example* + +```text +Merge branch 'main' into ticket-6.4-defensive-contrast +``` + +*Commit message example* + +```text +chore: Remove Streamlit dashboard +``` + +*Commit message example* + +```text +feat(scripts): Enhanced training pipeline with multi-source data +``` + +*Commit message example* + +```text +feat(api): Defensive Contrast Engine for batted-ball profile mismatches +``` + +## Architecture + +### Project Structure: Single Package + +This project uses **hybrid** module organization. + +### Configuration Files + +- `.github/workflows/npm-publish-github-packages.yml` +- `api/Dockerfile` +- `docker-compose.yml` +- `hub/Dockerfile` +- `hub/package.json` + +### Guidelines + +- This project uses a hybrid organization +- Follow existing patterns when adding new code + +## Code Style + +### Language: Python + +### Naming Conventions + +| Element | Convention | +|---------|------------| +| Files | snake_case | +| Functions | camelCase | +| Classes | PascalCase | +| Constants | SCREAMING_SNAKE_CASE | + +### Import Style: Mixed Style + +### Export Style: Mixed Style + + +## Error Handling + +### Error Handling Style: Try-Catch Blocks + + +*Standard error handling pattern* + +```typescript +try { + const result = await riskyOperation() + return result +} catch (error) { + console.error('Operation failed:', error) + throw new Error('User-friendly message') +} +``` + +## Common Workflows + +These workflows were detected from analyzing commit patterns. + +### Database Migration + +Database schema changes with migration files + +**Frequency**: ~10 times per month + +**Steps**: +1. Create migration file +2. Update schema definitions +3. Generate/update types + +**Example commit sequence**: +``` +fix: CI bot feedback - placeholder files, security, and schema fixes +Merge pull request #4 from jaayslaughter-cpu/ticket-1.4-bets-log-views-indexes +fix: change npm ci to npm install, reorder USER before EXPOSE +``` + +### Feature Development + +Standard feature implementation workflow + +**Frequency**: ~17 times per month + +**Steps**: +1. Add feature implementation +2. Add tests for feature +3. Update documentation + +**Files typically involved**: +- `**/api/**` + +**Example commit sequence**: +``` +fix: CI bot feedback - placeholder files, security, and schema fixes +Merge pull request #4 from jaayslaughter-cpu/ticket-1.4-bets-log-views-indexes +fix: change npm ci to npm install, reorder USER before EXPOSE +``` + +### Refactoring + +Code refactoring and cleanup workflow + +**Frequency**: ~2 times per month + +**Steps**: +1. Ensure tests pass before refactor +2. Refactor code structure +3. Verify tests still pass + +**Files typically involved**: +- `src/**/*` + +**Example commit sequence**: +``` +fix: 8 architecture improvements from CI bot review +fix: 5 architecture refinements from CI bot review +feat(api): FastAPI bootstrap with async SQLAlchemy +``` + +### Add Or Update Database Table Or Schema + +Adds or updates a database table, view, or index, often for new features or analytics. Includes SQL migration files and sometimes updates to related backend code. + +**Frequency**: ~4 times per month + +**Steps**: +1. Create or update SQL migration file in db/init/*.sql +2. Sometimes update related backend code (e.g., api/database.py, hub/src/sync.js) to use new/changed tables +3. Commit migration and related code + +**Files typically involved**: +- `db/init/01_core_reference.sql` +- `db/init/02_projection_market_layer.sql` +- `db/init/03_bets_log_views_indexes.sql` + +**Example commit sequence**: +``` +Create or update SQL migration file in db/init/*.sql +Sometimes update related backend code (e.g., api/database.py, hub/src/sync.js) to use new/changed tables +Commit migration and related code +``` + +### Add Or Enhance Api Endpoint + +Adds or updates FastAPI endpoints, including new routers, services, and sometimes model or requirements updates. + +**Frequency**: ~3 times per month + +**Steps**: +1. Create or update api/routers/*.py for endpoint logic +2. Create or update api/services/*.py for business logic +3. Update api/main.py to register new routers +4. Update api/requirements.txt if new dependencies are needed +5. Commit all related files + +**Files typically involved**: +- `api/routers/*.py` +- `api/services/*.py` +- `api/main.py` +- `api/requirements.txt` + +**Example commit sequence**: +``` +Create or update api/routers/*.py for endpoint logic +Create or update api/services/*.py for business logic +Update api/main.py to register new routers +Update api/requirements.txt if new dependencies are needed +Commit all related files +``` + +### Add Or Train Ml Model + +Adds new ML models, training scripts, and updates model artifacts for predictions. + +**Frequency**: ~2 times per month + +**Steps**: +1. Create or update scripts/train_model.py for training logic +2. Generate or update model artifact files in api/models/*.json +3. Update api/services/predictor.py to use new models if needed +4. Update api/requirements.txt if new ML dependencies are needed +5. Commit all related files + +**Files typically involved**: +- `scripts/train_model.py` +- `api/models/*.json` +- `api/services/predictor.py` +- `api/requirements.txt` + +**Example commit sequence**: +``` +Create or update scripts/train_model.py for training logic +Generate or update model artifact files in api/models/*.json +Update api/services/predictor.py to use new models if needed +Update api/requirements.txt if new ML dependencies are needed +Commit all related files +``` + +### Add Or Enhance Hub Sync Worker + +Implements or updates the Node.js hub's background sync worker for polling APIs and syncing betting markets. + +**Frequency**: ~2 times per month + +**Steps**: +1. Update or create hub/src/sync.js for polling logic +2. Update related fetchers (hub/src/fetchers/*.js) for new data sources +3. Update hub/src/server.js to integrate sync worker +4. Update docker-compose.yml or .env.example if new env vars are needed +5. Commit all related files + +**Files typically involved**: +- `hub/src/sync.js` +- `hub/src/fetchers/*.js` +- `hub/src/server.js` +- `docker-compose.yml` +- `.env.example` + +**Example commit sequence**: +``` +Update or create hub/src/sync.js for polling logic +Update related fetchers (hub/src/fetchers/*.js) for new data sources +Update hub/src/server.js to integrate sync worker +Update docker-compose.yml or .env.example if new env vars are needed +Commit all related files +``` + +### Add Or Enhance Dashboard Feature + +Adds or updates the Streamlit dashboard, including app logic, requirements, and Dockerfile. + +**Frequency**: ~2 times per month + +**Steps**: +1. Create or update dashboard/app.py for Streamlit UI +2. Update dashboard/requirements.txt for new dependencies +3. Update dashboard/Dockerfile if needed +4. Update docker-compose.yml if dashboard service changes +5. Commit all related files + +**Files typically involved**: +- `dashboard/app.py` +- `dashboard/requirements.txt` +- `dashboard/Dockerfile` +- `docker-compose.yml` + +**Example commit sequence**: +``` +Create or update dashboard/app.py for Streamlit UI +Update dashboard/requirements.txt for new dependencies +Update dashboard/Dockerfile if needed +Update docker-compose.yml if dashboard service changes +Commit all related files +``` + +### Add Or Update Docker Orchestration + +Updates Docker Compose and service Dockerfiles to orchestrate multi-service deployments, often when adding new services or changing environment variables. + +**Frequency**: ~2 times per month + +**Steps**: +1. Update docker-compose.yml with new/changed services or env vars +2. Update service Dockerfiles as needed (api/Dockerfile, dashboard/Dockerfile, hub/Dockerfile) +3. Update .env.example for new environment variables +4. Commit all related files + +**Files typically involved**: +- `docker-compose.yml` +- `api/Dockerfile` +- `dashboard/Dockerfile` +- `hub/Dockerfile` +- `.env.example` + +**Example commit sequence**: +``` +Update docker-compose.yml with new/changed services or env vars +Update service Dockerfiles as needed (api/Dockerfile, dashboard/Dockerfile, hub/Dockerfile) +Update .env.example for new environment variables +Commit all related files +``` + +### Add Or Update Backend Service Logic + +Implements or enhances backend service logic, especially in api/services/*.py, often for analytics engines (fatigue, usage vacuums, defensive contrast, etc). + +**Frequency**: ~3 times per month + +**Steps**: +1. Create or update api/services/*.py with new logic +2. Update api/services/predictor.py to integrate new logic +3. Update or create tests if needed +4. Commit all related files + +**Files typically involved**: +- `api/services/*.py` + +**Example commit sequence**: +``` +Create or update api/services/*.py with new logic +Update api/services/predictor.py to integrate new logic +Update or create tests if needed +Commit all related files +``` + + +## Best Practices + +Based on analysis of the codebase, follow these practices: + +### Do + +- Use conventional commit format (feat:, fix:, etc.) +- Use snake_case for file names +- Prefer mixed exports + +### Don't + +- Don't write vague commit messages +- Don't deviate from established patterns without discussion + +--- + +*This skill was auto-generated by [ECC Tools](https://ecc.tools). Review and customize as needed for your team.* diff --git a/.codex/AGENTS.md b/.codex/AGENTS.md new file mode 100644 index 0000000..32b1802 --- /dev/null +++ b/.codex/AGENTS.md @@ -0,0 +1,28 @@ +# ECC for Codex CLI + +This supplements the root `AGENTS.md` with a repo-local ECC baseline. + +## Repo Skill + +- Repo-generated Codex skill: `.agents/skills/mework/SKILL.md` +- Claude-facing companion skill: `.claude/skills/mework/SKILL.md` +- Keep user-specific credentials and private MCPs in `~/.codex/config.toml`, not in this repo. + +## MCP Baseline + +Treat `.codex/config.toml` as the default ECC-safe baseline for work in this repository. +The generated baseline enables GitHub, Context7, Exa, Memory, Playwright, and Sequential Thinking. + +## Multi-Agent Support + +- Explorer: read-only evidence gathering +- Reviewer: correctness, security, and regression review +- Docs researcher: API and release-note verification + +## Workflow Files + +- `.claude/commands/database-migration.md` +- `.claude/commands/feature-development.md` +- `.claude/commands/refactoring.md` + +Use these workflow files as reusable task scaffolds when the detected repository workflows recur. \ No newline at end of file diff --git a/.codex/agents/docs-researcher.toml b/.codex/agents/docs-researcher.toml new file mode 100644 index 0000000..0daae57 --- /dev/null +++ b/.codex/agents/docs-researcher.toml @@ -0,0 +1,9 @@ +model = "gpt-5.4" +model_reasoning_effort = "medium" +sandbox_mode = "read-only" + +developer_instructions = """ +Verify APIs, framework behavior, and release-note claims against primary documentation before changes land. +Cite the exact docs or file paths that support each claim. +Do not invent undocumented behavior. +""" \ No newline at end of file diff --git a/.codex/agents/explorer.toml b/.codex/agents/explorer.toml new file mode 100644 index 0000000..732df7a --- /dev/null +++ b/.codex/agents/explorer.toml @@ -0,0 +1,9 @@ +model = "gpt-5.4" +model_reasoning_effort = "medium" +sandbox_mode = "read-only" + +developer_instructions = """ +Stay in exploration mode. +Trace the real execution path, cite files and symbols, and avoid proposing fixes unless the parent agent asks for them. +Prefer targeted search and file reads over broad scans. +""" \ No newline at end of file diff --git a/.codex/agents/reviewer.toml b/.codex/agents/reviewer.toml new file mode 100644 index 0000000..b13ed9c --- /dev/null +++ b/.codex/agents/reviewer.toml @@ -0,0 +1,9 @@ +model = "gpt-5.4" +model_reasoning_effort = "high" +sandbox_mode = "read-only" + +developer_instructions = """ +Review like an owner. +Prioritize correctness, security, behavioral regressions, and missing tests. +Lead with concrete findings and avoid style-only feedback unless it hides a real bug. +""" \ No newline at end of file diff --git a/.codex/config.toml b/.codex/config.toml new file mode 100644 index 0000000..bc1ee67 --- /dev/null +++ b/.codex/config.toml @@ -0,0 +1,48 @@ +#:schema https://developers.openai.com/codex/config-schema.json + +# ECC Tools generated Codex baseline +approval_policy = "on-request" +sandbox_mode = "workspace-write" +web_search = "live" + +[mcp_servers.github] +command = "npx" +args = ["-y", "@modelcontextprotocol/server-github"] + +[mcp_servers.context7] +command = "npx" +args = ["-y", "@upstash/context7-mcp@latest"] + +[mcp_servers.exa] +url = "https://mcp.exa.ai/mcp" + +[mcp_servers.memory] +command = "npx" +args = ["-y", "@modelcontextprotocol/server-memory"] + +[mcp_servers.playwright] +command = "npx" +args = ["-y", "@playwright/mcp@latest", "--extension"] + +[mcp_servers.sequential-thinking] +command = "npx" +args = ["-y", "@modelcontextprotocol/server-sequential-thinking"] + +[features] +multi_agent = true + +[agents] +max_threads = 6 +max_depth = 1 + +[agents.explorer] +description = "Read-only codebase explorer for gathering evidence before changes are proposed." +config_file = "agents/explorer.toml" + +[agents.reviewer] +description = "PR reviewer focused on correctness, security, and missing tests." +config_file = "agents/reviewer.toml" + +[agents.docs_researcher] +description = "Documentation specialist that verifies APIs, framework behavior, and release notes." +config_file = "agents/docs-researcher.toml" \ No newline at end of file diff --git a/.env.example b/.env.example index d04b12e..65fa285 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,31 @@ +# ─── PostgreSQL ────────────────────────────────────── +POSTGRES_DB=propiq +POSTGRES_USER=propiq_admin +POSTGRES_PASSWORD=REPLACE_ME +POSTGRES_HOST=postgres + +# ─── Redis ─────────────────────────────────────────── +REDIS_PASSWORD=REPLACE_ME +REDIS_URL=redis://:REPLACE_ME@redis:6379 + +# ─── SportsData.io ─────────────────────────────────── +SPORTSDATA_API_KEY=REPLACE_ME + +# ─── The Odds API ───────────────────────────────────── +ODDS_API_KEY=REPLACE_ME + +# ─── SportsBlaze Advanced Stats ─────────────────────── +SPORTSBLAZE_API_KEY=REPLACE_ME + +# ─── Tank01 (via RapidAPI) ─────────────────────────── +TANK01_RAPIDAPI_KEY=REPLACE_ME + +# ─── Apify (Web Scraping) ──────────────────────────── +APIFY_API_TOKEN=REPLACE_ME + +# ─── Next.js Frontend ──────────────────────────────── +NEXT_PUBLIC_HUB_URL=http://localhost:3002 +NEXT_PUBLIC_ENGINE_URL=http://localhost:8000 # PropIQ Agent Army — Environment Variables # Copy to .env and fill in your values diff --git a/api/requirements.txt b/api/requirements.txt index c710255..ce661de 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -4,7 +4,11 @@ sqlalchemy==2.0.28 asyncpg==0.29.0 pybaseball==2.2.7 pandas==2.2.1 +numpy==1.26.4 lxml==5.1.0 xgboost==2.0.3 scikit-learn==1.5.0 pydantic==2.6.3 +requests==2.31.0 +httpx==0.27.0 +python-dotenv==1.0.1 diff --git a/hub/src/sync.js b/hub/src/sync.js index a15bc1e..ba4aeb9 100644 --- a/hub/src/sync.js +++ b/hub/src/sync.js @@ -1,5 +1,5 @@ // hub/src/sync.js -// Unified 15-second polling loop for real-time data sync. +// Unified 60-second polling loop for real-time data sync. const { Pool } = require('pg'); const oddsapi = require('./fetchers/oddsapi'); @@ -7,20 +7,22 @@ const espn = require('./fetchers/espn'); const pool = new Pool({ user: process.env.POSTGRES_USER, - host: 'postgres', + host: process.env.POSTGRES_HOST || 'postgres', database: process.env.POSTGRES_DB, password: process.env.POSTGRES_PASSWORD, port: 5432, }); -async function syncLoop() { +const syncLoop = async () => { try { - // 1. Fetch ESPN Live Scores (Keeps cache piping hot for the REST API) - await espn.getLiveScores().catch(err => console.warn('[Sync] ESPN fetch warning:', err.message)); + // 1. Fetch ESPN Live Scores (Keeps cache hot for REST API) + await espn.getLiveScores().catch(() => {}); // 2. Fetch and UPSERT Odds API lines const events = await oddsapi.getMLBEvents().catch(() => []); - if (!events || events.length === 0) return; + if (!events || events.length === 0) { + return; + } for (const event of events) { try { @@ -32,12 +34,34 @@ async function syncLoop() { // 1. Aggregate Over/Under outcomes in JavaScript memory first for (const outcome of market.outcomes) { - const point = outcome.point ?? 0.5; - const marketId = `${event.id}_${book.key}_${market.key}_${outcome.description || 'base'}_${point}`.replace(/\s+/g, '_').toLowerCase(); - + const desc = outcome.description || 'base'; + if (!outcomeMap[desc]) { + outcomeMap[desc] = { over_odds: null, under_odds: null, point: outcome.point ?? 0.5 }; + } + const key = oddsKeyMap[outcome.name]; + if (key) { + outcomeMap[desc][key] = outcome.price; + } + } + const nameLower = (outcome.name || '').toLowerCase(); + if (nameLower === 'over') { + outcomeMap[desc].over_odds = outcome.price; + } else if (nameLower === 'under') { + outcomeMap[desc].under_odds = outcome.price; + } + } + + // Now insert each aggregated market row + for (const [desc, data] of Object.entries(outcomeMap)) { + if (data.over_odds === null && data.under_odds === null) continue; + + const marketId = `${event.id}_${book.key}_${market.key}_${desc}_${data.point}` + .replace(/\s+/g, '_') + .toLowerCase(); + const query = ` INSERT INTO betting_markets ( - market_id, game_id, pitcher_id, sportsbook, prop_category, + market_id, game_id, pitcher_id, sportsbook, prop_category, line, over_odds, under_odds, updated_at ) VALUES ( $1, $2, NULL, $3, $4, $5, $6, $7, NOW() @@ -61,22 +85,19 @@ async function syncLoop() { } } } catch (error) { - console.error('[Sync] Critical error in unified loop:', error.message); } } -async function startSyncWorker() { - console.log('🚀 Starting Unified 60-Second Polling Loop...'); - +window.startSyncWorker = function startSyncWorker() { + async function runLoop() { const start = Date.now(); await syncLoop(); const elapsed = Date.now() - start; - setTimeout(runLoop, Math.max(0, 60000 - elapsed)); } - - runLoop(); // Start first run immediately -} + + runLoop(); // Start immediately +}; module.exports = { startSyncWorker };